# HG changeset patch
# User Sylvain Thénault
# Date 1299833205 -3600
# Node ID e4580e5f07037bd95bbfa4a8e8de02e0d06396fc
# Parent 48f468f33704e401a8e7907e258bf1ac61eb8407# Parent 4ce9e536dd66a310aae6e21aa7e3a13843c63d8d
stable (3.10.X) is now oldstable
diff -r 48f468f33704 -r e4580e5f0703 .hgtags
--- a/.hgtags Fri Dec 10 12:17:18 2010 +0100
+++ b/.hgtags Fri Mar 11 09:46:45 2011 +0100
@@ -155,7 +155,29 @@
1c01f9dffd64d507863c9f8f68e3585b7aa24374 cubicweb-debian-version-3.9.7-1
eed788018b595d46a55805bd8d2054c401812b2b cubicweb-version-3.9.8
e4dba8ae963701a36be94ae58c790bc97ba029bb cubicweb-debian-version-3.9.8-1
+0793fe84651be36f8de9b4faba3781436dc07be0 cubicweb-version-3.10.0
+9ef1347f8d99e7daad290738ef93aa894a2c03ce cubicweb-debian-version-3.10.0-1
+6c6859a676732c845af69f92e74d4aafae12f83a cubicweb-version-3.10.1
+3abb41c47925f8fc6e327164d0ceca3773503ef9 cubicweb-debian-version-3.10.1-1
+3fc6b4aaaff301e482a92c61e39789621bd7ed3b cubicweb-version-3.10.2
+4a87c8af6f3ffe59c6048ebbdc1b6b204d0b9c7f cubicweb-debian-version-3.10.2-1
+8eb58d00a0cedcf7b275b1c7f43b08e2165f655c cubicweb-version-3.10.3
+303b150ebb7a92b2904efd52b446457999cab370 cubicweb-debian-version-3.10.3-1
+3829498510a754b1b8a40582cb8dcbca9145fc9d cubicweb-version-3.10.4
+49f1226f2fab6d9ff17eb27d5a66732a4e5b5add cubicweb-debian-version-3.10.4-1
df0b2de62cec10c84a2fff5233db05852cbffe93 cubicweb-version-3.9.9
1ba51b00fc44faa0d6d57448000aaa1fd5c6ab57 cubicweb-debian-version-3.9.9-1
b7db1f59355832a409d2032e19c84cfffdb3b265 cubicweb-debian-version-3.9.9-2
09c98763ae9d43616d047c1b25d82b4e41a4362f cubicweb-debian-version-3.9.9-3
+3829498510a754b1b8a40582cb8dcbca9145fc9d cubicweb-version-3.10.4
+d73733479a3af453f06b849ed88d120784ce9224 cubicweb-version-3.10.4
+49f1226f2fab6d9ff17eb27d5a66732a4e5b5add cubicweb-debian-version-3.10.4-1
+7b41930e1d32fea3989a85f6ea7281983300adb1 cubicweb-debian-version-3.10.4-1
+159d0dbe07d9eb1c6ace4c5e160d1ec6e6762086 cubicweb-version-3.10.5
+e2e7410e994777589aec218d31eef9ff8d893f92 cubicweb-debian-version-3.10.5-1
+3c81dbb58ac4d4a6f61b74eef4b943a8316c2f42 cubicweb-version-3.10.6
+1484257fe9aeb29d0210e635c12ae5b3d6118cfb cubicweb-debian-version-3.10.6-1
+1959d97ebf2e6a0f7cd05d4cc48bb955c4351da5 cubicweb-version-3.10.7
+bf5d9a1415e3c9abe6b68ba3b24a8ad741f9de3c cubicweb-debian-version-3.10.7-1
+e581a86a68f089946a98c966ebca7aee58a5718f cubicweb-version-3.10.8
+132b525de25bc75ed6389c45aee77e847cb3a437 cubicweb-debian-version-3.10.8-1
diff -r 48f468f33704 -r e4580e5f0703 MANIFEST.in
--- a/MANIFEST.in Fri Dec 10 12:17:18 2010 +0100
+++ b/MANIFEST.in Fri Mar 11 09:46:45 2011 +0100
@@ -21,7 +21,7 @@
recursive-include entities/test/data bootstrap_cubes *.py
recursive-include sobjects/test/data bootstrap_cubes *.py
recursive-include hooks/test/data bootstrap_cubes *.py
-recursive-include server/test/data bootstrap_cubes *.py source*
+recursive-include server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif
recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js
recursive-include web/test/data bootstrap_cubes pouet.css *.py
diff -r 48f468f33704 -r e4580e5f0703 __pkginfo__.py
--- a/__pkginfo__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/__pkginfo__.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,5 +1,5 @@
-# pylint: disable-msg=W0622,C0103
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# pylint: disable=W0622,C0103
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -22,7 +22,7 @@
modname = distname = "cubicweb"
-numversion = (3, 9, 9)
+numversion = (3, 10, 9)
version = '.'.join(str(num) for num in numversion)
description = "a repository of entities / relations for knowledge management"
@@ -40,9 +40,9 @@
]
__depends__ = {
- 'logilab-common': '>= 0.51.0',
+ 'logilab-common': '>= 0.54.0',
'logilab-mtconverter': '>= 0.8.0',
- 'rql': '>= 0.26.2',
+ 'rql': '>= 0.28.0',
'yams': '>= 0.30.1',
'docutils': '>= 0.6',
#gettext # for xgettext, msgcat, etc...
@@ -52,7 +52,7 @@
'Twisted': '',
# XXX graphviz
# server dependencies
- 'logilab-database': '>= 1.3.2',
+ 'logilab-database': '>= 1.3.3',
'pysqlite': '>= 2.5.5', # XXX install pysqlite2
}
diff -r 48f468f33704 -r e4580e5f0703 _exceptions.py
--- a/_exceptions.py Fri Dec 10 12:17:18 2010 +0100
+++ b/_exceptions.py Fri Mar 11 09:46:45 2011 +0100
@@ -159,5 +159,5 @@
class ExecutionError(Exception):
"""server execution control error (already started, not running...)"""
-# pylint: disable-msg=W0611
+# pylint: disable=W0611
from logilab.common.clcommands import BadCommandUsage
diff -r 48f468f33704 -r e4580e5f0703 appobject.py
--- a/appobject.py Fri Dec 10 12:17:18 2010 +0100
+++ b/appobject.py Fri Mar 11 09:46:45 2011 +0100
@@ -214,6 +214,9 @@
return NotImplementedError("selector %s must implement its logic "
"in its __call__ method" % self.__class__)
+ def __repr__(self):
+ return u'' % (self.__class__.__name__, id(self))
+
class MultiSelector(Selector):
"""base class for compound selector classes"""
diff -r 48f468f33704 -r e4580e5f0703 common/mail.py
--- a/common/mail.py Fri Dec 10 12:17:18 2010 +0100
+++ b/common/mail.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""pre 3.6 bw compat"""
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
from warnings import warn
warn('moved to cubicweb.mail', DeprecationWarning, stacklevel=2)
from cubicweb.mail import *
diff -r 48f468f33704 -r e4580e5f0703 common/mixins.py
--- a/common/mixins.py Fri Dec 10 12:17:18 2010 +0100
+++ b/common/mixins.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""pre 3.6 bw compat"""
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
from warnings import warn
warn('moved to cubicweb.mixins', DeprecationWarning, stacklevel=2)
from cubicweb.mixins import *
diff -r 48f468f33704 -r e4580e5f0703 common/mttransforms.py
--- a/common/mttransforms.py Fri Dec 10 12:17:18 2010 +0100
+++ b/common/mttransforms.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""pre 3.6 bw compat"""
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
from warnings import warn
warn('moved to cubicweb.mttransforms', DeprecationWarning, stacklevel=2)
from cubicweb.mttransforms import *
diff -r 48f468f33704 -r e4580e5f0703 common/tags.py
--- a/common/tags.py Fri Dec 10 12:17:18 2010 +0100
+++ b/common/tags.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""pre 3.6 bw compat"""
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
from warnings import warn
warn('moved to cubicweb.tags', DeprecationWarning, stacklevel=2)
from cubicweb.tags import *
diff -r 48f468f33704 -r e4580e5f0703 common/uilib.py
--- a/common/uilib.py Fri Dec 10 12:17:18 2010 +0100
+++ b/common/uilib.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""pre 3.6 bw compat"""
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
from warnings import warn
warn('moved to cubicweb.uilib', DeprecationWarning, stacklevel=2)
from cubicweb.uilib import *
diff -r 48f468f33704 -r e4580e5f0703 cwconfig.py
--- a/cwconfig.py Fri Dec 10 12:17:18 2010 +0100
+++ b/cwconfig.py Fri Mar 11 09:46:45 2011 +0100
@@ -22,16 +22,11 @@
Resource mode
-------------
-A resource *mode* is a predifined set of settings for various resources
+A resource *mode* is a predefined set of settings for various resources
directories, such as cubes, instances, etc. to ease development with the
framework. There are two running modes with *CubicWeb*:
-* 'user', resources are searched / created in the user home directory:
-
- - instances are stored in :file:`~/etc/cubicweb.d`
- - temporary files (such as pid file) in :file:`/tmp`
-
-* 'system', resources are searched / created in the system directories (eg
+* **system**: resources are searched / created in the system directories (eg
usually requiring root access):
- instances are stored in :file:`/etc/cubicweb.d`
@@ -40,28 +35,34 @@
where `` is the detected installation prefix ('/usr/local' for
instance).
+* **user**: resources are searched / created in the user home directory:
+
+ - instances are stored in :file:`~/etc/cubicweb.d`
+ - temporary files (such as pid file) in :file:`/tmp`
+
+
Notice that each resource path may be explicitly set using an environment
variable if the default doesn't suit your needs. Here are the default resource
directories that are affected according to mode:
-* 'system': ::
+* **system**: ::
CW_INSTANCES_DIR = /etc/cubicweb.d/
CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/
CW_RUNTIME_DIR = /var/run/cubicweb/
-* 'user': ::
+* **user**: ::
CW_INSTANCES_DIR = ~/etc/cubicweb.d/
CW_INSTANCES_DATA_DIR = ~/etc/cubicweb.d/
CW_RUNTIME_DIR = /tmp
-Cubes search path is also affected, see the :ref:Cube section.
+Cubes search path is also affected, see the :ref:`Cube` section.
-By default, the mode automatically set to 'user' if a :file:`.hg` directory is found
-in the cubicweb package, else it's set to 'system'. You can force this by setting
-the :envvar:`CW_MODE` environment variable to either 'user' or 'system' so you can
+By default, the mode automatically set to `user` if a :file:`.hg` directory is found
+in the cubicweb package, else it's set to `system`. You can force this by setting
+the :envvar:`CW_MODE` environment variable to either `user` or `system` so you can
easily:
* use system wide installation but user specific instances and all, without root
@@ -158,14 +159,6 @@
SMTP_LOCK = Lock()
-class metaconfiguration(type):
- """metaclass to automaticaly register configuration"""
- def __new__(mcs, name, bases, classdict):
- cls = super(metaconfiguration, mcs).__new__(mcs, name, bases, classdict)
- if classdict.get('name'):
- CONFIGURATIONS.append(cls)
- return cls
-
def configuration_cls(name):
"""return the configuration class registered with the given name"""
try:
@@ -289,7 +282,6 @@
class CubicWebNoAppConfiguration(ConfigurationMixIn):
"""base class for cubicweb configuration without a specific instance directory
"""
- __metaclass__ = metaconfiguration
# to set in concrete configuration
name = None
# log messages format (see logging module documentation for available keys)
@@ -316,6 +308,12 @@
'help': 'server\'s log level',
'group': 'main', 'level': 1,
}),
+ ('umask',
+ {'type' : 'int',
+ 'default': 077,
+ 'help': 'permission umask for files created by the server',
+ 'group': 'main', 'level': 2,
+ }),
# pyro options
('pyro-instance-id',
{'type' : 'string',
@@ -443,14 +441,15 @@
@classmethod
def cube_dir(cls, cube):
- """return the cube directory for the given cube id,
- raise `ConfigurationError` if it doesn't exists
+ """return the cube directory for the given cube id, raise
+ `ConfigurationError` if it doesn't exist
"""
for directory in cls.cubes_search_path():
cubedir = join(directory, cube)
if exists(cubedir):
return cubedir
- raise ConfigurationError('no cube %s in %s' % (cube, cls.cubes_search_path()))
+ raise ConfigurationError('no cube %r in %s' % (
+ cube, cls.cubes_search_path()))
@classmethod
def cube_migration_scripts_dir(cls, cube):
@@ -588,6 +587,14 @@
return # cubes dir doesn't exists
@classmethod
+ def load_available_configs(cls):
+ from logilab.common.modutils import load_module_from_file
+ for conffile in ('web/webconfig.py', 'etwist/twconfig.py',
+ 'server/serverconfig.py',):
+ if exists(join(CW_SOFTWARE_ROOT, conffile)):
+ load_module_from_file(join(CW_SOFTWARE_ROOT, conffile))
+
+ @classmethod
def load_cwctl_plugins(cls):
from logilab.common.modutils import load_module_from_file
cls.cls_adjust_sys_path()
@@ -598,8 +605,8 @@
try:
load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile))
except ImportError, err:
- cls.info('could not import the command provider %s (cause : %s)' %
- (ctlfile, err))
+ cls.error('could not import the command provider %s: %s',
+ ctlfile, err)
cls.info('loaded cubicweb-ctl plugin %s', ctlfile)
for cube in cls.available_cubes():
oldpluginfile = join(cls.cube_dir(cube), 'ecplugin.py')
@@ -688,6 +695,7 @@
def __init__(self, debugmode=False):
register_stored_procedures()
+ self._cubes = None
super(CubicWebNoAppConfiguration, self).__init__()
self.debugmode = debugmode
self.adjust_sys_path()
@@ -763,7 +771,7 @@
self.debug('%s loaded', sitefile)
return module
- def eproperty_definitions(self):
+ def cwproperty_definitions(self):
cfg = self.persistent_options_configuration()
for section, options in cfg.options_by_section():
section = section.lower()
@@ -791,6 +799,31 @@
"""
return None
+ _cubes = None
+
+ def init_cubes(self, cubes):
+ assert self._cubes is None, self._cubes
+ self._cubes = self.reorder_cubes(cubes)
+ # load cubes'__init__.py file first
+ for cube in cubes:
+ __import__('cubes.%s' % cube)
+ self.load_site_cubicweb()
+
+ def cubes(self):
+ """return the list of cubes used by this instance
+
+ result is ordered from the top level cubes to inner dependencies
+ cubes
+ """
+ assert self._cubes is not None, 'cubes not initialized'
+ return self._cubes
+
+ def cubes_path(self):
+ """return the list of path to cubes used by this instance, from outer
+ most to inner most cubes
+ """
+ return [self.cube_dir(p) for p in self.cubes()]
+
class CubicWebConfiguration(CubicWebNoAppConfiguration):
"""base class for cubicweb server and web configurations"""
@@ -870,6 +903,7 @@
def config_for(cls, appid, config=None, debugmode=False):
"""return a configuration instance for the given instance identifier
"""
+ cls.load_available_configs()
config = config or guess_configuration(cls.instance_home(appid))
configcls = configuration_cls(config)
return configcls(appid, debugmode)
@@ -984,33 +1018,13 @@
return join(iddir, self.appid)
def init_cubes(self, cubes):
- assert self._cubes is None, self._cubes
- self._cubes = self.reorder_cubes(cubes)
- # load cubes'__init__.py file first
- for cube in cubes:
- __import__('cubes.%s' % cube)
- self.load_site_cubicweb()
+ super(CubicWebConfiguration, self).init_cubes(cubes)
# reload config file in cases options are defined in cubes __init__
# or site_cubicweb files
self.load_file_configuration(self.main_config_file())
# configuration initialization hook
self.load_configuration()
- def cubes(self):
- """return the list of cubes used by this instance
-
- result is ordered from the top level cubes to inner dependencies
- cubes
- """
- assert self._cubes is not None
- return self._cubes
-
- def cubes_path(self):
- """return the list of path to cubes used by this instance, from outer
- most to inner most cubes
- """
- return [self.cube_dir(p) for p in self.cubes()]
-
def add_cubes(self, cubes):
"""add given cubes to the list of used cubes"""
if not isinstance(cubes, list):
@@ -1265,7 +1279,9 @@
stack[0] = self.source_execute
def as_sql(self, backend, args):
- raise NotImplementedError('source only callback')
+ raise NotImplementedError(
+ 'This callback is only available for BytesFileSystemStorage '
+ 'managed attribute. Is FSPATH() argument BFSS managed?')
def source_execute(self, source, session, value):
fpath = source.binary_to_str(value)
diff -r 48f468f33704 -r e4580e5f0703 cwctl.py
--- a/cwctl.py Fri Dec 10 12:17:18 2010 +0100
+++ b/cwctl.py Fri Mar 11 09:46:45 2011 +0100
@@ -42,11 +42,18 @@
from logilab.common.shellutils import ASK
from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
+from cubicweb.utils import support_args
from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS
from cubicweb.toolsutils import Command, rm, create_dir, underline_title
from cubicweb.__pkginfo__ import version
-CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.', version=version)
+if support_args(CommandLine, 'check_duplicated_command'):
+ # don't check duplicated commands, it occurs when reloading site_cubicweb
+ CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.',
+ version=version, check_duplicated_command=False)
+else:
+ CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.',
+ version=version)
def wait_process_end(pid, maxtry=10, waittime=1):
"""wait for a process to actually die"""
@@ -235,9 +242,9 @@
tinfo = cwcfg.cube_pkginfo(cube)
tversion = tinfo.version
cfgpb.add_cube(cube, tversion)
- except ConfigurationError:
+ except (ConfigurationError, AttributeError), ex:
tinfo = None
- tversion = '[missing cube information]'
+ tversion = '[missing cube information: %s]' % ex
print '* %s %s' % (cube.ljust(namesize), tversion)
if self.config.verbose:
if tinfo:
@@ -494,7 +501,8 @@
msg = "%s seems to be running. Remove %s by hand if necessary or use \
the --force option."
raise ExecutionError(msg % (appid, pidf))
- helper.start_server(config)
+ if helper.start_server(config) == 1:
+ print 'instance %s started' % appid
def init_cmdline_log_threshold(config, loglevel):
@@ -656,10 +664,11 @@
name = 'upgrade'
actionverb = 'upgraded'
options = InstanceCommand.options + (
- ('force-componant-version',
- {'short': 't', 'type' : 'csv', 'metavar': 'cube1=X.Y.Z,cube2=X.Y.Z',
+ ('force-cube-version',
+ {'short': 't', 'type' : 'named', 'metavar': 'cube1:X.Y.Z,cube2:X.Y.Z',
'default': None,
- 'help': 'force migration from the indicated version for the specified cube.'}),
+ 'help': 'force migration from the indicated version for the specified cube(s).'}),
+
('force-cubicweb-version',
{'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z',
'default': None,
@@ -713,12 +722,9 @@
mih = config.migration_handler()
repo = mih.repo_connect()
vcconf = repo.get_versions()
- if self.config.force_componant_version:
- packversions = {}
- for vdef in self.config.force_componant_version:
- componant, version = vdef.split('=')
- packversions[componant] = Version(version)
- vcconf.update(packversions)
+ if self.config.force_cube_version:
+ for cube, version in self.config.force_cube_version.iteritems():
+ vcconf[cube] = Version(version)
toupgrade = []
for cube in config.cubes():
installedversion = config.cube_version(cube)
diff -r 48f468f33704 -r e4580e5f0703 cwvreg.py
--- a/cwvreg.py Fri Dec 10 12:17:18 2010 +0100
+++ b/cwvreg.py Fri Mar 11 09:46:45 2011 +0100
@@ -196,7 +196,7 @@
from warnings import warn
from logilab.common.decorators import cached, clear_cache
-from logilab.common.deprecation import deprecated
+from logilab.common.deprecation import deprecated, class_deprecated
from logilab.common.modutils import cleanup_sys_modules
from rql import RQLHelper
@@ -290,13 +290,18 @@
class ETypeRegistry(CWRegistry):
+ def clear_caches(self):
+ clear_cache(self, 'etype_class')
+ clear_cache(self, 'parent_classes')
+ from cubicweb import selectors
+ selectors._reset_is_instance_cache(self.vreg)
+
def initialization_completed(self):
"""on registration completed, clear etype_class internal cache
"""
super(ETypeRegistry, self).initialization_completed()
# clear etype cache if you don't want to run into deep weirdness
- clear_cache(self, 'etype_class')
- clear_cache(self, 'parent_classes')
+ self.clear_caches()
def register(self, obj, **kwargs):
oid = kwargs.get('oid') or class_regid(obj)
@@ -389,6 +394,8 @@
for vid, views in self.items():
if vid[0] == '_':
continue
+ views = [view for view in views
+ if not isinstance(view, class_deprecated)]
try:
view = self._select_best(views, req, rset=rset, **kwargs)
if view.linkable():
@@ -421,6 +428,56 @@
VRegistry.REGISTRY_FACTORY['actions'] = ActionsRegistry
+class CtxComponentsRegistry(CWRegistry):
+ def poss_visible_objects(self, *args, **kwargs):
+ """return an ordered list of possible components"""
+ context = kwargs.pop('context')
+ if '__cache' in kwargs:
+ cache = kwargs.pop('__cache')
+ elif kwargs.get('rset') is None:
+ cache = args[0]
+ else:
+ cache = kwargs['rset']
+ try:
+ cached = cache.__components_cache
+ except AttributeError:
+ ctxcomps = super(CtxComponentsRegistry, self).poss_visible_objects(
+ *args, **kwargs)
+ if cache is None:
+ components = []
+ for component in ctxcomps:
+ cctx = component.cw_propval('context')
+ if cctx == context:
+ component.cw_extra_kwargs['context'] = cctx
+ components.append(component)
+ return components
+ cached = cache.__components_cache = {}
+ for component in ctxcomps:
+ cctx = component.cw_propval('context')
+ component.cw_extra_kwargs['context'] = cctx
+ cached.setdefault(cctx, []).append(component)
+ thisctxcomps = cached.get(context, ())
+ # XXX set context for bw compat (should now be taken by comp.render())
+ for component in thisctxcomps:
+ component.cw_extra_kwargs['context'] = context
+ return thisctxcomps
+
+VRegistry.REGISTRY_FACTORY['ctxcomponents'] = CtxComponentsRegistry
+
+
+class BwCompatCWRegistry(object):
+ def __init__(self, vreg, oldreg, redirecttoreg):
+ self.vreg = vreg
+ self.oldreg = oldreg
+ self.redirecto = redirecttoreg
+
+ def __getattr__(self, attr):
+ warn('[3.10] you should now use the %s registry instead of the %s registry'
+ % (self.redirecto, self.oldreg), DeprecationWarning, stacklevel=2)
+ return getattr(self.vreg[self.redirecto], attr)
+
+ def clear(self): pass
+ def initialization_completed(self): pass
class CubicWebVRegistry(VRegistry):
"""Central registry for the cubicweb instance, extending the generic
@@ -433,15 +490,23 @@
stored objects. Currently we have the following registries of objects known
by the web instance (library may use some others additional registries):
- * etypes
- * views
- * components
- * actions
- * forms
- * formrenderers
- * controllers, which are directly plugged into the application
- object to handle request publishing XXX to merge with views
- * contentnavigation XXX to merge with components? to kill?
+ * 'etypes', entity type classes
+
+ * 'views', views and templates (e.g. layout views)
+
+ * 'components', non contextual components, like magic search, url evaluators
+
+ * 'ctxcomponents', contextual components like boxes and dynamic section
+
+ * 'actions', contextual actions, eg links to display in predefined places in
+ the ui
+
+ * 'forms', describing logic of HTML form
+
+ * 'formrenderers', rendering forms to html
+
+ * 'controllers', primary objects to handle request publishing, directly
+ plugged into the application
"""
def __init__(self, config, initlog=True):
@@ -456,6 +521,8 @@
# don't clear rtags during test, this may cause breakage with
# manually imported appobject modules
CW_EVENT_MANAGER.bind('before-registry-reload', clear_rtag_objects)
+ self['boxes'] = BwCompatCWRegistry(self, 'boxes', 'ctxcomponents')
+ self['contentnavigation'] = BwCompatCWRegistry(self, 'contentnavigation', 'ctxcomponents')
def setdefault(self, regid):
try:
@@ -487,7 +554,7 @@
if not self.initialized:
self['propertydefs'] = {}
self['propertyvalues'] = self.eprop_values = {}
- for key, propdef in self.config.eproperty_definitions():
+ for key, propdef in self.config.cwproperty_definitions():
self.register_property(key, **propdef)
CW_EVENT_MANAGER.emit('after-registry-reset', self)
@@ -713,7 +780,7 @@
vocab = pdef['vocabulary']
if vocab is not None:
if callable(vocab):
- vocab = vocab(key, None) # XXX need a req object
+ vocab = vocab(None) # XXX need a req object
if not value in vocab:
raise ValueError(_('unauthorized value'))
return value
@@ -751,7 +818,7 @@
def possible_actions(self, req, rset=None, **kwargs):
return self["actions"].possible_actions(req, rest=rset, **kwargs)
- @deprecated('[3.4] use vreg["boxes"].select_object(...)')
+ @deprecated('[3.4] use vreg["ctxcomponents"].select_object(...)')
def select_box(self, oid, *args, **kwargs):
return self['boxes'].select_object(oid, *args, **kwargs)
diff -r 48f468f33704 -r e4580e5f0703 dataimport.py
--- a/dataimport.py Fri Dec 10 12:17:18 2010 +0100
+++ b/dataimport.py Fri Mar 11 09:46:45 2011 +0100
@@ -81,10 +81,11 @@
from logilab.common.deprecation import deprecated
from cubicweb.server.utils import eschema_eid
+from cubicweb.server.ssplanner import EditedEntity
def count_lines(stream_or_filename):
if isinstance(stream_or_filename, basestring):
- f = open(filename)
+ f = open(stream_or_filename)
else:
f = stream_or_filename
f.seek(0)
@@ -97,8 +98,8 @@
skipfirst=False, withpb=True):
"""same as ucsvreader but a progress bar is displayed as we iter on rows"""
if isinstance(stream_or_path, basestring):
- if not osp.exists(filepath):
- raise Exception("file doesn't exists: %s" % filepath)
+ if not osp.exists(stream_or_path):
+ raise Exception("file doesn't exists: %s" % stream_or_path)
stream = open(stream_or_path)
else:
stream = stream_or_path
@@ -305,11 +306,18 @@
self.items.append(item)
return len(self.items) - 1
- def add(self, type, item):
+ def create_entity(self, etype, **data):
+ data['eid'] = eid = self._put(etype, data)
+ self.eids[eid] = data
+ self.types.setdefault(etype, []).append(eid)
+ return data
+
+ @deprecated("[3.11] add is deprecated, use create_entity instead")
+ def add(self, etype, item):
assert isinstance(item, dict), 'item is not a dict but a %s' % type(item)
- eid = item['eid'] = self._put(type, item)
- self.eids[eid] = item
- self.types.setdefault(type, []).append(eid)
+ data = self.create_entity(etype, **item)
+ item['eid'] = data['eid']
+ return item
def relate(self, eid_from, rtype, eid_to, inlined=False):
"""Add new relation"""
@@ -331,6 +339,7 @@
def rql(self, *args):
if self._rql is not None:
return self._rql(*args)
+ return []
@property
def nb_inserted_entities(self):
@@ -420,7 +429,6 @@
ObjectStore.__init__(self)
if session is None:
sys.exit('please provide a session of run this script with cubicweb-ctl shell and pass cnx as session')
- session = cnx
if not hasattr(session, 'set_pool'):
# connection
cnx = session
@@ -453,15 +461,17 @@
return entity
def _put(self, type, item):
- query = ('INSERT %s X: ' % type) + ', '.join('X %s %%(%s)s' % (k, k)
- for k in item)
+ query = 'INSERT %s X' % type
+ if item:
+ query += ': ' + ', '.join('X %s %%(%s)s' % (k, k)
+ for k in item)
return self.rql(query, item)[0][0]
def relate(self, eid_from, rtype, eid_to, inlined=False):
eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
eid_from, rtype, eid_to)
self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
- {'x': int(eid_from), 'y': int(eid_to)}, ('x', 'y'))
+ {'x': int(eid_from), 'y': int(eid_to)})
# the import controller ########################################################
@@ -504,7 +514,6 @@
traceback.print_exc(file=tmp)
else:
traceback.print_exception(type, value, tb, file=tmp)
- print tmp.getvalue()
# use a list to avoid counting a errors instead of one
errorlog = self.errors.setdefault(key, [])
if msg is None:
@@ -612,8 +621,7 @@
entity = copy(entity)
entity.cw_clear_relation_cache()
self.metagen.init_entity(entity)
- entity.update(kwargs)
- entity.edited_attributes = set(entity)
+ entity.cw_edited.update(kwargs, skipsec=False)
session = self.session
self.source.add_entity(session, entity)
self.source.add_info(session, entity, self.source, None, complete=False)
@@ -651,6 +659,11 @@
class MetaGenerator(object):
+ META_RELATIONS = (META_RTYPES
+ - VIRTUAL_RTYPES
+ - set(('eid', 'cwuri',
+ 'is', 'is_instance_of', 'cw_source')))
+
def __init__(self, session, baseurl=None):
self.session = session
self.source = session.repo.system_source
@@ -669,25 +682,20 @@
#self.entity_rels = [] XXX not handled (YAGNI?)
schema = session.vreg.schema
rschema = schema.rschema
- for rtype in META_RTYPES:
- if rtype in ('eid', 'cwuri') or rtype in VIRTUAL_RTYPES:
- continue
+ for rtype in self.META_RELATIONS:
if rschema(rtype).final:
self.etype_attrs.append(rtype)
else:
self.etype_rels.append(rtype)
- if not schema._eid_index:
- # test schema loaded from the fs
- self.gen_is = self.test_gen_is
- self.gen_is_instance_of = self.test_gen_is_instanceof
@cached
def base_etype_dicts(self, etype):
entity = self.session.vreg['etypes'].etype_class(etype)(self.session)
# entity are "surface" copied, avoid shared dict between copies
del entity.cw_extra_kwargs
+ entity.cw_edited = EditedEntity(entity)
for attr in self.etype_attrs:
- entity[attr] = self.generate(entity, attr)
+ entity.cw_edited.edited_attribute(attr, self.generate(entity, attr))
rels = {}
for rel in self.etype_rels:
rels[rel] = self.generate(entity, rel)
@@ -696,7 +704,7 @@
def init_entity(self, entity):
entity.eid = self.source.create_eid(self.session)
for attr in self.entity_attrs:
- entity[attr] = self.generate(entity, attr)
+ entity.cw_edited.edited_attribute(attr, self.generate(entity, attr))
def generate(self, entity, rtype):
return getattr(self, 'gen_%s' % rtype)(entity)
@@ -709,26 +717,7 @@
def gen_modification_date(self, entity):
return self.time
- def gen_is(self, entity):
- return entity.e_schema.eid
- def gen_is_instance_of(self, entity):
- eids = []
- for etype in entity.e_schema.ancestors() + [entity.e_schema]:
- eids.append(entity.e_schema.eid)
- return eids
-
def gen_created_by(self, entity):
return self.session.user.eid
def gen_owned_by(self, entity):
return self.session.user.eid
-
- # implementations of gen_is / gen_is_instance_of to use during test where
- # schema has been loaded from the fs (hence entity type schema eids are not
- # known)
- def test_gen_is(self, entity):
- return eschema_eid(self.session, entity.e_schema)
- def test_gen_is_instanceof(self, entity):
- eids = []
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- eids.append(eschema_eid(self.session, eschema))
- return eids
diff -r 48f468f33704 -r e4580e5f0703 dbapi.py
--- a/dbapi.py Fri Dec 10 12:17:18 2010 +0100
+++ b/dbapi.py Fri Mar 11 09:46:45 2011 +0100
@@ -48,6 +48,9 @@
except KeyError:
return ''
+def fake(*args, **kwargs):
+ return None
+
def multiple_connections_fix():
"""some monkey patching necessary when an application has to deal with
several connections to different repositories. It tries to hide buggy class
@@ -165,16 +168,20 @@
where it's already initialized.
:kwargs:
- there goes authentication tokens. You usually have to specify for
- instance a password for the given user, using a named 'password' argument.
+ there goes authentication tokens. You usually have to specify a password
+ for the given user, using a named 'password' argument.
"""
- config = cwconfig.CubicWebNoAppConfiguration()
- if host:
- config.global_set_option('pyro-ns-host', host)
- if group:
- config.global_set_option('pyro-ns-group', group)
cnxprops = cnxprops or ConnectionProperties()
method = cnxprops.cnxtype
+ if method == 'pyro':
+ config = cwconfig.CubicWebNoAppConfiguration()
+ if host:
+ config.global_set_option('pyro-ns-host', host)
+ if group:
+ config.global_set_option('pyro-ns-group', group)
+ else:
+ assert database
+ config = cwconfig.instance_configuration(database)
repo = get_repository(method, database, config=config)
if method == 'inmemory':
vreg = repo.vreg
@@ -194,21 +201,30 @@
cnx.vreg = vreg
return cnx
-def in_memory_cnx(config, login, **kwargs):
- """usefull method for testing and scripting to get a dbapi.Connection
- object connected to an in-memory repository instance
- """
+def in_memory_repo(config):
+ """Return and in_memory Repository object from a config (or vreg)"""
if isinstance(config, cwvreg.CubicWebVRegistry):
vreg = config
config = None
else:
vreg = None
# get local access to the repository
- repo = get_repository('inmemory', config=config, vreg=vreg)
- # connection to the CubicWeb repository
+ return get_repository('inmemory', config=config, vreg=vreg)
+
+def in_memory_cnx(repo, login, **kwargs):
+ """Establish a In memory connection to a for the user with
+
+ additionel credential might be required"""
cnxprops = ConnectionProperties('inmemory')
- cnx = repo_connect(repo, login, cnxprops=cnxprops, **kwargs)
- return repo, cnx
+ return repo_connect(repo, login, cnxprops=cnxprops, **kwargs)
+
+def in_memory_repo_cnx(config, login, **kwargs):
+ """usefull method for testing and scripting to get a dbapi.Connection
+ object connected to an in-memory repository instance
+ """
+ # connection to the CubicWeb repository
+ repo = in_memory_repo(config)
+ return repo, in_memory_cnx(repo, login, **kwargs)
class _NeedAuthAccessMock(object):
def __getattribute__(self, attr):
@@ -313,19 +329,17 @@
# low level session data management #######################################
- def get_shared_data(self, key, default=None, pop=False):
- """return value associated to `key` in shared data"""
- return self.cnx.get_shared_data(key, default, pop)
-
- def set_shared_data(self, key, value, querydata=False):
- """set value associated to `key` in shared data
+ def get_shared_data(self, key, default=None, pop=False, txdata=False):
+ """see :meth:`Connection.get_shared_data`"""
+ return self.cnx.get_shared_data(key, default, pop, txdata)
- if `querydata` is true, the value will be added to the repository
- session's query data which are cleared on commit/rollback of the current
- transaction, and won't be available through the connexion, only on the
- repository side.
- """
- return self.cnx.set_shared_data(key, value, querydata)
+ def set_shared_data(self, key, value, txdata=False, querydata=None):
+ """see :meth:`Connection.set_shared_data`"""
+ if querydata is not None:
+ txdata = querydata
+ warn('[3.10] querydata argument has been renamed to txdata',
+ DeprecationWarning, stacklevel=2)
+ return self.cnx.set_shared_data(key, value, txdata)
# server session compat layer #############################################
@@ -482,6 +496,7 @@
self.sessionid = cnxid
self._close_on_del = getattr(cnxprops, 'close_on_del', True)
self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro')
+ self._web_request = False
if cnxprops and cnxprops.log_queries:
self.executed_queries = []
self.cursor_class = LogCursor
@@ -534,9 +549,8 @@
esubpath = list(subpath)
esubpath.remove('views')
esubpath.append(join('web', 'views'))
- cubespath = [config.cube_dir(p) for p in cubes]
- config.load_site_cubicweb(cubespath)
- vpath = config.build_vregistry_path(reversed(cubespath),
+ config.init_cubes(cubes)
+ vpath = config.build_vregistry_path(reversed(config.cubes_path()),
evobjpath=esubpath,
tvobjpath=subpath)
self.vreg.register_objects(vpath)
@@ -547,35 +561,33 @@
You should call `load_appobjects` at some point to register those views.
"""
- from cubicweb.web.request import CubicWebRequestBase as cwrb
- DBAPIRequest.build_ajax_replace_url = cwrb.build_ajax_replace_url.im_func
- DBAPIRequest.ajax_replace_url = cwrb.ajax_replace_url.im_func
- DBAPIRequest.list_form_param = cwrb.list_form_param.im_func
DBAPIRequest.property_value = _fake_property_value
DBAPIRequest.next_tabindex = count().next
- DBAPIRequest.form = {}
- DBAPIRequest.data = {}
- fake = lambda *args, **kwargs: None
DBAPIRequest.relative_path = fake
DBAPIRequest.url = fake
- DBAPIRequest.next_tabindex = fake
DBAPIRequest.get_page_data = fake
DBAPIRequest.set_page_data = fake
- DBAPIRequest.add_js = fake #cwrb.add_js.im_func
- DBAPIRequest.add_css = fake #cwrb.add_css.im_func
# XXX could ask the repo for it's base-url configuration
self.vreg.config.set_option('base-url', baseurl)
+ self.vreg.config.uiprops = {}
+ self.vreg.config.datadir_url = baseurl + '/data'
# XXX why is this needed? if really needed, could be fetched by a query
if sitetitle is not None:
self.vreg['propertydefs']['ui.site-title'] = {'default': sitetitle}
+ self._web_request = True
- @check_not_closed
- def source_defs(self):
- """Return the definition of sources used by the repository.
-
- This is NOT part of the DB-API.
- """
- return self._repo.source_defs()
+ def request(self):
+ if self._web_request:
+ from cubicweb.web.request import CubicWebRequestBase
+ req = CubicWebRequestBase(self.vreg, False)
+ req.get_header = lambda x, default=None: default
+ req.set_session = lambda session, user=None: DBAPIRequest.set_session(
+ req, session, user)
+ req.relative_path = lambda includeparams=True: ''
+ else:
+ req = DBAPIRequest(self.vreg)
+ req.set_session(DBAPISession(self))
+ return req
@check_not_closed
def user(self, req=None, props=None):
@@ -593,21 +605,20 @@
else:
from cubicweb.entity import Entity
user = Entity(req, rset, row=0)
- user['login'] = login # cache login
+ user.cw_attr_cache['login'] = login # cache login
return user
@check_not_closed
def check(self):
- """raise `BadConnectionId` if the connection is no more valid"""
- self._repo.check_session(self.sessionid)
+ """raise `BadConnectionId` if the connection is no more valid, else
+ return its latest activity timestamp.
+ """
+ return self._repo.check_session(self.sessionid)
def _txid(self, cursor=None): # XXX could now handle various isolation level!
# return a dict as bw compat trick
return {'txid': currentThread().getName()}
- def request(self):
- return DBAPIRequest(self.vreg, DBAPISession(self))
-
# session data methods #####################################################
@check_not_closed
@@ -616,24 +627,35 @@
self._repo.set_session_props(self.sessionid, props)
@check_not_closed
- def get_shared_data(self, key, default=None, pop=False):
- """return value associated to `key` in shared data"""
- return self._repo.get_shared_data(self.sessionid, key, default, pop)
+ def get_shared_data(self, key, default=None, pop=False, txdata=False):
+ """return value associated to key in the session's data dictionary or
+ session's transaction's data if `txdata` is true.
+
+ If pop is True, value will be removed from the dictionnary.
+
+ If key isn't defined in the dictionnary, value specified by the
+ `default` argument will be returned.
+ """
+ return self._repo.get_shared_data(self.sessionid, key, default, pop, txdata)
@check_not_closed
- def set_shared_data(self, key, value, querydata=False):
+ def set_shared_data(self, key, value, txdata=False):
"""set value associated to `key` in shared data
- if `querydata` is true, the value will be added to the repository
+ if `txdata` is true, the value will be added to the repository
session's query data which are cleared on commit/rollback of the current
- transaction, and won't be available through the connexion, only on the
- repository side.
+ transaction.
"""
- return self._repo.set_shared_data(self.sessionid, key, value, querydata)
+ return self._repo.set_shared_data(self.sessionid, key, value, txdata)
# meta-data accessors ######################################################
@check_not_closed
+ def source_defs(self):
+ """Return the definition of sources used by the repository."""
+ return self._repo.source_defs()
+
+ @check_not_closed
def get_schema(self):
"""Return the schema currently used by the repository."""
return self._repo.get_schema()
diff -r 48f468f33704 -r e4580e5f0703 debian/changelog
--- a/debian/changelog Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/changelog Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,57 @@
+cubicweb (3.10.8-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Wed, 02 Feb 2011 11:09:22 +0100
+
+cubicweb (3.10.7-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Wed, 12 Jan 2011 08:50:29 +0100
+
+cubicweb (3.10.6-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Nicolas Chauvat Tue, 30 Nov 2010 22:25:41 +0100
+
+cubicweb (3.10.5-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Mon, 25 Oct 2010 18:22:20 +0200
+
+cubicweb (3.10.4-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Fri, 22 Oct 2010 17:41:00 +0200
+
+cubicweb (3.10.3-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Wed, 20 Oct 2010 16:00:33 +0200
+
+cubicweb (3.10.2-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Mon, 18 Oct 2010 11:47:37 +0200
+
+cubicweb (3.10.1-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Fri, 15 Oct 2010 12:08:58 +0200
+
+cubicweb (3.10.0-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Sylvain Thénault Wed, 13 Oct 2010 22:18:39 +0200
+
cubicweb (3.9.9-3) unstable; urgency=low
* cubicweb-common must actually include shared/i18n folder
diff -r 48f468f33704 -r e4580e5f0703 debian/control
--- a/debian/control Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/control Fri Mar 11 09:46:45 2011 +0100
@@ -7,15 +7,15 @@
Adrien Di Mascio ,
Aurélien Campéas ,
Nicolas Chauvat
-Build-Depends: debhelper (>= 5), python-dev (>=2.5), python-central (>= 0.5)
-Standards-Version: 3.8.0
+Build-Depends: debhelper (>= 7), python (>= 2.5), python-central (>= 0.5)
+Standards-Version: 3.9.1
Homepage: http://www.cubicweb.org
XS-Python-Version: >= 2.5, << 2.7
Package: cubicweb
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version})
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version})
XB-Recommends: (postgresql, postgresql-plpython) | mysql | sqlite3
Recommends: postgresql | mysql | sqlite3
Description: the complete CubicWeb framework
@@ -33,8 +33,8 @@
Conflicts: cubicweb-multisources
Replaces: cubicweb-multisources
Provides: cubicweb-multisources
-Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.3.2), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
-Recommends: pyro (< 4.0.0), cubicweb-documentation (= ${source:Version})
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.3.3), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
+Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
Description: server part of the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -46,7 +46,7 @@
Package: cubicweb-postgresql-support
Architecture: all
# postgresql-client packages for backup/restore of non local database
-Depends: python-psycopg2, postgresql-client
+Depends: ${misc:Depends}, python-psycopg2, postgresql-client
Description: postgres support for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -56,7 +56,7 @@
Package: cubicweb-mysql-support
Architecture: all
# mysql-client packages for backup/restore of non local database
-Depends: python-mysqldb, mysql-client
+Depends: ${misc:Depends}, python-mysqldb, mysql-client
Description: mysql support for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -68,8 +68,8 @@
Architecture: all
XB-Python-Version: ${python:Versions}
Provides: cubicweb-web-frontend
-Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
-Recommends: pyro (< 4.0.0), cubicweb-documentation (= ${source:Version})
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
+Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
Description: twisted-based web interface for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -82,7 +82,7 @@
Package: cubicweb-web
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3)
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3)
Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging
Description: web interface library for the CubicWeb framework
CubicWeb is a semantic web application framework.
@@ -97,7 +97,7 @@
Package: cubicweb-common
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.51.0), python-yams (>= 0.30.1), python-rql (>= 0.26.3), python-lxml
+Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.54.0), python-yams (>= 0.30.1), python-rql (>= 0.28.0), python-lxml
Recommends: python-simpletal (>= 4.0), python-crypto
Conflicts: cubicweb-core
Replaces: cubicweb-core
@@ -111,7 +111,7 @@
Package: cubicweb-ctl
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, cubicweb-common (= ${source:Version})
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version})
Description: tool to manage the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -123,7 +123,7 @@
Package: cubicweb-dev
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
Suggests: w3c-dtd-xhtml
Description: tests suite and development tools for the CubicWeb framework
CubicWeb is a semantic web application framework.
@@ -133,7 +133,6 @@
Package: cubicweb-documentation
-Architecture: all
Recommends: doc-base
Description: documentation for the CubicWeb framework
CubicWeb is a semantic web application framework.
diff -r 48f468f33704 -r e4580e5f0703 debian/cubicweb-ctl.cubicweb.init
--- a/debian/cubicweb-ctl.cubicweb.init Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/cubicweb-ctl.cubicweb.init Fri Mar 11 09:46:45 2011 +0100
@@ -2,8 +2,8 @@
### BEGIN INIT INFO
# Provides: cubicweb
-# Required-Start: $syslog $local_fs $network
-# Required-Stop: $syslog $local_fs $network
+# Required-Start: $remote_fs $syslog $local_fs $network
+# Required-Stop: $remote_fs $syslog $local_fs $network
# Should-Start: $postgresql $pyro-nsd
# Should-Stop: $postgresql $pyro-nsd
# Default-Start: 2 3 4 5
@@ -24,12 +24,12 @@
case $1 in
force-reload)
- /usr/bin/cubicweb-ctl reload --force
+ python -W ignore /usr/bin/cubicweb-ctl reload --force
;;
status)
- /usr/bin/cubicweb-ctl status
+ python -W ignore /usr/bin/cubicweb-ctl status
;;
- *)
- /usr/bin/cubicweb-ctl $1 --force
+ start|stop|restart|*)
+ python -W ignore /usr/bin/cubicweb-ctl $1 --force
;;
esac
diff -r 48f468f33704 -r e4580e5f0703 debian/cubicweb-ctl.dirs
--- a/debian/cubicweb-ctl.dirs Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/cubicweb-ctl.dirs Fri Mar 11 09:46:45 2011 +0100
@@ -4,7 +4,6 @@
etc/bash_completion.d
usr/bin
usr/share/doc/cubicweb-ctl
-var/run/cubicweb
var/log/cubicweb
var/lib/cubicweb/backup
var/lib/cubicweb/instances
diff -r 48f468f33704 -r e4580e5f0703 debian/cubicweb-ctl.prerm
--- a/debian/cubicweb-ctl.prerm Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/cubicweb-ctl.prerm Fri Mar 11 09:46:45 2011 +0100
@@ -2,8 +2,7 @@
case "$1" in
purge)
- rm -rf /etc/cubicweb.d/
- rm -rf /var/run/cubicweb/
+ rm -rf /etc/cubicweb.d/
rm -rf /var/log/cubicweb/
rm -rf /var/lib/cubicweb/
;;
diff -r 48f468f33704 -r e4580e5f0703 debian/cubicweb-documentation.install.in
--- a/debian/cubicweb-documentation.install.in Fri Dec 10 12:17:18 2010 +0100
+++ b/debian/cubicweb-documentation.install.in Fri Mar 11 09:46:45 2011 +0100
@@ -1,2 +1,3 @@
doc/book usr/share/doc/cubicweb-documentation
+doc/html usr/share/doc/cubicweb-documentation
debian/cubicweb-doc usr/share/doc-base/cubicweb-doc
diff -r 48f468f33704 -r e4580e5f0703 devtools/__init__.py
--- a/devtools/__init__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/__init__.py Fri Mar 11 09:46:45 2011 +0100
@@ -35,30 +35,19 @@
# db auto-population configuration #############################################
-SYSTEM_ENTITIES = schema.SCHEMA_TYPES | set((
- 'CWGroup', 'CWUser', 'CWProperty',
- 'Workflow', 'State', 'BaseTransition', 'Transition', 'WorkflowTransition',
- 'TrInfo', 'SubWorkflowExitPoint',
- ))
-
-SYSTEM_RELATIONS = schema.META_RTYPES | set((
- # workflow related
- 'workflow_of', 'state_of', 'transition_of', 'initial_state', 'default_workflow',
- 'allowed_transition', 'destination_state', 'from_state', 'to_state',
- 'condition', 'subworkflow', 'subworkflow_state', 'subworkflow_exit',
- 'custom_workflow', 'in_state', 'wf_info_for',
- # cwproperty
- 'for_user',
- # schema definition
- 'specializes',
- 'relation_type', 'from_entity', 'to_entity',
- 'constrained_by', 'cstrtype', 'widget',
- 'read_permission', 'update_permission', 'delete_permission', 'add_permission',
- # permission
- 'in_group', 'require_group', 'require_permission',
- # deducted from other relations
- 'primary_email',
- ))
+SYSTEM_ENTITIES = (schema.SCHEMA_TYPES
+ | schema.INTERNAL_TYPES
+ | schema.WORKFLOW_TYPES
+ | set(('CWGroup', 'CWUser',))
+ )
+SYSTEM_RELATIONS = (schema.META_RTYPES
+ | schema.WORKFLOW_RTYPES
+ | schema.WORKFLOW_DEF_RTYPES
+ | schema.SYSTEM_RTYPES
+ | schema.SCHEMA_TYPES
+ | set(('primary_email', # deducted from other relations
+ ))
+ )
# content validation configuration #############################################
@@ -96,22 +85,8 @@
read_instance_schema = False
init_repository = True
db_require_setup = True
- options = cwconfig.merge_options(ServerConfiguration.options + (
- ('anonymous-user',
- {'type' : 'string',
- 'default': None,
- 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
- 'group': 'main', 'level': 1,
- }),
- ('anonymous-password',
- {'type' : 'string',
- 'default': None,
- 'help': 'password of the CubicWeb user account matching login',
- 'group': 'main', 'level': 1,
- }),
- ))
- def __init__(self, appid, apphome=None, log_threshold=logging.CRITICAL+10):
+ def __init__(self, appid='data', apphome=None, log_threshold=logging.CRITICAL+10):
# must be set before calling parent __init__
if apphome is None:
if exists(appid):
@@ -124,7 +99,20 @@
# need this, usually triggered by cubicweb-ctl
self.load_cwctl_plugins()
- anonymous_user = TwistedConfiguration.anonymous_user.im_func
+ # By default anonymous login are allow but some test need to deny of to
+ # change the default user. Set it to None to prevent anonymous login.
+ anonymous_credential = ('anon', 'anon')
+
+ def anonymous_user(self):
+ if not self.anonymous_credential:
+ return None, None
+ return self.anonymous_credential
+
+ def set_anonymous_allowed(self, allowed, anonuser='anon'):
+ if allowed:
+ self.anonymous_credential = (anonuser, anonuser)
+ else:
+ self.anonymous_credential = None
@property
def apphome(self):
@@ -133,8 +121,6 @@
def load_configuration(self):
super(TestServerConfiguration, self).load_configuration()
- self.global_set_option('anonymous-user', 'anon')
- self.global_set_option('anonymous-password', 'anon')
# no undo support in tests
self.global_set_option('undo-support', '')
@@ -170,6 +156,8 @@
sources = super(TestServerConfiguration, self).sources()
if not sources:
sources = DEFAULT_SOURCES
+ if 'admin' not in sources:
+ sources['admin'] = DEFAULT_SOURCES['admin']
return sources
# web config methods needed here for cases when we use this config as a web
@@ -184,6 +172,7 @@
class BaseApptestConfiguration(TestServerConfiguration, TwistedConfiguration):
repo_method = 'inmemory'
+ name = 'all-in-one' # so it search for all-in-one.conf, not repository.conf
options = cwconfig.merge_options(TestServerConfiguration.options
+ TwistedConfiguration.options)
cubicweb_appobject_path = TestServerConfiguration.cubicweb_appobject_path | TwistedConfiguration.cubicweb_appobject_path
@@ -233,10 +222,10 @@
# test database handling #######################################################
-def init_test_database(config=None, configdir='data'):
+def init_test_database(config=None, appid='data', apphome=None):
"""init a test database for a specific driver"""
- from cubicweb.dbapi import in_memory_cnx
- config = config or TestServerConfiguration(configdir)
+ from cubicweb.dbapi import in_memory_repo_cnx
+ config = config or TestServerConfiguration(appid, apphome=apphome)
sources = config.sources()
driver = sources['system']['db-driver']
if config.db_require_setup:
@@ -247,7 +236,7 @@
else:
raise ValueError('no initialization function for driver %r' % driver)
config._cubes = None # avoid assertion error
- repo, cnx = in_memory_cnx(config, unicode(sources['admin']['login']),
+ repo, cnx = in_memory_repo_cnx(config, unicode(sources['admin']['login']),
password=sources['admin']['password'] or 'xxx')
if driver == 'sqlite':
install_sqlite_patch(repo.querier)
@@ -344,12 +333,13 @@
def init_test_database_sqlite(config):
"""initialize a fresh sqlite databse used for testing purpose"""
# remove database file if it exists
+ dbfile = join(config.apphome, config.sources()['system']['db-name'])
+ config.sources()['system']['db-name'] = dbfile
if not reset_test_database_sqlite(config):
# initialize the database
import shutil
from cubicweb.server import init_repository
init_repository(config, interactive=False)
- dbfile = config.sources()['system']['db-name']
shutil.copy(dbfile, '%s-template' % dbfile)
def install_sqlite_patch(querier):
diff -r 48f468f33704 -r e4580e5f0703 devtools/cwwindmill.py
--- a/devtools/cwwindmill.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/cwwindmill.py Fri Mar 11 09:46:45 2011 +0100
@@ -29,120 +29,131 @@
import sys
# imported by default to simplify further import statements
-from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.testlib import TestCase, unittest_main, Tags
-import windmill
-from windmill.dep import functest
-from windmill.bin.admin_lib import configure_global_settings, setup, teardown
+try:
+ import windmill
+ from windmill.dep import functest
+ from windmill.bin.admin_lib import configure_global_settings, setup, teardown
+except ImportError, ex:
+ windmill = None
from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebServerConfig
+if windmill is None:
+ class CubicWebWindmillUseCase(CubicWebServerTC):
+ tags = CubicWebServerTC.tags & Tags(('windmill',))
-# Excerpt from :ref:`windmill.authoring.unit`
-class UnitTestReporter(functest.reports.FunctestReportInterface):
- def summary(self, test_list, totals_dict, stdout_capture):
- self.test_list = test_list
-
-unittestreporter = UnitTestReporter()
-functest.reports.register_reporter(unittestreporter)
+ def testWindmill(self):
+ self.skipTest("can't import windmill %s" % ex)
+else:
+ # Excerpt from :ref:`windmill.authoring.unit`
+ class UnitTestReporter(functest.reports.FunctestReportInterface):
+ def summary(self, test_list, totals_dict, stdout_capture):
+ self.test_list = test_list
-class CubicWebWindmillUseCase(CubicWebServerTC):
- """basic class for Windmill use case tests
+ unittestreporter = UnitTestReporter()
+ functest.reports.register_reporter(unittestreporter)
+
+ class CubicWebWindmillUseCase(CubicWebServerTC):
+ """basic class for Windmill use case tests
- If you want to change cubicweb test server parameters, define a new
- :class:`CubicWebServerConfig` and override the :var:`configcls`
- attribute:
+ If you want to change cubicweb test server parameters, define a new
+ :class:`CubicWebServerConfig` and override the :var:`configcls`
+ attribute:
- configcls = CubicWebServerConfig
+ configcls = CubicWebServerConfig
- From Windmill configuration:
+ From Windmill configuration:
- .. attribute:: browser
- identification string (firefox|ie|safari|chrome) (firefox by default)
- .. attribute :: edit_test
- load and edit test for debugging (False by default)
- .. attribute:: test_dir (optional)
- testing file path or directory (windmill directory under your unit case
- file by default)
+ .. attribute:: browser
+ identification string (firefox|ie|safari|chrome) (firefox by default)
+ .. attribute :: edit_test
+ load and edit test for debugging (False by default)
+ .. attribute:: test_dir (optional)
+ testing file path or directory (windmill directory under your unit case
+ file by default)
+
+ Examples:
- Examples:
+ browser = 'firefox'
+ test_dir = osp.join(__file__, 'windmill')
+ edit_test = False
+ If you prefer, you can put here the use cases recorded by windmill GUI
+ (services transformer) instead of the windmill sub-directory
+ You can change `test_dir` as following:
+
+ test_dir = __file__
+
+ Instead of toggle `edit_test` value, try `pytest -i`
+ """
browser = 'firefox'
- test_dir = osp.join(__file__, 'windmill')
- edit_test = False
-
- If you prefer, you can put here the use cases recorded by windmill GUI
- (services transformer) instead of the windmill sub-directory
- You can change `test_dir` as following:
+ edit_test = "-i" in sys.argv # detection for pytest invocation
+ # Windmill use case are written with no anonymous user
+ anonymous_allowed = False
- test_dir = __file__
-
- Instead of toggle `edit_test` value, try `pytest -i`
- """
- browser = 'firefox'
- edit_test = "-i" in sys.argv # detection for pytest invocation
- # Windmill use case are written with no anonymous user
- anonymous_logged = False
+ tags = CubicWebServerTC.tags & Tags(('windmill',))
- def _test_dir(self):
- """access to class attribute if possible or make assumption
- of expected directory"""
- try:
- return getattr(self, 'test_dir')
- except AttributeError:
- if os.path.basename(sys.argv[0]) == "pytest":
- test_dir = os.getcwd()
- else:
- import inspect
- test_dir = os.path.dirname(inspect.stack()[-1][1])
- return osp.join(test_dir, 'windmill')
+ def _test_dir(self):
+ """access to class attribute if possible or make assumption
+ of expected directory"""
+ try:
+ return getattr(self, 'test_dir')
+ except AttributeError:
+ if os.path.basename(sys.argv[0]) == "pytest":
+ test_dir = os.getcwd()
+ else:
+ import inspect
+ test_dir = os.path.dirname(inspect.stack()[-1][1])
+ return osp.join(test_dir, 'windmill')
- def setUp(self):
- # Start CubicWeb session before running the server to populate self.vreg
- CubicWebServerTC.setUp(self)
- # XXX reduce log output (should be done in a cleaner way)
- # windmill fu** up our logging configuration
- for logkey in ('windmill', 'logilab', 'cubicweb'):
- getLogger(logkey).setLevel(ERROR)
- self.test_dir = self._test_dir()
- msg = "provide a valid 'test_dir' as the given test file/dir (current: %s)"
- assert os.path.exists(self.test_dir), (msg % self.test_dir)
- # windmill setup
- windmill.stdout, windmill.stdin = sys.stdout, sys.stdin
- configure_global_settings()
- windmill.settings['TEST_URL'] = self.config['base-url']
- if hasattr(self,"windmill_settings"):
- for (setting,value) in self.windmill_settings.iteritems():
- windmill.settings[setting] = value
- self.windmill_shell_objects = setup()
+ def setUp(self):
+ # Start CubicWeb session before running the server to populate self.vreg
+ CubicWebServerTC.setUp(self)
+ # XXX reduce log output (should be done in a cleaner way)
+ # windmill fu** up our logging configuration
+ for logkey in ('windmill', 'logilab', 'cubicweb'):
+ getLogger(logkey).setLevel(ERROR)
+ self.test_dir = self._test_dir()
+ msg = "provide a valid 'test_dir' as the given test file/dir (current: %s)"
+ assert os.path.exists(self.test_dir), (msg % self.test_dir)
+ # windmill setup
+ windmill.stdout, windmill.stdin = sys.stdout, sys.stdin
+ configure_global_settings()
+ windmill.settings['TEST_URL'] = self.config['base-url']
+ if hasattr(self,"windmill_settings"):
+ for (setting,value) in self.windmill_settings.iteritems():
+ windmill.settings[setting] = value
+ self.windmill_shell_objects = setup()
- def tearDown(self):
- teardown(self.windmill_shell_objects)
- CubicWebServerTC.tearDown(self)
+ def tearDown(self):
+ teardown(self.windmill_shell_objects)
+ CubicWebServerTC.tearDown(self)
- def testWindmill(self):
- if self.edit_test:
- # see windmill.bin.admin_options.Firebug
- windmill.settings['INSTALL_FIREBUG'] = 'firebug'
- windmill.settings.setdefault('MOZILLA_PLUGINS', []).extend(
- ['/usr/share/mozilla-extensions/',
- '/usr/share/xul-ext/'])
- controller = self.windmill_shell_objects['start_' + self.browser]()
- self.windmill_shell_objects['do_test'](self.test_dir,
- load=self.edit_test,
- threaded=False)
- # set a breakpoint to be able to debug windmill test
- if self.edit_test:
- import pdb; pdb.set_trace()
- return
+ def testWindmill(self):
+ if self.edit_test:
+ # see windmill.bin.admin_options.Firebug
+ windmill.settings['INSTALL_FIREBUG'] = 'firebug'
+ windmill.settings.setdefault('MOZILLA_PLUGINS', []).extend(
+ ['/usr/share/mozilla-extensions/',
+ '/usr/share/xul-ext/'])
+ controller = self.windmill_shell_objects['start_' + self.browser]()
+ self.windmill_shell_objects['do_test'](self.test_dir,
+ load=self.edit_test,
+ threaded=False)
+ # set a breakpoint to be able to debug windmill test
+ if self.edit_test:
+ import pdb; pdb.set_trace()
+ return
- # reporter
- for test in unittestreporter.test_list:
- msg = ""
- self._testMethodDoc = getattr(test, "__doc__", None)
- self._testMethodName = test.__name__
- # try to display a better message in case of failure
- if hasattr(test, "tb"):
- msg = '\n'.join(test.tb)
- self.assertEqual(test.result, True, msg=msg)
+ # reporter
+ for test in unittestreporter.test_list:
+ msg = ""
+ self._testMethodDoc = getattr(test, "__doc__", None)
+ self._testMethodName = test.__name__
+ # try to display a better message in case of failure
+ if hasattr(test, "tb"):
+ msg = '\n'.join(test.tb)
+ self.assertEqual(test.result, True, msg=msg)
diff -r 48f468f33704 -r e4580e5f0703 devtools/dataimport.py
--- a/devtools/dataimport.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/dataimport.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0614,W0401
+# pylint: disable=W0614,W0401
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
diff -r 48f468f33704 -r e4580e5f0703 devtools/devctl.py
--- a/devtools/devctl.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/devctl.py Fri Mar 11 09:46:45 2011 +0100
@@ -127,8 +127,7 @@
from copy import deepcopy
from cubicweb.i18n import add_msg
from cubicweb.web import uicfg
- from cubicweb.schema import META_RTYPES, SYSTEM_RTYPES, CONSTRAINTS
- no_context_rtypes = META_RTYPES | SYSTEM_RTYPES
+ from cubicweb.schema import NO_I18NCONTEXT, CONSTRAINTS
w('# schema pot file, generated on %s\n'
% datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
w('# \n')
@@ -217,13 +216,13 @@
else:
librschema = libschema.rschema(rtype)
# add context information only for non-metadata rtypes
- if rschema not in no_context_rtypes:
+ if rschema not in NO_I18NCONTEXT:
libsubjects = librschema and librschema.subjects() or ()
for subjschema in rschema.subjects():
if not subjschema in libsubjects:
add_msg(w, rtype, subjschema.type)
if not (schema.rschema(rtype).final or rschema.symmetric):
- if rschema not in no_context_rtypes:
+ if rschema not in NO_I18NCONTEXT:
libobjects = librschema and librschema.objects() or ()
for objschema in rschema.objects():
if not objschema in libobjects:
@@ -239,6 +238,8 @@
def _iter_vreg_objids(vreg, done):
for reg, objdict in vreg.items():
+ if reg in ('boxes', 'contentnavigation'):
+ continue
for objects in objdict.values():
for obj in objects:
objid = '%s_%s' % (reg, obj.__regid__)
@@ -345,7 +346,7 @@
print 'when you are done, run "cubicweb-ctl i18ncube yourcube".'
-class UpdateTemplateCatalogCommand(Command):
+class UpdateCubeCatalogCommand(Command):
"""Update i18n catalogs for cubes. If no cube is specified, update
catalogs of all registered cubes.
"""
@@ -782,7 +783,7 @@
print make_qunit_html(args[0], args[1:])
for cmdcls in (UpdateCubicWebCatalogCommand,
- UpdateTemplateCatalogCommand,
+ UpdateCubeCatalogCommand,
#LiveServerCommand,
NewCubeCommand,
ExamineLogCommand,
diff -r 48f468f33704 -r e4580e5f0703 devtools/fake.py
--- a/devtools/fake.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/fake.py Fri Mar 11 09:46:45 2011 +0100
@@ -127,6 +127,16 @@
def validate_cache(self):
pass
+ def build_url_params(self, **kwargs):
+ # overriden to get predictable resultts
+ args = []
+ for param, values in sorted(kwargs.iteritems()):
+ if not isinstance(values, (list, tuple)):
+ values = (values,)
+ for value in values:
+ assert value is not None
+ args.append(u'%s=%s' % (param, self.url_quote(value)))
+ return '&'.join(args)
class FakeUser(object):
login = 'toto'
@@ -170,6 +180,7 @@
self.config = config or FakeConfig()
self.vreg = vreg or CubicWebVRegistry(self.config, initlog=False)
self.vreg.schema = schema
+ self.sources = []
def internal_session(self):
return FakeSession(self)
diff -r 48f468f33704 -r e4580e5f0703 devtools/fill.py
--- a/devtools/fill.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/fill.py Fri Mar 11 09:46:45 2011 +0100
@@ -27,7 +27,7 @@
from logilab.common import attrdict
from yams.constraints import (SizeConstraint, StaticVocabularyConstraint,
- IntervalBoundConstraint, BoundConstraint,
+ IntervalBoundConstraint, BoundaryConstraint,
Attribute, actual_value)
from rql.utils import decompose_b26 as base_decompose_b26
@@ -185,10 +185,12 @@
minvalue = maxvalue - (index * step) # i.e. randint(-index, 0)
return choice(list(custom_range(minvalue, maxvalue, step)))
- def _actual_boundary(self, entity, boundary):
+ def _actual_boundary(self, entity, attrname, boundary):
if isinstance(boundary, Attribute):
# ensure we've a value for this attribute
- self.generate_attribute_value(entity, boundary.attr)
+ entity[attrname] = None # infinite loop safety belt
+ if not boundary.attr in entity:
+ self.generate_attribute_value(entity, boundary.attr)
boundary = actual_value(boundary, entity)
return boundary
@@ -196,13 +198,13 @@
minvalue = maxvalue = None
for cst in self.eschema.rdef(attrname).constraints:
if isinstance(cst, IntervalBoundConstraint):
- minvalue = self._actual_boundary(entity, cst.minvalue)
- maxvalue = self._actual_boundary(entity, cst.maxvalue)
- elif isinstance(cst, BoundConstraint):
+ minvalue = self._actual_boundary(entity, attrname, cst.minvalue)
+ maxvalue = self._actual_boundary(entity, attrname, cst.maxvalue)
+ elif isinstance(cst, BoundaryConstraint):
if cst.operator[0] == '<':
- maxvalue = self._actual_boundary(entity, cst.boundary)
+ maxvalue = self._actual_boundary(entity, attrname, cst.boundary)
else:
- minvalue = self._actual_boundary(entity, cst.boundary)
+ minvalue = self._actual_boundary(entity, attrname, cst.boundary)
return minvalue, maxvalue
def get_choice(self, entity, attrname):
diff -r 48f468f33704 -r e4580e5f0703 devtools/htmlparser.py
--- a/devtools/htmlparser.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/htmlparser.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,16 +15,17 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""defines a validating HTML parser used in web application tests
-
-"""
+"""defines a validating HTML parser used in web application tests"""
import re
import sys
from lxml import etree
+from logilab.common.deprecation import class_deprecated
+
from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE
+
STRICT_DOCTYPE = str(STRICT_DOCTYPE)
TRANSITIONAL_DOCTYPE = str(TRANSITIONAL_DOCTYPE)
@@ -51,10 +52,7 @@
def __init__(self):
Validator.__init__(self)
# XXX understand what's happening under windows
- validate = True
- if sys.platform == 'win32':
- validate = False
- self.parser = etree.XMLParser(dtd_validation=validate)
+ self.parser = etree.XMLParser(dtd_validation=sys.platform != 'win32')
def preprocess_data(self, data):
"""used to fix potential blockquote mess generated by docutils"""
@@ -87,12 +85,14 @@
Validator.__init__(self)
self.parser = etree.XMLParser()
+
class XMLDemotingValidator(SaxOnlyValidator):
""" some views produce html instead of xhtml, using demote_to_html
this is typically related to the use of external dependencies
which do not produce valid xhtml (google maps, ...)
"""
+ __metaclass__ = class_deprecated
def preprocess_data(self, data):
if data.startswith(' appears in the page"""
diff -r 48f468f33704 -r e4580e5f0703 devtools/httptest.py
--- a/devtools/httptest.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/httptest.py Fri Mar 11 09:46:45 2011 +0100
@@ -89,12 +89,11 @@
"""Class for running test web server. See :class:`CubicWebServerConfig`.
Class attributes:
- * ` anonymous_logged`: flag telling ifs anonymous user should be log logged
- by default (True by default)
+ * `anonymous_allowed`: flag telling if anonymous browsing should be allowed
"""
configcls = CubicWebServerConfig
# anonymous is logged by default in cubicweb test cases
- anonymous_logged = True
+ anonymous_allowed = True
def start_server(self):
# use a semaphore to avoid starting test while the http server isn't
@@ -176,7 +175,7 @@
return response
def setUp(self):
- CubicWebTC.setUp(self)
+ super(CubicWebServerTC, self).setUp()
self.start_server()
def tearDown(self):
@@ -185,13 +184,9 @@
except error.ReactorNotRunning, err:
# Server could be launched manually
print err
- CubicWebTC.tearDown(self)
+ super(CubicWebServerTC, self).tearDown()
@classmethod
def init_config(cls, config):
+ config.set_anonymous_allowed(cls.anonymous_allowed)
super(CubicWebServerTC, cls).init_config(config)
- if not cls.anonymous_logged:
- config.global_set_option('anonymous-user', None)
- else:
- config.global_set_option('anonymous-user', 'anon')
- config.global_set_option('anonymous-password', 'anon')
diff -r 48f468f33704 -r e4580e5f0703 devtools/livetest.py
--- a/devtools/livetest.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/livetest.py Fri Mar 11 09:46:45 2011 +0100
@@ -35,7 +35,7 @@
from logilab.common.testlib import TestCase
-from cubicweb.dbapi import in_memory_cnx
+from cubicweb.dbapi import in_memory_repo_cnx
from cubicweb.etwist.server import CubicWebRootResource
from cubicweb.devtools import BaseApptestConfiguration, init_test_database
@@ -164,7 +164,7 @@
# build a config, and get a connection
self.config = LivetestConfiguration(self.cube, self.sourcefile)
_, user, passwd, _ = loadconf()
- self.repo, self.cnx = in_memory_cnx(self.config, user, password=passwd)
+ self.repo, self.cnx = in_memory_repo_cnx(self.config, user, password=passwd)
self.setup_db(self.cnx)
def tearDown(self):
diff -r 48f468f33704 -r e4580e5f0703 devtools/qunit.py
--- a/devtools/qunit.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/qunit.py Fri Mar 11 09:46:45 2011 +0100
@@ -8,7 +8,7 @@
from uuid import uuid4
# imported by default to simplify further import statements
-from logilab.common.testlib import unittest_main, with_tempdir, InnerTest
+from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags
from logilab.common.shellutils import getlogin
import cubicweb
@@ -86,6 +86,8 @@
class QUnitTestCase(CubicWebServerTC):
+ tags = CubicWebServerTC.tags | Tags(('qunit',))
+
# testfile, (dep_a, dep_b)
all_js_tests = ()
diff -r 48f468f33704 -r e4580e5f0703 devtools/repotest.py
--- a/devtools/repotest.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/repotest.py Fri Mar 11 09:46:45 2011 +0100
@@ -22,9 +22,11 @@
__docformat__ = "restructuredtext en"
+from copy import deepcopy
from pprint import pprint
from logilab.common.decorators import clear_cache
+from logilab.common.testlib import SkipTest
def tuplify(list):
for i in range(len(list)):
@@ -140,7 +142,7 @@
from rql import RQLHelper
from cubicweb.devtools.fake import FakeRepo, FakeSession
-from cubicweb.server import set_debug
+from cubicweb.server import set_debug, debugged
from cubicweb.server.querier import QuerierHelper
from cubicweb.server.session import Session
from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
@@ -148,6 +150,15 @@
class RQLGeneratorTC(TestCase):
schema = backend = None # set this in concret test
+
+ @classmethod
+ def setUpClass(cls):
+ if cls.backend is not None:
+ try:
+ cls.dbhelper = get_db_helper(cls.backend)
+ except ImportError, ex:
+ raise SkipTest(str(ex))
+
def setUp(self):
self.repo = FakeRepo(self.schema)
self.repo.system_source = mock_object(dbdriver=self.backend)
@@ -158,11 +169,7 @@
ExecutionPlan._check_permissions = _dummy_check_permissions
rqlannotation._select_principal = _select_principal
if self.backend is not None:
- try:
- dbhelper = get_db_helper(self.backend)
- except ImportError, ex:
- self.skipTest(str(ex))
- self.o = SQLGenerator(self.schema, dbhelper)
+ self.o = SQLGenerator(self.schema, self.dbhelper)
def tearDown(self):
ExecutionPlan._check_permissions = _orig_check_permissions
@@ -170,6 +177,8 @@
def set_debug(self, debug):
set_debug(debug)
+ def debugged(self, debug):
+ return debugged(debug)
def _prepare(self, rql):
#print '******************** prepare', rql
@@ -221,6 +230,8 @@
def set_debug(self, debug):
set_debug(debug)
+ def debugged(self, debug):
+ return debugged(debug)
def _rqlhelper(self):
rqlhelper = self.repo.vreg.rqlhelper
@@ -284,8 +295,7 @@
self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered
def add_source(self, sourcecls, uri):
- self.sources.append(sourcecls(self.repo, self.o.schema,
- {'uri': uri}))
+ self.sources.append(sourcecls(self.repo, {'uri': uri}))
self.repo.sources_by_uri[uri] = self.sources[-1]
setattr(self, uri, self.sources[-1])
self.newsources += 1
@@ -364,17 +374,17 @@
from cubicweb.server.msplanner import PartPlanInformation
except ImportError:
class PartPlanInformation(object):
- def merge_input_maps(self, *args):
+ def merge_input_maps(self, *args, **kwargs):
pass
def _choose_term(self, sourceterms):
pass
_orig_merge_input_maps = PartPlanInformation.merge_input_maps
_orig_choose_term = PartPlanInformation._choose_term
-def _merge_input_maps(*args):
- return sorted(_orig_merge_input_maps(*args))
+def _merge_input_maps(*args, **kwargs):
+ return sorted(_orig_merge_input_maps(*args, **kwargs))
-def _choose_term(self, sourceterms):
+def _choose_term(self, source, sourceterms):
# predictable order for test purpose
def get_key(x):
try:
@@ -387,8 +397,13 @@
except AttributeError:
# const
return x.value
- return _orig_choose_term(self, DumbOrderedDict2(sourceterms, get_key))
+ return _orig_choose_term(self, source, DumbOrderedDict2(sourceterms, get_key))
+from cubicweb.server.sources.pyrorql import PyroRQLSource
+_orig_syntax_tree_search = PyroRQLSource.syntax_tree_search
+
+def _syntax_tree_search(*args, **kwargs):
+ return deepcopy(_orig_syntax_tree_search(*args, **kwargs))
def do_monkey_patch():
RQLRewriter.insert_snippets = _insert_snippets
@@ -398,6 +413,7 @@
ExecutionPlan.init_temp_table = _init_temp_table
PartPlanInformation.merge_input_maps = _merge_input_maps
PartPlanInformation._choose_term = _choose_term
+ PyroRQLSource.syntax_tree_search = _syntax_tree_search
def undo_monkey_patch():
RQLRewriter.insert_snippets = _orig_insert_snippets
@@ -406,3 +422,4 @@
ExecutionPlan.init_temp_table = _orig_init_temp_table
PartPlanInformation.merge_input_maps = _orig_merge_input_maps
PartPlanInformation._choose_term = _orig_choose_term
+ PyroRQLSource.syntax_tree_search = _orig_syntax_tree_search
diff -r 48f468f33704 -r e4580e5f0703 devtools/test/unittest_httptest.py
--- a/devtools/test/unittest_httptest.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/test/unittest_httptest.py Fri Mar 11 09:46:45 2011 +0100
@@ -19,6 +19,7 @@
import httplib
+from logilab.common.testlib import Tags
from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebServerConfig
@@ -40,7 +41,9 @@
class TwistedCWIdentTC(CubicWebServerTC):
- anonymous_logged = False
+
+ anonymous_allowed = False
+ tags = CubicWebServerTC.tags | Tags(('auth',))
def test_response_denied(self):
response = self.web_get()
@@ -49,7 +52,7 @@
def test_login(self):
response = self.web_get()
if response.status != httplib.FORBIDDEN:
- self.skipTest('Already authenticated')
+ self.skipTest('Already authenticated, "test_response_denied" must have failed')
# login
self.web_login(self.admlogin, self.admpassword)
response = self.web_get()
diff -r 48f468f33704 -r e4580e5f0703 devtools/test/unittest_testlib.py
--- a/devtools/test/unittest_testlib.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/test/unittest_testlib.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,24 +15,23 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unittests for gct.apptest module
-
-"""
+"""unittests for cw.devtools.testlib module"""
from cStringIO import StringIO
-from logilab.common.testlib import (TestCase, unittest_main, TestSuite,
- SkipAwareTextTestRunner)
+from unittest import TextTestRunner
+from logilab.common.testlib import TestSuite, TestCase, unittest_main
from cubicweb.devtools import htmlparser
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.pytestconf import clean_repo_test_cls
+
class WebTestTC(TestCase):
def setUp(self):
output = StringIO()
- self.runner = SkipAwareTextTestRunner(stream=output)
+ self.runner = TextTestRunner(stream=output)
def test_error_raised(self):
class MyWebTest(CubicWebTC):
diff -r 48f468f33704 -r e4580e5f0703 devtools/testlib.py
--- a/devtools/testlib.py Fri Dec 10 12:17:18 2010 +0100
+++ b/devtools/testlib.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -25,7 +25,7 @@
import sys
import re
import urlparse
-from os.path import dirname, join
+from os.path import dirname, join, abspath
from urllib import unquote
from math import log
from contextlib import contextmanager
@@ -38,7 +38,7 @@
from logilab.common.debugger import Debugger
from logilab.common.umessage import message_from_string
from logilab.common.decorators import cached, classproperty, clear_cache
-from logilab.common.deprecation import deprecated
+from logilab.common.deprecation import deprecated, class_deprecated
from logilab.common.shellutils import getlogin
from cubicweb import ValidationError, NoSelectableObject, AuthenticationError
@@ -185,6 +185,7 @@
* `repo`, the repository object
* `admlogin`, login of the admin user
* `admpassword`, password of the admin user
+ * `shell`, create and use shell environment
"""
appid = 'data'
configcls = devtools.ApptestConfiguration
@@ -200,7 +201,7 @@
try:
return cls.__dict__['_config']
except KeyError:
- home = join(dirname(sys.modules[cls.__module__].__file__), cls.appid)
+ home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid))
config = cls._config = cls.configcls(cls.appid, apphome=home)
config.mode = 'test'
return config
@@ -286,18 +287,29 @@
"""return current server side session (using default manager account)"""
return self.repo._sessions[self._orig_cnx[0].sessionid]
+ def shell(self):
+ """return a shell session object"""
+ from cubicweb.server.migractions import ServerMigrationHelper
+ return ServerMigrationHelper(None, repo=self.repo, cnx=self.cnx,
+ interactive=False,
+ # hack so it don't try to load fs schema
+ schema=1)
+
def set_option(self, optname, value):
self.config.global_set_option(optname, value)
def set_debug(self, debugmode):
server.set_debug(debugmode)
+ def debugged(self, debugmode):
+ return server.debugged(debugmode)
+
# default test setup and teardown #########################################
def setUp(self):
# monkey patch send mail operation so emails are sent synchronously
- self._old_mail_commit_event = SendMailOp.commit_event
- SendMailOp.commit_event = SendMailOp.sendmails
+ self._old_mail_postcommit_event = SendMailOp.postcommit_event
+ SendMailOp.postcommit_event = SendMailOp.sendmails
pause_tracing()
previous_failure = self.__class__.__dict__.get('_repo_init_failed')
if previous_failure is not None:
@@ -319,7 +331,7 @@
for cnx in self._cnxs:
if not cnx._closed:
cnx.close()
- SendMailOp.commit_event = self._old_mail_commit_event
+ SendMailOp.postcommit_event = self._old_mail_postcommit_event
def setup_database(self):
"""add your database setup code by overriding this method"""
@@ -344,7 +356,7 @@
user = req.create_entity('CWUser', login=unicode(login),
upassword=password, **kwargs)
req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
- % ','.join(repr(g) for g in groups),
+ % ','.join(repr(str(g)) for g in groups),
{'x': user.eid})
user.cw_clear_relation_cache('in_group', 'subject')
if commit:
@@ -423,6 +435,21 @@
# other utilities #########################################################
+ def grant_permission(self, entity, group, pname, plabel=None):
+ """insert a permission on an entity. Will have to commit the main
+ connection to be considered
+ """
+ pname = unicode(pname)
+ plabel = plabel and unicode(plabel) or unicode(group)
+ e = entity.eid
+ with security_enabled(self.session, False, False):
+ peid = self.execute(
+ 'INSERT CWPermission X: X name %(pname)s, X label %(plabel)s,'
+ 'X require_group G, E require_permission X '
+ 'WHERE G name %(group)s, E eid %(e)s',
+ locals())[0][0]
+ return peid
+
@contextmanager
def temporary_appobjects(self, *appobjects):
self.vreg._loadedmods.setdefault(self.__module__, {})
@@ -434,7 +461,20 @@
for obj in appobjects:
self.vreg.unregister(obj)
- # vregistry inspection utilities ###########################################
+ def assertModificationDateGreater(self, entity, olddate):
+ entity.cw_attr_cache.pop('modification_date', None)
+ self.failUnless(entity.modification_date > olddate)
+
+
+ # workflow utilities #######################################################
+
+ def assertPossibleTransitions(self, entity, expected):
+ transitions = entity.cw_adapt_to('IWorkflowable').possible_transitions()
+ self.assertListEqual(sorted(tr.name for tr in transitions),
+ sorted(expected))
+
+
+ # views and actions registries inspection ##################################
def pviews(self, req, rset):
return sorted((a.__regid__, a.__class__)
@@ -468,9 +508,7 @@
def items(self):
return self
class fake_box(object):
- def mk_action(self, label, url, **kwargs):
- return (label, url)
- def box_action(self, action, **kwargs):
+ def action_link(self, action, **kwargs):
return (action.title, action.url())
submenu = fake_menu()
action.fill_menu(fake_box(), submenu)
@@ -489,7 +527,8 @@
continue
views = [view for view in views
if view.category != 'startupview'
- and not issubclass(view, notification.NotificationView)]
+ and not issubclass(view, notification.NotificationView)
+ and not isinstance(view, class_deprecated)]
if views:
try:
view = viewsvreg._select_best(views, req, rset=rset)
@@ -511,7 +550,7 @@
def list_boxes_for(self, rset):
"""returns the list of boxes that can be applied on `rset`"""
req = rset.req
- for box in self.vreg['boxes'].possible_objects(req, rset=rset):
+ for box in self.vreg['ctxcomponents'].possible_objects(req, rset=rset):
yield box
def list_startup_views(self):
@@ -620,6 +659,10 @@
def init_authentication(self, authmode, anonuser=None):
self.set_option('auth-mode', authmode)
self.set_option('anonymous-user', anonuser)
+ if anonuser is None:
+ self.config.anonymous_credential = None
+ else:
+ self.config.anonymous_credential = (anonuser, anonuser)
req = self.request()
origsession = req.session
req.session = req.cnx = None
@@ -721,10 +764,8 @@
:returns: an instance of `cubicweb.devtools.htmlparser.PageInfo`
encapsulation the generated HTML
"""
- output = None
try:
output = viewfunc(**kwargs)
- return self._check_html(output, view, template)
except (SystemExit, KeyboardInterrupt):
raise
except:
@@ -735,44 +776,107 @@
msg = '[%s in %s] %s' % (klass, view.__regid__, exc)
except:
msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__)
- if output is not None:
- position = getattr(exc, "position", (0,))[0]
- if position:
- # define filter
- output = output.splitlines()
- width = int(log(len(output), 10)) + 1
- line_template = " %" + ("%i" % width) + "i: %s"
- # XXX no need to iterate the whole file except to get
- # the line number
- output = '\n'.join(line_template % (idx + 1, line)
- for idx, line in enumerate(output)
- if line_context_filter(idx+1, position))
- msg += '\nfor output:\n%s' % output
raise AssertionError, msg, tcbk
+ return self._check_html(output, view, template)
+ def get_validator(self, view=None, content_type=None, output=None):
+ if view is not None:
+ try:
+ return self.vid_validators[view.__regid__]()
+ except KeyError:
+ if content_type is None:
+ content_type = view.content_type
+ if content_type is None:
+ content_type = 'text/html'
+ if content_type in ('text/html', 'application/xhtml+xml'):
+ if output and output.startswith(' used in progress widget, unknown in html dtd
output = re.sub('', '', output)
- return validator.parse_string(output.strip())
+ return self.assertWellFormed(validator, output.strip(), context= view.__regid__)
+
+ def assertWellFormed(self, validator, content, context=None):
+ try:
+ return validator.parse_string(content)
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except:
+ # hijack exception: generative tests stop when the exception
+ # is not an AssertionError
+ klass, exc, tcbk = sys.exc_info()
+ if context is None:
+ msg = u'[%s]' % (klass,)
+ else:
+ msg = u'[%s in %s]' % (klass, context)
+ msg = msg.encode(sys.getdefaultencoding(), 'replace')
+
+ try:
+ str_exc = str(exc)
+ except:
+ str_exc = 'undisplayable exception'
+ msg += str_exc
+ if content is not None:
+ position = getattr(exc, "position", (0,))[0]
+ if position:
+ # define filter
+ if isinstance(content, str):
+ content = unicode(content, sys.getdefaultencoding(), 'replace')
+ content = content.splitlines()
+ width = int(log(len(content), 10)) + 1
+ line_template = " %" + ("%i" % width) + "i: %s"
+ # XXX no need to iterate the whole file except to get
+ # the line number
+ content = u'\n'.join(line_template % (idx + 1, line)
+ for idx, line in enumerate(content)
+ if line_context_filter(idx+1, position))
+ msg += u'\nfor content:\n%s' % content
+ raise AssertionError, msg, tcbk
+
+ def assertDocTestFile(self, testfile):
+ # doctest returns tuple (failure_count, test_count)
+ result = self.shell().process_script(testfile)
+ if result[0] and result[1]:
+ raise self.failureException("doctest file '%s' failed"
+ % testfile)
+
+ # notifications ############################################################
+
+ def assertSentEmail(self, subject, recipients=None, nb_msgs=None):
+ """test recipients in system mailbox for given email subject
+
+ :param subject: email subject to find in mailbox
+ :param recipients: list of email recipients
+ :param nb_msgs: expected number of entries
+ :returns: list of matched emails
+ """
+ messages = [email for email in MAILBOX
+ if email.message.get('Subject') == subject]
+ if recipients is not None:
+ sent_to = set()
+ for msg in messages:
+ sent_to.update(msg.recipients)
+ self.assertSetEqual(set(recipients), sent_to)
+ if nb_msgs is not None:
+ self.assertEqual(len(MAILBOX), nb_msgs)
+ return messages
# deprecated ###############################################################
@@ -966,7 +1070,8 @@
for action in self.list_actions_for(rset):
yield InnerTest(self._testname(rset, action.__regid__, 'action'), self._test_action, action)
for box in self.list_boxes_for(rset):
- yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render)
+ w = [].append
+ yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render, w)
@staticmethod
def _testname(rset, objid, objtype):
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/admin/cubicweb-ctl.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/admin/cubicweb-ctl.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,122 @@
+.. -*- coding: utf-8 -*-
+
+.. _cubicweb-ctl:
+
+``cubicweb-ctl`` tool
+=====================
+
+`cubicweb-ctl` is the swiss knife to manage *CubicWeb* instances.
+The general syntax is ::
+
+ cubicweb-ctl [options command]
+
+To view available commands ::
+
+ cubicweb-ctl
+ cubicweb-ctl --help
+
+Please note that the commands available depends on the *CubicWeb* packages
+and cubes that have been installed.
+
+To view the help menu on specific command ::
+
+ cubicweb-ctl --help
+
+Listing available cubes and instance
+-------------------------------------
+
+* ``list``, provides a list of the available configuration, cubes
+ and instances.
+
+
+Creation of a new cube
+-----------------------
+
+Create your new cube cube ::
+
+ cubicweb-ctl newcube
+
+This will create a new cube in
+``/path/to/forest/cubicweb/cubes/`` for a Mercurial forest
+installation, or in ``/usr/share/cubicweb/cubes`` for a debian
+packages installation.
+
+Create an instance
+-------------------
+
+You must ensure `~/cubicweb.d/` exists prior to this. On windows, the
+'~' part will probably expand to 'Documents and Settings/user'.
+
+To create an instance from an existing cube, execute the following
+command ::
+
+ cubicweb-ctl create
+
+This command will create the configuration files of an instance in
+``~/etc/cubicweb.d/``.
+
+The tool ``cubicweb-ctl`` executes the command ``db-create`` and
+``db-init`` when you run ``create`` so that you can complete an
+instance creation in a single command. But of course it is possible
+to issue these separate commands separately, at a later stage.
+
+Command to create/initialize an instance database
+-------------------------------------------------
+
+* ``db-create``, creates the system database of an instance (tables and
+ extensions only)
+* ``db-init``, initializes the system database of an instance
+ (schema, groups, users, workflows...)
+
+Commands to control instances
+-----------------------------
+
+* ``start``, starts one or more or all instances
+
+of special interest::
+
+ start -D
+
+will start in debug mode (under windows, starting without -D will not
+work; you need instead to setup your instance as a service).
+
+* ``stop``, stops one or more or all instances
+* ``restart``, restarts one or more or all instances
+* ``status``, returns the status of the instance(s)
+
+Commands to maintain instances
+------------------------------
+
+* ``upgrade``, launches the existing instances migration when a new version
+ of *CubicWeb* or the cubes installed is available
+* ``shell``, opens a (Python based) migration shell for manual maintenance of the instance
+* ``db-dump``, creates a dump of the system database
+* ``db-restore``, restores a dump of the system database
+* ``db-check``, checks data integrity of an instance. If the automatic correction
+ is activated, it is recommanded to create a dump before this operation.
+* ``schema-sync``, synchronizes the persistent schema of an instance with
+ the instance schema. It is recommanded to create a dump before this operation.
+
+Commands to maintain i18n catalogs
+----------------------------------
+* ``i18ncubicweb``, regenerates messages catalogs of the *CubicWeb* library
+* ``i18ncube``, regenerates the messages catalogs of a cube
+* ``i18ninstance``, recompiles the messages catalogs of an instance.
+ This is automatically done while upgrading.
+
+See also chapter :ref:`internationalization`.
+
+Other commands
+--------------
+* ``delete``, deletes an instance (configuration files and database)
+
+Command to create an instance for Google AppEngine datastore source
+-------------------------------------------------------------------
+* ``newgapp``, creates the configuration files for an instance
+
+This command needs to be followed by the commands responsible for
+the database initialization. As those are specific to the `datastore`,
+specific Google AppEgine database, they are not available for now
+in cubicweb-ctl, but they are available in the instance created.
+
+For more details, please see :ref:`GoogleAppEngineSource` .
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/admin/index.rst
--- a/doc/book/en/admin/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/admin/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -14,6 +14,7 @@
:numbered:
setup
+ cubicweb-ctl
create-instance
instance-config
site-config
@@ -23,16 +24,5 @@
gae
migration
additional-tips
-
-RQL logs
---------
+ rql-logs
-You can configure the *CubicWeb* instance to keep a log
-of the queries executed against your database. To do so,
-edit the configuration file of your instance
-``.../etc/cubicweb.d/myapp/all-in-one.conf`` and uncomment the
-variable ``query-log-file``::
-
- # web instance query log file
- query-log-file=/tmp/rql-myapp.log
-
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/admin/instance-config.rst
--- a/doc/book/en/admin/instance-config.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/admin/instance-config.rst Fri Mar 11 09:46:45 2011 +0100
@@ -4,6 +4,11 @@
Configure an instance
=====================
+On a Unix system, the instances are usually stored in the directory
+:file:`/etc/cubicweb.d/`. During development, the
+:file:`~/etc/cubicweb.d/` directory is looked up, as well as the paths
+in :envvar:`CW_INSTANCES_DIR` environment variable.
+
While creating an instance, a configuration file is generated in::
$ (CW_INSTANCES_DIR) / / .conf
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/admin/rql-logs.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/admin/rql-logs.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,14 @@
+.. -*- coding: utf-8 -*-
+
+RQL logs
+========
+
+You can configure the *CubicWeb* instance to keep a log
+of the queries executed against your database. To do so,
+edit the configuration file of your instance
+``.../etc/cubicweb.d/myapp/all-in-one.conf`` and uncomment the
+variable ``query-log-file``::
+
+ # web instance query log file
+ query-log-file=/tmp/rql-myapp.log
+
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/admin/setup.rst
--- a/doc/book/en/admin/setup.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/admin/setup.rst Fri Mar 11 09:46:45 2011 +0100
@@ -2,7 +2,7 @@
.. _SetUpEnv:
-Installation and set-up of a *CubicWeb* environment
+Installation and set-up of a |cubicweb| environment
===================================================
Installation of `Cubicweb` and its dependencies
@@ -68,8 +68,8 @@
`cubicweb with postgresql datatabase`_ and `cubicweb-mysql-support` contains
necessary dependency for using `cubicweb with mysql database`_ .
-There is also a wide variety of :ref:`cubes ` listed on the `CubicWeb.org Forge`_
-available as debian packages and tarball.
+There is also a wide variety of :ref:`cubes ` listed on the
+`CubicWeb.org Forge`_ available as debian packages and tarball.
The repositories are signed with `Logilab's gnupg key`_. To avoid warning on
"apt-get update":
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/annexes/cubicweb-ctl.rst
--- a/doc/book/en/annexes/cubicweb-ctl.rst Fri Dec 10 12:17:18 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,122 +0,0 @@
-.. -*- coding: utf-8 -*-
-
-.. _cubicweb-ctl:
-
-``cubicweb-ctl`` tool
-=====================
-
-`cubicweb-ctl` is the swiss knife to manage *CubicWeb* instances.
-The general syntax is ::
-
- cubicweb-ctl [options command]
-
-To view available commands ::
-
- cubicweb-ctl
- cubicweb-ctl --help
-
-Please note that the commands available depends on the *CubicWeb* packages
-and cubes that have been installed.
-
-To view the help menu on specific command ::
-
- cubicweb-ctl --help
-
-Listing available cubes and instance
--------------------------------------
-
-* ``list``, provides a list of the available configuration, cubes
- and instances.
-
-
-Creation of a new cube
------------------------
-
-Create your new cube cube ::
-
- cubicweb-ctl newcube
-
-This will create a new cube in
-``/path/to/forest/cubicweb/cubes/`` for a Mercurial forest
-installation, or in ``/usr/share/cubicweb/cubes`` for a debian
-packages installation.
-
-Create an instance
--------------------
-
-You must ensure `~/cubicweb.d/` exists prior to this. On windows, the
-'~' part will probably expand to 'Documents and Settings/user'.
-
-To create an instance from an existing cube, execute the following
-command ::
-
- cubicweb-ctl create
-
-This command will create the configuration files of an instance in
-``~/etc/cubicweb.d/``.
-
-The tool ``cubicweb-ctl`` executes the command ``db-create`` and
-``db-init`` when you run ``create`` so that you can complete an
-instance creation in a single command. But of course it is possible
-to issue these separate commands separately, at a later stage.
-
-Command to create/initialize an instance database
--------------------------------------------------
-
-* ``db-create``, creates the system database of an instance (tables and
- extensions only)
-* ``db-init``, initializes the system database of an instance
- (schema, groups, users, workflows...)
-
-Commands to control instances
------------------------------
-
-* ``start``, starts one or more or all instances
-
-of special interest::
-
- start -D
-
-will start in debug mode (under windows, starting without -D will not
-work; you need instead to setup your instance as a service).
-
-* ``stop``, stops one or more or all instances
-* ``restart``, restarts one or more or all instances
-* ``status``, returns the status of the instance(s)
-
-Commands to maintain instances
-------------------------------
-
-* ``upgrade``, launches the existing instances migration when a new version
- of *CubicWeb* or the cubes installed is available
-* ``shell``, opens a migration shell for manual maintenance of the instance
-* ``db-dump``, creates a dump of the system database
-* ``db-restore``, restores a dump of the system database
-* ``db-check``, checks data integrity of an instance. If the automatic correction
- is activated, it is recommanded to create a dump before this operation.
-* ``schema-sync``, synchronizes the persistent schema of an instance with
- the instance schema. It is recommanded to create a dump before this operation.
-
-Commands to maintain i18n catalogs
-----------------------------------
-* ``i18ncubicweb``, regenerates messages catalogs of the *CubicWeb* library
-* ``i18ncube``, regenerates the messages catalogs of a cube
-* ``i18ninstance``, recompiles the messages catalogs of an instance.
- This is automatically done while upgrading.
-
-See also chapter :ref:`internationalization`.
-
-Other commands
---------------
-* ``delete``, deletes an instance (configuration files and database)
-
-Command to create an instance for Google AppEngine datastore source
--------------------------------------------------------------------
-* ``newgapp``, creates the configuration files for an instance
-
-This command needs to be followed by the commands responsible for
-the database initialization. As those are specific to the `datastore`,
-specific Google AppEgine database, they are not available for now
-in cubicweb-ctl, but they are available in the instance created.
-
-For more details, please see :ref:`GoogleAppEngineSource` .
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/annexes/faq.rst
--- a/doc/book/en/annexes/faq.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/annexes/faq.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,7 +1,7 @@
.. -*- coding: utf-8 -*-
-Frequently Asked Questions
-==========================
+Frequently Asked Questions (FAQ)
+================================
[XXX 'copy answer from forum' means reusing text from
http://groups.google.com/group/google-appengine/browse_frm/thread/c9476925f5f66ec6
@@ -73,7 +73,7 @@
Why is the RQL query language looking similar to X ?
------------------------------------------------------
+----------------------------------------------------
It may remind you of SQL but it is higher level than SQL, more like
SPARQL. Except that SPARQL did not exist when we started the project.
@@ -97,13 +97,57 @@
Which ajax library is CubicWeb using ?
--------------------------------------
-CubicWeb uses jQuery and provides a few helpers on top of
-that. Additionally, some jQuery plugins are provided (some are
-provided in specific cubes).
+CubicWeb uses jQuery_ and provides a few helpers on top of that. Additionally,
+some jQuery plugins are provided (some are provided in specific cubes).
+
+.. _jQuery: http://jquery.com
+
Development
```````````
+How to change the instance logo ?
+---------------------------------
+
+There are two ways of changing the logo.
+
+1. The easiest way to use a different logo is to replace the existing
+ ``logo.png`` in ``myapp/data`` by your prefered icon and refresh.
+ By default all instance will look for a ``logo.png`` to be
+ rendered in the logo section.
+
+ .. image:: ../images/lax-book_06-main-template-logo_en.png
+
+2. In your cube directory, you can specify which file to use for the logo.
+ This is configurable in ``mycube/uiprops.py``: ::
+
+ LOGO = data('mylogo.gif')
+
+ ``mylogo.gif`` is in ``mycube/data`` directory.
+
+How to create an anonymous user ?
+---------------------------------
+
+This allows to browse the site without being authenticated. In the
+``all-in-one.conf`` file of your instance, define the anonymous user
+as follows ::
+
+ # login of the CubicWeb user account to use for anonymous user (if you want to
+ # allow anonymous)
+ anonymous-user=anon
+
+ # password of the CubicWeb user account matching login
+ anonymous-password=anon
+
+You also must ensure that this `anon` user is a registered user of
+the DB backend. If not, you can create through the administation
+interface of your instance by adding a user with in the group `guests`.
+
+.. note::
+ While creating a new instance, you can decide to allow access
+ to anonymous user, which will automatically execute what is
+ decribed above.
+
How to load data from a script ?
--------------------------------
@@ -117,42 +161,29 @@
cnx = dbapi.connect(database='instance-id', user='admin', password='admin')
cur = cnx.cursor()
- for name in ('Personal', 'Professional', 'Computers'):
- cur.execute('INSERT Blog B: B name %s', name)
+ for name in (u'Personal', u'Professional', u'Computers'):
+ cur.execute('INSERT Tag T: T name %(n)s', {'n': name})
cnx.commit()
+Wether your instance as pyro activated or not, you can still acheive this by
+using cubicweb-ctl shell scripts.
How to format an entity date attribute ?
----------------------------------------
-If your schema has an attribute of type Date or Datetime, you might
-want to format it. First, you should define your preferred format using
-the site configuration panel ``http://appurl/view?vid=systempropertiesform``
-and then set ``ui.date`` and/or ``ui.datetime``.
-Then in the view code, use:
+If your schema has an attribute of type `Date` or `Datetime`, you usually want to
+format it when displaying it. First, you should define your preferred format
+using the site configuration panel
+``http://appurl/view?vid=systempropertiesform`` and then set ``ui.date`` and/or
+``ui.datetime``. Then in the view code, use:
.. sourcecode:: python
- self.format_date(entity.date_attribute)
-
-What is the CubicWeb datatype corresponding to GAE datastore's UserProperty ?
------------------------------------------------------------------------------
+ entity.printable_value(date_attribute)
-If you take a look at your instance schema and
-click on "display detailed view of metadata" you will see that there
-is a Euser entity in there. That's the one that is modeling users. The
-thing that corresponds to a UserProperty is a relationship between
-your entity and the Euser entity. As in:
-
-.. sourcecode:: python
-
- class TodoItem(EntityType):
- text = String()
- todo_by = SubjectRelation('Euser')
-
-[XXX check that cw handle users better by mapping Google Accounts to local Euser
-entities automatically]
-
+which will always return a string whatever the attribute's type (so it's
+recommended also for other attribute types). By default it expects to generate
+HTML, so it deals with rich text formating, xml escaping...
How do I translate an msg id defined (and translated) in another cube ?
-----------------------------------------------------------------------
@@ -160,46 +191,6 @@
You should put these translations in the `i18n/static-messages.pot`
file of your own cube.
-
-What is `Error while publishing rest text ...` ?
-------------------------------------------------
-
-While modifying the description of an entity, you get an error message in
-the instance `Error while publishing ...` for Rest text and plain text.
-The server returns a traceback like as follows ::
-
- 2008-10-06 15:05:08 - (cubicweb.rest) ERROR: error while publishing ReST text
- Traceback (most recent call last):
- File "/home/user/src/blogdemo/cubicweb/common/rest.py", line 217, in rest_publish
- File "/usr/lib/python2.5/codecs.py", line 817, in open
- file = __builtin__.open(filename, mode, buffering)
- TypeError: __init__() takes at most 3 arguments (4 given)
-
-This can be fixed by applying the patch described in :
-http://code.google.com/p/googleappengine/issues/detail?id=48
-
-What are hooks used for ?
--------------------------
-
-Hooks are executed around (actually before or after) events. The
-most common events are data creation, update and deletion. They
-permit additional constraint checking (those not expressible at the
-schema level), pre and post computations depending on data
-movements.
-
-As such, they are a vital part of the framework.
-
-Other kinds of hooks, called Operations, are available
-for execution just before commit.
-
-When should you define an HTML template rather than define a graphical component ?
-----------------------------------------------------------------------------------
-
-An HTML template cannot contain code, hence it is only about static
-content. A component is made of code and operations that apply on a
-well defined context (request, result set). It enables much more
-dynamic views.
-
How to update a database after a schema modification ?
------------------------------------------------------
@@ -212,51 +203,76 @@
* add a relation: ``add_relation_definition('SubjRelation', 'MyRelation', 'ObjRelation')``.
+I get `NoSelectableObject` exceptions, how do I debug selectors ?
+-----------------------------------------------------------------
-How to create an anonymous user ?
----------------------------------
+You just need to put the appropriate context manager around view/component
+selection. One standard place for components is in cubicweb/vregistry.py:
+
+.. sourcecode:: python
-This allows to bypass authentication for your site. In the
-``all-in-one.conf`` file of your instance, define the anonymous user
-as follows ::
+ def possible_objects(self, *args, **kwargs):
+ """return an iterator on possible objects in this registry for the given
+ context
+ """
+ from cubicweb.selectors import traced_selection
+ with traced_selection():
+ for appobjects in self.itervalues():
+ try:
+ yield self._select_best(appobjects, *args, **kwargs)
+ except NoSelectableObject:
+ continue
- # login of the CubicWeb user account to use for anonymous user (if you want to
- # allow anonymous)
- anonymous-user=anon
+Don't forget the 'from __future__ import with_statement' at the module
+top-level if you're using python 2.5.
+
+This will yield additional WARNINGs, like this::
+
+ 2009-01-09 16:43:52 - (cubicweb.selectors) WARNING: selector one_line_rset returned 0 for
- # password of the CubicWeb user account matching login
- anonymous-password=anon
+For views, you can put this context in `cubicweb/web/views/basecontrollers.py` in
+the `ViewController`:
+
+.. sourcecode:: python
-You also must ensure that this `anon` user is a registered user of
-the DB backend. If not, you can create through the administation
-interface of your instance by adding a user with the role `guests`.
-This could be the admin account (for development
-purposes, of course).
+ def _select_view_and_rset(self, rset):
+ ...
+ try:
+ from cubicweb.selectors import traced_selection
+ with traced_selection():
+ view = self._cw.vreg['views'].select(vid, req, rset=rset)
+ except ObjectNotFound:
+ self.warning("the view %s could not be found", vid)
+ req.set_message(req._("The view %s could not be found") % vid)
+ vid = vid_from_rset(req, rset, self._cw.vreg.schema)
+ view = self._cw.vreg['views'].select(vid, req, rset=rset)
+ ...
-.. note::
- While creating a new instance, you can decide to allow access
- to anonymous user, which will automatically execute what is
- decribed above.
+I get "database is locked" when executing tests
+-----------------------------------------------
+
+If you have "database is locked" as error when you are executing security tests,
+it is usually because commit or rollback are missing before login() calls.
+
+You can also use a context manager, to avoid such errors, as described
+here: :ref:`securitytest`.
-How to change the instance logo ?
-------------------------------------
+What are hooks used for ?
+-------------------------
-There are two ways of changing the logo.
+Hooks are executed around (actually before or after) events. The most common
+events are data creation, update and deletion. They permit additional constraint
+checking (those not expressible at the schema level), pre and post computations
+depending on data movements.
-1. The easiest way to use a different logo is to replace the existing
- ``logo.png`` in ``myapp/data`` by your prefered icon and refresh.
- By default all instance will look for a ``logo.png`` to be
- rendered in the logo section.
-
- .. image:: ../images/lax-book_06-main-template-logo_en.png
+As such, they are a vital part of the framework.
-2. In your cube directory, you can specify which file to use for the logo.
- This is configurable in ``mycube/data/external_resources``: ::
+Other kinds of hooks, called Operations, are available
+for execution just before commit.
- LOGO = DATADIR/path/to/mylogo.gif
+For more information, read :ref:`hooks` section.
- where DATADIR is ``mycube/data``.
Configuration
`````````````
@@ -264,31 +280,7 @@
How to configure a LDAP source ?
--------------------------------
-Your instance's sources are defined in ``/etc/cubicweb.d/myapp/sources``.
-Configuring an LDAP source is about declaring that source in your
-instance configuration file such as: ::
-
- [ldapuser]
- adapter=ldapuser
- # ldap host
- host=myhost
- # base DN to lookup for usres
- user-base-dn=ou=People,dc=mydomain,dc=fr
- # user search scope
- user-scope=ONELEVEL
- # classes of user
- user-classes=top,posixAccount
- # attribute used as login on authentication
- user-login-attr=uid
- # name of a group in which ldap users will be by default
- user-default-group=users
- # map from ldap user attributes to cubicweb attributes
- user-attrs-map=gecos:email,uid:login
-
-Any change applied to configuration file requires to restart your
-instance.
-
-You can find additional information in the section :ref:`LDAP`.
+See :ref:`LDAP`.
How to import LDAP users in |cubicweb| ?
----------------------------------------
@@ -350,34 +342,6 @@
cnx.close()
-I get NoSelectableObject exceptions, how do I debug selectors ?
----------------------------------------------------------------
-
-You just need to put the appropriate context manager around view/component
-selection (one standard place in in vreg.py):
-
-.. sourcecode:: python
-
- def possible_objects(self, registry, *args, **kwargs):
- """return an iterator on possible objects in a registry for this result set
-
- actions returned are classes, not instances
- """
- from cubicweb.selectors import traced_selection
- with traced_selection():
- for vobjects in self.registry(registry).values():
- try:
- yield self.select(vobjects, *args, **kwargs)
- except NoSelectableObject:
- continue
-
-Don't forget the 'from __future__ import with_statement' at the module
-top-level.
-
-This will yield additional WARNINGs, like this::
-
- 2009-01-09 16:43:52 - (cubicweb.selectors) WARNING: selector one_line_rset returned 0 for
-
Security
````````
@@ -410,7 +374,7 @@
Be careful, the encryption algorithm is different on Windows and on
Unix. You cannot therefore use a hash generated on Unix to fill in a
-Windows database, nor the other way round.
+Windows database, nor the other way round.
You can prefer use a migration script similar to this shell invocation instead::
@@ -434,7 +398,8 @@
remove {'PR': 'Project', 'C': 'CWUser'} from solutions since your_user has no read access to cost
-This is because you have to put your user in the "users" group. The user has to be in both groups.
+This is because you have to put your user in the "users" group. The user has to
+be in both groups.
How is security implemented ?
------------------------------
@@ -491,9 +456,7 @@
Is it possible to bypass security from the UI (web front) part ?
----------------------------------------------------------------
-No.
-
-Only Hooks/Operations can do that.
+No. Only Hooks/Operations can do that.
Can PostgreSQL and CubicWeb authentication work with kerberos ?
----------------------------------------------------------------
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/annexes/index.rst
--- a/doc/book/en/annexes/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/annexes/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -13,9 +13,7 @@
:numbered:
faq
- cubicweb-ctl
rql/index
mercurial
depends
- javascript-api
docstrings-conventions
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/annexes/rql/index.rst
--- a/doc/book/en/annexes/rql/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/annexes/rql/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-.. _RQLChapter
+.. _RQLChapter:
Relation Query Language (RQL)
=============================
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/cubes/available-cubes.rst
--- a/doc/book/en/devrepo/cubes/available-cubes.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/cubes/available-cubes.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,4 @@
+.. _AvailableCubes:
Available cubes
---------------
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/cubes/cc-newcube.rst
--- a/doc/book/en/devrepo/cubes/cc-newcube.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/cubes/cc-newcube.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,5 +1,5 @@
-Creating a new cube from scratch using :command:`cubicweb-ctl newcube`
-----------------------------------------------------------------------
+Creating a new cube from scratch
+--------------------------------
Let's start by creating the cube environment in which we will develop ::
@@ -14,7 +14,7 @@
hg ci
If all went well, you should see the cube you just created in the list
-returned by ``cubicweb-ctl list`` in the *Available cubes* section.
+returned by ``cubicweb-ctl list`` in the *Available cubes* section.
If not, please refer to :ref:`ConfigurationEnv`.
To reuse an existing cube, add it to the list named
@@ -24,6 +24,14 @@
database for the instance is created (import_erschema('MyCube') will
not properly work otherwise).
+On a Unix system, the available cubes are usually stored in the
+directory :file:`/usr/share/cubicweb/cubes`. If you are using the
+cubicweb mercurial repository (:ref:`SourceInstallation`), the cubes
+are searched in the directory
+:file:`/path/to/cubicweb_toplevel/cubes`. In this configuration
+cubicweb itself ought to be located at
+:file:`/path/to/cubicweb_toplevel/cubicweb`.
+
.. note::
Please note that if you do not wish to use default directory for your cubes
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/datamodel/define-workflows.rst
--- a/doc/book/en/devrepo/datamodel/define-workflows.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/datamodel/define-workflows.rst Fri Mar 11 09:46:45 2011 +0100
@@ -8,14 +8,13 @@
General
-------
-A workflow describes how certain entities have to evolve between
-different states. Hence we have a set of states, and a "transition
-graph", i.e. a set of possible transitions from one state to another
-state.
+A workflow describes how certain entities have to evolve between different
+states. Hence we have a set of states, and a "transition graph", i.e. a set of
+possible transitions from one state to another state.
-We will define a simple workflow for a blog, with only the following
-two states: `submitted` and `published`. So first, we create a simple
-|cubicweb| instance in five minutes (see :ref:`BlogFiveMinutes`).
+We will define a simple workflow for a blog, with only the following two states:
+`submitted` and `published`. You may want to take a look at :ref:`_TutosBase` if
+you want to quickly setup an instance running a blog.
Setting up a workflow
---------------------
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/datamodel/definition.rst
--- a/doc/book/en/devrepo/datamodel/definition.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/datamodel/definition.rst Fri Mar 11 09:46:45 2011 +0100
@@ -304,7 +304,7 @@
* we associate rights at the entities/relations schema level
-* the default groups are: `administrators`, `users` and `guests`
+* the default groups are: `managers`, `users` and `guests`
* users belong to the `users` group
@@ -334,6 +334,34 @@
provided if the user is in one of the listed groups or if one of the RQL condition
is satisfied.
+Default permissions
+```````````````````
+
+The default permissions for ``EntityType`` are:
+
+.. sourcecode:: python
+
+ __permissions__ = {
+ 'read': ('managers', 'users', 'guests',),
+ 'update': ('managers', 'owners',),
+ 'delete': ('managers', 'owners'),
+ 'add': ('managers', 'users',)
+ }
+
+The default permissions for relations are:
+
+.. sourcecode:: python
+
+ __permissions__ = {'read': ('managers', 'users', 'guests',),
+ 'delete': ('managers', 'users'),
+ 'add': ('managers', 'users',)}
+
+The default permissions for attributes are:
+
+.. sourcecode:: python
+
+ __permissions__ = {'read': ('managers', 'users', 'guests',),
+ 'update': ('managers', ERQLExpression('U has_update_permission X')),}
The standard user groups
````````````````````````
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/devcore/cwconfig.rst
--- a/doc/book/en/devrepo/devcore/cwconfig.rst Fri Dec 10 12:17:18 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-Configuration
--------------
-
-.. automodule:: cubicweb.cwconfig
- :members:
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/devcore/index.rst
--- a/doc/book/en/devrepo/devcore/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/devcore/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -6,5 +6,4 @@
dbapi.rst
reqbase.rst
- cwconfig.rst
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/repo/hooks.rst
--- a/doc/book/en/devrepo/repo/hooks.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/repo/hooks.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,162 +1,27 @@
.. -*- coding: utf-8 -*-
-
.. _hooks:
Hooks and Operations
====================
-Generalities
-------------
-
-Paraphrasing the `emacs`_ documentation, let us say that hooks are an
-important mechanism for customizing an application. A hook is
-basically a list of functions to be called on some well-defined
-occasion (this is called `running the hook`).
-
-.. _`emacs`: http://www.gnu.org/software/emacs/manual/html_node/emacs/Hooks.html
-
-In CubicWeb, hooks are subclasses of the Hook class in
-`server/hook.py`, implementing their own `call` method, and selected
-over a set of pre-defined `events` (and possibly more conditions,
-hooks being selectable AppObjects like views and components).
-
-There are two families of events: data events and server events. In a
-typical application, most of the Hooks are defined over data
-events.
-
-The purpose of data hooks is to complement the data model as defined
-in the schema.py, which is static by nature, with dynamic or value
-driven behaviours. It is functionally equivalent to a `database
-trigger`_, except that database triggers definition languages are not
-standardized, hence not portable (for instance, PL/SQL works with
-Oracle and PostgreSQL but not SqlServer nor Sqlite).
-
-.. _`database trigger`: http://en.wikipedia.org/wiki/Database_trigger
-
-Data hooks can serve the following purposes:
-
-* enforcing constraints that the static schema cannot express
- (spanning several entities/relations, exotic value ranges and
- cardinalities, etc.)
-
-* implement computed attributes
-
-Operations are Hook-like objects that may be created by Hooks and
-scheduled to happen just before (or after) the `commit` event. Hooks
-being fired immediately on data operations, it is sometime necessary
-to delay the actual work down to a time where all other Hooks have
-run, for instance a validation check which needs that all relations be
-already set on an entity. Also while the order of execution of Hooks
-is data dependant (and thus hard to predict), it is possible to force
-an order on Operations.
-
-Operations also may be used to process various side effects associated
-with a transaction such as filesystem udpates, mail notifications,
-etc.
-
-Operations are subclasses of the Operation class in `server/hook.py`,
-implementing `precommit_event` and other standard methods (wholly
-described in :ref:`operations_api`).
-
-.. hint::
-
- It is a good practice, to write unit tests for each hook. See an example in :ref:`hook_test`
-
-Events
-------
-
-Hooks are mostly defined and used to handle `dataflow`_ operations. It
-means as data gets in (entities added, updated, relations set or
-unset), specific events are issued and the Hooks matching these events
-are called.
-
-.. _`dataflow`: http://en.wikipedia.org/wiki/Dataflow
-
-Below comes a list of the dataflow events related to entities operations:
-
-* before_add_entity
-
-* before_update_entity
-
-* before_delete_entity
-
-* after_add_entity
-
-* after_update_entity
-
-* after_delete_entity
-
-These define ENTTIES HOOKS. RELATIONS HOOKS are defined
-over the following events:
-
-* after_add_relation
-
-* after_delete_relation
-
-* before_add_relation
-
-* before_delete_relation
-
-This is an occasion to remind us that relations support the add/delete
-operation, but no update.
-
-Non data events also exist. These are called SYSTEM HOOKS.
-
-* server_startup
-
-* server_shutdown
-
-* server_maintenance
-
-* server_backup
-
-* server_restore
-
-* session_open
-
-* session_close
+.. autodocstring:: cubicweb.server.hook
-Using dataflow Hooks
---------------------
-
-Dataflow hooks either automate data operations or maintain the
-consistency of the data model. In the later case, we must use a
-specific exception named ValidationError
-
-Validation Errors
-~~~~~~~~~~~~~~~~~
-
-When a condition is not met in a Hook/Operation, it must raise a
-`ValidationError`. Raising anything but a (subclass of)
-ValidationError is a programming error. Raising a ValidationError
-entails aborting the current transaction.
+Example using dataflow hooks
+----------------------------
-The ValidationError exception is used to convey enough information up
-to the user interface. Hence its constructor is different from the
-default Exception constructor. It accepts, positionally:
-
-* an entity eid,
-
-* a dict whose keys represent attribute (or relation) names and values
- an end-user facing message (hence properly translated) relating the
- problem.
-
-An entity hook
-~~~~~~~~~~~~~~
-
-We will use a very simple example to show hooks usage. Let us start
-with the following schema.
+We will use a very simple example to show hooks usage. Let us start with the
+following schema.
.. sourcecode:: python
class Person(EntityType):
age = Int(required=True)
-We would like to add a range constraint over a person's age. Let's
-write an hook. It shall be placed into mycube/hooks.py. If this file
-were to grow too much, we can easily have a mycube/hooks/... package
-containing hooks in various modules.
+We would like to add a range constraint over a person's age. Let's write an hook
+(supposing yams can not handle this nativly, which is wrong). It shall be placed
+into `mycube/hooks.py`. If this file were to grow too much, we can easily have a
+`mycube/hooks/... package` containing hooks in various modules.
.. sourcecode:: python
@@ -166,68 +31,30 @@
class PersonAgeRange(Hook):
__regid__ = 'person_age_range'
+ __select__ = Hook.__select__ & is_instance('Person')
events = ('before_add_entity', 'before_update_entity')
- __select__ = Hook.__select__ & is_instance('Person')
def __call__(self):
- if 0 >= self.entity.age <= 120:
- return
- msg = self._cw._('age must be between 0 and 120')
- raise ValidationError(self.entity.eid, {'age': msg})
-
-Hooks being AppObjects like views, they have a __regid__ and a
-__select__ class attribute. The base __select__ is augmented with an
-`is_instance` selector matching the desired entity type. The `events`
-tuple is used by the Hook.__select__ base selector to dispatch the
-hook on the right events. In an entity hook, it is possible to
-dispatch on any entity event (e.g. 'before_add_entity',
-'before_update_entity') at once if needed.
+ if 'age' in self.entity.cw_edited:
+ if 0 <= self.entity.age <= 120:
+ return
+ msg = self._cw._('age must be between 0 and 120')
+ raise ValidationError(self.entity.eid, {'age': msg})
-Like all appobjects, hooks have the `self._cw` attribute which
-represents the current session. In entity hooks, a `self.entity`
-attribute is also present.
-
-
-A relation hook
-~~~~~~~~~~~~~~~
-
-Let us add another entity type with a relation to person (in
-mycube/schema.py).
-
-.. sourcecode:: python
+In our example the base `__select__` is augmented with an `is_instance` selector
+matching the desired entity type.
- class Company(EntityType):
- name = String(required=True)
- boss = SubjectRelation('Person', cardinality='1*')
+The `events` tuple is used specify that our hook should be called before the
+entity is added or updated.
-We would like to constrain the company's bosses to have a minimum
-(legal) age. Let's write an hook for this, which will be fired when
-the `boss` relation is established.
-
-.. sourcecode:: python
-
- class CompanyBossLegalAge(Hook):
- __regid__ = 'company_boss_legal_age'
- events = ('before_add_relation',)
- __select__ = Hook.__select__ & match_rtype('boss')
+Then in the hook's `__call__` method, we:
- def __call__(self):
- boss = self._cw.entity_from_eid(self.eidto)
- if boss.age < 18:
- msg = self._cw._('the minimum age for a boss is 18')
- raise ValidationError(self.eidfrom, {'boss': msg})
-
-We use the `match_rtype` selector to select the proper relation type.
+* check if the 'age' attribute is edited
+* if so, check the value is in the range
+* if not, raise a validation error properly
-The essential difference with respect to an entity hook is that there
-is no self.entity, but `self.eidfrom` and `self.eidto` hook attributes
-which represent the subject and object eid of the relation.
-
-
-Using Operations
-----------------
-
-Let's augment our example with a new `subsidiary_of` relation on Company.
+Now Let's augment our schema with new `Company` entity type with some relation to
+`Person` (in 'mycube/schema.py').
.. sourcecode:: python
@@ -236,16 +63,41 @@
boss = SubjectRelation('Person', cardinality='1*')
subsidiary_of = SubjectRelation('Company', cardinality='*?')
-Base example
-~~~~~~~~~~~~
-We would like to check that there is no cycle by the `subsidiary_of`
-relation. This is best achieved in an Operation since all relations
-are likely to be set at commit time.
+We would like to constrain the company's bosses to have a minimum (legal)
+age. Let's write an hook for this, which will be fired when the `boss` relation
+is established (still supposing we could not specify that kind of thing in the
+schema).
.. sourcecode:: python
- from cubicweb.server.hook import Hook, Operation, match_rtype
+ class CompanyBossLegalAge(Hook):
+ __regid__ = 'company_boss_legal_age'
+ __select__ = Hook.__select__ & match_rtype('boss')
+ events = ('before_add_relation',)
+
+ def __call__(self):
+ boss = self._cw.entity_from_eid(self.eidto)
+ if boss.age < 18:
+ msg = self._cw._('the minimum age for a boss is 18')
+ raise ValidationError(self.eidfrom, {'boss': msg})
+
+.. Note::
+
+ We use the :class:`~cubicweb.server.hook.match_rtype` selector to select the
+ proper relation type.
+
+ The essential difference with respect to an entity hook is that there is no
+ self.entity, but `self.eidfrom` and `self.eidto` hook attributes which
+ represent the subject and object **eid** of the relation.
+
+Suppose we want to check that there is no cycle by the `subsidiary_of`
+relation. This is best achieved in an operation since all relations are likely to
+be set at commit time.
+
+.. sourcecode:: python
+
+ from cubicweb.server.hook import Hook, DataOperationMixIn, Operation, match_rtype
def check_cycle(self, session, eid, rtype, role='subject'):
parents = set([eid])
@@ -257,7 +109,8 @@
raise ValidationError(eid, {rtype: msg})
parents.add(parent.eid)
- class CheckSubsidiaryCycleOp(Operation):
+
+ class CheckSubsidiaryCycleOp(DataOperationMixIn, Operation):
def precommit_event(self):
check_cycle(self.session, self.eidto, 'subsidiary_of')
@@ -265,30 +118,20 @@
class CheckSubsidiaryCycleHook(Hook):
__regid__ = 'check_no_subsidiary_cycle'
+ __select__ = Hook.__select__ & match_rtype('subsidiary_of')
events = ('after_add_relation',)
- __select__ = Hook.__select__ & match_rtype('subsidiary_of')
def __call__(self):
CheckSubsidiaryCycleOp(self._cw, eidto=self.eidto)
-The operation is instantiated in the Hook.__call__ method.
-An operation always takes a session object as first argument
-(accessible as `.session` from the operation instance), and optionally
-all keyword arguments needed by the operation. These keyword arguments
-will be accessible as attributes from the operation instance.
+Like in hooks, :exc:`~cubicweb.ValidationError` can be raised in operations. Other
+exceptions are usually programming errors.
-Like in Hooks, ValidationError can be raised in Operations. Other
-exceptions are programming errors.
-
-Notice how our hook will instantiate an operation each time the Hook
-is called, i.e. each time the `subsidiary_of` relation is set.
-
-Using set_operation
-~~~~~~~~~~~~~~~~~~~
-
-There is an alternative method to schedule an Operation from a Hook,
-using the `set_operation` function.
+In the above example, our hook will instantiate an operation each time the hook
+is called, i.e. each time the `subsidiary_of` relation is set. There is an
+alternative method to schedule an operation from a hook, using the
+:func:`get_instance` class method.
.. sourcecode:: python
@@ -300,143 +143,98 @@
__select__ = Hook.__select__ & match_rtype('subsidiary_of')
def __call__(self):
- set_operation(self._cw, 'subsidiary_cycle_detection', self.eidto,
- CheckSubsidiaryCycleOp, rtype=self.rtype)
+ CheckSubsidiaryCycleOp.get_instance(self._cw).add_data(self.eidto)
class CheckSubsidiaryCycleOp(Operation):
def precommit_event(self):
- for eid in self.session.transaction_data['subsidiary_cycle_detection']:
+ for eid in self.get_data():
check_cycle(self.session, eid, self.rtype)
-Here, we call set_operation with a session object, a specially forged
-key, a value that is the actual payload of an individual operation (in
-our case, the object of the subsidiary_of relation) , the class of the
-Operation, and more optional parameters to give to the operation (here
-the rtype which do not vary accross operations).
-
-The body of the operation must then iterate over the values that have
-been mapped in the transaction_data dictionary to the forged key.
-This mechanism is especially useful on two occasions (not shown in our
-example):
+Here, we call :func:`set_operation` so that we will simply accumulate eids of
+entities to check at the end in a single `CheckSubsidiaryCycleOp`
+operation. Value are stored in a set associated to the
+'subsidiary_cycle_detection' transaction data key. The set initialization and
+operation creation are handled nicely by :func:`set_operation`.
-* massive data import (reduced memory consumption within a large
- transaction)
+A more realistic example can be found in the advanced tutorial chapter
+:ref:`adv_tuto_security_propagation`.
-* when several hooks need to instantiate the same operation (e.g. an
- entity and a relation hook).
-
-.. note::
- A more realistic example can be found in the advanced tutorial
- chapter :ref:`adv_tuto_security_propagation`.
+Hooks writing tips
+------------------
-.. _operations_api:
-
-Operation: a small API overview
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Reminder
+~~~~~~~~
-.. autoclass:: cubicweb.server.hook.Operation
-.. autoclass:: cubicweb.server.hook.LateOperation
-.. autofunction:: cubicweb.server.hook.set_operation
+You should never use the `entity.foo = 42` notation to update an
+entity. It will not do what you expect (updating the
+database). Instead, use the :meth:`set_attributes` and
+:meth:`set_relations` methods.
-Hooks writing rules
--------------------
-
-Remainder
-~~~~~~~~~
-
-Never, ever use the `entity.foo = 42` notation to update an entity. It
-will not work.
How to choose between a before and an after event ?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Before hooks give you access to the old attribute (or relation)
-values. By definition the database is not yet updated in a before
-hook.
+`before_*` hooks give you access to the old attribute (or relation)
+values. You can also intercept and update edited values in the case of
+entity modification before they reach the database.
+
+Else the question is: should I need to do things before or after the actual
+modification ? If the answer is "it doesn't matter", use an 'after' event.
+
+
+Validation Errors
+~~~~~~~~~~~~~~~~~
-To access old and new values in an before_update_entity hook, one can
-use the `server.hook.entity_oldnewvalue` function which returns a
-tuple of the old and new values. This function takes an entity and an
-attribute name as parameters.
+When a hook which is responsible to maintain the consistency of the
+data model detects an error, it must use a specific exception named
+:exc:`~cubicweb.ValidationError`. Raising anything but a (subclass of)
+:exc:`~cubicweb.ValidationError` is a programming error. Raising it
+entails aborting the current transaction.
-In a 'before_add|update_entity' hook the self.entity contains the new
-values. One is allowed to further modify them before database
-operations, using the dictionary notation.
+This exception is used to convey enough information up to the user
+interface. Hence its constructor is different from the default Exception
+constructor. It accepts, positionally:
+
+* an entity eid,
+
+* a dict whose keys represent attribute (or relation) names and values
+ an end-user facing message (hence properly translated) relating the
+ problem.
.. sourcecode:: python
- self.entity['age'] = 42
-
-This is because using self.entity.set_attributes(age=42) will
-immediately update the database (which does not make sense in a
-pre-database hook), and will trigger any existing
-before_add|update_entity hook, thus leading to infinite hook loops or
-such awkward situations.
+ raise ValidationError(earth.eid, {'sea_level': self._cw._('too high'),
+ 'temperature': self._cw._('too hot')})
-Beyond these specific cases, updating an entity attribute or relation
-must *always* be done using `set_attributes` and `set_relations`
-methods.
-(Of course, ValidationError will always abort the current transaction,
-whetever the event).
-
-Peculiarities of inlined relations
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Checking for object created/deleted in the current transaction
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Some relations are defined in the schema as `inlined` (see
-:ref:`RelationType` for details). In this case, they are inserted in
-the database at the same time as entity attributes.
-
-Hence in the case of before_add_relation, such relations already exist
-in the database.
-
-Edited attributes
-~~~~~~~~~~~~~~~~~
+In hooks, you can use the
+:meth:`~cubicweb.server.session.Session.added_in_transaction` or
+:meth:`~cubicweb.server.session.Session.deleted_in_transaction` of the session
+object to check if an eid has been created or deleted during the hook's
+transaction.
-On udpates, it is possible to ask the `entity.edited_attributes`
-variable whether one attribute has been updated.
-
-.. sourcecode:: python
-
- if 'age' not in entity.edited_attribute:
- return
-
-Deleted in transaction
-~~~~~~~~~~~~~~~~~~~~~~
-
-The session object has a deleted_in_transaction method, which can help
-writing deletion Hooks.
+This is useful to enable or disable some stuff if some entity is being added or
+deleted.
.. sourcecode:: python
if self._cw.deleted_in_transaction(self.eidto):
return
-Given this predicate, we can avoid scheduling an operation.
-Disabling hooks
-~~~~~~~~~~~~~~~
-
-It is sometimes convenient to disable some hooks. For instance to
-avoid infinite Hook loops. One uses the `hooks_control` context
-manager.
-
-This can be controlled more finely through the `category` Hook class
-attribute, which is a string.
+Peculiarities of inlined relations
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. sourcecode:: python
-
- with hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, ):
- # ... do stuff
-
-.. autoclass:: cubicweb.server.session.hooks_control
+Relations which are defined in the schema as `inlined` (see :ref:`RelationType`
+for details) are inserted in the database at the same time as entity attributes.
-The existing categories are: ``email``, ``syncsession``,
-``syncschema``, ``bookmark``, ``security``, ``worfklow``,
-``metadata``, ``notification``, ``integrity``, ``activeintegrity``.
-
-Nothing precludes one to invent new categories and use the
-hooks_control context manager to filter them (in or out).
+This may have some side effect, for instance when creating an entity
+and setting an inlined relation in the same rql query, then at
+`before_add_relation` time, the relation will already exist in the
+database (it is otherwise not the case).
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/testing.rst
--- a/doc/book/en/devrepo/testing.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/testing.rst Fri Mar 11 09:46:45 2011 +0100
@@ -109,6 +109,8 @@
.. _apycot: http://www.logilab.org/project/apycot
+.. _securitytest:
+
Managing connections or users
+++++++++++++++++++++++++++++
@@ -194,13 +196,13 @@
blog_entry_2 = req.create_entity('BlogEntry', title=u'yes',
content=u'cubicweb yes')
blog_entry_2.set_relations(entry_of=cubicweb_blog)
- self.assertEquals(len(MAILBOX), 0)
+ self.assertEqual(len(MAILBOX), 0)
self.commit()
- self.assertEquals(len(MAILBOX), 2)
+ self.assertEqual(len(MAILBOX), 2)
mail = MAILBOX[0]
- self.assertEquals(mail.subject, '[data] hop')
+ self.assertEqual(mail.subject, '[data] hop')
mail = MAILBOX[1]
- self.assertEquals(mail.subject, '[data] yes')
+ self.assertEqual(mail.subject, '[data] yes')
Visible actions tests
`````````````````````
@@ -227,7 +229,7 @@
def test_admin(self):
req = self.request()
rset = req.execute('Any C WHERE C is Conference')
- self.assertListEquals(self.pactions(req, rset),
+ self.assertListEqual(self.pactions(req, rset),
[('workflow', workflow.WorkflowActions),
('edit', confactions.ModifyAction),
('managepermission', actions.ManagePermissionsAction),
@@ -236,7 +238,7 @@
('generate_badge_action', badges.GenerateBadgeAction),
('addtalkinconf', confactions.AddTalkInConferenceAction)
])
- self.assertListEquals(self.action_submenu(req, rset, 'addrelated'),
+ self.assertListEqual(self.action_submenu(req, rset, 'addrelated'),
[(u'add Track in_conf Conference object',
u'http://testing.fr/cubicweb/add/Track'
u'?__linkto=in_conf%%3A%(conf)s%%3Asubject&'
@@ -341,6 +343,60 @@
therefore making new entity types and relations available to the
tests.
+Literate programming
+--------------------
+
+CubicWeb provides some literate programming capabilities. The :ref:`cubicweb-ctl`
+`shell` command accepts differents format files. If your file ends with `.txt`
+or `.rst`, the file will be parsed by :mod:`doctest.testfile` with CubicWeb
+:ref:`migration` API enabled in it.
+
+Create a `scenario.txt` file into `test/` directory and fill with some content.
+Please refer the :mod:`doctest.testfile` `documentation`_.
+
+.. _documentation: http://docs.python.org/library/doctest.html
+
+Then, you can run it directly by::
+
+ $ cubicweb-ctl shell test/scenario.txt
+
+When your scenario file is ready, put it in a new test case to be able to run
+it automatically.
+
+.. sourcecode:: python
+
+ from os.path import dirname, join
+ from logilab.common.testlib import unittest_main
+ from cubicweb.devtools.testlib import CubicWebTC
+
+ class AcceptanceTC(CubicWebTC):
+
+ def test_scenario(self):
+ self.assertDocTestFile(join(dirname(__file__), 'scenario.txt'))
+
+ if __name__ == '__main__':
+ unittest_main()
+
+Skipping a scenario
+```````````````````
+
+If you want to set up initial conditions that you can't put in your unit test
+case, you have to use a :exc:`KeyboardInterrupt` exception only because of the
+way :mod:`doctest` module will catch all the exceptions internally.
+
+ >>> if condition_not_met:
+ ... raise KeyboardInterrupt('please, check your fixture.')
+
+Passing paramaters
+``````````````````
+Using extra arguments to parametrize your scenario is possible by prepend them
+by double dashes.
+
+Please refer to the `cubicweb-ctl shell --help` usage.
+
+.. important::
+ Your scenario file must be utf-8 encoded.
+
Test APIS
---------
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devrepo/vreg.rst
--- a/doc/book/en/devrepo/vreg.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devrepo/vreg.rst Fri Mar 11 09:46:45 2011 +0100
@@ -38,6 +38,7 @@
.. autoclass:: cubicweb.selectors.match_kwargs
.. autoclass:: cubicweb.selectors.appobject_selectable
.. autoclass:: cubicweb.selectors.adaptable
+.. autoclass:: cubicweb.selectors.configuration_values
Result set selectors
@@ -77,6 +78,8 @@
.. autoclass:: cubicweb.selectors.has_permission
.. autoclass:: cubicweb.selectors.has_add_permission
.. autoclass:: cubicweb.selectors.has_mimetype
+.. autoclass:: cubicweb.selectors.is_in_state
+.. autoclass:: cubicweb.selectors.on_transition
.. autoclass:: cubicweb.selectors.implements
@@ -100,11 +103,13 @@
.. autoclass:: cubicweb.selectors.match_view
.. autoclass:: cubicweb.selectors.primary_view
.. autoclass:: cubicweb.selectors.specified_etype_implements
+.. autoclass:: cubicweb.selectors.attribute_edited
Other selectors
~~~~~~~~~~~~~~~
.. autoclass:: cubicweb.selectors.match_transition
+.. autoclass:: cubicweb.selectors.debug_mode
You'll also find some other (very) specific selectors hidden in other modules
than :mod:`cubicweb.selectors`.
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devweb/edition/examples.rst
--- a/doc/book/en/devweb/edition/examples.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devweb/edition/examples.rst Fri Mar 11 09:46:45 2011 +0100
@@ -117,7 +117,7 @@
set to 'sendmail', which is our form DOM id as specified by its `domid`
attribute), another to cancel the form which will go back to the previous page
using another javascript call. Also we specify an image to use as button icon as a
-resource identifier (see :ref:`external_resources`) given as last argument to
+resource identifier (see :ref:`uiprops`) given as last argument to
:class:`cubicweb.web.formwidgets.ImgButton`.
To see this form, we still have to wrap it in a view. This is pretty simple:
@@ -131,12 +131,13 @@
def call(self):
form = self._cw.vreg['forms'].select('massmailing', self._cw,
rset=self.cw_rset)
- self.w(form.render())
+ form.render(w=self.w)
As you see, we simply define a view with proper selector so it only apply to a
result set containing :class:`IEmailable` entities, and so that only users in the
managers or users group can use it. Then in the `call()` method for this view we
-simply select the above form and write what its `.render()` method returns.
+simply select the above form and call its `.render()` method with our output
+stream as argument.
When this form is submitted, a controller with id 'sendmail' will be called (as
specified using `action`). This controller will be responsible to actually send
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devweb/request.rst
--- a/doc/book/en/devweb/request.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devweb/request.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,5 +1,5 @@
-The `Request` class (`cubicweb.web`)
-------------------------------------
+The `Request` class (`cubicweb.web.request`)
+--------------------------------------------
Overview
````````
@@ -7,7 +7,8 @@
A request instance is created when an HTTP request is sent to the web
server. It contains informations such as form parameters,
authenticated user, etc. It is a very prevalent object and is used
-throughout all of the framework and applications.
+throughout all of the framework and applications, as you'll access to
+almost every resources through it.
**A request represents a user query, either through HTTP or not (we
also talk about RQL queries on the server side for example).**
@@ -24,8 +25,8 @@
* `User and identification`:
- * `user`, instance of `cubicweb.common.utils.User` corresponding to
- the authenticated user
+ * `user`, instance of `cubicweb.entities.authobjs.CWUser` corresponding to the
+ authenticated user
* `Session data handling`
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devweb/views/basetemplates.rst
--- a/doc/book/en/devweb/views/basetemplates.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devweb/views/basetemplates.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,7 +1,5 @@
.. -*- coding: utf-8 -*-
-.. |cubicweb| replace:: *CubicWeb*
-
.. _templates:
Templates
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devweb/views/primary.rst
--- a/doc/book/en/devweb/views/primary.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devweb/views/primary.rst Fri Mar 11 09:46:45 2011 +0100
@@ -226,8 +226,6 @@
We'll show you now an example of a ``primary`` view and how to customize it.
-We continue along the basic tutorial :ref:`tuto_blog`.
-
If you want to change the way a ``BlogEntry`` is displayed, just
override the method ``cell_call()`` of the view ``primary`` in
``BlogDemo/views.py``.
@@ -247,7 +245,7 @@
The above source code defines a new primary view for
-``BlogEntry``. The `id` class attribute is not repeated there since it
+``BlogEntry``. The `__reid__` class attribute is not repeated there since it
is inherited through the `primary.PrimaryView` class.
The selector for this view chains the selector of the inherited class
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/devweb/views/views.rst
--- a/doc/book/en/devweb/views/views.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/devweb/views/views.rst Fri Mar 11 09:46:45 2011 +0100
@@ -88,7 +88,7 @@
Other basic view classes
````````````````````````
-Here are some of the subclasses of `View` defined in `cubicweb.common.view`
+Here are some of the subclasses of `View` defined in `cubicweb.view`
that are more concrete as they relate to data rendering within the application:
* `EntityView`, view applying to lines or cell containing an entity (e.g. an eid)
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/blog-demo-first-page.png
Binary file doc/book/en/images/blog-demo-first-page.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-add-relation-entryof_en.png
Binary file doc/book/en/images/cbw-add-relation-entryof_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-create-blog_en.png
Binary file doc/book/en/images/cbw-create-blog_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-detail-one-blogentry_en.png
Binary file doc/book/en/images/cbw-detail-one-blogentry_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-list-one-blog_en.png
Binary file doc/book/en/images/cbw-list-one-blog_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-list-two-blog_en.png
Binary file doc/book/en/images/cbw-list-two-blog_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-schema_en.png
Binary file doc/book/en/images/cbw-schema_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/cbw-update-primary-view_en.png
Binary file doc/book/en/images/cbw-update-primary-view_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/lax-book_06-header-no-login_en.png
Binary file doc/book/en/images/lax-book_06-header-no-login_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/lax-book_06-main-template-layout_en.png
Binary file doc/book/en/images/lax-book_06-main-template-layout_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/lax-book_06-simple-main-template_en.png
Binary file doc/book/en/images/lax-book_06-simple-main-template_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/login-form.png
Binary file doc/book/en/images/login-form.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_blog-form_en.png
Binary file doc/book/en/images/tutos-base_blog-form_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_blog-primary-after-post-creation_en.png
Binary file doc/book/en/images/tutos-base_blog-primary-after-post-creation_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_blog-primary_en.png
Binary file doc/book/en/images/tutos-base_blog-primary_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_blogs-list_en.png
Binary file doc/book/en/images/tutos-base_blogs-list_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_form-generic-relations_en.png
Binary file doc/book/en/images/tutos-base_form-generic-relations_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_index_en.png
Binary file doc/book/en/images/tutos-base_index_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_login-form_en.png
Binary file doc/book/en/images/tutos-base_login-form_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-blogentry-taggable-commentable-primary_en.png
Binary file doc/book/en/images/tutos-base_myblog-blogentry-taggable-commentable-primary_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-community-custom-primary_en.png
Binary file doc/book/en/images/tutos-base_myblog-community-custom-primary_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-community-default-primary_en.png
Binary file doc/book/en/images/tutos-base_myblog-community-default-primary_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-community-taggable-primary_en.png
Binary file doc/book/en/images/tutos-base_myblog-community-taggable-primary_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-custom-footer_en.png
Binary file doc/book/en/images/tutos-base_myblog-custom-footer_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-schema_en.png
Binary file doc/book/en/images/tutos-base_myblog-schema_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_myblog-siteinfo_en.png
Binary file doc/book/en/images/tutos-base_myblog-siteinfo_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_schema_en.png
Binary file doc/book/en/images/tutos-base_schema_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_siteconfig_en.png
Binary file doc/book/en/images/tutos-base_siteconfig_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-base_user-menu_en.png
Binary file doc/book/en/images/tutos-base_user-menu_en.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_background-image.png
Binary file doc/book/en/images/tutos-photowebsite_background-image.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_boxes.png
Binary file doc/book/en/images/tutos-photowebsite_boxes.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_breadcrumbs.png
Binary file doc/book/en/images/tutos-photowebsite_breadcrumbs.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_facets.png
Binary file doc/book/en/images/tutos-photowebsite_facets.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_grey-box.png
Binary file doc/book/en/images/tutos-photowebsite_grey-box.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_index-after.png
Binary file doc/book/en/images/tutos-photowebsite_index-after.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_index-before.png
Binary file doc/book/en/images/tutos-photowebsite_index-before.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_login-box.png
Binary file doc/book/en/images/tutos-photowebsite_login-box.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_prevnext.png
Binary file doc/book/en/images/tutos-photowebsite_prevnext.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_ui1.png
Binary file doc/book/en/images/tutos-photowebsite_ui1.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_ui2.png
Binary file doc/book/en/images/tutos-photowebsite_ui2.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/images/tutos-photowebsite_ui3.png
Binary file doc/book/en/images/tutos-photowebsite_ui3.png has changed
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/index.rst
--- a/doc/book/en/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -13,19 +13,28 @@
Its main features are:
-* an engine driven by the explicit :ref:`data model ` of the application,
+* an engine driven by the explicit :ref:`data model
+ ` of the application,
+
* a query language named :ref:`RQL ` similar to W3C's SPARQL,
-* a :ref:`selection+view ` mechanism for semi-automatic XHTML/XML/JSON/text generation,
-* a library of reusable :ref:`components ` (data model and views) that fulfill common needs,
+
+* a :ref:`selection+view `
+ mechanism for semi-automatic XHTML/XML/JSON/text generation,
+
+* a library of reusable :ref:`components ` (data model and views) that
+ fulfill common needs,
+
* the power and flexibility of the Python_ programming language,
-* the reliability of SQL databases, LDAP directories, Subversion and Mercurial for storage backends.
+
+* the reliability of SQL databases, LDAP directories, Subversion and Mercurial
+ for storage backends.
Built since 2000 from an R&D effort still continued, supporting 100,000s of
daily visits at some production sites, |cubicweb| is a proven end to end solution
for semantic web application development that promotes quality, reusability and
efficiency.
-The unbeliever will read the :ref:`Tutorial`.
+The unbeliever will read the :ref:`Tutorials`.
The hacker will join development at the forge_.
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/intro/concepts.rst
--- a/doc/book/en/intro/concepts.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/intro/concepts.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,4 @@
+
.. -*- coding: utf-8 -*-
.. _Concepts:
@@ -27,14 +28,10 @@
The `CubicWeb.org Forge`_ offers a large number of cubes developed by the community
and available under a free software license.
-The command :command:`cubicweb-ctl list` displays the list of cubes installed on
-your system.
+.. note::
-On a Unix system, the available cubes are usually stored in the directory
-:file:`/usr/share/cubicweb/cubes`. If you're using the cubicweb forest
-(:ref:SourceInstallation), the cubes are searched in the directory
-:file:`/path/to/cubicweb_forest/cubes`. The environment variable
-:envvar:`CW_CUBES_PATH` gives additionnal locations where to search for cubes.
+ The command :command:`cubicweb-ctl list` displays the list of cubes
+installed on your system.
.. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
.. _`cubicweb-blog`: http://www.cubicweb.org/project/cubicweb-blog
@@ -64,12 +61,6 @@
The command :command:`cubicweb-ctl list` also displays the list of instances
installed on your system.
-On a Unix system, the instances are usually stored in the directory
-:file:`/etc/cubicweb.d/`. During development, the :file:`~/etc/cubicweb.d/`
-directory is looked up, as well as the paths in :envvar:`CW_INSTANCES_DIR`
-environment variable.
-
-
.. note::
The term application is used to refer to "something that should do something as
@@ -83,28 +74,20 @@
Data Repository
---------------
-The data repository [1]_ provides access to one or more data sources (including
-SQL databases, LDAP repositories, other |cubicweb| instance repositories, GAE's
+The data repository [1]_ encapsulates and groups an access to one or
+more data sources (including SQL databases, LDAP repositories, other
+|cubicweb| instance repositories, filesystems, Google AppEngine's
DataStore, etc).
-All interactions with the repository are done using the Relation Query Language
+All interactions with the repository are done using the `Relation Query Language`
(:ref:`RQL`). The repository federates the data sources and hides them from the
-querier, which does not realize when a query spans accross several data sources
+querier, which does not realize when a query spans several data sources
and requires running sub-queries and merges to complete.
-It is common to run the web engine and the repository in the same process (see
-instances of type all-in-one above), but this is not a requirement. A repository
-can be set up to be accessed remotely using Pyro (`Python Remote Objects`_) and
-act as a server. However, it's important to know if code you're writing is
-executed on the repository side, on our client side (the web engine being a
-client for instance): you don't have the same abilities on both side. On the
-repository side, you can for instance by-pass security checks, which isn't
-possible from client code.
-
-Some logic can be attached to events that happen in the repository,
-like creation of entities, deletion of relations, etc. This is used
-for example to send email notifications when the state of an object
-changes. See :ref:`HookIntro` below.
+Application logic can be mapped to data events happenning within the
+repository, like creation of entities, deletion of relations,
+etc. This is used for example to send email notifications when the
+state of an object changes. See :ref:`HookIntro` below.
.. [1] not to be confused with a Mercurial repository or a Debian repository.
.. _`Python Remote Objects`: http://pyro.sourceforge.net/
@@ -114,14 +97,19 @@
Web Engine
----------
-The web engine replies to http requests and runs the user interface
-and most of the application logic.
+The web engine replies to http requests and runs the user interface.
By default the web engine provides a `CRUD`_ user interface based on
the data model of the instance. Entities can be created, displayed,
updated and deleted. As the default user interface is not very fancy,
it is usually necessary to develop your own.
+It is common to run the web engine and the repository in the same
+process (see instances of type all-in-one above), but this is not a
+requirement. A repository can be set up to be accessed remotely using
+Pyro (`Python Remote Objects`_) and act as a standalone server, which
+can be directly accessed or also through a standalone web engine.
+
.. _`CRUD`: http://en.wikipedia.org/wiki/Create,_read,_update_and_delete
.. _SchemaIntro:
@@ -134,24 +122,24 @@
.. _yams: http://www.logilab.org/project/yams/
-An `entity type` defines a set of attributes and is used in some relations.
-Attributes may be of the following types: `String`, `Int`, `Float`, `Boolean`,
-`Date`, `Time`, `Datetime`, `Interval`, `Password`, `Bytes`, `RichString`.
+An `entity type` defines a sequence of attributes. Attributes may be
+of the following types: `String`, `Int`, `Float`, `Boolean`, `Date`,
+`Time`, `Datetime`, `Interval`, `Password`, `Bytes`, `RichString`.
-A `relation type` is used to define an oriented binary relation between two
-entity types. The left-hand part of a relation is named the `subject` and the
-right-hand part is named the `object`.
+A `relation type` is used to define an oriented binary relation
+between entity types. The left-hand part of a relation is named the
+`subject` and the right-hand part is named the `object`.
A `relation definition` is a triple (*subject entity type*, *relation type*, *object
entity type*) associated with a set of properties such as cardinality,
constraints, etc.
-Permissions can be set on entity types and relation definition to control who
+Permissions can be set on entity types or relation definition to control who
will be able to create, read, update or delete entities and relations. Permissions
-are granted to groups (to which users may belong) or using rql expression (if the
+are granted to groups (to which users may belong) or using rql expressions (if the
rql expression returns some results, the permission is granted).
-Some meta-data necessary to the system is added to the data model. That includes
+Some meta-data necessary to the system are added to the data model. That includes
entities like users and groups, the entities used to store the data model
itself and attributes like unique identifier, creation date, creator, etc.
@@ -169,19 +157,15 @@
Application objects
~~~~~~~~~~~~~~~~~~~
-Beside a few core functionalities, almost every feature of the framework is
+Besides a few core functionalities, almost every feature of the framework is
achieved by dynamic objects (`application objects` or `appobjects`) stored in a
-two-levels registry (the `vregistry`). Each object is affected to a registry with
+two-levels registry. Each object is affected to a registry with
an identifier in this registry. You may have more than one object sharing an
-identifier in the same registry. At runtime, appobjects are selected in a
-registry according to the context. Selection is done by comparing the *score*
-returned by each appobject's *selector*.
-
-Application objects are stored in the vregistry using a two-level hierarchy :
+identifier in the same registry:
object's `__registry__` : object's `__regid__` : [list of app objects]
-In other words, the `vregistry` contains several (sub-)registries which hold a
+In other words, the `registry` contains several (sub-)registries which hold a
list of appobjects associated to an identifier.
The base class of appobjects is :class:`cubicweb.appobject.AppObject`.
@@ -189,10 +173,14 @@
Selectors
~~~~~~~~~
-Each appobject has a selector that is used to compute how well the object fits a
-given context. The better the object fits the context, the higher the score. Scores
-are the glue that ties appobjects to the data model. Using them appropriately is
-an essential part of the construction of well behaved cubes.
+At runtime, appobjects can be selected in a registry according to some
+contextual information. Selection is done by comparing the *score*
+returned by each appobject's *selector*.
+
+The better the object fits the context, the higher the score. Scores
+are the glue that ties appobjects to the data model. Using them
+appropriately is an essential part of the construction of well behaved
+cubes.
|cubicweb| provides a set of basic selectors that may be parametrized. Also,
selectors can be combined with the `~` unary operator (negation) and the binary
@@ -200,35 +188,36 @@
selectors. Of course complex selectors may be combined too. Last but not least, you
can write your own selectors.
-The `vregistry`
+The `registry`
~~~~~~~~~~~~~~~
-At startup, the `vregistry` inspects a number of directories looking for
-compatible classes definition. After a recording process, the objects are
-assigned to registries so that they can be selected dynamically while the
-instance is running.
+At startup, the `registry` inspects a number of directories looking
+for compatible class definitions. After a recording process, the
+objects are assigned to registries and become available through the
+selection process.
In a cube, application object classes are looked in the following modules or
packages:
- `entities`
- `views`
+- `hooks`
- `sobjects`
-
-Once initialized, there are three common ways to retrieve some application object
-from a registry:
+There are three common ways to look up some application object from a
+registry:
-* get the most appropriate object by specifying an identifier. In that case, the
- object with the greatest score is selected. There should always be a single
- appobject with a greater score than others for a particular context.
+* get the most appropriate object by specifying an identifier and
+ context objects. The object with the greatest score is
+ selected. There should always be a single appobject with a greater
+ score than others for a particular context.
-* get all objects applying to a context by specifying a registry. In that case, a
- list of objects will be returned containing the object with the highest score
- (> 0) for each identifier in that registry.
+* get all objects applying to a context by specifying a registry. A
+ list of objects will be returned containing the object with the
+ highest score (> 0) for each identifier in that registry.
-* get the object within a particular registry/identifier. In that case no
- selection process is involved, the vregistry will expect to find a single
+* get the object within a particular registry/identifier. No selection
+ process is involved: the registry will expect to find a single
object in that cell.
@@ -247,21 +236,6 @@
emphasize browsing relations.
-DB-API
-~~~~~~
-
-The repository exposes a `db-api`_ like api but using the RQL instead of SQL.
-
-.. _`db-api`: http://www.python.org/dev/peps/pep-0249/
-
-You basically get a connection using :func:`cubicweb.dbapi.connect` , then
-get a cursor to call its `execute` method which will return result set for the
-given rql query.
-
-You can also get additional information through the connection, such as the
-repository'schema, version configuration, etc.
-
-
Result set
~~~~~~~~~~
@@ -285,18 +259,10 @@
something, eg producing some html, text, xml, pdf, or whatsover that can be
displayed to a user.
-The two main entry points of a view are:
-
-* `call()`, used to render a view on a context with no result set, or on a whole
- result set
-
-* `cell_call(row, col)`, used to render a view on a the cell with index `row` and
- `col` of the context's result set (remember result set may be seen as a two
- dimensions array).
-
-Then view may gets refined into different kind of objects such as `template`,
-`boxes`, `components`, which are more high-level abstraction useful to build
-the user interface in an object oriented way.
+Views actually are partitioned into different kind of objects such as
+`templates`, `boxes`, `components` and proper `views`, which are more
+high-level abstraction useful to build the user interface in an object
+oriented way.
.. _HookIntro:
@@ -312,7 +278,7 @@
* managing computed attributes
-* enforcing complicated structural invariants
+* enforcing complicated business rules
* real-world side-effects linked to data events (email notification
being a prime example)
@@ -326,10 +292,9 @@
* it is well-coupled to the rest of the framework
-Hooks are also application objects registered on events such as after/before
-add/update/delete on entities/relations, server startup or shutdown, etc. As all
-application objects, they have a selector defining when they should be called or
-not.
+Hooks are also application objects (in the `hooks` registry) and
+selected on events such as after/before add/update/delete on
+entities/relations, server startup or shutdown, etc.
`Operations` may be instantiated by hooks to do further processing at different
steps of the transaction's commit / rollback, which usually can not be done
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/makefile
--- a/doc/book/en/makefile Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/makefile Fri Mar 11 09:46:45 2011 +0100
@@ -1,16 +1,11 @@
-MKHTML=mkdoc
-MKHTMLOPTS=--doctype article --target html --stylesheet standard
SRC=.
-TXTFILES:= $(wildcard *.txt)
-TARGET := $(TXTFILES:.txt=.html)
-
# You can set these sphinx variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
#BUILDDIR = build
-BUILDDIR = ~/tmp/cwdoc
+BUILDDIR = ../..
CWDIR = ../../..
JSDIR = ${CWDIR}/web/data
JSTORST = ${CWDIR}/doc/tools/pyjsrest.py
@@ -28,7 +23,6 @@
help:
@echo "Please use \`make ' where is one of"
@echo " all to make standalone HTML files, developer manual and API doc"
- @echo " apidoc to make API doc"
@echo " html to make standalone HTML files"
@echo "--- "
@echo " pickle to make pickle files (usable by e.g. sphinx-web)"
@@ -38,20 +32,11 @@
@echo " linkcheck to check all external links for integrity"
clean:
- rm -rf apidoc/
rm -f *.html
- -rm -rf ${BUILDDIR}/*
+ -rm -rf ${BUILDDIR}/html ${BUILDDIR}/doctrees
-rm -rf ${BUILDJS}
-all: ${TARGET} apidoc html
-
-%.html: %.txt
- ${MKHTML} ${MKHTMLOPTS} $<
-
-#apydoc:
-# epydoc --html -o epydoc/ -n ../server/*.py ../core/*.py ../common/*.py ../server/*/*.py ../modpython/*/*.py ../common/*/*.py
-apidoc:
- epydoc --html -o apidoc -n "cubicweb" --exclude=setup --exclude=__pkginfo__ ../../../
+all: html
# run sphinx ###
html: js
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/tutorials/advanced/index.rst
--- a/doc/book/en/tutorials/advanced/index.rst Fri Dec 10 12:17:18 2010 +0100
+++ b/doc/book/en/tutorials/advanced/index.rst Fri Mar 11 09:46:45 2011 +0100
@@ -1,7 +1,8 @@
-.. _advanced_tutorial:
+
+.. _TutosPhotoWebSite:
-Building a photo gallery with CubicWeb
-======================================
+Building a photo gallery with |cubicweb|
+========================================
Desired features
----------------
@@ -16,574 +17,13 @@
* advanced security (not everyone can see everything). More on this later.
-Cube creation and schema definition
------------------------------------
-
-.. _adv_tuto_create_new_cube:
-
-Step 1: creating a new cube for my web site
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-One note about my development environment: I wanted to use the packaged
-version of CubicWeb and cubes while keeping my cube in my user
-directory, let's say `~src/cubes`. I achieve this by setting the
-following environment variables::
-
- CW_CUBES_PATH=~/src/cubes
- CW_MODE=user
-
-I can now create the cube which will hold custom code for this web
-site using::
-
- cubicweb-ctl newcube --directory=~/src/cubes sytweb
-
-
-.. _adv_tuto_assemble_cubes:
-
-Step 2: pick building blocks into existing cubes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Almost everything I want to handle in my web-site is somehow already modelized in
-existing cubes that I'll extend for my need. So I'll pick the following cubes:
-
-* `folder`, containing the `Folder` entity type, which will be used as
- both 'album' and a way to map file system folders. Entities are
- added to a given folder using the `filed_under` relation.
-
-* `file`, containing `File` and `Image` entity types, gallery view,
- and a file system import utility.
-
-* `zone`, containing the `Zone` entity type for hierarchical geographical
- zones. Entities (including sub-zones) are added to a given zone using the
- `situated_in` relation.
-
-* `person`, containing the `Person` entity type plus some basic views.
-
-* `comment`, providing a full commenting system allowing one to comment entity types
- supporting the `comments` relation by adding a `Comment` entity.
-
-* `tag`, providing a full tagging system as an easy and powerful way to classify
- entities supporting the `tags` relation by linking the to `Tag` entities. This
- will allows navigation into a large number of picture.
-
-Ok, now I'll tell my cube requires all this by editing cubes/sytweb/__pkginfo__.py:
-
- .. sourcecode:: python
-
- __depends_cubes__ = {'file': '>= 1.2.0',
- 'folder': '>= 1.1.0',
- 'person': '>= 1.2.0',
- 'comment': '>= 1.2.0',
- 'tag': '>= 1.2.0',
- 'zone': None,
- }
- __depends__ = {'cubicweb': '>= 3.5.10',
- }
- for key,value in __depends_cubes__.items():
- __depends__['cubicweb-'+key] = value
- __use__ = tuple(__depends_cubes__)
-
-Notice that you can express minimal version of the cube that should be used,
-`None` meaning whatever version available.
+.. toctree::
+ :maxdepth: 2
-Step 3: glue everything together in my cube's schema
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-.. sourcecode:: python
-
- from yams.buildobjs import RelationDefinition
-
- class comments(RelationDefinition):
- subject = 'Comment'
- object = ('File', 'Image')
- cardinality = '1*'
- composite = 'object'
-
- class tags(RelationDefinition):
- subject = 'Tag'
- object = ('File', 'Image')
-
- class filed_under(RelationDefinition):
- subject = ('File', 'Image')
- object = 'Folder'
-
- class situated_in(RelationDefinition):
- subject = 'Image'
- object = 'Zone'
-
- class displayed_on(RelationDefinition):
- subject = 'Person'
- object = 'Image'
-
-
-This schema:
-
-* allows to comment and tag on `File` and `Image` entity types by adding the
- `comments` and `tags` relations. This should be all we've to do for this
- feature since the related cubes provide 'pluggable section' which are
- automatically displayed on the primary view of entity types supporting the
- relation.
-
-* adds a `situated_in` relation definition so that image entities can be
- geolocalized.
-
-* add a new relation `displayed_on` relation telling who can be seen on a
- picture.
-
-This schema will probably have to evolve as time goes (for security handling at
-least), but since the possibility to let a schema evolve is one of CubicWeb's
-features (and goals), we won't worry about it for now and see that later when needed.
-
-
-Step 4: creating the instance
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Now that I have a schema, I want to create an instance. To
-do so using this new 'sytweb' cube, I run::
-
- cubicweb-ctl create sytweb sytweb_instance
-
-Hint: if you get an error while the database is initialized, you can
-avoid having to answer the questions again by running::
-
- cubicweb-ctl db-create sytweb_instance
-
-This will use your already configured instance and start directly from the create
-database step, thus skipping questions asked by the 'create' command.
-
-Once the instance and database are fully initialized, run ::
-
- cubicweb-ctl start sytweb_instance
-
-to start the instance, check you can connect on it, etc...
+ part01_create-cube
+ part02_security
+ part03_bfss
+ part04_ui-base
+ part05_ui-advanced
-Security, testing and migration
--------------------------------
-
-This part will cover various topics:
-
-* configuring security
-* migrating existing instance
-* writing some unit tests
-
-Here is the ``read`` security model I want:
-
-* folders, files, images and comments should have one of the following visibility:
- - ``public``, everyone can see it
- - ``authenticated``, only authenticated users can see it
- - ``restricted``, only a subset of authenticated users can see it
-* managers (e.g. me) can see everything
-* only authenticated users can see people
-* everyone can see classifier entities, such as tag and zone
-
-Also, unless explicitly specified, the visibility of an image should be the same as
-its parent folder, as well as visibility of a comment should be the same as the
-commented entity. If there is no parent entity, the default visibility is
-``authenticated``.
-
-Regarding write security, that's much easier:
-* anonymous can't write anything
-* authenticated users can only add comment
-* managers will add the remaining stuff
-
-Now, let's implement that!
-
-Proper security in CubicWeb is done at the schema level, so you don't have to
-bother with it in views: users will only see what they can see automatically.
-
-.. _adv_tuto_security:
-
-Step 1: configuring security into the schema
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In schema, you can grant access according to groups, or to some RQL expressions:
-users get access if the expression returns some results. To implement the read
-security defined earlier, groups are not enough, we'll need some RQL expression. Here
-is the idea:
-
-* add a `visibility` attribute on Folder, Image and Comment, which may be one of
- the value explained above
-
-* add a `may_be_read_by` relation from Folder, Image and Comment to users,
- which will define who can see the entity
-
-* security propagation will be done in hook.
-
-So the first thing to do is to modify my cube's schema.py to define those
-relations:
-
-.. sourcecode:: python
-
- from yams.constraints import StaticVocabularyConstraint
-
- class visibility(RelationDefinition):
- subject = ('Folder', 'File', 'Image', 'Comment')
- object = 'String'
- constraints = [StaticVocabularyConstraint(('public', 'authenticated',
- 'restricted', 'parent'))]
- default = 'parent'
- cardinality = '11' # required
-
- class may_be_read_by(RelationDefinition):
- subject = ('Folder', 'File', 'Image', 'Comment',)
- object = 'CWUser'
-
-We can note the following points:
-
-* we've added a new `visibility` attribute to folder, file, image and comment
- using a `RelationDefinition`
-
-* `cardinality = '11'` means this attribute is required. This is usually hidden
- under the `required` argument given to the `String` constructor, but we can
- rely on this here (same thing for StaticVocabularyConstraint, which is usually
- hidden by the `vocabulary` argument)
-
-* the `parent` possible value will be used for visibility propagation
-
-Now, we should be able to define security rules in the schema, based on these new
-attribute and relation. Here is the code to add to *schema.py*:
-
-.. sourcecode:: python
-
- from cubicweb.schema import ERQLExpression
-
- VISIBILITY_PERMISSIONS = {
- 'read': ('managers',
- ERQLExpression('X visibility "public"'),
- ERQLExpression('X may_be_read_by U')),
- 'add': ('managers',),
- 'update': ('managers', 'owners',),
- 'delete': ('managers', 'owners'),
- }
- AUTH_ONLY_PERMISSIONS = {
- 'read': ('managers', 'users'),
- 'add': ('managers',),
- 'update': ('managers', 'owners',),
- 'delete': ('managers', 'owners'),
- }
- CLASSIFIERS_PERMISSIONS = {
- 'read': ('managers', 'users', 'guests'),
- 'add': ('managers',),
- 'update': ('managers', 'owners',),
- 'delete': ('managers', 'owners'),
- }
-
- from cubes.folder.schema import Folder
- from cubes.file.schema import File, Image
- from cubes.comment.schema import Comment
- from cubes.person.schema import Person
- from cubes.zone.schema import Zone
- from cubes.tag.schema import Tag
-
- Folder.__permissions__ = VISIBILITY_PERMISSIONS
- File.__permissions__ = VISIBILITY_PERMISSIONS
- Image.__permissions__ = VISIBILITY_PERMISSIONS
- Comment.__permissions__ = VISIBILITY_PERMISSIONS.copy()
- Comment.__permissions__['add'] = ('managers', 'users',)
- Person.__permissions__ = AUTH_ONLY_PERMISSIONS
- Zone.__permissions__ = CLASSIFIERS_PERMISSIONS
- Tag.__permissions__ = CLASSIFIERS_PERMISSIONS
-
-What's important in there:
-
-* `VISIBILITY_PERMISSIONS` provides read access to managers group, if
- `visibility` attribute's value is 'public', or if user (designed by the 'U'
- variable in the expression) is linked to the entity (the 'X' variable) through
- the `may_read` permission
-
-* we modify permissions of the entity types we use by importing them and
- modifying their `__permissions__` attribute
-
-* notice the `.copy()`: we only want to modify 'add' permission for `Comment`,
- not for all entity types using `VISIBILITY_PERMISSIONS`!
-
-* the remaining part of the security model is done using regular groups:
-
- - `users` is the group to which all authenticated users will belong
- - `guests` is the group of anonymous users
-
-
-.. _adv_tuto_security_propagation:
-
-Step 2: security propagation in hooks
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To fullfill the requirements, we have to implement::
-
- Also, unless explicity specified, visibility of an image should be the same as
- its parent folder, as well as visibility of a comment should be the same as the
- commented entity.
-
-This kind of `active` rule will be done using CubicWeb's hook
-system. Hooks are triggered on database event such as addition of new
-entity or relation.
-
-The tricky part of the requirement is in *unless explicitly specified*, notably
-because when the entity is added, we don't know yet its 'parent'
-entity (e.g. Folder of an Image, Image commented by a Comment). To handle such things,
-CubicWeb provides `Operation`, which allow to schedule things to do at commit time.
-
-In our case we will:
-
-* on entity creation, schedule an operation that will set default visibility
-
-* when a "parent" relation is added, propagate parent's visibility unless the
- child already has a visibility set
-
-Here is the code in cube's *hooks.py*:
-
-.. sourcecode:: python
-
- from cubicweb.selectors import is_instance
- from cubicweb.server import hook
-
- class SetVisibilityOp(hook.Operation):
- def precommit_event(self):
- for eid in self.session.transaction_data.pop('pending_visibility'):
- entity = self.session.entity_from_eid(eid)
- if entity.visibility == 'parent':
- entity.set_attributes(visibility=u'authenticated')
-
- class SetVisibilityHook(hook.Hook):
- __regid__ = 'sytweb.setvisibility'
- __select__ = hook.Hook.__select__ & is_instance('Folder', 'File', 'Image', 'Comment')
- events = ('after_add_entity',)
- def __call__(self):
- hook.set_operation(self._cw, 'pending_visibility', self.entity.eid,
- SetVisibilityOp)
-
- class SetParentVisibilityHook(hook.Hook):
- __regid__ = 'sytweb.setparentvisibility'
- __select__ = hook.Hook.__select__ & hook.match_rtype('filed_under', 'comments')
- events = ('after_add_relation',)
-
- def __call__(self):
- parent = self._cw.entity_from_eid(self.eidto)
- child = self._cw.entity_from_eid(self.eidfrom)
- if child.visibility == 'parent':
- child.set_attributes(visibility=parent.visibility)
-
-Notice:
-
-* hooks are application objects, hence have selectors that should match entity or
- relation types to which the hook applies. To match a relation type, we use the
- hook specific `match_rtype` selector.
-
-* usage of `set_operation`: instead of adding an operation for each added entity,
- set_operation allows to create a single one and to store entity's eids to be
- processed in session's transaction data. This is a good pratice to avoid heavy
- operations manipulation cost when creating a lot of entities in the same
- transaction.
-
-* the `precommit_event` method of the operation will be called at transaction's
- commit time.
-
-* in a hook, `self._cw` is the repository session, not a web request as usually
- in views
-
-* according to hook's event, you have access to different attributes on the hook
- instance. Here:
-
- - `self.entity` is the newly added entity on 'after_add_entity' events
-
- - `self.eidfrom` / `self.eidto` are the eid of the subject / object entity on
- 'after_add_relatiohn' events (you may also get the relation type using
- `self.rtype`)
-
-The `parent` visibility value is used to tell "propagate using parent security"
-because we want that attribute to be required, so we can't use None value else
-we'll get an error before we get any chance to propagate...
-
-Now, we also want to propagate the `may_be_read_by` relation. Fortunately,
-CubicWeb provides some base hook classes for such things, so we only have to add
-the following code to *hooks.py*:
-
-.. sourcecode:: python
-
- # relations where the "parent" entity is the subject
- S_RELS = set()
- # relations where the "parent" entity is the object
- O_RELS = set(('filed_under', 'comments',))
-
- class AddEntitySecurityPropagationHook(hook.PropagateSubjectRelationHook):
- """propagate permissions when new entity are added"""
- __regid__ = 'sytweb.addentity_security_propagation'
- __select__ = (hook.PropagateSubjectRelationHook.__select__
- & hook.match_rtype_sets(S_RELS, O_RELS))
- main_rtype = 'may_be_read_by'
- subject_relations = S_RELS
- object_relations = O_RELS
-
- class AddPermissionSecurityPropagationHook(hook.PropagateSubjectRelationAddHook):
- """propagate permissions when new entity are added"""
- __regid__ = 'sytweb.addperm_security_propagation'
- __select__ = (hook.PropagateSubjectRelationAddHook.__select__
- & hook.match_rtype('may_be_read_by',))
- subject_relations = S_RELS
- object_relations = O_RELS
-
- class DelPermissionSecurityPropagationHook(hook.PropagateSubjectRelationDelHook):
- __regid__ = 'sytweb.delperm_security_propagation'
- __select__ = (hook.PropagateSubjectRelationDelHook.__select__
- & hook.match_rtype('may_be_read_by',))
- subject_relations = S_RELS
- object_relations = O_RELS
-
-* the `AddEntitySecurityPropagationHook` will propagate the relation
- when `filed_under` or `comments` relations are added
-
- - the `S_RELS` and `O_RELS` set as well as the `match_rtype_sets` selector are
- used here so that if my cube is used by another one, it'll be able to
- configure security propagation by simply adding relation to one of the two
- sets.
-
-* the two others will propagate permissions changes on parent entities to
- children entities
-
-
-.. _adv_tuto_tesing_security:
-
-Step 3: testing our security
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Security is tricky. Writing some tests for it is a very good idea. You should
-even write them first, as Test Driven Development recommends!
-
-Here is a small test case that will check the basis of our security
-model, in *test/unittest_sytweb.py*:
-
-.. sourcecode:: python
-
- from cubicweb.devtools.testlib import CubicWebTC
- from cubicweb import Binary
-
- class SecurityTC(CubicWebTC):
-
- def test_visibility_propagation(self):
- # create a user for later security checks
- toto = self.create_user('toto')
- # init some data using the default manager connection
- req = self.request()
- folder = req.create_entity('Folder',
- name=u'restricted',
- visibility=u'restricted')
- photo1 = req.create_entity('Image',
- data_name=u'photo1.jpg',
- data=Binary('xxx'),
- filed_under=folder)
- self.commit()
- photo1.clear_all_caches() # good practice, avoid request cache effects
- # visibility propagation
- self.assertEquals(photo1.visibility, 'restricted')
- # unless explicitly specified
- photo2 = req.create_entity('Image',
- data_name=u'photo2.jpg',
- data=Binary('xxx'),
- visibility=u'public',
- filed_under=folder)
- self.commit()
- self.assertEquals(photo2.visibility, 'public')
- # test security
- self.login('toto')
- req = self.request()
- self.assertEquals(len(req.execute('Image X')), 1) # only the public one
- self.assertEquals(len(req.execute('Folder X')), 0) # restricted...
- # may_be_read_by propagation
- self.restore_connection()
- folder.set_relations(may_be_read_by=toto)
- self.commit()
- photo1.clear_all_caches()
- self.failUnless(photo1.may_be_read_by)
- # test security with permissions
- self.login('toto')
- req = self.request()
- self.assertEquals(len(req.execute('Image X')), 2) # now toto has access to photo2
- self.assertEquals(len(req.execute('Folder X')), 1) # and to restricted folder
-
- if __name__ == '__main__':
- from logilab.common.testlib import unittest_main
- unittest_main()
-
-It's not complete, but show most things you'll want to do in tests: adding some
-content, creating users and connecting as them in the test, etc...
-
-To run it type:
-
-.. sourcecode:: bash
-
- $ pytest unittest_sytweb.py
- ======================== unittest_sytweb.py ========================
- -> creating tables [....................]
- -> inserting default user and default groups.
- -> storing the schema in the database [....................]
- -> database for instance data initialized.
- .
- ----------------------------------------------------------------------
- Ran 1 test in 22.547s
-
- OK
-
-
-The first execution is taking time, since it creates a sqlite database for the
-test instance. The second one will be much quicker:
-
-.. sourcecode:: bash
-
- $ pytest unittest_sytweb.py
- ======================== unittest_sytweb.py ========================
- .
- ----------------------------------------------------------------------
- Ran 1 test in 2.662s
-
- OK
-
-If you do some changes in your schema, you'll have to force regeneration of that
-database. You do that by removing the tmpdb files before running the test: ::
-
- $ rm tmpdb*
-
-
-.. Note::
- pytest is a very convenient utility used to control test execution. It is available from the `logilab-common`_ package.
-
-.. _`logilab-common`: http://www.logilab.org/project/logilab-common
-
-.. _adv_tuto_migration_script:
-
-Step 4: writing the migration script and migrating the instance
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Prior to those changes, I created an instance, feeded it with some data, so I
-don't want to create a new one, but to migrate the existing one. Let's see how to
-do that.
-
-Migration commands should be put in the cube's *migration* directory, in a
-file named file:`_Any.py` ('Any' being there mostly for historical reason).
-
-Here I'll create a *migration/0.2.0_Any.py* file containing the following
-instructions:
-
-.. sourcecode:: python
-
- add_relation_type('may_be_read_by')
- add_relation_type('visibility')
- sync_schema_props_perms()
-
-Then I update the version number in cube's *__pkginfo__.py* to 0.2.0. And
-that's it! Those instructions will:
-
-* update the instance's schema by adding our two new relations and update the
- underlying database tables accordingly (the two first instructions)
-
-* update schema's permissions definition (the last instruction)
-
-
-To migrate my instance I simply type::
-
- cubicweb-ctl upgrade sytweb
-
-I'll then be asked some questions to do the migration step by step. You should say
-YES when it asks if a backup of your database should be done, so you can get back
-to initial state if anything goes wrong...
-
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/tutorials/advanced/part01_create-cube.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/advanced/part01_create-cube.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,155 @@
+.. _TutosPhotoWebSiteCubeCreation:
+
+Cube creation and schema definition
+-----------------------------------
+
+.. _adv_tuto_create_new_cube:
+
+Step 1: creating a new cube for my web site
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+One note about my development environment: I wanted to use the packaged
+version of CubicWeb and cubes while keeping my cube in my user
+directory, let's say `~src/cubes`. I achieve this by setting the
+following environment variables::
+
+ CW_CUBES_PATH=~/src/cubes
+ CW_MODE=user
+
+I can now create the cube which will hold custom code for this web
+site using::
+
+ cubicweb-ctl newcube --directory=~/src/cubes sytweb
+
+
+.. _adv_tuto_assemble_cubes:
+
+Step 2: pick building blocks into existing cubes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Almost everything I want to handle in my web-site is somehow already modelized in
+existing cubes that I'll extend for my need. So I'll pick the following cubes:
+
+* `folder`, containing the `Folder` entity type, which will be used as
+ both 'album' and a way to map file system folders. Entities are
+ added to a given folder using the `filed_under` relation.
+
+* `file`, containing `File` entity type, gallery view, and a file system import
+ utility.
+
+* `zone`, containing the `Zone` entity type for hierarchical geographical
+ zones. Entities (including sub-zones) are added to a given zone using the
+ `situated_in` relation.
+
+* `person`, containing the `Person` entity type plus some basic views.
+
+* `comment`, providing a full commenting system allowing one to comment entity types
+ supporting the `comments` relation by adding a `Comment` entity.
+
+* `tag`, providing a full tagging system as an easy and powerful way to classify
+ entities supporting the `tags` relation by linking the to `Tag` entities. This
+ will allows navigation into a large number of picture.
+
+Ok, now I'll tell my cube requires all this by editing :file:`cubes/sytweb/__pkginfo__.py`:
+
+ .. sourcecode:: python
+
+ __depends__ = {'cubicweb': '>= 3.10.0',
+ 'cubicweb-file': '>= 1.9.0',
+ 'cubicweb-folder': '>= 1.1.0',
+ 'cubicweb-person': '>= 1.2.0',
+ 'cubicweb-comment': '>= 1.2.0',
+ 'cubicweb-tag': '>= 1.2.0',
+ 'cubicweb-zone': None}
+
+Notice that you can express minimal version of the cube that should be used,
+`None` meaning whatever version available. All packages starting with 'cubicweb-'
+will be recognized as being cube, not bare python packages. You can still specify
+this explicitly using instead the `__depends_cubes__` dictionary which should
+contains cube's name without the prefix. So the example below would be written
+as:
+
+ .. sourcecode:: python
+
+ __depends__ = {'cubicweb': '>= 3.10.0'}
+ __depends_cubes__ = {'file': '>= 1.9.0',
+ 'folder': '>= 1.1.0',
+ 'person': '>= 1.2.0',
+ 'comment': '>= 1.2.0',
+ 'tag': '>= 1.2.0',
+ 'zone': None}
+
+If your cube is packaged for debian, it's a good idea to update the
+`debian/control` file at the same time, so you won't forget it.
+
+
+Step 3: glue everything together in my cube's schema
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. sourcecode:: python
+
+ from yams.buildobjs import RelationDefinition
+
+ class comments(RelationDefinition):
+ subject = 'Comment'
+ object = 'File'
+ cardinality = '1*'
+ composite = 'object'
+
+ class tags(RelationDefinition):
+ subject = 'Tag'
+ object = 'File'
+
+ class filed_under(RelationDefinition):
+ subject = 'File'
+ object = 'Folder'
+
+ class situated_in(RelationDefinition):
+ subject = 'File'
+ object = 'Zone'
+
+ class displayed_on(RelationDefinition):
+ subject = 'Person'
+ object = 'File'
+
+
+This schema:
+
+* allows to comment and tag on `File` entity type by adding the `comments` and
+ `tags` relations. This should be all we've to do for this feature since the
+ related cubes provide 'pluggable section' which are automatically displayed on
+ the primary view of entity types supporting the relation.
+
+* adds a `situated_in` relation definition so that image entities can be
+ geolocalized.
+
+* add a new relation `displayed_on` relation telling who can be seen on a
+ picture.
+
+This schema will probably have to evolve as time goes (for security handling at
+least), but since the possibility to let a schema evolve is one of CubicWeb's
+features (and goals), we won't worry about it for now and see that later when needed.
+
+
+Step 4: creating the instance
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Now that I have a schema, I want to create an instance. To
+do so using this new 'sytweb' cube, I run::
+
+ cubicweb-ctl create sytweb sytweb_instance
+
+Hint: if you get an error while the database is initialized, you can
+avoid having to answer the questions again by running::
+
+ cubicweb-ctl db-create sytweb_instance
+
+This will use your already configured instance and start directly from the create
+database step, thus skipping questions asked by the 'create' command.
+
+Once the instance and database are fully initialized, run ::
+
+ cubicweb-ctl start sytweb_instance
+
+to start the instance, check you can connect on it, etc...
+
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/tutorials/advanced/part02_security.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/advanced/part02_security.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,440 @@
+.. _TutosPhotoWebSiteSecurity:
+
+Security, testing and migration
+-------------------------------
+
+This part will cover various topics:
+
+* configuring security
+* migrating existing instance
+* writing some unit tests
+
+Here is the ``read`` security model I want:
+
+* folders, files, images and comments should have one of the following visibility:
+
+ - ``public``, everyone can see it
+ - ``authenticated``, only authenticated users can see it
+ - ``restricted``, only a subset of authenticated users can see it
+
+* managers (e.g. me) can see everything
+* only authenticated users can see people
+* everyone can see classifier entities, such as tag and zone
+
+Also, unless explicitly specified, the visibility of an image should be the same as
+its parent folder, as well as visibility of a comment should be the same as the
+commented entity. If there is no parent entity, the default visibility is
+``authenticated``.
+
+Regarding write security, that's much easier:
+* anonymous can't write anything
+* authenticated users can only add comment
+* managers will add the remaining stuff
+
+Now, let's implement that!
+
+Proper security in CubicWeb is done at the schema level, so you don't have to
+bother with it in views: users will only see what they can see automatically.
+
+.. _adv_tuto_security:
+
+Step 1: configuring security into the schema
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In schema, you can grant access according to groups, or to some RQL expressions:
+users get access if the expression returns some results. To implement the read
+security defined earlier, groups are not enough, we'll need some RQL expression. Here
+is the idea:
+
+* add a `visibility` attribute on Folder, File and Comment, which may be one of
+ the value explained above
+
+* add a `may_be_read_by` relation from Folder, File and Comment to users,
+ which will define who can see the entity
+
+* security propagation will be done in hook.
+
+So the first thing to do is to modify my cube's schema.py to define those
+relations:
+
+.. sourcecode:: python
+
+ from yams.constraints import StaticVocabularyConstraint
+
+ class visibility(RelationDefinition):
+ subject = ('Folder', 'File', 'Comment')
+ object = 'String'
+ constraints = [StaticVocabularyConstraint(('public', 'authenticated',
+ 'restricted', 'parent'))]
+ default = 'parent'
+ cardinality = '11' # required
+
+ class may_be_read_by(RelationDefinition):
+ __permissions__ = {
+ 'read': ('managers', 'users'),
+ 'add': ('managers',),
+ 'delete': ('managers',),
+ }
+
+ subject = ('Folder', 'File', 'Comment',)
+ object = 'CWUser'
+
+We can note the following points:
+
+* we've added a new `visibility` attribute to folder, file, image and comment
+ using a `RelationDefinition`
+
+* `cardinality = '11'` means this attribute is required. This is usually hidden
+ under the `required` argument given to the `String` constructor, but we can
+ rely on this here (same thing for StaticVocabularyConstraint, which is usually
+ hidden by the `vocabulary` argument)
+
+* the `parent` possible value will be used for visibility propagation
+
+* think to secure the `may_be_read_by` permissions, else any user can add/delete it
+ by default, which somewhat breaks our security model...
+
+Now, we should be able to define security rules in the schema, based on these new
+attribute and relation. Here is the code to add to *schema.py*:
+
+.. sourcecode:: python
+
+ from cubicweb.schema import ERQLExpression
+
+ VISIBILITY_PERMISSIONS = {
+ 'read': ('managers',
+ ERQLExpression('X visibility "public"'),
+ ERQLExpression('X may_be_read_by U')),
+ 'add': ('managers',),
+ 'update': ('managers', 'owners',),
+ 'delete': ('managers', 'owners'),
+ }
+ AUTH_ONLY_PERMISSIONS = {
+ 'read': ('managers', 'users'),
+ 'add': ('managers',),
+ 'update': ('managers', 'owners',),
+ 'delete': ('managers', 'owners'),
+ }
+ CLASSIFIERS_PERMISSIONS = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': ('managers',),
+ 'update': ('managers', 'owners',),
+ 'delete': ('managers', 'owners'),
+ }
+
+ from cubes.folder.schema import Folder
+ from cubes.file.schema import File
+ from cubes.comment.schema import Comment
+ from cubes.person.schema import Person
+ from cubes.zone.schema import Zone
+ from cubes.tag.schema import Tag
+
+ Folder.__permissions__ = VISIBILITY_PERMISSIONS
+ File.__permissions__ = VISIBILITY_PERMISSIONS
+ Comment.__permissions__ = VISIBILITY_PERMISSIONS.copy()
+ Comment.__permissions__['add'] = ('managers', 'users',)
+ Person.__permissions__ = AUTH_ONLY_PERMISSIONS
+ Zone.__permissions__ = CLASSIFIERS_PERMISSIONS
+ Tag.__permissions__ = CLASSIFIERS_PERMISSIONS
+
+What's important in there:
+
+* `VISIBILITY_PERMISSIONS` provides read access to managers group, if
+ `visibility` attribute's value is 'public', or if user (designed by the 'U'
+ variable in the expression) is linked to the entity (the 'X' variable) through
+ the `may_read` permission
+
+* we modify permissions of the entity types we use by importing them and
+ modifying their `__permissions__` attribute
+
+* notice the `.copy()`: we only want to modify 'add' permission for `Comment`,
+ not for all entity types using `VISIBILITY_PERMISSIONS`!
+
+* the remaining part of the security model is done using regular groups:
+
+ - `users` is the group to which all authenticated users will belong
+ - `guests` is the group of anonymous users
+
+
+.. _adv_tuto_security_propagation:
+
+Step 2: security propagation in hooks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To fullfill the requirements, we have to implement::
+
+ Also, unless explicity specified, visibility of an image should be the same as
+ its parent folder, as well as visibility of a comment should be the same as the
+ commented entity.
+
+This kind of `active` rule will be done using CubicWeb's hook
+system. Hooks are triggered on database event such as addition of new
+entity or relation.
+
+The tricky part of the requirement is in *unless explicitly specified*, notably
+because when the entity is added, we don't know yet its 'parent'
+entity (e.g. Folder of an File, File commented by a Comment). To handle such things,
+CubicWeb provides `Operation`, which allow to schedule things to do at commit time.
+
+In our case we will:
+
+* on entity creation, schedule an operation that will set default visibility
+
+* when a "parent" relation is added, propagate parent's visibility unless the
+ child already has a visibility set
+
+Here is the code in cube's *hooks.py*:
+
+.. sourcecode:: python
+
+ from cubicweb.selectors import is_instance
+ from cubicweb.server import hook
+
+ class SetVisibilityOp(hook.Operation):
+ def precommit_event(self):
+ for eid in self.session.transaction_data.pop('pending_visibility'):
+ entity = self.session.entity_from_eid(eid)
+ if entity.visibility == 'parent':
+ entity.set_attributes(visibility=u'authenticated')
+
+ class SetVisibilityHook(hook.Hook):
+ __regid__ = 'sytweb.setvisibility'
+ __select__ = hook.Hook.__select__ & is_instance('Folder', 'File', 'Comment')
+ events = ('after_add_entity',)
+ def __call__(self):
+ hook.set_operation(self._cw, 'pending_visibility', self.entity.eid,
+ SetVisibilityOp)
+
+ class SetParentVisibilityHook(hook.Hook):
+ __regid__ = 'sytweb.setparentvisibility'
+ __select__ = hook.Hook.__select__ & hook.match_rtype('filed_under', 'comments')
+ events = ('after_add_relation',)
+
+ def __call__(self):
+ parent = self._cw.entity_from_eid(self.eidto)
+ child = self._cw.entity_from_eid(self.eidfrom)
+ if child.visibility == 'parent':
+ child.set_attributes(visibility=parent.visibility)
+
+Notice:
+
+* hooks are application objects, hence have selectors that should match entity or
+ relation types to which the hook applies. To match a relation type, we use the
+ hook specific `match_rtype` selector.
+
+* usage of `set_operation`: instead of adding an operation for each added entity,
+ set_operation allows to create a single one and to store entity's eids to be
+ processed in session's transaction data. This is a good pratice to avoid heavy
+ operations manipulation cost when creating a lot of entities in the same
+ transaction.
+
+* the `precommit_event` method of the operation will be called at transaction's
+ commit time.
+
+* in a hook, `self._cw` is the repository session, not a web request as usually
+ in views
+
+* according to hook's event, you have access to different attributes on the hook
+ instance. Here:
+
+ - `self.entity` is the newly added entity on 'after_add_entity' events
+
+ - `self.eidfrom` / `self.eidto` are the eid of the subject / object entity on
+ 'after_add_relatiohn' events (you may also get the relation type using
+ `self.rtype`)
+
+The `parent` visibility value is used to tell "propagate using parent security"
+because we want that attribute to be required, so we can't use None value else
+we'll get an error before we get any chance to propagate...
+
+Now, we also want to propagate the `may_be_read_by` relation. Fortunately,
+CubicWeb provides some base hook classes for such things, so we only have to add
+the following code to *hooks.py*:
+
+.. sourcecode:: python
+
+ # relations where the "parent" entity is the subject
+ S_RELS = set()
+ # relations where the "parent" entity is the object
+ O_RELS = set(('filed_under', 'comments',))
+
+ class AddEntitySecurityPropagationHook(hook.PropagateSubjectRelationHook):
+ """propagate permissions when new entity are added"""
+ __regid__ = 'sytweb.addentity_security_propagation'
+ __select__ = (hook.PropagateSubjectRelationHook.__select__
+ & hook.match_rtype_sets(S_RELS, O_RELS))
+ main_rtype = 'may_be_read_by'
+ subject_relations = S_RELS
+ object_relations = O_RELS
+
+ class AddPermissionSecurityPropagationHook(hook.PropagateSubjectRelationAddHook):
+ """propagate permissions when new entity are added"""
+ __regid__ = 'sytweb.addperm_security_propagation'
+ __select__ = (hook.PropagateSubjectRelationAddHook.__select__
+ & hook.match_rtype('may_be_read_by',))
+ subject_relations = S_RELS
+ object_relations = O_RELS
+
+ class DelPermissionSecurityPropagationHook(hook.PropagateSubjectRelationDelHook):
+ __regid__ = 'sytweb.delperm_security_propagation'
+ __select__ = (hook.PropagateSubjectRelationDelHook.__select__
+ & hook.match_rtype('may_be_read_by',))
+ subject_relations = S_RELS
+ object_relations = O_RELS
+
+* the `AddEntitySecurityPropagationHook` will propagate the relation
+ when `filed_under` or `comments` relations are added
+
+ - the `S_RELS` and `O_RELS` set as well as the `match_rtype_sets` selector are
+ used here so that if my cube is used by another one, it'll be able to
+ configure security propagation by simply adding relation to one of the two
+ sets.
+
+* the two others will propagate permissions changes on parent entities to
+ children entities
+
+
+.. _adv_tuto_tesing_security:
+
+Step 3: testing our security
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Security is tricky. Writing some tests for it is a very good idea. You should
+even write them first, as Test Driven Development recommends!
+
+Here is a small test case that will check the basis of our security
+model, in *test/unittest_sytweb.py*:
+
+.. sourcecode:: python
+
+ from cubicweb.devtools.testlib import CubicWebTC
+ from cubicweb import Binary
+
+ class SecurityTC(CubicWebTC):
+
+ def test_visibility_propagation(self):
+ # create a user for later security checks
+ toto = self.create_user('toto')
+ # init some data using the default manager connection
+ req = self.request()
+ folder = req.create_entity('Folder',
+ name=u'restricted',
+ visibility=u'restricted')
+ photo1 = req.create_entity('File',
+ data_name=u'photo1.jpg',
+ data=Binary('xxx'),
+ filed_under=folder)
+ self.commit()
+ photo1.clear_all_caches() # good practice, avoid request cache effects
+ # visibility propagation
+ self.assertEquals(photo1.visibility, 'restricted')
+ # unless explicitly specified
+ photo2 = req.create_entity('File',
+ data_name=u'photo2.jpg',
+ data=Binary('xxx'),
+ visibility=u'public',
+ filed_under=folder)
+ self.commit()
+ self.assertEquals(photo2.visibility, 'public')
+ # test security
+ self.login('toto')
+ req = self.request()
+ self.assertEquals(len(req.execute('File X')), 1) # only the public one
+ self.assertEquals(len(req.execute('Folder X')), 0) # restricted...
+ # may_be_read_by propagation
+ self.restore_connection()
+ folder.set_relations(may_be_read_by=toto)
+ self.commit()
+ photo1.clear_all_caches()
+ self.failUnless(photo1.may_be_read_by)
+ # test security with permissions
+ self.login('toto')
+ req = self.request()
+ self.assertEquals(len(req.execute('File X')), 2) # now toto has access to photo2
+ self.assertEquals(len(req.execute('Folder X')), 1) # and to restricted folder
+
+ if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
+ unittest_main()
+
+It's not complete, but show most things you'll want to do in tests: adding some
+content, creating users and connecting as them in the test, etc...
+
+To run it type:
+
+.. sourcecode:: bash
+
+ $ pytest unittest_sytweb.py
+ ======================== unittest_sytweb.py ========================
+ -> creating tables [....................]
+ -> inserting default user and default groups.
+ -> storing the schema in the database [....................]
+ -> database for instance data initialized.
+ .
+ ----------------------------------------------------------------------
+ Ran 1 test in 22.547s
+
+ OK
+
+
+The first execution is taking time, since it creates a sqlite database for the
+test instance. The second one will be much quicker:
+
+.. sourcecode:: bash
+
+ $ pytest unittest_sytweb.py
+ ======================== unittest_sytweb.py ========================
+ .
+ ----------------------------------------------------------------------
+ Ran 1 test in 2.662s
+
+ OK
+
+If you do some changes in your schema, you'll have to force regeneration of that
+database. You do that by removing the tmpdb files before running the test: ::
+
+ $ rm data/tmpdb*
+
+
+.. Note::
+ pytest is a very convenient utility used to control test execution. It is available from the `logilab-common`_ package.
+
+.. _`logilab-common`: http://www.logilab.org/project/logilab-common
+
+.. _adv_tuto_migration_script:
+
+Step 4: writing the migration script and migrating the instance
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Prior to those changes, I created an instance, feeded it with some data, so I
+don't want to create a new one, but to migrate the existing one. Let's see how to
+do that.
+
+Migration commands should be put in the cube's *migration* directory, in a
+file named file:`_Any.py` ('Any' being there mostly for historical reason).
+
+Here I'll create a *migration/0.2.0_Any.py* file containing the following
+instructions:
+
+.. sourcecode:: python
+
+ add_relation_type('may_be_read_by')
+ add_relation_type('visibility')
+ sync_schema_props_perms()
+
+Then I update the version number in cube's *__pkginfo__.py* to 0.2.0. And
+that's it! Those instructions will:
+
+* update the instance's schema by adding our two new relations and update the
+ underlying database tables accordingly (the two first instructions)
+
+* update schema's permissions definition (the last instruction)
+
+
+To migrate my instance I simply type::
+
+ cubicweb-ctl upgrade sytweb
+
+You'll then be asked some questions to do the migration step by step. You should say
+YES when it asks if a backup of your database should be done, so you can get back
+to initial state if anything goes wrong...
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/tutorials/advanced/part03_bfss.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/advanced/part03_bfss.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,131 @@
+Storing images on the file-system
+---------------------------------
+
+Step 1: configuring the BytesFileSystem storage
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To avoid cluttering my database, and to ease file manipulation, I don't want them
+to be stored in the database. I want to be able create File entities for some
+files on the server file system, where those file will be accessed to get
+entities data. To do so I've to set a custom :class:`BytesFileSystemStorage`
+storage for the File 'data' attribute, which hold the actual file's content.
+
+Since the function to register a custom storage needs to have a repository
+instance as first argument, we've to call it in a server startup hook. So I added
+in `cubes/sytweb/hooks.py` :
+
+.. sourcecode:: python
+
+ from os import makedirs
+ from os.path import join, exists
+
+ from cubicweb.server import hook
+ from cubicweb.server.sources import storage
+
+ class ServerStartupHook(hook.Hook):
+ __regid__ = 'sytweb.serverstartup'
+ events = ('server_startup', 'server_maintenance')
+
+ def __call__(self):
+ bfssdir = join(self.repo.config.appdatahome, 'bfss')
+ if not exists(bfssdir):
+ makedirs(bfssdir)
+ print 'created', bfssdir
+ storage = storages.BytesFileSystemStorage(bfssdir)
+ set_attribute_storage(self.repo, 'File', 'data', storage)
+
+.. Note::
+
+ * how we built the hook's registry identifier (_`_regid__`): you can introduce
+ 'namespaces' by using there python module like naming identifiers. This is
+ especially import for hooks where you usually want a new custom hook, not
+ overriding / specializing an existant one, but the concept may be applied to
+ any application objects
+
+ * we catch two events here: "server_startup" and "server_maintenance". The first
+ is called on regular repository startup (eg, as a server), the other for
+ maintenance task such as shell or upgrade. In both cases, we need to have
+ the storage set, else we'll be in trouble...
+
+ * the path given to the storage is the place where file added through the ui
+ (or in the database before migration) will be located
+
+ * be ware that by doing this, you can't anymore write queries that will try to
+ restrict on File `data` attribute. Hopefuly we don't do that usually
+ on file's content or more generally on attributes for the Bytes type
+
+Now, if you've already added some photos through the web ui, you'll have to
+migrate existing data so file's content will be stored on the file-system instead
+of the database. There is a migration command to do so, let's run it in the
+cubicweb shell (in actual life, you'd have to put it in a migration script as we
+seen last time):
+
+::
+
+ $ cubicweb-ctl shell sytweb
+ entering the migration python shell
+ just type migration commands or arbitrary python code and type ENTER to execute it
+ type "exit" or Ctrl-D to quit the shell and resume operation
+ >>> storage_changed('File', 'data')
+ [........................]
+
+
+That's it. Now, file added through the web ui will have their content stored on
+the file-system, and you'll also be able to import files from the file-system as
+explained in the next part.
+
+Step 2: importing some data into the instance
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Hey, we start to have some nice features, let give us a try on this new web
+site. For instance if I have a 'photos/201005WePyrenees' containing pictures for
+a particular event, I can import it to my web site by typing ::
+
+ $ cubicweb-ctl fsimport -F sytweb photos/201005WePyrenees/
+ ** importing directory /home/syt/photos/201005WePyrenees
+ importing IMG_8314.JPG
+ importing IMG_8274.JPG
+ importing IMG_8286.JPG
+ importing IMG_8308.JPG
+ importing IMG_8304.JPG
+
+.. Note::
+ The -F option tell that folders should be mapped, hence my photos will be
+ all under a Folder entity corresponding to the file-system folder.
+
+Let's take a look at the web ui:
+
+.. image:: ../../images/tutos-photowebsite_ui1.png
+
+Nothing different, I can't see the new folder... But remember our security model!
+By default, files are only accessible to authenticated users, and I'm looking at
+the site as anonymous, e.g. not authenticated. If I login, I can now see:
+
+.. image:: ../../images/tutos-photowebsite_ui2.png
+
+Yeah, it's there! You can also notice that I can see some entities as well as
+folders and images the anonymous user can't. It just works **everywhere in the
+ui** since it's handled at the repository level, thanks to our security model.
+
+Now if I click on the newly inserted folder, I can see
+
+.. image:: ../../images/tutos-photowebsite_ui3.png
+
+Great! There is even my pictures in the folder. I can know give to this folder a
+nicer name (provided I don't intend to import from it anymore, else already
+imported photos will be reimported), change permissions, title for some pictures,
+etc... Having a good content is much more difficult than having a good web site
+;)
+
+
+Conclusion
+~~~~~~~~~~
+
+We started to see here an advanced feature of our repository: the ability
+to store some parts of our data-model into a custom storage, outside the
+database. There is currently only the :class:`BytesFileSystemStorage` available,
+but you can expect to see more coming in a near future (our write your own!).
+
+Also, we can know start to feed our web-site with some nice pictures!
+The site isn't perfect (far from it actually) but it's usable, and we can
+now start using it and improve it on the way. The Incremental Cubic Way :)
diff -r 48f468f33704 -r e4580e5f0703 doc/book/en/tutorials/advanced/part04_ui-base.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/advanced/part04_ui-base.rst Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,365 @@
+Let's make it more user friendly
+================================
+
+
+Step 1: let's improve site's usability for our visitors
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The first thing I've noticed is that people to whom I send links to photos with
+some login/password authentication get lost, because they don't grasp they have
+to login by clicking on the 'authenticate' link. That's much probably because
+they only get a 404 when trying to access an unauthorized folder, and the site
+doesn't make clear that 1. you're not authenticated, 2. you could get more
+content by authenticating yourself.
+
+So, to improve this situation, I decided that I should:
+
+* make a login box appears for anonymous, so they see at a first glance a place
+ to put the login / password information I provided
+
+* customize the 404 page, proposing to login to anonymous.
+
+Here is the code, samples from my cube's `views.py` file:
+
+.. sourcecode:: python
+
+ from cubicweb.selectors import is_instance
+ from cubicweb.web import component
+ from cubicweb.web.views import error
+
+ class FourOhFour(error.FourOhFour):
+ __select__ = error.FourOhFour.__select__ & anonymous_user()
+
+ def call(self):
+ self.w(u"
\n'))
+
+ def test_rql_role_without_vid(self):
+ context = self.context()
+ out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`')
+ self.assertEqual(out, u'
Dieses Schema des Datenmodells enthält<>keine Meta-Daten, aber Sie "
+"können ein vollständiges Schema mit Meta-Daten anzeigen."
+"div>"
+
+msgid ""
+msgstr ""
+
+msgid "?*"
+msgstr "0..1 0..n"
+
+msgid "?+"
+msgstr "0..1 1..n"
+
+msgid "?1"
+msgstr "0..1 1"
+
+msgid "??"
+msgstr "0..1 0..1"
+
+msgid "AND"
+msgstr "UND"
+
+msgid "About this site"
+msgstr "Über diese Seite"
+
+msgid "Any"
+msgstr "irgendein"
+
+msgid "Attributes permissions:"
+msgstr "Rechte der Attribute"
+
+msgid "Attributes with non default permissions:"
+msgstr "Attribute mit nicht-standard-Berechtigungen"
+
+# schema pot file, generated on 2009-09-16 16:46:55
+#
+# singular and plural forms for each entity type
+msgid "BaseTransition"
+msgstr "Übergang (abstrakt)"
+
+msgid "BaseTransition_plural"
+msgstr "Übergänge (abstrakt)"
+
+msgid "Bookmark"
+msgstr "Lesezeichen"
+
+msgid "Bookmark_plural"
+msgstr "Lesezeichen"
+
+msgid "Boolean"
+msgstr "Boolean"
+
+msgid "Boolean_plural"
+msgstr "Booleans"
+
+msgid "BoundConstraint"
+msgstr "gebundene Einschränkung"
+
+msgid "BoundaryConstraint"
+msgstr "Rand-einschränkung"
+
+msgid "Browse by category"
+msgstr "nach Kategorien navigieren"
+
+msgid "Browse by entity type"
+msgstr "nach Identitätstyp navigieren"
+
+msgid "Bytes"
+msgstr "Bytes"
+
+msgid "Bytes_plural"
+msgstr "Bytes"
+
+msgid "CWAttribute"
+msgstr "Attribut"
+
+msgid "CWAttribute_plural"
+msgstr "Attribute"
+
+msgid "CWCache"
+msgstr "Cache"
+
+msgid "CWCache_plural"
+msgstr "Caches"
+
+msgid "CWConstraint"
+msgstr "Einschränkung"
+
+msgid "CWConstraintType"
+msgstr "Einschränkungstyp"
+
+msgid "CWConstraintType_plural"
+msgstr "Einschränkungstypen"
+
+msgid "CWConstraint_plural"
+msgstr "Einschränkungen"
+
+msgid "CWEType"
+msgstr "Entitätstyp"
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "CWEType"
+msgstr "Entitätstyp"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "CWEType"
+msgstr "Entitätstyp"
+
+msgid "CWEType_plural"
+msgstr "Entitätstypen"
+
+msgid "CWGroup"
+msgstr "Gruppe"
+
+msgid "CWGroup_plural"
+msgstr "Gruppen"
+
+msgid "CWPermission"
+msgstr "Berechtigung"
+
+msgid "CWPermission_plural"
+msgstr "Berechtigungen"
+
+msgid "CWProperty"
+msgstr "Eigenschaft"
+
+msgid "CWProperty_plural"
+msgstr "Eigenschaften"
+
+msgid "CWRType"
+msgstr "Relationstyp"
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "CWRType"
+msgstr "Relationstyp"
+
+msgid "CWRType_plural"
+msgstr "Relationstypen"
+
+msgid "CWRelation"
+msgstr "Relation"
+
+msgid "CWRelation_plural"
+msgstr "Relationen"
+
+msgid "CWSource"
+msgstr ""
+
+msgid "CWSourceHostConfig"
+msgstr ""
+
+msgid "CWSourceHostConfig_plural"
+msgstr ""
+
+msgid "CWSource_plural"
+msgstr ""
+
+msgid "CWUniqueTogetherConstraint"
+msgstr "unique-together-Einschränkung"
+
+msgid "CWUniqueTogetherConstraint_plural"
+msgstr "unique-together-Einschränkungen"
+
+msgid "CWUser"
+msgstr "Nutzer"
+
+msgid "CWUser_plural"
+msgstr "Nutzer"
+
+#, python-format
+msgid ""
+"Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already "
+"linked using this relation."
+msgstr ""
+"Kann die Relation %(role)s %(rtype)s zu einer Entität %(eid)s nicht wieder "
+"herstellen, die durch diese Relation bereits mit einer anderen Entität "
+"verbunden ist."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation "
+"does not exists anymore in the schema."
+msgstr ""
+"Kann die Relation %(rtype)s zwischen %(subj)s und %(obj)s nicht wieder "
+"herstellen, diese Relation existiert nicht mehr in dem Schema."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s of entity %(eid)s, this relation does not "
+"exists anymore in the schema."
+msgstr ""
+"Kann die Relation %(rtype)s der Entität %(eid)s nicht wieder herstellen, "
+"diese Relation existiert nicht mehr in dem Schema."
+
+#, python-format
+msgid ""
+"Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist "
+"anymore."
+msgstr ""
+"Kann die Relation %(rtype)s nicht wieder herstellen, die Entität %(role)s "
+"%(eid)s existiert nicht mehr."
+
+#, python-format
+msgid ""
+"Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't "
+"exist anymore"
+msgstr ""
+"Kann das Hinzufügen der Relation %(rtype)s von %(subj)s zu %(obj)s nicht "
+"rückgängig machen , diese Relation existiert nicht mehr."
+
+#, python-format
+msgid ""
+"Can't undo creation of entity %(eid)s of type %(etype)s, type no more "
+"supported"
+msgstr ""
+"Kann die Erstelllung der Entität %(eid)s vom Typ %(etype)s nicht rückgängig "
+"machen, dieser Typ existiert nicht mehr."
+
+#, python-format
+msgid "Data connection graph for %s"
+msgstr "Graf der Datenverbindungen für %s"
+
+msgid "Date"
+msgstr "Datum"
+
+msgid "Date_plural"
+msgstr "Daten"
+
+msgid "Datetime"
+msgstr "Datum und Uhrzeit"
+
+msgid "Datetime_plural"
+msgstr "Daten und Uhrzeiten"
+
+msgid "Decimal"
+msgstr "Dezimalzahl"
+
+msgid "Decimal_plural"
+msgstr "Dezimalzahlen"
+
+msgid "Do you want to delete the following element(s) ?"
+msgstr "Wollen Sie das/die folgend(n) Element(e) löschen?"
+
+msgid "Download schema as OWL"
+msgstr "Herunterladen des Schemas im OWL-Format"
+
+msgid "EmailAddress"
+msgstr "Email-Adresse"
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "EmailAddress"
+msgstr "Email-Adresse"
+
+msgid "EmailAddress_plural"
+msgstr "Email-Adressen"
+
+msgid "Entities"
+msgstr "Entitäten"
+
+msgid "Entity types"
+msgstr "Entitätstypen"
+
+msgid "ExternalUri"
+msgstr "Externer Uri"
+
+msgid "ExternalUri_plural"
+msgstr "Externe Uris"
+
+msgid "Float"
+msgstr "Gleitkommazahl"
+
+msgid "Float_plural"
+msgstr "Gleitkommazahlen"
+
+# schema pot file, generated on 2009-12-03 09:22:35
+#
+# singular and plural forms for each entity type
+msgid "FormatConstraint"
+msgstr "Format-Einschränkung"
+
+msgid "From:"
+msgstr "Von:"
+
+msgid "Garbage collection information"
+msgstr "Information zur Speicherbereinigung"
+
+msgid "Got rhythm?"
+msgstr "Hast Du Rhythmus ?"
+
+msgid "Help"
+msgstr "Hilfe"
+
+msgid "Index"
+msgstr "Index"
+
+msgid "Instance"
+msgstr "Instanz"
+
+msgid "Int"
+msgstr "Ganzzahl"
+
+msgid "Int_plural"
+msgstr "Ganzzahlen"
+
+msgid "Interval"
+msgstr "Zeitraum"
+
+msgid "IntervalBoundConstraint"
+msgstr "interval-Einschränkung"
+
+msgid "Interval_plural"
+msgstr "Intervalle"
+
+msgid "Looked up classes"
+msgstr "gesuchte Klassen"
+
+msgid "Most referenced classes"
+msgstr "meist-referenzierte Klassen"
+
+msgid "New BaseTransition"
+msgstr "neuer Übergang (abstrakt)"
+
+msgid "New Bookmark"
+msgstr "Neues Lesezeichen"
+
+msgid "New CWAttribute"
+msgstr "Neue finale Relationsdefinition"
+
+msgid "New CWCache"
+msgstr "Neuer Anwendungs-Cache"
+
+msgid "New CWConstraint"
+msgstr "Neue Einschränkung"
+
+msgid "New CWConstraintType"
+msgstr "Neuer Einschränkungstyp"
+
+msgid "New CWEType"
+msgstr "Neuer Entitätstyp"
+
+msgid "New CWGroup"
+msgstr "Neue Gruppe"
+
+msgid "New CWPermission"
+msgstr "Neue Berechtigung"
+
+msgid "New CWProperty"
+msgstr "Neue Eigenschaft"
+
+msgid "New CWRType"
+msgstr "Neuer Relationstyp"
+
+msgid "New CWRelation"
+msgstr "Neue Relation"
+
+msgid "New CWSource"
+msgstr ""
+
+msgid "New CWSourceHostConfig"
+msgstr ""
+
+msgid "New CWUniqueTogetherConstraint"
+msgstr "Neue unique-together-Einschränkung"
+
+msgid "New CWUser"
+msgstr "Neuer Nutzer"
+
+msgid "New EmailAddress"
+msgstr "Neue Email-Adresse"
+
+msgid "New ExternalUri"
+msgstr "Neuer externer URI"
+
+msgid "New RQLExpression"
+msgstr "Neuer RQL Ausdruck"
+
+msgid "New State"
+msgstr "Neuer Zustand"
+
+msgid "New SubWorkflowExitPoint"
+msgstr "Neuer subworkflow-Endpunkt"
+
+msgid "New TrInfo"
+msgstr "Neue Übergangsinformation"
+
+msgid "New Transition"
+msgstr "Neuer Übergang"
+
+msgid "New Workflow"
+msgstr "Neuer workflow"
+
+msgid "New WorkflowTransition"
+msgstr "Neuer workflow-Übergang"
+
+#, python-format
+msgid "No account? Try public access at %s"
+msgstr "Kein Konto? Zur öffentlichen Website: %s"
+
+msgid "No result matching query"
+msgstr "Ihre Suche ergab keine Treffer."
+
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr ""
+"nicht abschließende Liste von Ansichten, die auf Entitäten dieses Typs "
+"Anwendung finden"
+
+msgid "OR"
+msgstr "oder"
+
+msgid "Parent class:"
+msgstr "Elternklasse"
+
+msgid "Password"
+msgstr "Passwort"
+
+msgid "Password_plural"
+msgstr "Passwörter"
+
+msgid "Permissions for entity types"
+msgstr "Berechtigungen für Entitätstypen"
+
+msgid "Permissions for relations"
+msgstr "Berechtigungen für Relationen"
+
+msgid "Please note that this is only a shallow copy"
+msgstr "Achtung: dies ist nur eine flache Kopie!"
+
+msgid "Powered by CubicWeb"
+msgstr "Powered by CubicWeb"
+
+msgid "RQLConstraint"
+msgstr "RQL-Einschränkung"
+
+msgid "RQLExpression"
+msgstr "RQL-Ausdruck"
+
+msgid "RQLExpression_plural"
+msgstr "RQL-Ausdrücke"
+
+msgid "RQLUniqueConstraint"
+msgstr "RQL Einschränkung bzgl. Eindeutigkeit"
+
+msgid "RQLVocabularyConstraint"
+msgstr "RQL Wortschatz-Einschränkung"
+
+msgid "Recipients:"
+msgstr "Adressaten:"
+
+msgid "RegexpConstraint"
+msgstr "regulärer Ausdruck Einschränkung"
+
+msgid "Registry's content"
+msgstr "Inhalt der Registry"
+
+msgid "Relation types"
+msgstr "Relationstypen"
+
+msgid "Relations"
+msgstr "Relationen"
+
+msgid "Repository"
+msgstr "Ablage"
+
+#, python-format
+msgid "Schema %s"
+msgstr "Schema %s"
+
+msgid "Schema of the data model"
+msgstr "Schema des Datenmodells"
+
+msgid "Search for"
+msgstr "Suchen"
+
+msgid "SizeConstraint"
+msgstr "Größeneinschränkung"
+
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+
+msgid "Startup views"
+msgstr "Startansichten"
+
+msgid "State"
+msgstr "Zustand"
+
+msgid "State_plural"
+msgstr "Zustände"
+
+msgid "StaticVocabularyConstraint"
+msgstr "Wortschatz-Einschränkung"
+
+msgid "String"
+msgstr "String"
+
+msgid "String_plural"
+msgstr "Strings"
+
+msgid "Sub-classes:"
+msgstr "Unterklassen"
+
+msgid "SubWorkflowExitPoint"
+msgstr "Subworkflow Endpunkt"
+
+msgid "SubWorkflowExitPoint_plural"
+msgstr "subworkflow Endpunkte"
+
+msgid "Subject:"
+msgstr "Subjekt :"
+
+msgid "Submit bug report"
+msgstr "Fehlerbericht senden"
+
+msgid "Submit bug report by mail"
+msgstr "Diesen Bericht als E-Mail senden"
+
+#, python-format
+msgid "The view %s can not be applied to this query"
+msgstr "Die Ansicht %s ist auf diese Anfrage nicht anwendbar."
+
+#, python-format
+msgid "The view %s could not be found"
+msgstr "Die Ansicht %s konnte nicht gefunden werden."
+
+msgid "There is no default workflow"
+msgstr "Dieser Entitätstyp hat standardmäßig keinen Workflow."
+
+msgid "This BaseTransition"
+msgstr "Diese abstracte Transition"
+
+msgid "This Bookmark"
+msgstr "Dieses Lesezeichen"
+
+msgid "This CWAttribute"
+msgstr "diese finale Relationsdefinition"
+
+msgid "This CWCache"
+msgstr "Dieser Anwendungs-Cache"
+
+msgid "This CWConstraint"
+msgstr "diese Einschränkung"
+
+msgid "This CWConstraintType"
+msgstr "Dieser Einschränkungstyp"
+
+msgid "This CWEType"
+msgstr "Dieser Entitätstyp"
+
+msgid "This CWGroup"
+msgstr "Diese Gruppe"
+
+msgid "This CWPermission"
+msgstr "Diese Berechtigung"
+
+msgid "This CWProperty"
+msgstr "Diese Eigenschaft"
+
+msgid "This CWRType"
+msgstr "Dieser Relationstyp"
+
+msgid "This CWRelation"
+msgstr "Diese Relation"
+
+msgid "This CWSource"
+msgstr ""
+
+msgid "This CWSourceHostConfig"
+msgstr ""
+
+msgid "This CWUniqueTogetherConstraint"
+msgstr "Diese unique-together-Einschränkung"
+
+msgid "This CWUser"
+msgstr "Dieser Nutzer"
+
+msgid "This EmailAddress"
+msgstr "Diese E-Mail-Adresse"
+
+msgid "This ExternalUri"
+msgstr "dieser externe URI"
+
+msgid "This RQLExpression"
+msgstr "Dieser RQL-Ausdruck"
+
+msgid "This State"
+msgstr "Dieser Zustand"
+
+msgid "This SubWorkflowExitPoint"
+msgstr "Dieser Subworkflow Endpunkt"
+
+msgid "This TrInfo"
+msgstr "Diese Übergangs-Information"
+
+msgid "This Transition"
+msgstr "Dieser Übergang"
+
+msgid "This Workflow"
+msgstr "Dieser Workflow"
+
+msgid "This WorkflowTransition"
+msgstr "Dieser Workflow-Übergang"
+
+msgid "This entity type permissions:"
+msgstr "Berechtigungen für diesen Entitätstyp"
+
+msgid "Time"
+msgstr "Zeit"
+
+msgid "Time_plural"
+msgstr "Zeiten"
+
+msgid "TrInfo"
+msgstr "Übergangs-Information"
+
+msgid "TrInfo_plural"
+msgstr "Übergangs-Informationen"
+
+msgid "Transition"
+msgstr "Übergang"
+
+msgid "Transition_plural"
+msgstr "Übergänge"
+
+msgid "UniqueConstraint"
+msgstr "eindeutige Einschränkung"
+
+msgid "Unreachable objects"
+msgstr "unzugängliche Objekte"
+
+msgid "Used by:"
+msgstr "benutzt von:"
+
+msgid "Web server"
+msgstr "Web-Server"
+
+msgid "What's new?"
+msgstr "Was ist neu?"
+
+msgid "Workflow"
+msgstr "Workflow"
+
+msgid "Workflow history"
+msgstr "Workflow-Chronik"
+
+msgid "WorkflowTransition"
+msgstr "Workflow-Übergang"
+
+msgid "WorkflowTransition_plural"
+msgstr "Workflow-Übergänge"
+
+msgid "Workflow_plural"
+msgstr "Workflows"
+
+msgid ""
+"You can either submit a new file using the browse button above, or choose to "
+"remove already uploaded file by checking the \"detach attached file\" check-"
+"box, or edit file content online with the widget below."
+msgstr ""
+"Sie können entweder mit dem bouton\n"
+"\"Durchsuchen\" oberhalb eine neue Datei hochladen, eine bereits "
+"hochgeladene Datei durch anklicken des Kästchens \"angehängte Datei abtrennen"
+"\" entfernen, oder den Datei-Inhalt mit dem Widget unterhalb editieren."
+
+msgid ""
+"You can either submit a new file using the browse button above, or edit file "
+"content online with the widget below."
+msgstr ""
+"Sie können entweder mit dem bouton\n"
+"\"Durchsuchen\" oberhalb eine neue Datei hochladen, oder den Datei-Inhalt "
+"mit dem Widget unterhalb editieren."
+
+msgid "You can use any of the following substitutions in your text"
+msgstr "Sie können die folgenden Ersetzungen in Ihrem Text verwenden:"
+
+msgid ""
+"You have no access to this view or it can not be used to display the current "
+"data."
+msgstr ""
+"Sie haben entweder keinen Zugriff auf diese Ansicht, oder die Ansicht kann "
+"nicht zur Anzeite dieser Daten verwendet werden."
+
+msgid ""
+"You're not authorized to access this page. If you think you should, please "
+"contact the site administrator."
+msgstr ""
+"Sie haben keinen Zugriff auf diese Seite.Bitte wenden Sie sich ggfs. an "
+"Ihren Administrator."
+
+#, python-format
+msgid "[%s supervision] changes summary"
+msgstr "[%s supervision] Beschreibung der Änderungen"
+
+msgid ""
+"a RQL expression which should return some results, else the transition won't "
+"be available. This query may use X and U variables that will respectivly "
+"represents the current entity and the current user"
+msgstr ""
+"ein RQL-Ausdruck, der einige Treffer liefern sollte, sonst wird der Übergang "
+"nicht verfügbar sein. Diese Abfrage kann X und U Variable benutzen, die "
+"jeweils die aktuelle Entität und den aktuellen Nutzer repräsentieren."
+
+msgid "a URI representing an object in external data store"
+msgstr "ein URI, der ein Objekt in einem externen Data-Store repräsentiert"
+
+msgid "a float is expected"
+msgstr "Eine Dezimalzahl (float) wird erwartet."
+
+msgid ""
+"a simple cache entity characterized by a name and a validity date. The "
+"target application is responsible for updating timestamp when necessary to "
+"invalidate the cache (typically in hooks). Also, checkout the AppObject."
+"get_cache() method."
+msgstr ""
+
+msgid "abstract base class for transitions"
+msgstr "abstrakte Basisklasse für Übergänge"
+
+msgid "action(s) on this selection"
+msgstr "Aktionen(en) bei dieser Auswahl"
+
+msgid "actions"
+msgstr "Aktionen"
+
+msgid "activate"
+msgstr "aktivieren"
+
+msgid "activated"
+msgstr "aktiviert"
+
+msgid "add"
+msgstr "hinzufügen"
+
+msgid "add Bookmark bookmarked_by CWUser object"
+msgstr "Lesezeichen"
+
+msgid "add CWAttribute constrained_by CWConstraint subject"
+msgstr "Einschränkung"
+
+msgid "add CWAttribute read_permission RQLExpression subject"
+msgstr "RQL-Ausdruck zum lesen"
+
+msgid "add CWAttribute relation_type CWRType object"
+msgstr "Attributdefinition"
+
+msgid "add CWAttribute update_permission RQLExpression subject"
+msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren"
+
+msgid "add CWEType add_permission RQLExpression subject"
+msgstr "RQL-Ausdruck für Berechtigung zum Hinzufügen"
+
+msgid "add CWEType delete_permission RQLExpression subject"
+msgstr "RQL-Ausdruck für Berechtigung zum Löschen"
+
+msgid "add CWEType read_permission RQLExpression subject"
+msgstr "RQL-Ausdruck für Berechtigung zum Lesen"
+
+msgid "add CWEType update_permission RQLExpression subject"
+msgstr "RQL-Ausdruck für Berechtigung zum Aktualisieren"
+
+msgid "add CWProperty for_user CWUser object"
+msgstr "Eigenschaft"
+
+msgid "add CWRelation add_permission RQLExpression subject"
+msgstr "RQL-Ausdruck hinzufügen"
+
+msgid "add CWRelation constrained_by CWConstraint subject"
+msgstr "Einschränkung"
+
+msgid "add CWRelation delete_permission RQLExpression subject"
+msgstr "RQL-Ausdruck löschen"
+
+msgid "add CWRelation read_permission RQLExpression subject"
+msgstr "RQL-Ausdruck lesen"
+
+msgid "add CWRelation relation_type CWRType object"
+msgstr "Relationsdefinition"
+
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr ""
+
+msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
+msgstr "unique-together-Einschränkung hinzufügen"
+
+msgid "add CWUser in_group CWGroup object"
+msgstr "Nutzer"
+
+msgid "add CWUser use_email EmailAddress subject"
+msgstr "Email-Adresse"
+
+msgid "add State allowed_transition Transition object"
+msgstr "Anfangszustand"
+
+msgid "add State allowed_transition Transition subject"
+msgstr "erlaubter Übergang"
+
+msgid "add State allowed_transition WorkflowTransition subject"
+msgstr "Workflow-Übergang"
+
+msgid "add State state_of Workflow object"
+msgstr "Status"
+
+msgid "add Transition condition RQLExpression subject"
+msgstr "Bedingung"
+
+msgid "add Transition destination_state State object"
+msgstr "ausstehender Übergang"
+
+msgid "add Transition destination_state State subject"
+msgstr "Zielstatus"
+
+msgid "add Transition transition_of Workflow object"
+msgstr "Übergang"
+
+msgid "add WorkflowTransition condition RQLExpression subject"
+msgstr "Workflow-Übergang"
+
+msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject"
+msgstr "Subworkflow Exit-Punkt"
+
+msgid "add WorkflowTransition transition_of Workflow object"
+msgstr "Workflow-Übergang"
+
+msgctxt "inlined:CWRelation.from_entity.subject"
+msgid "add a CWEType"
+msgstr "einen Entitätstyp hinzufügen"
+
+msgctxt "inlined:CWRelation.to_entity.subject"
+msgid "add a CWEType"
+msgstr "einen Entitätstyp hinzufügen"
+
+msgctxt "inlined:CWRelation.relation_type.subject"
+msgid "add a CWRType"
+msgstr "einen Relationstyp hinzufügen"
+
+msgctxt "inlined:CWUser.use_email.subject"
+msgid "add a EmailAddress"
+msgstr "Email-Adresse hinzufügen"
+
+msgid "add a new permission"
+msgstr "eine Berechtigung hinzufügen"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgid "add_permission"
+msgstr "kann hinzugefügt werden durch"
+
+# subject and object forms for each relation type
+# (no object form for final relation types)
+msgctxt "CWEType"
+msgid "add_permission"
+msgstr "Berechtigung hinzufügen"
+
+msgctxt "CWRelation"
+msgid "add_permission"
+msgstr "Berechtigung hinzufügen"
+
+msgid "add_permission_object"
+msgstr "hat die Berechtigung zum Hinzufügen"
+
+msgctxt "CWGroup"
+msgid "add_permission_object"
+msgstr "kann hinzufügen"
+
+msgctxt "RQLExpression"
+msgid "add_permission_object"
+msgstr "benutzt, um die Hinzufüge-Berechtigung zu festzulegen für"
+
+msgid "add_relation"
+msgstr "hinzufügen"
+
+#, python-format
+msgid "added %(etype)s #%(eid)s (%(title)s)"
+msgstr "Hinzufügen der Entität %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"Die Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #"
+"%(eidto)s wurde hinzugefügt."
+
+msgid "addrelated"
+msgstr "hinzufügen"
+
+msgid "address"
+msgstr "Adresse"
+
+msgctxt "EmailAddress"
+msgid "address"
+msgstr "Adresse"
+
+msgid "alias"
+msgstr "Alias"
+
+msgctxt "EmailAddress"
+msgid "alias"
+msgstr "Alias"
+
+msgid "allow to set a specific workflow for an entity"
+msgstr "erlaube, einen bestimmten Workflow für eine Entität zu setzen"
+
+msgid "allowed transitions from this state"
+msgstr "erlaubte Übergänge von diesem Zustand"
+
+msgid "allowed_transition"
+msgstr "erlaubter Übergang"
+
+msgctxt "State"
+msgid "allowed_transition"
+msgstr "erlaubter Übergang"
+
+msgid "allowed_transition_object"
+msgstr "ausstehende Zustände"
+
+msgctxt "BaseTransition"
+msgid "allowed_transition_object"
+msgstr "ausstehende Zustände"
+
+msgctxt "Transition"
+msgid "allowed_transition_object"
+msgstr "ausstehende Zustände"
+
+msgctxt "WorkflowTransition"
+msgid "allowed_transition_object"
+msgstr "ausstehende Zustände"
+
+msgid "am/pm calendar (month)"
+msgstr "am/pm Kalender (Monat)"
+
+msgid "am/pm calendar (semester)"
+msgstr "am/pm Kalender (Halbjahr)"
+
+msgid "am/pm calendar (week)"
+msgstr "am/pm Kalender (Woche)"
+
+msgid "am/pm calendar (year)"
+msgstr "am/pm Kalender (Jahr)"
+
+msgid "an electronic mail address associated to a short alias"
+msgstr "Eine E-Mail-Adresse wurde mit einem Alias verknüpft."
+
+msgid "an error occurred"
+msgstr "Es ist ein Fehler aufgetreten."
+
+msgid "an error occurred while processing your request"
+msgstr "Während der Bearbeitung Ihrer Anfrage ist ein Fehler aufgetreten."
+
+msgid "an error occurred, the request cannot be fulfilled"
+msgstr ""
+"Es ist ein Fehler aufgetreten, Ihre Anfrage kann nicht bearbeitet werden."
+
+msgid "an integer is expected"
+msgstr "Ganze Zahl (integer) erwartet."
+
+msgid "and linked"
+msgstr "und verknüpft"
+
+msgid "and/or between different values"
+msgstr "und/oder zwischen verschiedenen Werten"
+
+msgid "anonymous"
+msgstr "anonym"
+
+msgid "application entities"
+msgstr "Anwendungs-Entitäten"
+
+msgid "april"
+msgstr "April"
+
+#, python-format
+msgid "archive for %(author)s"
+msgstr ""
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr ""
+
+#, python-format
+msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
+msgstr ""
+"Die Entität %(eid)s ´vom Typ %(etype)s muss mindestens mit einer \n"
+"anderen durch die Relation %(rtype)s verknüpft sein."
+
+msgid "attribute"
+msgstr "Attribut"
+
+msgid "august"
+msgstr "August"
+
+msgid "authentication failure"
+msgstr "Nutzername oder Passwort falsch"
+
+msgid "auto"
+msgstr "automatisch"
+
+msgid "automatic"
+msgstr "automatisch"
+
+msgid "bad value"
+msgstr "Unzulässiger Wert"
+
+msgid "base url"
+msgstr "Basis-URL"
+
+msgid "bookmark has been removed"
+msgstr "Das Lesezeichen wurde gelöscht."
+
+msgid "bookmark this page"
+msgstr "diese Seite merken"
+
+msgid "bookmark this search"
+msgstr "diese Suche merken"
+
+msgid "bookmarked_by"
+msgstr "Lesezeichen angelegt durch"
+
+msgctxt "Bookmark"
+msgid "bookmarked_by"
+msgstr "Lesezeichen angelegt durch"
+
+msgid "bookmarked_by_object"
+msgstr "hat Lesezeichen"
+
+msgctxt "CWUser"
+msgid "bookmarked_by_object"
+msgstr "verwendet Lesezeichen"
+
+msgid "bookmarks"
+msgstr "Lesezeichen"
+
+msgid "bookmarks are used to have user's specific internal links"
+msgstr "Lesezeichen werden für nutzer-spezifische interne Links verwendet"
+
+msgid "boxes"
+msgstr "Boxen"
+
+msgid "bug report sent"
+msgstr "Fehlerbericht gesendet"
+
+msgid "button_apply"
+msgstr "Anwenden"
+
+msgid "button_cancel"
+msgstr "Abbrechen"
+
+msgid "button_delete"
+msgstr "Löschen"
+
+msgid "button_ok"
+msgstr "OK"
+
+msgid "by"
+msgstr "durch"
+
+msgid "by relation"
+msgstr "durch die Relation"
+
+msgid "by_transition"
+msgstr "je Übergang"
+
+msgctxt "TrInfo"
+msgid "by_transition"
+msgstr "je Übergang"
+
+msgid "by_transition_object"
+msgstr "Übergangsinformation"
+
+msgctxt "BaseTransition"
+msgid "by_transition_object"
+msgstr "Übergangsinformation"
+
+msgctxt "Transition"
+msgid "by_transition_object"
+msgstr "Übergangsinformation"
+
+msgctxt "WorkflowTransition"
+msgid "by_transition_object"
+msgstr "Übergangsinformation"
+
+msgid "calendar"
+msgstr "Kalender anzeigen"
+
+msgid "calendar (month)"
+msgstr "Kalender (monatlich)"
+
+msgid "calendar (semester)"
+msgstr "Kalender (halbjährlich)"
+
+msgid "calendar (week)"
+msgstr "Kalender (wöchentlich)"
+
+msgid "calendar (year)"
+msgstr "Kalender (jährlich)"
+
+msgid "can not resolve entity types:"
+msgstr "Die Typen konnten nicht ermittelt werden:"
+
+msgid "can't be changed"
+msgstr "kann nicht geändert werden"
+
+msgid "can't be deleted"
+msgstr "kann nicht entfernt werden"
+
+#, python-format
+msgid "can't change the %s attribute"
+msgstr "Kann das Attribut %s nicht ändern."
+
+#, python-format
+msgid "can't connect to source %s, some data may be missing"
+msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen"
+
+#, python-format
+msgid "can't display data, unexpected error: %s"
+msgstr "Kann die Daten aufgrund des folgenden Fehlers nicht anzeigen: %s"
+
+msgid "can't have multiple exits on the same state"
+msgstr "Mehrere Ausgänge aus demselben Zustand nicht möglich."
+
+#, python-format
+msgid "can't parse %(value)r (expected %(format)s)"
+msgstr ""
+"Kann den Wert %(value)r nicht analysieren (erwartetes Format: %(format)s)"
+
+#, python-format
+msgid ""
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
+"%(card)s"
+msgstr ""
+
+msgid "cancel"
+msgstr ""
+
+msgid "cancel select"
+msgstr "Auswahl aufheben"
+
+msgid "cancel this insert"
+msgstr "diese Einfügung aufheben"
+
+msgid "cardinality"
+msgstr "Kardinalität"
+
+msgctxt "CWAttribute"
+msgid "cardinality"
+msgstr "Kardinalität"
+
+msgctxt "CWRelation"
+msgid "cardinality"
+msgstr "Kardinalität"
+
+msgid "category"
+msgstr "Kategorie"
+
+#, python-format
+msgid "changed state of %(etype)s #%(eid)s (%(title)s)"
+msgstr "Änderung des Zustands von %(etype)s #%(eid)s (%(title)s)"
+
+msgid "changes applied"
+msgstr "Änderungen übernommen"
+
+msgid "click here to see created entity"
+msgstr "Hier klicken, um die angelegte Entität anzusehen"
+
+msgid "click here to see edited entity"
+msgstr ""
+
+msgid "click on the box to cancel the deletion"
+msgstr "Klicken Sie die Box an, um das Löschen rückgängig zu machen."
+
+msgid "click to add a value"
+msgstr "Klicken Sie, um einen Wert hinzuzufügen"
+
+msgid "click to delete this value"
+msgstr "Klicken Sie, um diesen Wert zu löschen"
+
+msgid "click to edit this field"
+msgstr "Klicken Sie, um dieses Feld zu editieren"
+
+msgid "comment"
+msgstr "Kommentar"
+
+msgctxt "TrInfo"
+msgid "comment"
+msgstr "Kommentar"
+
+msgid "comment_format"
+msgstr "Format"
+
+msgctxt "TrInfo"
+msgid "comment_format"
+msgstr "Format"
+
+msgid "components"
+msgstr "Komponenten"
+
+msgid "components_etypenavigation"
+msgstr "nach Typ filtern"
+
+msgid "components_etypenavigation_description"
+msgstr "Erlaubt die Sortierung von Suchergebnissen nach Entitätstyp"
+
+msgid "components_navigation"
+msgstr "Seitennavigation"
+
+msgid "components_navigation_description"
+msgstr "Paginierungs-Komponente für große Ergebnismengen"
+
+msgid "components_rqlinput"
+msgstr "rql Eingabefeld"
+
+msgid "components_rqlinput_description"
+msgstr "das rql-Eingabefeld im Seitenkopf"
+
+msgid "composite"
+msgstr ""
+
+msgctxt "CWRelation"
+msgid "composite"
+msgstr "composite"
+
+msgid "condition"
+msgstr "Bedingung"
+
+msgctxt "BaseTransition"
+msgid "condition"
+msgstr "Bedingung"
+
+msgctxt "Transition"
+msgid "condition"
+msgstr "Bedingung"
+
+msgctxt "WorkflowTransition"
+msgid "condition"
+msgstr "Bedingung"
+
+msgid "condition_object"
+msgstr "Bedingung von"
+
+msgctxt "RQLExpression"
+msgid "condition_object"
+msgstr "Bedingung von"
+
+msgid "conditions"
+msgstr "Bedingungen"
+
+msgid "config"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "config"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr ""
+
+msgid "config mode"
+msgstr "Konfigurationsmodus"
+
+msgid "config type"
+msgstr "Konfigurationstyp"
+
+msgid "confirm password"
+msgstr "Passwort bestätigen"
+
+msgid "constrained_by"
+msgstr "eingeschränkt durch"
+
+msgctxt "CWAttribute"
+msgid "constrained_by"
+msgstr "eingeschränkt durch"
+
+msgctxt "CWRelation"
+msgid "constrained_by"
+msgstr "eingeschränkt durch"
+
+msgid "constrained_by_object"
+msgstr "Einschränkungen"
+
+msgctxt "CWConstraint"
+msgid "constrained_by_object"
+msgstr "Einschränkungen"
+
+msgid "constraint factory"
+msgstr "Einschränkungs-Factory"
+
+msgid "constraint_of"
+msgstr ""
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "constraint_of"
+msgstr ""
+
+msgid "constraint_of_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "constraint_of_object"
+msgstr ""
+
+msgid "constraints"
+msgstr "Einschränkungen"
+
+msgid "constraints applying on this relation"
+msgstr "auf diese Relation angewandte Einschränkung"
+
+msgid "content type"
+msgstr "MIME-Typ"
+
+msgid "context"
+msgstr "Kontext"
+
+msgid "context where this box should be displayed"
+msgstr "Kontext, in dem diese Box angezeigt werden soll"
+
+msgid "context where this component should be displayed"
+msgstr "Kontext, in dem diese Komponente angezeigt werden soll"
+
+msgid "context where this facet should be displayed, leave empty for both"
+msgstr ""
+"Kontext, wo diese Nachricht angezeigt werden soll; für beides: frei lassen."
+
+msgid "control subject entity's relations order"
+msgstr ""
+
+msgid "copy"
+msgstr "kopieren"
+
+msgid "core relation indicating a user's groups"
+msgstr "Kernrelation für die Gruppen eines Nutzers"
+
+msgid ""
+"core relation indicating owners of an entity. This relation implicitly put "
+"the owner into the owners group for the entity"
+msgstr ""
+
+msgid "core relation indicating the original creator of an entity"
+msgstr "Kernrelation für den Urheber einer Entität"
+
+msgid "core relation indicating the type of an entity"
+msgstr "Kernrelation für den Identitätstyp"
+
+msgid ""
+"core relation indicating the types (including specialized types) of an entity"
+msgstr ""
+
+msgid "cost"
+msgstr "Kosten"
+
+msgid "could not connect to the SMTP server"
+msgstr "Keine Verbindung mit dem SMTP-Server"
+
+msgid "create an index for quick search on this attribute"
+msgstr "Erstelle einen Index zur schnellen Suche über dieses Attribut"
+
+msgid "create an index page"
+msgstr "Eine Index-Seite anlegen"
+
+msgid "created on"
+msgstr "angelegt am"
+
+msgid "created_by"
+msgstr "erstellt von"
+
+msgid "created_by_object"
+msgstr "hat erstellt"
+
+msgid "creating Bookmark (Bookmark bookmarked_by CWUser %(linkto)s)"
+msgstr "Erstelle Lesezeichen für %(linkto)s"
+
+msgid "creating CWAttribute (CWAttribute relation_type CWRType %(linkto)s)"
+msgstr "Erstelle Attribut %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWAttribute %(linkto)s constrained_by CWConstraint)"
+msgstr "Erstelle Einschränkung für attribute %(linkto)s"
+
+msgid ""
+"creating CWConstraint (CWRelation %(linkto)s constrained_by CWConstraint)"
+msgstr "Erstelle Einschränkung für Relation %(linkto)s"
+
+msgid "creating CWProperty (CWProperty for_user CWUser %(linkto)s)"
+msgstr "Erstelle Eigenschaft für Nutzer %(linkto)s"
+
+msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)"
+msgstr "Erstelle Relation %(linkto)s"
+
+msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr ""
+
+msgid ""
+"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
+"constraint_of CWEType %(linkto)s)"
+msgstr ""
+
+msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)"
+msgstr "Erstelle neuen Nutzer in Gruppe %(linkto)s"
+
+msgid "creating EmailAddress (CWUser %(linkto)s use_email EmailAddress)"
+msgstr "Erstelle E-Mail-Adresse für Nutzer %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)"
+msgstr "RQL-Ausdruck für Leseberechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWAttribute %(linkto)s update_permission "
+"RQLExpression)"
+msgstr "RQL Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)"
+msgstr "Erstelle rql-Ausdruck für Hinzufüge-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s delete_permission RQLExpression)"
+msgstr "Erstelle rql-Ausdruck für Lösch-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)"
+msgstr "Erstelle rql-Ausdruck für Lese-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)"
+msgstr "Erstelle rql-Ausdruck für Aktualisierungs-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)"
+msgstr "RQL-Ausdruck zur Vergabe der Hinzufüge-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s delete_permission "
+"RQLExpression)"
+msgstr "RQL-Ausdruck zur Vergabe der Lösch-Berechtigung für %(linkto)s"
+
+msgid ""
+"creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)"
+msgstr "RQL-Ausdruck zur Vergabe der Lese-Berechtigung für %(linkto)s"
+
+msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)"
+msgstr "Erstelle RQL-Ausdruck für Übergang %(linkto)s"
+
+msgid ""
+"creating RQLExpression (WorkflowTransition %(linkto)s condition "
+"RQLExpression)"
+msgstr "Erstelle RQL-Ausdruck für Workflow-Übergang %(linkto)s"
+
+msgid "creating State (State allowed_transition Transition %(linkto)s)"
+msgstr "Erstelle einen zustand, der den Übergang %(linkto)s auslösen kann."
+
+msgid "creating State (State state_of Workflow %(linkto)s)"
+msgstr "Erstelle Zustand des Workflows %(linkto)s"
+
+msgid "creating State (Transition %(linkto)s destination_state State)"
+msgstr "Erstelle Zielzustand für Übergang %(linkto)s"
+
+msgid ""
+"creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s "
+"subworkflow_exit SubWorkflowExitPoint)"
+msgstr "Erstelle Subworkflow Exit-Punkt für Workflow-Übergang %(linkto)s"
+
+msgid "creating Transition (State %(linkto)s allowed_transition Transition)"
+msgstr "Erstelle auslösbaren Übergang für Zustand %(linkto)s"
+
+msgid "creating Transition (Transition destination_state State %(linkto)s)"
+msgstr "Erstelle Übergang, der zu Zustand %(linkto)s führt."
+
+msgid "creating Transition (Transition transition_of Workflow %(linkto)s)"
+msgstr "Erstelle Übergang des Workflows %(linkto)s"
+
+msgid ""
+"creating WorkflowTransition (State %(linkto)s allowed_transition "
+"WorkflowTransition)"
+msgstr "Erstelle Workflow-Übergang, der zum Zustand %(linkto)s führt."
+
+msgid ""
+"creating WorkflowTransition (WorkflowTransition transition_of Workflow "
+"%(linkto)s)"
+msgstr "Erstelle Workflow-Übergang des Workflows %(linkto)s"
+
+msgid "creation"
+msgstr "Erstellung"
+
+msgid "creation date"
+msgstr "Erstellungsdatum"
+
+msgid "creation time of an entity"
+msgstr "Erstellungszeitpunkt einer Entität"
+
+msgid "creation_date"
+msgstr "Erstellungsdatum"
+
+msgid "cstrtype"
+msgstr "Typ der Einschränkung"
+
+msgctxt "CWConstraint"
+msgid "cstrtype"
+msgstr "Einschränkungstyp"
+
+msgid "cstrtype_object"
+msgstr "benutzt von"
+
+msgctxt "CWConstraintType"
+msgid "cstrtype_object"
+msgstr "Einschränkungstyp von"
+
+msgid "csv entities export"
+msgstr "CSV-Export von Entitäten"
+
+msgid "csv export"
+msgstr "CSV-Export"
+
+msgid "ctxcomponents"
+msgstr "Kontext-Komponenten"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr ""
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+
+msgid "ctxcomponents_appliname"
+msgstr ""
+
+msgid "ctxcomponents_appliname_description"
+msgstr ""
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "Lesezeichen-Box"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "Box mit einer Liste der Lesezeichen des Nutzers"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "Brotkrumen"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr ""
+"Anzeigen eines Pfads zur Lokalisierung der aktuellen Seite innerhalb der Site"
+
+msgid "ctxcomponents_download_box"
+msgstr "Download-Box"
+
+msgid "ctxcomponents_download_box_description"
+msgstr ""
+
+msgid "ctxcomponents_edit_box"
+msgstr "Aktionsbox"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr "Box mit verfügbaren Aktionen für die angezeigten Daten"
+
+msgid "ctxcomponents_facet.filters"
+msgstr "Filter"
+
+msgid "ctxcomponents_facet.filters_description"
+msgstr "Box mit Filter für aktuelle Suchergebnis-Funktionalität"
+
+msgid "ctxcomponents_logo"
+msgstr "Icon"
+
+msgid "ctxcomponents_logo_description"
+msgstr "Das Anwendungs-Ikon angezeigt im Bildschirmkopf"
+
+msgid "ctxcomponents_metadata"
+msgstr "Metadaten für Entität metadata"
+
+msgid "ctxcomponents_metadata_description"
+msgstr ""
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "Box mit möglichen Ansichten"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "Box mit möglichen Ansichten für die angezeigten Daten"
+
+msgid "ctxcomponents_prevnext"
+msgstr "vorherige/nächste Entität"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"display link to go from one entity to another on entities implementing the "
+"\"previous/next\" interface."
+
+msgid "ctxcomponents_rss"
+msgstr "RSS-Box"
+
+msgid "ctxcomponents_rss_description"
+msgstr "RSS icon um die angezeigten Daten als RSS-Thread zu erhalten"
+
+msgid "ctxcomponents_search_box"
+msgstr "Suchbox"
+
+msgid "ctxcomponents_search_box_description"
+msgstr "Suchbox"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "Box für Start-Ansicht"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "Box mit möglichen Start-Ansichten"
+
+msgid "ctxcomponents_userstatus"
+msgstr ""
+
+msgid "ctxcomponents_userstatus_description"
+msgstr ""
+
+msgid "ctxcomponents_wfhistory"
+msgstr "Workflow-Chronik"
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr "Zeite die Workflow-Chronik."
+
+msgid "ctxtoolbar"
+msgstr "Werkzeugleiste"
+
+msgid "custom_workflow"
+msgstr "angepasster Workflow"
+
+msgid "custom_workflow_object"
+msgstr "angepasster Workflow von"
+
+msgid "cw_dont_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_dont_cross"
+msgstr ""
+
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgid "cw_host_config_of"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr ""
+
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgid "cw_may_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_may_cross"
+msgstr ""
+
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgid "cw_source"
+msgstr ""
+
+msgid "cw_source_object"
+msgstr ""
+
+msgid "cw_support"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_support"
+msgstr ""
+
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_support_object"
+msgstr ""
+
+msgid "cwetype-box"
+msgstr "Box-Ansicht"
+
+msgid "cwetype-description"
+msgstr "Beschreibung"
+
+msgid "cwetype-permissions"
+msgstr "Berechtigungen"
+
+msgid "cwetype-views"
+msgstr "Ansichten"
+
+msgid "cwetype-workflow"
+msgstr "Workflow"
+
+msgid "cwgroup-main"
+msgstr "Beschreibung"
+
+msgid "cwgroup-permissions"
+msgstr "Berechtigungen"
+
+msgid "cwrtype-description"
+msgstr "Beschreibung"
+
+msgid "cwrtype-permissions"
+msgstr "Berechtigungen"
+
+msgid "cwuri"
+msgstr "interner URI"
+
+msgid "data directory url"
+msgstr "URL des Daten-Pools"
+
+msgid "date"
+msgstr "Datum"
+
+msgid "deactivate"
+msgstr "deaktivieren"
+
+msgid "deactivated"
+msgstr "deaktiviert"
+
+msgid "december"
+msgstr "Dezember"
+
+msgid "default"
+msgstr "Standardwert"
+
+msgid "default text format for rich text fields."
+msgstr "Standardformat für Textfelder"
+
+msgid "default user workflow"
+msgstr "Standard-Workflow für Nutzer"
+
+msgid "default value"
+msgstr "Standardwert"
+
+msgid "default workflow for an entity type"
+msgstr "Standard-Workflow eines Entitätstyps"
+
+msgid "default_workflow"
+msgstr "Standard-Workflow"
+
+msgctxt "CWEType"
+msgid "default_workflow"
+msgstr "Standard-Workflow"
+
+msgid "default_workflow_object"
+msgstr "Standard-Workflow von"
+
+msgctxt "Workflow"
+msgid "default_workflow_object"
+msgstr "Standard-Workflow von"
+
+msgid "defaultval"
+msgstr "Standard-Wert"
+
+msgctxt "CWAttribute"
+msgid "defaultval"
+msgstr "Standard-Wert"
+
+msgid "define a CubicWeb user"
+msgstr "Einen CubicWeb-Nutzer definieren"
+
+msgid "define a CubicWeb users group"
+msgstr "Eine CubicWeb-Nutzergruppe definieren"
+
+msgid ""
+"define a final relation: link a final relation type from a non final entity "
+"to a final entity type. used to build the instance schema"
+msgstr ""
+
+msgid ""
+"define a non final relation: link a non final relation type from a non final "
+"entity to a non final entity type. used to build the instance schema"
+msgstr ""
+
+msgid "define a relation type, used to build the instance schema"
+msgstr ""
+"Definieren eines Relationstyps, der zur Erstellung des Instanz-Schemas "
+"benutzt wird."
+
+msgid "define a rql expression used to define permissions"
+msgstr "Definieren eines RQL-Ausdrucks zur Festlegung von Berechtigungen."
+
+msgid "define a schema constraint"
+msgstr "Eine Schema-Einschränkung definieren"
+
+msgid "define a schema constraint type"
+msgstr "den Typ einer Schema-Einschränkung definieren"
+
+msgid "define an entity type, used to build the instance schema"
+msgstr "definieren eines Entitätstyps zur Erstellung des Instanz-Schemas"
+
+msgid "define how we get out from a sub-workflow"
+msgstr "Definieren, wie man aus einem Sub-Workflow herauskommt"
+
+msgid "defines a sql-level multicolumn unique index"
+msgstr "definiert auf SQL-Ebene einen eindeutigen Index über mehrere Spalten"
+
+msgid ""
+"defines what's the property is applied for. You must select this first to be "
+"able to set value"
+msgstr ""
+"definiert, worauf die Eigenschaft angewendet wird. Sie müssen dies zunächst "
+"markieren,um den Wert zuzuweisen."
+
+msgid "delete"
+msgstr "löschen"
+
+msgid "delete this bookmark"
+msgstr "dieses Lesezeichen löschen"
+
+msgid "delete this permission"
+msgstr "dieses Recht löschen"
+
+msgid "delete this relation"
+msgstr "diese Relation löschen"
+
+msgid "delete_permission"
+msgstr "kann gelöscht werden durch"
+
+msgctxt "CWEType"
+msgid "delete_permission"
+msgstr "Lösch-Berechtigung"
+
+msgctxt "CWRelation"
+msgid "delete_permission"
+msgstr "Lösch-Berechtigung"
+
+msgid "delete_permission_object"
+msgstr "hat Lösch-Berechtigung"
+
+msgctxt "CWGroup"
+msgid "delete_permission_object"
+msgstr "hat Lösch-Berechtigung für"
+
+msgctxt "RQLExpression"
+msgid "delete_permission_object"
+msgstr "hat die Berechtigung, zu löschen"
+
+#, python-format
+msgid "deleted %(etype)s #%(eid)s (%(title)s)"
+msgstr "Löschen der Entität %(etype)s #%(eid)s (%(title)s)"
+
+#, python-format
+msgid ""
+"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #"
+"%(eidto)s"
+msgstr ""
+"Relation %(rtype)s von %(frometype)s #%(eidfrom)s zu %(toetype)s #%(eidto)s "
+"gelöscht"
+
+msgid "depends on the constraint type"
+msgstr "hängt vom Einschränkungsgyp ab"
+
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "BaseTransition"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "CWAttribute"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "CWEType"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "CWRType"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "CWRelation"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "State"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "Transition"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "Workflow"
+msgid "description"
+msgstr "Beschreibung"
+
+msgctxt "WorkflowTransition"
+msgid "description"
+msgstr "Beschreibung"
+
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "BaseTransition"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "CWAttribute"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "CWEType"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "CWRType"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "CWRelation"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "State"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "Transition"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "Workflow"
+msgid "description_format"
+msgstr "Format"
+
+msgctxt "WorkflowTransition"
+msgid "description_format"
+msgstr "Format"
+
+msgid "destination state for this transition"
+msgstr "Zielzustand dieses Übergangs"
+
+msgid "destination state must be in the same workflow as our parent transition"
+msgstr "Zielzustand muss im selben Workflow sein wie unser Parent-Übergang"
+
+msgid "destination state of a transition"
+msgstr "Zielzustand eines Übergangs"
+
+msgid ""
+"destination state. No destination state means that transition should go back "
+"to the state from which we've entered the subworkflow."
+msgstr ""
+"Zielzustand. Kein Zielzustand bedeutet, dass der Übergang in den Zustand "
+"zurückführen soll, von dem aus der Subworkflow erreicht wurde."
+
+msgid "destination_state"
+msgstr "Zielzustand"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "destination_state"
+msgstr "Zielzustand"
+
+msgctxt "Transition"
+msgid "destination_state"
+msgstr "Zielzustand"
+
+msgid "destination_state_object"
+msgstr "Ziel von"
+
+msgctxt "State"
+msgid "destination_state_object"
+msgstr "Ziel von"
+
+msgid "detach attached file"
+msgstr "angehängte Datei abtrennen"
+
+msgid "display order of the box"
+msgstr "angezeigte Reihenfolge der Boxen"
+
+msgid "display order of the component"
+msgstr "angezeigte Reihenfolge der Komponenten"
+
+msgid "display order of the facet"
+msgstr "angezeigte Reihenfolge der Facetten"
+
+msgid "display the box or not"
+msgstr "die Box anzeigen oder nicht"
+
+msgid "display the component or not"
+msgstr "die Komponente anzeigen oder nicht"
+
+msgid "display the facet or not"
+msgstr "die Facette anzeigen oder nicht"
+
+msgid ""
+"distinct label to distinguate between other permission entity of the same "
+"name"
+msgstr ""
+"Zusätzliches Label, um von anderen Berechtigungsentitäten unterscheiden zu "
+"können."
+
+msgid "download"
+msgstr "Herunterladen"
+
+#, python-format
+msgid "download %s"
+msgstr "Herunterladen von %s"
+
+msgid "download icon"
+msgstr "Ikone 'herunterladen'"
+
+msgid "download schema as owl"
+msgstr "Schema als OWL herunterladen"
+
+msgid "edit bookmarks"
+msgstr "Lesezeichen bearbeiten"
+
+msgid "edit canceled"
+msgstr "Änderungen verwerfen"
+
+msgid "edit the index page"
+msgstr "Index-Seite bearbeiten"
+
+msgid "editable-table"
+msgstr "bearbeitbare Tabelle"
+
+msgid "eid"
+msgstr ""
+
+msgid "email address to use for notification"
+msgstr "E-Mail-Adresse für Mitteilungen."
+
+msgid "emails successfully sent"
+msgstr "E-Mails erfolgreich versandt."
+
+msgid "embed"
+msgstr "einbetten"
+
+msgid "embedded html"
+msgstr "HTML-Inhalt"
+
+msgid "embedding this url is forbidden"
+msgstr "Einbettung dieses URLs ist nicht erlaubt."
+
+msgid "entities deleted"
+msgstr "Entitäten gelöscht"
+
+msgid "entity copied"
+msgstr "Entität kopiert"
+
+msgid "entity created"
+msgstr "Entität erstellt"
+
+msgid "entity creation"
+msgstr "Erstellung der Entität"
+
+msgid "entity deleted"
+msgstr "Entität gelöscht"
+
+msgid "entity deletion"
+msgstr "Löschen der Entität"
+
+msgid "entity edited"
+msgstr "Entität bearbeitet"
+
+msgid "entity has no workflow set"
+msgstr "Entität hat keinen Workflow"
+
+msgid "entity linked"
+msgstr "Entität verknüpft"
+
+msgid "entity type"
+msgstr "Entitätstyp"
+
+msgid ""
+"entity type that may be used to construct some advanced security "
+"configuration"
+msgstr ""
+"Entitätstyp zum Aufbau einer fortgeschrittenen Sicherheitskonfiguration."
+
+msgid "entity types which may use this workflow"
+msgstr "Entitätstypen, die diesen Workflow benutzen können."
+
+msgid "entity update"
+msgstr "Aktualisierung der Entität"
+
+msgid "error while embedding page"
+msgstr "Fehler beim Einbetten der Seite"
+
+msgid "error while publishing ReST text"
+msgstr "Fehler beim Übersetzen von reST"
+
+#, python-format
+msgid "error while querying source %s, some data may be missing"
+msgstr ""
+"Fehler beim Zugriff auf Quelle %s, möglicherweise sind die Daten "
+"unvollständig."
+
+msgid "eta_date"
+msgstr "Enddatum"
+
+msgid "exit state must be a subworkflow state"
+msgstr "Exit-Zustand muss ein Subworkflow-Zustand sein."
+
+msgid "exit_point"
+msgstr "Exit-Punkt "
+
+msgid "exit_point_object"
+msgstr "Exit-Punkt für"
+
+#, python-format
+msgid "exiting from subworkflow %s"
+msgstr "verlasse Subworkflow %s"
+
+msgid "expected:"
+msgstr "erwartet:"
+
+msgid "expression"
+msgstr "Ausdruck"
+
+msgctxt "RQLExpression"
+msgid "expression"
+msgstr "Ausdruck"
+
+msgid "exprtype"
+msgstr "Typ des Ausdrucks"
+
+msgctxt "RQLExpression"
+msgid "exprtype"
+msgstr "Typ des Ausdrucks"
+
+msgid "external page"
+msgstr "externe Seite"
+
+msgid "facet.filters"
+msgstr ""
+
+msgid "facetbox"
+msgstr "Facetten-Box"
+
+msgid "facets_created_by-facet"
+msgstr "\"erstellt durch\" facet"
+
+msgid "facets_created_by-facet_description"
+msgstr ""
+
+msgid "facets_cw_source-facet"
+msgstr ""
+
+msgid "facets_cw_source-facet_description"
+msgstr ""
+
+msgid "facets_cwfinal-facet"
+msgstr "\"finaler Entitäts- oder Relationstyp\" facet"
+
+msgid "facets_cwfinal-facet_description"
+msgstr ""
+
+msgid "facets_etype-facet"
+msgstr "\"Entitätstyp\" facet"
+
+msgid "facets_etype-facet_description"
+msgstr ""
+
+msgid "facets_has_text-facet"
+msgstr "\"hat Text\" facet"
+
+msgid "facets_has_text-facet_description"
+msgstr ""
+
+msgid "facets_in_group-facet"
+msgstr "\"in Gruppe\" facet"
+
+msgid "facets_in_group-facet_description"
+msgstr ""
+
+msgid "facets_in_state-facet"
+msgstr "\"in Zustand\" facet"
+
+msgid "facets_in_state-facet_description"
+msgstr ""
+
+#, python-format
+msgid "failed to uniquify path (%s, %s)"
+msgstr "Konnte keinen eindeutigen Dateinamen erzeugen (%s, %s)"
+
+msgid "february"
+msgstr "Februar"
+
+msgid "file tree view"
+msgstr "Baumansicht (Dateien)"
+
+msgid "final"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "final"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "final"
+msgstr ""
+
+msgid "first name"
+msgstr "Vorname"
+
+msgid "firstname"
+msgstr "Vorname"
+
+msgctxt "CWUser"
+msgid "firstname"
+msgstr "Vorname"
+
+msgid "foaf"
+msgstr "FOAF"
+
+msgid "follow"
+msgstr "dem Link folgen"
+
+#, python-format
+msgid "follow this link for more information on this %s"
+msgstr "Folgend Sie dem Link für mehr Informationen über %s"
+
+msgid "follow this link if javascript is deactivated"
+msgstr "Folgen Sie diesem Link, falls Javascript deaktiviert ist."
+
+msgid "for_user"
+msgstr "für den Nutzer"
+
+msgctxt "CWProperty"
+msgid "for_user"
+msgstr "für Nutzer"
+
+msgid "for_user_object"
+msgstr "hat als Eigenschaft"
+
+msgctxt "CWUser"
+msgid "for_user_object"
+msgstr "verwendet die Eigenschaften"
+
+msgid "friday"
+msgstr "Freitag"
+
+msgid "from"
+msgstr "von"
+
+#, python-format
+msgid "from %(date)s"
+msgstr "vom %(date)s"
+
+msgid "from_entity"
+msgstr "der Entität"
+
+msgctxt "CWAttribute"
+msgid "from_entity"
+msgstr "Attribut der Entität"
+
+msgctxt "CWRelation"
+msgid "from_entity"
+msgstr "Relation der Entität"
+
+msgid "from_entity_object"
+msgstr "der Entität"
+
+msgctxt "CWEType"
+msgid "from_entity_object"
+msgstr "Subjektrelation"
+
+msgid "from_interval_start"
+msgstr "Von"
+
+msgid "from_state"
+msgstr "des Zustands"
+
+msgctxt "TrInfo"
+msgid "from_state"
+msgstr "Anfangszustand"
+
+msgid "from_state_object"
+msgstr "Übergänge aus diesem Zustand"
+
+msgctxt "State"
+msgid "from_state_object"
+msgstr "Anfangszustand von"
+
+msgid "full text or RQL query"
+msgstr "Volltextsuche oder RQL-Anfrage"
+
+msgid "fulltext_container"
+msgstr "Container des indizierten Textes"
+
+msgctxt "CWRType"
+msgid "fulltext_container"
+msgstr "zu indizierendes Objekt"
+
+msgid "fulltextindexed"
+msgstr "Indizierung des Textes"
+
+msgctxt "CWAttribute"
+msgid "fulltextindexed"
+msgstr "indizierter Text"
+
+msgid "generic plot"
+msgstr "generischer Plot"
+
+msgid "generic relation to link one entity to another"
+msgstr "generische Relation zur Verbindung einer Entität mit einer anderen"
+
+msgid ""
+"generic relation to specify that an external entity represent the same "
+"object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def"
+msgstr ""
+"generische Relation, die anzeigt, dass eine Entität mit einer anderen Web-"
+"Ressource identisch ist (siehe http://www.w3.org/TR/owl-ref/#sameAs-def)."
+
+msgid "go back to the index page"
+msgstr "Zurück zur Index-Seite"
+
+msgid "granted to groups"
+msgstr "an Gruppen gewährt"
+
+#, python-format
+msgid "graphical representation of %(appid)s data model"
+msgstr "graphische Darstellung des Datenmodells von %(appid)s"
+
+#, fuzzy, python-format
+msgid ""
+"graphical representation of the %(etype)s entity type from %(appid)s data "
+"model"
+msgstr ""
+"graphische Darstellung des Datenmodells des Entitätstyps (etype)s von "
+"%(appid)s"
+
+#, python-format
+msgid ""
+"graphical representation of the %(rtype)s relation type from %(appid)s data "
+"model"
+msgstr ""
+"graphische Darstellung des Datenmodells des Relationstyps %(rtype)s von "
+"%(appid)s"
+
+msgid "group in which a user should be to be allowed to pass this transition"
+msgstr ""
+"Gruppe, zu welcher der Nutzer gehören muss, um die Transaktion durchzuführen"
+
+msgid "groups"
+msgstr "Gruppen"
+
+msgid "groups grant permissions to the user"
+msgstr "die Gruppen geben dem Nutzer Rechte"
+
+msgid "groups to which the permission is granted"
+msgstr "Gruppen, denen dieses Recht verliehen ist"
+
+msgid "guests"
+msgstr "Gäste"
+
+msgid "hCalendar"
+msgstr "hCalendar"
+
+msgid "has_text"
+msgstr "enthält Text"
+
+msgid "header-left"
+msgstr ""
+
+msgid "header-right"
+msgstr ""
+
+msgid "hide filter form"
+msgstr "Filter verbergen"
+
+msgid ""
+"how to format date and time in the ui (\"man strftime\" for format "
+"description)"
+msgstr ""
+"Wie formatiert man das Datum Interface im (\"man strftime\" für die "
+"Beschreibung des neuen Formats"
+
+msgid "how to format date in the ui (\"man strftime\" for format description)"
+msgstr ""
+"Wie formatiert man das Datum im Interface (\"man strftime\" für die "
+"Beschreibung des Formats)"
+
+msgid "how to format float numbers in the ui"
+msgstr "Wie man Dezimalzahlen (float) im Interface formatiert"
+
+msgid "how to format time in the ui (\"man strftime\" for format description)"
+msgstr ""
+"Wie man die Uhrzeit im Interface (\"man strftime\" für die "
+"Formatbeschreibung)"
+
+msgid "i18n_bookmark_url_fqs"
+msgstr "Parameter"
+
+msgid "i18n_bookmark_url_path"
+msgstr "Pfad"
+
+msgid "i18n_login_popup"
+msgstr "Anmelden"
+
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
+msgid "i18nprevnext_next"
+msgstr "Weiter"
+
+msgid "i18nprevnext_previous"
+msgstr "Zurück"
+
+msgid "i18nprevnext_up"
+msgstr "eine Ebene auf"
+
+msgid "iCalendar"
+msgstr "iCalendar"
+
+msgid "id of main template used to render pages"
+msgstr "id der Hauptvorlage"
+
+msgid "identical to"
+msgstr "identisch mit"
+
+msgid "identical_to"
+msgstr "identisch mit"
+
+msgid "identity"
+msgstr "ist identisch mit"
+
+msgid "identity_object"
+msgstr "ist identisch mit"
+
+msgid ""
+"if full text content of subject/object entity should be added to other side "
+"entity (the container)."
+msgstr ""
+"falls der indizierte Text der Subjekt/Objekt-Entität der anderen Seite der "
+"Relation (dem Container) hinzugefügt werden muss"
+
+msgid "image"
+msgstr "Bild"
+
+msgid "in_group"
+msgstr "in der Gruppe"
+
+msgctxt "CWUser"
+msgid "in_group"
+msgstr "gehört zu der Gruppe"
+
+msgid "in_group_object"
+msgstr "Mitglieder"
+
+msgctxt "CWGroup"
+msgid "in_group_object"
+msgstr "enthält die Nutzer"
+
+msgid "in_state"
+msgstr "Zustand"
+
+msgid "in_state_object"
+msgstr "Zustand von"
+
+msgid "incontext"
+msgstr "im Kontext"
+
+msgid "incorrect captcha value"
+msgstr "Unzulässiger Wert für Überschrift"
+
+#, python-format
+msgid "incorrect value (%(value)s) for type \"%(type)s\""
+msgstr "Wert %(value)s ungültig für den Typ \"%(type)s\""
+
+msgid "index this attribute's value in the plain text index"
+msgstr "indizieren des Wertes dieses Attributs im Volltext-Index"
+
+msgid "indexed"
+msgstr "Index"
+
+msgctxt "CWAttribute"
+msgid "indexed"
+msgstr "indiziert"
+
+msgid "indicate the current state of an entity"
+msgstr "zeigt den aktuellen Zustand einer Entität an"
+
+msgid ""
+"indicate which state should be used by default when an entity using states "
+"is created"
+msgstr ""
+"zeigt an, welcher Zustand standardmäßig benutzt werden soll, wenn eine "
+"Entität erstellt wird"
+
+msgid "info"
+msgstr "Information"
+
+#, python-format
+msgid "initial estimation %s"
+msgstr "Erste Schätzung %s"
+
+msgid "initial state for this workflow"
+msgstr "Anfangszustand für diesen Workflow"
+
+msgid "initial_state"
+msgstr "Anfangszustand"
+
+msgctxt "Workflow"
+msgid "initial_state"
+msgstr "Anfangszustand"
+
+msgid "initial_state_object"
+msgstr "Anfangszustand von"
+
+msgctxt "State"
+msgid "initial_state_object"
+msgstr "Anfangszustand von"
+
+msgid "inlined"
+msgstr "eingereiht"
+
+msgctxt "CWRType"
+msgid "inlined"
+msgstr "eingereiht"
+
+msgid "instance home"
+msgstr "Startseite der Instanz"
+
+msgid "instance schema"
+msgstr "Schema der Instanz"
+
+msgid "internal entity uri"
+msgstr "interner URI"
+
+msgid "internationalizable"
+msgstr "internationalisierbar"
+
+msgctxt "CWAttribute"
+msgid "internationalizable"
+msgstr "internationalisierbar"
+
+#, python-format
+msgid "invalid action %r"
+msgstr "Ungültige Aktion %r"
+
+#, python-format
+msgid "invalid value %(value)s, it must be one of %(choices)s"
+msgstr "Wert %(value)s ungültig, er muss zwischen %(choices)s"
+
+msgid "is"
+msgstr "vom Typ"
+
+msgid "is object of:"
+msgstr "ist Objekt von"
+
+msgid "is subject of:"
+msgstr "ist Subjekt von"
+
+msgid ""
+"is the subject/object entity of the relation composed of the other ? This "
+"implies that when the composite is deleted, composants are also deleted."
+msgstr ""
+"Ist die Subjekt/Objekt-Entität der Relation aus der anderen Seite "
+"zusammengesetzt?Falls ja, werden beim Löschen der Entität auch deren "
+"Bausteine gelöscht."
+
+msgid "is this attribute's value translatable"
+msgstr "Ist der Wert dieses Attributs übersetzbar?"
+
+msgid "is this relation equivalent in both direction ?"
+msgstr "Ist diese Relation in beiden Richtungen äquivalent?"
+
+msgid ""
+"is this relation physically inlined? you should know what you're doing if "
+"you are changing this!"
+msgstr ""
+"Ist diese Relation in die Datenbank eingereiht? Sie sollten wissen, was Sie "
+"tun, wenn Sie dies ändern."
+
+msgid "is_instance_of"
+msgstr "ist eine Instanz von"
+
+msgid "is_instance_of_object"
+msgstr "Typ von"
+
+msgid "is_object"
+msgstr "hat als Instanz"
+
+msgid "january"
+msgstr "Januar"
+
+msgid "july"
+msgstr "Juli"
+
+msgid "june"
+msgstr "Juni"
+
+msgid "label"
+msgstr "gekennzeichnet"
+
+msgctxt "CWPermission"
+msgid "label"
+msgstr "gekennzeichnet"
+
+msgid "language of the user interface"
+msgstr "Sprache der Nutzer-Schnittstelle"
+
+msgid "last connection date"
+msgstr "Datum der letzten Verbindung"
+
+msgid "last login time"
+msgstr "Datum der letzten Verbindung"
+
+msgid "last name"
+msgstr "Name"
+
+msgid "last usage"
+msgstr "letzte Benutzung"
+
+msgid "last_login_time"
+msgstr "Datum der letzten Verbindung"
+
+msgctxt "CWUser"
+msgid "last_login_time"
+msgstr "Datum der letzten Verbindung"
+
+msgid "latest modification time of an entity"
+msgstr "Datum der letzten Änderung einer Entität"
+
+msgid "latest update on"
+msgstr "letzte Änderung am"
+
+msgid "left"
+msgstr "links"
+
+msgid ""
+"link a permission to the entity. This permission should be used in the "
+"security definition of the entity's type to be useful."
+msgstr ""
+"verknüpft eine Berechtigung mit einer Entität. Um Nützlich zu sein, sollte "
+"diese Berechtigung in der Sicherheitsdefinition des Entitätstyps benutzt "
+"werden."
+
+msgid ""
+"link a property to the user which want this property customization. Unless "
+"you're a site manager, this relation will be handled automatically."
+msgstr ""
+"verknüpft eine Eigenschaft mit einem Nutzer, der diese Personalisierung "
+"wünscht. Sofern Sie nicht Site-Manager sind, wird diese Relation automatisch "
+"behandelt."
+
+msgid "link a relation definition to its object entity type"
+msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Objekts"
+
+msgid "link a relation definition to its relation type"
+msgstr "verknüpft eine Relationsdefinition mit ihrem Relationstyp"
+
+msgid "link a relation definition to its subject entity type"
+msgstr "verknüpft eine Relationsdefinition mit dem Entitätstyp ihres Subjekts"
+
+msgid "link a state to one or more workflow"
+msgstr "verknüpft einen Zustand mit einem oder mehreren Workflows"
+
+msgid "link a transition information to its object"
+msgstr "verknüpft eine Übergangsinformation mit ihrem Objekt"
+
+msgid "link a transition to one or more workflow"
+msgstr "verknüpft einen Übergang mit einem oder mehreren Workflows"
+
+msgid "link a workflow to one or more entity type"
+msgstr "verknüpft einen Workflow mit einem oder mehreren Entitätstypen"
+
+msgid "list"
+msgstr "Liste"
+
+msgid "log in"
+msgstr "anmelden"
+
+msgid "log out first"
+msgstr "Melden Sie sich zuerst ab."
+
+msgid "login"
+msgstr "Anmeldung"
+
+msgctxt "CWUser"
+msgid "login"
+msgstr "Anmeldung"
+
+msgid "login / password"
+msgstr ""
+
+msgid "login or email"
+msgstr "Nutzername oder E-Mail-Adresse"
+
+msgid "login_action"
+msgstr "Melden Sie sich an"
+
+msgid "logout"
+msgstr "Abmelden"
+
+#, python-format
+msgid "loop in %(rel)s relation (%(eid)s)"
+msgstr ""
+"Endlosschleife gefunden in der Relation %(rel)s von der Entität #%(eid)s"
+
+msgid "main informations"
+msgstr "Allgemeine Informationen"
+
+msgid "main_tab"
+msgstr ""
+
+msgid "mainvars"
+msgstr "Hauptvariablen"
+
+msgctxt "RQLExpression"
+msgid "mainvars"
+msgstr "Hauptvariablen"
+
+msgid "manage"
+msgstr "Verwalten"
+
+msgid "manage bookmarks"
+msgstr "Lesezeichen verwalten"
+
+msgid "manage permissions"
+msgstr "Rechte verwalten"
+
+msgid "manage security"
+msgstr "Sicherheitsverwaltung"
+
+msgid "managers"
+msgstr "Administratoren"
+
+msgid "mandatory relation"
+msgstr "obligatorische Relation"
+
+msgid "march"
+msgstr "März"
+
+msgid "match_host"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr ""
+
+msgid "maximum number of characters in short description"
+msgstr "Maximale Anzahl von Zeichen in der Kurzbeschreibung"
+
+msgid "maximum number of entities to display in related combo box"
+msgstr "maximale Anzahl Entitäten zur Anzeige im Listenfeld"
+
+msgid "maximum number of objects displayed by page of results"
+msgstr "maximale Anzahl pro Seite angezeigter Objekte"
+
+msgid "maximum number of related entities to display in the primary view"
+msgstr "maximale anzahl verknüpfter Entitäten zur Anzeige auf der hauptseite"
+
+msgid "may"
+msgstr "Mai"
+
+msgid "memory leak debugging"
+msgstr "Fehlersuche bei Speicherlöschern"
+
+msgid "milestone"
+msgstr "Meilenstein"
+
+#, python-format
+msgid "missing parameters for entity %s"
+msgstr "Fehlende Parameter für Entität %s"
+
+msgid "modification"
+msgstr "Änderung"
+
+msgid "modification_date"
+msgstr "Datum der Änderung"
+
+msgid "modify"
+msgstr "ändern"
+
+msgid "monday"
+msgstr "Montag"
+
+msgid "more actions"
+msgstr "weitere Aktionen"
+
+msgid "more info about this workflow"
+msgstr "mehr Informationen zu diesem Workflow"
+
+msgid "multiple edit"
+msgstr "mehrfache Bearbeitung"
+
+msgid "my custom search"
+msgstr "meine personalisierte Suche"
+
+msgid "name"
+msgstr "Name"
+
+msgctxt "BaseTransition"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWCache"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWConstraintType"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWEType"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWGroup"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWPermission"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWRType"
+msgid "name"
+msgstr "Name"
+
+msgctxt "CWSource"
+msgid "name"
+msgstr ""
+
+msgctxt "State"
+msgid "name"
+msgstr "Name"
+
+msgctxt "Transition"
+msgid "name"
+msgstr "Name"
+
+msgctxt "Workflow"
+msgid "name"
+msgstr "Name"
+
+msgctxt "WorkflowTransition"
+msgid "name"
+msgstr "Name"
+
+msgid "name of the cache"
+msgstr "Name des Caches"
+
+msgid ""
+"name of the main variables which should be used in the selection if "
+"necessary (comma separated)"
+msgstr ""
+"Name der Hauptvariablen, die in der Auswahl benutzt werden sollten (mehrere "
+"Variablen durch ',' trennen!)"
+
+msgid "name of the source"
+msgstr ""
+
+msgid "name or identifier of the permission"
+msgstr "Name (oder Bezeichner) der Berechtigung"
+
+msgid "navbottom"
+msgstr "zum Seitenende"
+
+msgid "navcontentbottom"
+msgstr "zum Hauptinhalt"
+
+msgid "navcontenttop"
+msgstr "zum Seitenanfang"
+
+msgid "navigation"
+msgstr "Navigation"
+
+msgid "navigation.combobox-limit"
+msgstr "Anzahl Entitäten pro Listenfeld"
+
+msgid "navigation.page-size"
+msgstr "Anzahl Suchergebnisse"
+
+msgid "navigation.related-limit"
+msgstr "Anzahl Entitäten in der Hauptansicht"
+
+msgid "navigation.short-line-size"
+msgstr "Kurzbeschreibung"
+
+msgid "navtop"
+msgstr "zum Hauptinhalt springen"
+
+msgid "new"
+msgstr "neu"
+
+msgid "next_results"
+msgstr "weitere Ergebnisse"
+
+msgid "no"
+msgstr "Nein"
+
+msgid "no associated permissions"
+msgstr "keine entsprechende Berechtigung"
+
+#, python-format
+msgid "no edited fields specified for entity %s"
+msgstr "kein Eingabefeld spezifiziert Für Entität %s"
+
+msgid "no related entity"
+msgstr "keine verknüpfte Entität"
+
+msgid "no related project"
+msgstr "kein verknüpftes Projekt"
+
+msgid "no repository sessions found"
+msgstr "keine Datenbank-Sitzung gefunden"
+
+msgid "no selected entities"
+msgstr "keine Entitäten ausgewählt"
+
+#, python-format
+msgid "no such entity type %s"
+msgstr "Der Entitätstyp '%s' existiert nicht."
+
+msgid "no version information"
+msgstr "Keine Versionsangaben."
+
+msgid "no web sessions found"
+msgstr "Keine Sitzung gefunden."
+
+msgid "normal"
+msgstr "normal"
+
+msgid "not authorized"
+msgstr "nicht authrisiert"
+
+msgid "not selected"
+msgstr "nicht ausgewählt"
+
+msgid "november"
+msgstr "November"
+
+msgid "object"
+msgstr "Objekt"
+
+msgid "object type"
+msgstr "Objekttyp"
+
+msgid "october"
+msgstr "Oktober"
+
+msgid "one month"
+msgstr "ein Monat"
+
+msgid "one week"
+msgstr "eine Woche"
+
+msgid "oneline"
+msgstr "eine Zeile"
+
+msgid "only select queries are authorized"
+msgstr "Nur Auswahl-Anfragen sind erlaubt."
+
+msgid "open all"
+msgstr "alle öffnen"
+
+msgid "opened sessions"
+msgstr "offene Sitzungen"
+
+msgid "opened web sessions"
+msgstr "offene Web-Sitzungen"
+
+msgid "options"
+msgstr "Optionen"
+
+msgid "order"
+msgstr "Reihenfolge"
+
+msgid "ordernum"
+msgstr "Reihenfolge"
+
+msgctxt "CWAttribute"
+msgid "ordernum"
+msgstr "Ordnungszahl"
+
+msgctxt "CWRelation"
+msgid "ordernum"
+msgstr "Ordnungszahl"
+
+msgid "owl"
+msgstr "OWL"
+
+msgid "owlabox"
+msgstr "OWL ABox"
+
+msgid "owned_by"
+msgstr "gehört zu"
+
+msgid "owned_by_object"
+msgstr "besitzt"
+
+msgid "owners"
+msgstr "Besitzer"
+
+msgid "ownership"
+msgstr "Eigentum"
+
+msgid "ownerships have been changed"
+msgstr "Die Eigentumsrechte sind geändert worden."
+
+msgid "pageid-not-found"
+msgstr ""
+"Notwendige Daten scheinen nicht mehr gültig zu sein. Bitte laden Sie die "
+"Seite neu und beginnen Sie von vorn."
+
+msgid "password"
+msgstr "Passwort"
+
+msgid "password and confirmation don't match"
+msgstr "Das Passwort stimmt nicht mit der Bestätigung überein."
+
+msgid "path"
+msgstr "Pfad"
+
+msgctxt "Bookmark"
+msgid "path"
+msgstr "Pfad"
+
+msgid "permission"
+msgstr "Recht"
+
+msgid "permissions"
+msgstr "Rechte"
+
+msgid "permissions for this entity"
+msgstr "Rechte für diese Entität"
+
+msgid "pick existing bookmarks"
+msgstr "Wählen Sie aus den bestehenden lesezeichen aus"
+
+msgid "pkey"
+msgstr "Schlüssel"
+
+msgctxt "CWProperty"
+msgid "pkey"
+msgstr "code der Eigenschaft"
+
+msgid "please correct errors below"
+msgstr "Bitte die nachstehenden Fehler korrigieren"
+
+msgid "please correct the following errors:"
+msgstr "Bitte korrigieren Sie die folgenden Fehler:"
+
+msgid "possible views"
+msgstr "Mögliche Ansichten"
+
+msgid "prefered_form"
+msgstr "bevorzugte form"
+
+msgctxt "EmailAddress"
+msgid "prefered_form"
+msgstr "bevorzugte form"
+
+msgid "prefered_form_object"
+msgstr "bevorzugte form vor"
+
+msgctxt "EmailAddress"
+msgid "prefered_form_object"
+msgstr "bevorzugte form von"
+
+msgid "preferences"
+msgstr "Einstellungen"
+
+msgid "previous_results"
+msgstr "vorige Ergebnisse"
+
+msgid "primary"
+msgstr "primär"
+
+msgid "primary_email"
+msgstr "primäre E-Mail-Adresse"
+
+msgctxt "CWUser"
+msgid "primary_email"
+msgstr "primäre E-Mail-Adresse"
+
+msgid "primary_email_object"
+msgstr "Objekt der primären E-Mail-Adresse"
+
+msgctxt "EmailAddress"
+msgid "primary_email_object"
+msgstr "primäre E-Mail-Adresse von"
+
+msgid "profile"
+msgstr "Profil"
+
+msgid "progress"
+msgstr "Fortschritt"
+
+msgid "progress bar"
+msgstr "Fortschrittsbalken"
+
+msgid "project"
+msgstr "Projekt"
+
+msgid "rdef-description"
+msgstr "Beschreibung"
+
+msgid "rdef-permissions"
+msgstr "Rechte"
+
+msgid "read"
+msgstr "Lesen"
+
+msgid "read_permission"
+msgstr "Leseberechtigung"
+
+msgctxt "CWAttribute"
+msgid "read_permission"
+msgstr "Leseberechtigung"
+
+msgctxt "CWEType"
+msgid "read_permission"
+msgstr "Leseberechtigung"
+
+msgctxt "CWRelation"
+msgid "read_permission"
+msgstr "Leseberechtigung"
+
+msgid "read_permission_object"
+msgstr "hat eine Leseberechtigung"
+
+msgctxt "CWGroup"
+msgid "read_permission_object"
+msgstr "kann lesen"
+
+msgctxt "RQLExpression"
+msgid "read_permission_object"
+msgstr "kann lesen"
+
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+
+msgid "registry"
+msgstr "Registratur"
+
+msgid "related entity has no state"
+msgstr "Verknüpfte Entität hat keinen Zustand"
+
+msgid "related entity has no workflow set"
+msgstr "Verknüpfte Entität hat keinen Workflow"
+
+msgid "relation"
+msgstr "Relation"
+
+#, python-format
+msgid "relation %(relname)s of %(ent)s"
+msgstr "Relation %(relname)s von %(ent)s"
+
+msgid "relation add"
+msgstr "Relation hinzufügen"
+
+msgid "relation removal"
+msgstr "Relation entfernen"
+
+msgid "relation_type"
+msgstr "Relationstyp"
+
+msgctxt "CWAttribute"
+msgid "relation_type"
+msgstr "Relationstyp"
+
+msgctxt "CWRelation"
+msgid "relation_type"
+msgstr "Relationstyp"
+
+msgid "relation_type_object"
+msgstr "Definition"
+
+msgctxt "CWRType"
+msgid "relation_type_object"
+msgstr "definition"
+
+msgid "relations"
+msgstr "Relationen"
+
+msgctxt "CWUniqueTogetherConstraint"
+msgid "relations"
+msgstr "Relationen"
+
+msgid "relations deleted"
+msgstr "Relationen entfernt"
+
+msgid "relations_object"
+msgstr "Relationen von"
+
+msgctxt "CWRType"
+msgid "relations_object"
+msgstr ""
+
+msgid "relative url of the bookmarked page"
+msgstr "URL relativ zu der Seite"
+
+msgid "remove-inlined-entity-form"
+msgstr "Entfernen"
+
+msgid "require_group"
+msgstr "benötigt die Gruppe"
+
+msgctxt "BaseTransition"
+msgid "require_group"
+msgstr "auf Gruppe beschränkt"
+
+msgctxt "CWPermission"
+msgid "require_group"
+msgstr "auf Gruppe beschränkt"
+
+msgctxt "Transition"
+msgid "require_group"
+msgstr "auf Gruppe beschränkt"
+
+msgctxt "WorkflowTransition"
+msgid "require_group"
+msgstr "auf Gruppe beschränkt"
+
+msgid "require_group_object"
+msgstr "hat die Rechte"
+
+msgctxt "CWGroup"
+msgid "require_group_object"
+msgstr "hat die Rechte"
+
+msgid "require_permission"
+msgstr "erfordert Berechtigung"
+
+msgid "require_permission_object"
+msgstr "Berechtigung von"
+
+msgid "required"
+msgstr "erforderlich"
+
+msgid "required attribute"
+msgstr "erforderliches Attribut"
+
+msgid "required field"
+msgstr "Pflichtfeld"
+
+msgid "resources usage"
+msgstr "genutzte Ressourcen"
+
+msgid ""
+"restriction part of a rql query. For entity rql expression, X and U are "
+"predefined respectivly to the current object and to the request user. For "
+"relation rql expression, S, O and U are predefined respectivly to the "
+"current relation'subject, object and to the request user. "
+msgstr ""
+"Restriktionsteil einer RQL-Abfrage. Für einen Ausdruck, der für eine Entität "
+"gilt,X und U sind jeweils für die Entität und den Nutzer vordefiniert."
+"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur "
+
+msgid "revert changes"
+msgstr "Änderungen rückgängig machen"
+
+msgid "right"
+msgstr "rechts"
+
+msgid "rql expressions"
+msgstr "RQL-Ausdrücke"
+
+msgid "rss"
+msgstr "RSS"
+
+msgid "same_as"
+msgstr "identisch mit"
+
+msgid "sample format"
+msgstr "Beispiel"
+
+msgid "saturday"
+msgstr "Samstag"
+
+msgid "schema entities"
+msgstr "Entitäten, die das Schema definieren"
+
+msgid "schema's permissions definitions"
+msgstr "Im Schema definierte Rechte"
+
+msgid "schema-diagram"
+msgstr "Diagramm"
+
+msgid "schema-entity-types"
+msgstr "Entitätstypen"
+
+msgid "schema-relation-types"
+msgstr "Relationstypen"
+
+msgid "schema-security"
+msgstr "Rechte"
+
+msgid "search"
+msgstr "suchen"
+
+msgid "search for association"
+msgstr "nach verwandten Ergebnissen suchen"
+
+msgid "searching for"
+msgstr "Suche nach"
+
+msgid "secondary"
+msgstr "sekundär"
+
+msgid "security"
+msgstr "Sicherheit"
+
+msgid "see more"
+msgstr ""
+
+msgid "see them all"
+msgstr "Alle ansehen"
+
+msgid "see_also"
+msgstr "Siehe auch"
+
+msgid "select"
+msgstr "auswählen"
+
+msgid "select a"
+msgstr "wählen Sie einen"
+
+msgid "select a key first"
+msgstr "Wählen Sie zuerst einen Schlüssel."
+
+msgid "select a relation"
+msgstr "Wählen Sie eine Relation."
+
+msgid "select this entity"
+msgstr "Wählen Sie diese Entität"
+
+msgid "selected"
+msgstr "ausgewählt"
+
+msgid "semantic description of this attribute"
+msgstr "Semantische Beschreibung dieses Attributs"
+
+msgid "semantic description of this entity type"
+msgstr "Semantische Beschreibung dieses Entitätstyps"
+
+msgid "semantic description of this relation"
+msgstr "Semantische Beschreibung dieser Relation"
+
+msgid "semantic description of this relation type"
+msgstr "Semantische Beschreibung dieses Relationstyps"
+
+msgid "semantic description of this state"
+msgstr "Semantische Beschreibung dieses Zustands"
+
+msgid "semantic description of this transition"
+msgstr "Semantische Beschreibung dieses Übergangs"
+
+msgid "semantic description of this workflow"
+msgstr "Semantische Beschreibung dieses Workflows"
+
+msgid "send email"
+msgstr "E-Mail senden"
+
+msgid "september"
+msgstr "September"
+
+msgid "server information"
+msgstr "Server-Informationen"
+
+msgid ""
+"should html fields being edited using fckeditor (a HTML WYSIWYG editor). "
+"You should also select text/html as default text format to actually get "
+"fckeditor."
+msgstr ""
+"Bestimmt, ob HTML-Felder mit fckeditor (ein WYSIWYG-HTML-Editor)\n"
+"bearbeitet werden müssen. Es wird auch empfohlen, Text/HTML\n"
+"als Standard-Textformat festzulegen, um Text mit fckeditor zu bearbeiten."
+
+#, python-format
+msgid "show %s results"
+msgstr "Zeige %s Ergebnisse"
+
+msgid "show advanced fields"
+msgstr "Zeige detaillierte Felder"
+
+msgid "show filter form"
+msgstr "Filter zeigen"
+
+msgid "sioc"
+msgstr "sioc"
+
+msgid "site configuration"
+msgstr "Konfiguration der Website"
+
+msgid "site documentation"
+msgstr "Dokumentation der Website"
+
+msgid "site schema"
+msgstr "Schema der Website"
+
+msgid "site title"
+msgstr "Titel der Website"
+
+msgid "site-wide property can't be set for user"
+msgstr ""
+"Eine Eigenschaft für die gesamte Website kann nicht für einen Nutzer gesetzt "
+"werden."
+
+msgid "some errors occurred:"
+msgstr "Einige Fehler sind aufgetreten"
+
+msgid "some later transaction(s) touch entity, undo them first"
+msgstr ""
+"Eine oder mehrere frühere Transaktion(en) betreffen die Tntität. Machen Sie "
+"sie zuerst rückgängig."
+
+msgid "sorry, the server is unable to handle this query"
+msgstr "Der Server kann diese Anfrage leider nicht bearbeiten."
+
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+
+msgid "sparql xml"
+msgstr "Sparql XML"
+
+msgid "special transition allowing to go through a sub-workflow"
+msgstr "Spezieller Übergang, um in einen Subworkflow hineinzugehen"
+
+msgid "specializes"
+msgstr "leitet sich ab von"
+
+msgctxt "CWEType"
+msgid "specializes"
+msgstr "spezialisiert"
+
+msgid "specializes_object"
+msgstr "Vorgänger von"
+
+msgctxt "CWEType"
+msgid "specializes_object"
+msgstr "Vorgänger von"
+
+msgid "startup views"
+msgstr "Start-Ansichten"
+
+msgid "state"
+msgstr "Zustand"
+
+msgid "state and transition don't belong the the same workflow"
+msgstr "Zustand und Übergang gehören nicht zum selben Workflow."
+
+msgid "state doesn't apply to this entity's type"
+msgstr "Zustand gilt nicht für diesen Entitätstyp."
+
+msgid "state doesn't belong to entity's current workflow"
+msgstr "Der Zustand gehört nicht zum aktuellen Workflow der Entität."
+
+msgid "state doesn't belong to entity's workflow"
+msgstr "Der Zustand gehört nicht zum Workflow der Entität."
+
+msgid ""
+"state doesn't belong to entity's workflow. You may want to set a custom "
+"workflow for this entity first."
+msgstr ""
+"Der Zustand gehört nicht zum Workflow der Entität.Bitte bestimmen Sie zuerst "
+"einen Workflow für diese Entität."
+
+msgid "state doesn't belong to this workflow"
+msgstr "Zustand gehört nicht zu diesem Workflow."
+
+msgid "state_of"
+msgstr "Zustand von"
+
+msgctxt "State"
+msgid "state_of"
+msgstr "Zustand von"
+
+msgid "state_of_object"
+msgstr "hat als Zustand"
+
+msgctxt "Workflow"
+msgid "state_of_object"
+msgstr "enthält die Zustände"
+
+msgid "status change"
+msgstr "Zustand ändern"
+
+msgid "status changed"
+msgstr "Zustand geändert"
+
+#, python-format
+msgid "status will change from %(st1)s to %(st2)s"
+msgstr "Entität wird vom Zustand %(st1)s in zustand %(st2)s übergehen."
+
+msgid "subject"
+msgstr "Subjekt"
+
+msgid "subject type"
+msgstr "Subjekttyp"
+
+msgid "subject/object cardinality"
+msgstr "Subjekt/Objekt Kardinalität"
+
+msgid "subworkflow"
+msgstr "Subworkflow"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow"
+msgstr "Subworkflow"
+
+msgid ""
+"subworkflow isn't a workflow for the same types as the transition's workflow"
+msgstr ""
+"Dieser Subworkflow gilt nicht für dieselben Typen wie der Workflow dieses "
+"Übergangs."
+
+msgid "subworkflow state"
+msgstr "Zustand des Subworkflows"
+
+msgid "subworkflow_exit"
+msgstr "Ende des Subworkflows"
+
+msgctxt "WorkflowTransition"
+msgid "subworkflow_exit"
+msgstr "Ende des Subworkflows"
+
+msgid "subworkflow_exit_object"
+msgstr "Endzustand"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_exit_object"
+msgstr "Endzustände"
+
+msgid "subworkflow_object"
+msgstr "verwendet vom Übergang"
+
+msgctxt "Workflow"
+msgid "subworkflow_object"
+msgstr "Subworkflow von"
+
+msgid "subworkflow_state"
+msgstr "Zustand des Subworkflows"
+
+msgctxt "SubWorkflowExitPoint"
+msgid "subworkflow_state"
+msgstr "Zustand"
+
+msgid "subworkflow_state_object"
+msgstr "Endzustand von"
+
+msgctxt "State"
+msgid "subworkflow_state_object"
+msgstr "Endzustand von"
+
+msgid "sunday"
+msgstr "Sonntag"
+
+msgid "surname"
+msgstr "Name"
+
+msgctxt "CWUser"
+msgid "surname"
+msgstr "Nachname"
+
+msgid "symmetric"
+msgstr "symmetrisch"
+
+msgctxt "CWRType"
+msgid "symmetric"
+msgstr "symmetrisch"
+
+msgid "system entities"
+msgstr "System-Entitäten"
+
+msgid "table"
+msgstr "Tabelle"
+
+msgid "tablefilter"
+msgstr "Tabellenfilter"
+
+msgid "task progression"
+msgstr "Fortschritt der Aufgabe"
+
+msgid "text"
+msgstr "Text"
+
+msgid "text/cubicweb-page-template"
+msgstr "dynamischer Inhalt"
+
+msgid "text/html"
+msgstr "html"
+
+msgid "text/plain"
+msgstr "Nur Text"
+
+msgid "text/rest"
+msgstr "reST"
+
+msgid "the URI of the object"
+msgstr "der URI des Objekts"
+
+msgid "the prefered email"
+msgstr "primäre E-Mail-Adresse"
+
+#, python-format
+msgid "the value \"%s\" is already used, use another one"
+msgstr ""
+"Der Wert \"%s\" wird bereits benutzt, bitte verwenden Sie einen anderen Wert"
+
+msgid "this action is not reversible!"
+msgstr "Achtung! Diese Aktion ist unumkehrbar."
+
+msgid "this entity is currently owned by"
+msgstr "Diese Entität gehört:"
+
+msgid "this resource does not exist"
+msgstr "cette ressource est introuvable"
+
+msgid "thursday"
+msgstr "Donnerstag"
+
+msgid "timeline"
+msgstr "Zeitleiste"
+
+msgid "timestamp"
+msgstr "Datum"
+
+msgctxt "CWCache"
+msgid "timestamp"
+msgstr "gültig seit"
+
+msgid "timestamp of the latest source synchronization."
+msgstr "Zeitstempel der letzten Synchronisierung mit der Quelle."
+
+msgid "timetable"
+msgstr "Zeitplan"
+
+msgid "title"
+msgstr "titel"
+
+msgctxt "Bookmark"
+msgid "title"
+msgstr "bezeichnet"
+
+msgid "to"
+msgstr "zu"
+
+#, python-format
+msgid "to %(date)s"
+msgstr "bis zum %(date)s"
+
+msgid "to associate with"
+msgstr "zu verknüpfen mit"
+
+msgid "to_entity"
+msgstr "zu der Entität"
+
+msgctxt "CWAttribute"
+msgid "to_entity"
+msgstr "für die Entität"
+
+msgctxt "CWRelation"
+msgid "to_entity"
+msgstr "für die Entität"
+
+msgid "to_entity_object"
+msgstr "Objekt der Relation"
+
+msgctxt "CWEType"
+msgid "to_entity_object"
+msgstr "Objekt der Relation"
+
+msgid "to_interval_end"
+msgstr "bis"
+
+msgid "to_state"
+msgstr "zum Zustand"
+
+msgctxt "TrInfo"
+msgid "to_state"
+msgstr "Zielstatus"
+
+msgid "to_state_object"
+msgstr "Übergänge zu dem Zustand"
+
+msgctxt "State"
+msgid "to_state_object"
+msgstr "Übergang zu diesem Zustand"
+
+msgid "todo_by"
+msgstr "zu erledigen bis"
+
+msgid "toggle check boxes"
+msgstr "Kontrollkästchen umkehren"
+
+msgid "tr_count"
+msgstr ""
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr ""
+
+msgid "transaction undoed"
+msgstr "Transaktion rückgängig gemacht"
+
+#, python-format
+msgid "transition %(tr)s isn't allowed from %(st)s"
+msgstr "Der Übergang %(tr)s ist aus dem Zustand %(st)s nicht erlaubt."
+
+msgid "transition doesn't belong to entity's workflow"
+msgstr "Übergang gehört nicht zum Workflow der Entität."
+
+msgid "transition isn't allowed"
+msgstr "Der Übergang ist nicht erleubt."
+
+msgid "transition may not be fired"
+msgstr "Der Übergang kann nicht ausgelöst werden."
+
+msgid "transition_of"
+msgstr "Übergang des/der"
+
+msgctxt "BaseTransition"
+msgid "transition_of"
+msgstr "Übergang des/der"
+
+msgctxt "Transition"
+msgid "transition_of"
+msgstr "Übergang des/der"
+
+msgctxt "WorkflowTransition"
+msgid "transition_of"
+msgstr "Übergang des/der"
+
+msgid "transition_of_object"
+msgstr "hat als Übergang"
+
+msgctxt "Workflow"
+msgid "transition_of_object"
+msgstr "hat als Übergang"
+
+msgid "tree view"
+msgstr "Baumansicht"
+
+msgid "tuesday"
+msgstr "Dienstag"
+
+msgid "type"
+msgstr "Typ"
+
+msgctxt "BaseTransition"
+msgid "type"
+msgstr "Typ"
+
+msgctxt "CWSource"
+msgid "type"
+msgstr ""
+
+msgctxt "Transition"
+msgid "type"
+msgstr "Typ"
+
+msgctxt "WorkflowTransition"
+msgid "type"
+msgstr "Typ"
+
+msgid "type here a sparql query"
+msgstr "Geben sie eine sparql-Anfrage ein"
+
+msgid "type of the source"
+msgstr ""
+
+msgid "ui"
+msgstr "Allgemeinen Eigenschaften der Nutzerschnittstelle"
+
+msgid "ui.date-format"
+msgstr "Datumsformat"
+
+msgid "ui.datetime-format"
+msgstr "Format von Datum und Zeit"
+
+msgid "ui.default-text-format"
+msgstr "Textformat"
+
+msgid "ui.encoding"
+msgstr "Kodierung"
+
+msgid "ui.fckeditor"
+msgstr "Editor"
+
+msgid "ui.float-format"
+msgstr "Format von Dezimalzahlen (float)"
+
+msgid "ui.language"
+msgstr "Sprache"
+
+msgid "ui.main-template"
+msgstr "Hauptvorlage"
+
+msgid "ui.site-title"
+msgstr "Titel der Website"
+
+msgid "ui.time-format"
+msgstr "Zeitformat"
+
+msgid "unable to check captcha, please try again"
+msgstr "Kann capcha nicht bestätigen. Bitte noch einmal versuchen."
+
+msgid "unaccessible"
+msgstr "nicnt zugänglich"
+
+msgid "unauthorized value"
+msgstr "ungültiger Wert"
+
+msgid "undo"
+msgstr "rückgängig machen"
+
+msgid "unique identifier used to connect to the application"
+msgstr "eindeutiger Bezeichner zur Verbindung mit der Anwendung"
+
+msgid "unknown external entity"
+msgstr "(Externe) Entität nicht gefunden"
+
+#, python-format
+msgid "unknown property key %s"
+msgstr "Unbekannter Eigentumsschlüssel %s"
+
+msgid "unknown vocabulary:"
+msgstr "Unbekanntes Wörterbuch : "
+
+msgid "up"
+msgstr "nach oben"
+
+msgid "upassword"
+msgstr "Passwort"
+
+msgctxt "CWUser"
+msgid "upassword"
+msgstr "Passwort"
+
+msgid "update"
+msgstr "Aktualisierung"
+
+msgid "update_permission"
+msgstr "Änderungsrecht"
+
+msgctxt "CWAttribute"
+msgid "update_permission"
+msgstr "Änderungsrecht"
+
+msgctxt "CWEType"
+msgid "update_permission"
+msgstr "Änderungsrecht"
+
+msgid "update_permission_object"
+msgstr "hat die Änderungsberechtigung"
+
+msgctxt "CWGroup"
+msgid "update_permission_object"
+msgstr "kann ändern"
+
+msgctxt "RQLExpression"
+msgid "update_permission_object"
+msgstr "kann ändern"
+
+msgid "update_relation"
+msgstr "aktualisieren"
+
+msgid "updated"
+msgstr "aktualisiert"
+
+#, python-format
+msgid "updated %(etype)s #%(eid)s (%(title)s)"
+msgstr "Entität %(etype)s #%(eid)s (%(title)s) aktualisiert"
+
+msgid "uri"
+msgstr "URI"
+
+msgctxt "ExternalUri"
+msgid "uri"
+msgstr "URI"
+
+msgid "use template languages"
+msgstr "Verwenden Sie Templating-Sprachen"
+
+msgid ""
+"use to define a transition from one or multiple states to a destination "
+"states in workflow's definitions. Transition without destination state will "
+"go back to the state from which we arrived to the current state."
+msgstr ""
+"verwendet, um einen Übergang von einem oder mehreren Zuständenin einen "
+"Zielzustand eines Workflows zu definieren.Ein Übergang ohne Zielzustand "
+"führt in den Zustand zurück, der dem aktuellen zustand vorausgeht."
+
+msgid "use_email"
+msgstr "E-Mail-Adresse"
+
+msgctxt "CWUser"
+msgid "use_email"
+msgstr "verwendet die E-Mail-Adresse"
+
+msgid "use_email_object"
+msgstr "Adresse verwendet von"
+
+msgctxt "EmailAddress"
+msgid "use_email_object"
+msgstr "verwendet von"
+
+msgid "use_template_format"
+msgstr "Benutzung des 'cubicweb template'-Formats"
+
+msgid ""
+"used for cubicweb configuration. Once a property has been created you can't "
+"change the key."
+msgstr ""
+"konfiguriert CubicWeb. Nachdem eine Eigenschafterstellt wurde, können Sie "
+"den Schlüssel nicht mehr ändern."
+
+msgid ""
+"used to associate simple states to an entity type and/or to define workflows"
+msgstr ""
+"assoziiert einfache Zustände mit einem Entitätstyp und/oder definiert "
+"Workflows"
+
+msgid "used to grant a permission to a group"
+msgstr "gibt einer Gruppe eine Berechtigung"
+
+msgid "user"
+msgstr "Nutzer"
+
+#, python-format
+msgid ""
+"user %s has made the following change(s):\n"
+"\n"
+msgstr ""
+"Nutzer %s hat die folgende(n) Änderung(en) vorgenommen:\n"
+"\n"
+
+msgid "user interface encoding"
+msgstr "Kodierung für die Nutzerschnittstelle"
+
+msgid "user preferences"
+msgstr "Nutzereinstellungen"
+
+msgid "users"
+msgstr "Nutzer"
+
+msgid "users using this bookmark"
+msgstr "Nutzer, die dieses Lesezeichen verwenden"
+
+msgid "validate modifications on selected items"
+msgstr "Überprüfen der Änderungen an den ausgewählten Elementen"
+
+msgid "validating..."
+msgstr "Überprüfung läuft..."
+
+msgid "value"
+msgstr "Wert"
+
+msgctxt "CWConstraint"
+msgid "value"
+msgstr "Einschränkung"
+
+msgctxt "CWProperty"
+msgid "value"
+msgstr "Wert"
+
+msgid "value associated to this key is not editable manually"
+msgstr ""
+"Der mit diesem Schlüssele verbundene Wert kann n icht manuell geändert "
+"werden."
+
+#, python-format
+msgid "value must be %(op)s %(boundary)s"
+msgstr "Der Wert muss %(op)s %(boundary)s sein."
+
+#, python-format
+msgid "value must be <= %(boundary)s"
+msgstr "Der Wert muss <= %(boundary)s sein."
+
+#, python-format
+msgid "value must be >= %(boundary)s"
+msgstr "Der Wert muss >= %(boundary)s sein."
+
+#, python-format
+msgid "value should have maximum size of %s"
+msgstr "Der Wert darf höchstens %s betragen."
+
+#, python-format
+msgid "value should have minimum size of %s"
+msgstr "Der Wert muss mindestens %s betragen."
+
+msgid "vcard"
+msgstr "VCard"
+
+msgid "versions configuration"
+msgstr "Versionskonfiguration"
+
+msgid "view"
+msgstr "ansehen"
+
+msgid "view all"
+msgstr "alle ansehen"
+
+msgid "view detail for this entity"
+msgstr "Details für diese Entität ansehen"
+
+msgid "view history"
+msgstr "Chronik ansehen"
+
+msgid "view identifier"
+msgstr "Nutzername"
+
+msgid "view title"
+msgstr "Titel"
+
+msgid "view workflow"
+msgstr "mögliche Zustände ansehen"
+
+msgid "view_index"
+msgstr "Index-Seite"
+
+#, python-format
+msgid "violates unique_together constraints (%s)"
+msgstr "Verletzung der unique_together-Einschränkung (%s)"
+
+msgid "visible"
+msgstr "sichtbar"
+
+msgid "we are not yet ready to handle this query"
+msgstr "Momentan können wir diese sparql-Anfrage noch nicht ausführen."
+
+msgid "wednesday"
+msgstr "Mittwoch"
+
+msgid "week"
+msgstr "Woche"
+
+#, python-format
+msgid "welcome %s !"
+msgstr "Willkommen %s !"
+
+msgid "wf_info_for"
+msgstr "Chronik von"
+
+msgid "wf_info_for_object"
+msgstr "Chronik der Übergänge"
+
+msgid "wf_tab_info"
+msgstr "Beschreibung"
+
+msgid "wfgraph"
+msgstr "Grafik des Workflows"
+
+msgid ""
+"when multiple addresses are equivalent (such as python-projects@logilab.org "
+"and python-projects@lists.logilab.org), set this to indicate which is the "
+"preferred form."
+msgstr ""
+"Wenn mehrere Adressen ähnlich sind (comme python-projects@logilab.org und "
+"python-projects@lists.logilab.org), bestimmen Sie die bevorzugte Form."
+
+msgid "workflow"
+msgstr "Workflow"
+
+msgid "workflow already have a state of that name"
+msgstr "Der Workflow hat bereits einen Zustand desselben Namens."
+
+msgid "workflow already have a transition of that name"
+msgstr "Der Workflow hat bereits einen Übergang desselben Namens."
+
+#, python-format
+msgid "workflow changed to \"%s\""
+msgstr "Workflow geändert in \"%s\""
+
+msgid "workflow has no initial state"
+msgstr "Workflow hat keinen Anfangszustand"
+
+msgid "workflow history item"
+msgstr "Beginn der Chronik des Workflows"
+
+msgid "workflow isn't a workflow for this type"
+msgstr "Der Workflow gilt nicht für diesen Entitätstyp."
+
+msgid "workflow to which this state belongs"
+msgstr "Workflow, zu dem dieser Zustand gehört"
+
+msgid "workflow to which this transition belongs"
+msgstr "Workflow, zu dem dieser Übergang gehört"
+
+msgid "workflow_of"
+msgstr "Workflow von"
+
+msgctxt "Workflow"
+msgid "workflow_of"
+msgstr "Workflow von"
+
+msgid "workflow_of_object"
+msgstr "hat als Workflow"
+
+msgctxt "CWEType"
+msgid "workflow_of_object"
+msgstr "hat als Workflow"
+
+#, python-format
+msgid "wrong query parameter line %s"
+msgstr "Falscher Anfrage-Parameter Zeile %s"
+
+msgid "xbel"
+msgstr "XBEL"
+
+msgid "xml"
+msgstr "XML"
+
+msgid "xml export"
+msgstr "XML-Export"
+
+msgid "yes"
+msgstr "Ja"
+
+msgid "you have been logged out"
+msgstr "Sie sind jetzt abgemeldet."
+
+msgid "you should probably delete that property"
+msgstr "Sie sollten diese Eigenschaft wahrscheinlich löschen."
+
+#~ msgid ""
+#~ "can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has "
+#~ "cardinality=%(card)s"
+#~ msgstr ""
+#~ "Kann 'inlined' = %(inlined)s nicht zuweisen, %(stype)s %(rtype)s "
+#~ "%(otype)s hat die Kardinalität %(card)s"
diff -r 48f468f33704 -r e4580e5f0703 i18n/en.po
--- a/i18n/en.po Fri Dec 10 12:17:18 2010 +0100
+++ b/i18n/en.po Fri Mar 11 09:46:45 2011 +0100
@@ -47,6 +47,10 @@
msgstr ""
#, python-format
+msgid "%(etype)s by %(author)s"
+msgstr ""
+
+#, python-format
msgid "%(firstname)s %(surname)s"
msgstr ""
@@ -200,6 +204,9 @@
msgid "AND"
msgstr ""
+msgid "About this site"
+msgstr ""
+
msgid "Any"
msgstr ""
@@ -320,11 +327,23 @@
msgid "CWRelation_plural"
msgstr "Relations"
+msgid "CWSource"
+msgstr "Data source"
+
+msgid "CWSourceHostConfig"
+msgstr "Host configuration"
+
+msgid "CWSourceHostConfig_plural"
+msgstr "Host configurations"
+
+msgid "CWSource_plural"
+msgstr "Data sources"
+
msgid "CWUniqueTogetherConstraint"
-msgstr ""
+msgstr "Unicity constraint"
msgid "CWUniqueTogetherConstraint_plural"
-msgstr ""
+msgstr "Unicity constraints"
msgid "CWUser"
msgstr "User"
@@ -505,8 +524,14 @@
msgid "New CWRelation"
msgstr "New relation"
+msgid "New CWSource"
+msgstr "New source"
+
+msgid "New CWSourceHostConfig"
+msgstr "New host configuration"
+
msgid "New CWUniqueTogetherConstraint"
-msgstr ""
+msgstr "New unicity constraint"
msgid "New CWUser"
msgstr "New user"
@@ -569,6 +594,9 @@
msgid "Please note that this is only a shallow copy"
msgstr ""
+msgid "Powered by CubicWeb"
+msgstr ""
+
msgid "RQLConstraint"
msgstr "RQL constraint"
@@ -615,6 +643,12 @@
msgid "SizeConstraint"
msgstr "size constraint"
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+
msgid "Startup views"
msgstr ""
@@ -698,8 +732,14 @@
msgid "This CWRelation"
msgstr "This relation"
+msgid "This CWSource"
+msgstr "This data source"
+
+msgid "This CWSourceHostConfig"
+msgstr "This host configuration"
+
msgid "This CWUniqueTogetherConstraint"
-msgstr ""
+msgstr "This unicity constraint"
msgid "This CWUser"
msgstr "This user"
@@ -829,9 +869,6 @@
"get_cache() method."
msgstr ""
-msgid "about this site"
-msgstr ""
-
msgid "abstract base class for transitions"
msgstr ""
@@ -895,8 +932,11 @@
msgid "add CWRelation relation_type CWRType object"
msgstr "relation definition"
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr "host configuration"
+
msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
-msgstr ""
+msgstr "unicity constraint"
msgid "add CWUser in_group CWGroup object"
msgstr "user"
@@ -1083,6 +1123,14 @@
msgstr ""
#, python-format
+msgid "archive for %(author)s"
+msgstr ""
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr ""
+
+#, python-format
msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
msgstr ""
@@ -1139,54 +1187,6 @@
msgid "boxes"
msgstr ""
-msgid "boxes_bookmarks_box"
-msgstr "bookmarks box"
-
-msgid "boxes_bookmarks_box_description"
-msgstr "box listing the user's bookmarks"
-
-msgid "boxes_download_box"
-msgstr "download box"
-
-msgid "boxes_download_box_description"
-msgstr ""
-
-msgid "boxes_edit_box"
-msgstr "actions box"
-
-msgid "boxes_edit_box_description"
-msgstr "box listing the applicable actions on the displayed data"
-
-msgid "boxes_filter_box"
-msgstr "filter"
-
-msgid "boxes_filter_box_description"
-msgstr "box providing filter within current search results functionality"
-
-msgid "boxes_possible_views_box"
-msgstr "possible views box"
-
-msgid "boxes_possible_views_box_description"
-msgstr "box listing the possible views for the displayed data"
-
-msgid "boxes_rss"
-msgstr "rss box"
-
-msgid "boxes_rss_description"
-msgstr "RSS icon to get displayed data as a RSS thread"
-
-msgid "boxes_search_box"
-msgstr "search box"
-
-msgid "boxes_search_box_description"
-msgstr "search box"
-
-msgid "boxes_startup_views_box"
-msgstr "startup views box"
-
-msgid "boxes_startup_views_box_description"
-msgstr "box listing the possible start pages"
-
msgid "bug report sent"
msgstr ""
@@ -1275,10 +1275,13 @@
#, python-format
msgid ""
-"can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has cardinality="
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
"%(card)s"
msgstr ""
+msgid "cancel"
+msgstr ""
+
msgid "cancel select"
msgstr ""
@@ -1309,6 +1312,9 @@
msgid "click here to see created entity"
msgstr ""
+msgid "click here to see edited entity"
+msgstr ""
+
msgid "click on the box to cancel the deletion"
msgstr ""
@@ -1338,45 +1344,12 @@
msgid "components"
msgstr ""
-msgid "components_appliname"
-msgstr "application title"
-
-msgid "components_appliname_description"
-msgstr "display the application title in the page's header"
-
-msgid "components_breadcrumbs"
-msgstr "breadcrumbs"
-
-msgid "components_breadcrumbs_description"
-msgstr "breadcrumbs bar that display a path locating the page in the site"
-
msgid "components_etypenavigation"
msgstr "filtering by type"
msgid "components_etypenavigation_description"
msgstr "permit to filter search results by entity type"
-msgid "components_help"
-msgstr "help button"
-
-msgid "components_help_description"
-msgstr "the help button on the top right-hand corner"
-
-msgid "components_loggeduserlink"
-msgstr "user link"
-
-msgid "components_loggeduserlink_description"
-msgstr ""
-"for anonymous users, this is a link pointing to authentication form, for "
-"logged in users, this is a link that makes a box appear and listing some "
-"possible user actions"
-
-msgid "components_logo"
-msgstr "icon"
-
-msgid "components_logo_description"
-msgstr "the application's icon displayed in the page's header"
-
msgid "components_navigation"
msgstr "page navigation"
@@ -1421,6 +1394,17 @@
msgid "conditions"
msgstr ""
+msgid "config"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "config"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr ""
+
msgid "config mode"
msgstr ""
@@ -1452,18 +1436,18 @@
msgstr ""
msgid "constraint_of"
-msgstr ""
+msgstr "constraint of"
msgctxt "CWUniqueTogetherConstraint"
msgid "constraint_of"
-msgstr ""
+msgstr "constraint of"
msgid "constraint_of_object"
-msgstr ""
+msgstr "constrained by"
msgctxt "CWEType"
msgid "constraint_of_object"
-msgstr ""
+msgstr "constrained by"
msgid "constraints"
msgstr ""
@@ -1474,43 +1458,6 @@
msgid "content type"
msgstr ""
-msgid "contentnavigation"
-msgstr "contextual components"
-
-msgid "contentnavigation_breadcrumbs"
-msgstr "breadcrumb"
-
-msgid "contentnavigation_breadcrumbs_description"
-msgstr "breadcrumbs bar that display a path locating the page in the site"
-
-msgid "contentnavigation_metadata"
-msgstr "entity's metadata"
-
-msgid "contentnavigation_metadata_description"
-msgstr ""
-
-msgid "contentnavigation_prevnext"
-msgstr "previous / next entity"
-
-msgid "contentnavigation_prevnext_description"
-msgstr ""
-"display link to go from one entity to another on entities implementing the "
-"\"previous/next\" interface."
-
-msgid "contentnavigation_seealso"
-msgstr "see also"
-
-msgid "contentnavigation_seealso_description"
-msgstr ""
-"section containing entities related by the \"see also\" relation on entities "
-"supporting it."
-
-msgid "contentnavigation_wfhistory"
-msgstr "workflow history"
-
-msgid "contentnavigation_wfhistory_description"
-msgstr "show the workflow's history."
-
msgid "context"
msgstr ""
@@ -1589,9 +1536,14 @@
msgstr "creating relation %(linkto)s"
msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr "creating host configuration for source %(linkto)s"
+
+msgid ""
"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
"constraint_of CWEType %(linkto)s)"
-msgstr ""
+msgstr "creating unique together constraint for entity type %(linkto)s"
msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)"
msgstr "creating a new user in group %(linkto)s"
@@ -1710,6 +1662,110 @@
msgid "csv export"
msgstr ""
+msgid "ctxcomponents"
+msgstr "contextual components"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr "user link"
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+"for anonymous users, this is a link pointing to authentication form, for "
+"logged in users, this is a link that makes a box appear and listing some "
+"possible user actions"
+
+msgid "ctxcomponents_appliname"
+msgstr "application title"
+
+msgid "ctxcomponents_appliname_description"
+msgstr "display the application title in the page's header"
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "bookmarks box"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "box listing the user's bookmarks"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "breadcrumb"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr "breadcrumbs bar that display a path locating the page in the site"
+
+msgid "ctxcomponents_download_box"
+msgstr "download box"
+
+msgid "ctxcomponents_download_box_description"
+msgstr ""
+
+msgid "ctxcomponents_edit_box"
+msgstr "actions box"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr "box listing the applicable actions on the displayed data"
+
+msgid "ctxcomponents_facet.filters"
+msgstr "facets box"
+
+msgid "ctxcomponents_facet.filters_description"
+msgstr "box providing filter within current search results functionality"
+
+msgid "ctxcomponents_logo"
+msgstr "logo"
+
+msgid "ctxcomponents_logo_description"
+msgstr "the application's icon displayed in the page's header"
+
+msgid "ctxcomponents_metadata"
+msgstr "entity's metadata"
+
+msgid "ctxcomponents_metadata_description"
+msgstr ""
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "possible views box"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "box listing the possible views for the displayed data"
+
+msgid "ctxcomponents_prevnext"
+msgstr "previous / next entity"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"display link to go from one entity to another on entities implementing the "
+"\"previous/next\" interface."
+
+msgid "ctxcomponents_rss"
+msgstr "rss box"
+
+msgid "ctxcomponents_rss_description"
+msgstr "RSS icon to get displayed data as a RSS thread"
+
+msgid "ctxcomponents_search_box"
+msgstr "search box"
+
+msgid "ctxcomponents_search_box_description"
+msgstr "search box"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "startup views box"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "box listing the possible start pages"
+
+msgid "ctxcomponents_userstatus"
+msgstr ""
+
+msgid "ctxcomponents_userstatus_description"
+msgstr ""
+
+msgid "ctxcomponents_wfhistory"
+msgstr "workflow history"
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr "show the workflow's history."
+
msgid "ctxtoolbar"
msgstr "toolbar"
@@ -1719,6 +1775,72 @@
msgid "custom_workflow_object"
msgstr "custom workflow of"
+msgid "cw_dont_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_dont_cross"
+msgstr ""
+
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgid "cw_host_config_of"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr ""
+
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgid "cw_may_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_may_cross"
+msgstr ""
+
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgid "cw_source"
+msgstr ""
+
+msgid "cw_source_object"
+msgstr ""
+
+msgid "cw_support"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_support"
+msgstr ""
+
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_support_object"
+msgstr ""
+
msgid "cwetype-box"
msgstr "\"box\" view"
@@ -2112,10 +2234,6 @@
msgid "error while embedding page"
msgstr ""
-#, python-format
-msgid "error while handling __method: %s"
-msgstr "error while handling method %s"
-
msgid "error while publishing ReST text"
msgstr ""
@@ -2159,8 +2277,11 @@
msgid "external page"
msgstr ""
+msgid "facet.filters"
+msgstr "filter"
+
msgid "facetbox"
-msgstr ""
+msgstr "facettes"
msgid "facets_created_by-facet"
msgstr "\"created by\" facet"
@@ -2168,6 +2289,12 @@
msgid "facets_created_by-facet_description"
msgstr ""
+msgid "facets_cw_source-facet"
+msgstr "data source"
+
+msgid "facets_cw_source-facet_description"
+msgstr ""
+
msgid "facets_cwfinal-facet"
msgstr "\"final entity or relation type\" facet"
@@ -2372,8 +2499,11 @@
msgid "has_text"
msgstr "has text"
-msgid "help"
-msgstr ""
+msgid "header-left"
+msgstr "header (left)"
+
+msgid "header-right"
+msgstr "header (right)"
msgid "hide filter form"
msgstr ""
@@ -2679,6 +2809,9 @@
msgid "login"
msgstr "login"
+msgid "login / password"
+msgstr ""
+
msgid "login or email"
msgstr ""
@@ -2695,6 +2828,9 @@
msgid "main informations"
msgstr ""
+msgid "main_tab"
+msgstr "description"
+
msgid "mainvars"
msgstr "main vars"
@@ -2723,6 +2859,13 @@
msgid "march"
msgstr ""
+msgid "match_host"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr ""
+
msgid "maximum number of characters in short description"
msgstr ""
@@ -2803,6 +2946,10 @@
msgid "name"
msgstr "name"
+msgctxt "CWSource"
+msgid "name"
+msgstr ""
+
msgctxt "State"
msgid "name"
msgstr "name"
@@ -2827,6 +2974,9 @@
"necessary (comma separated)"
msgstr ""
+msgid "name of the source"
+msgstr ""
+
msgid "name or identifier of the permission"
msgstr ""
@@ -3020,9 +3170,6 @@
msgid "possible views"
msgstr ""
-msgid "powered by CubicWeb"
-msgstr ""
-
msgid "prefered_form"
msgstr "prefered form"
@@ -3107,6 +3254,9 @@
msgid "read_permission_object"
msgstr "can be read by"
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+
msgid "registry"
msgstr ""
@@ -3158,15 +3308,11 @@
msgstr ""
msgid "relations_object"
-msgstr ""
-
-msgctxt "CWAttribute"
+msgstr "constrained by"
+
+msgctxt "CWRType"
msgid "relations_object"
-msgstr ""
-
-msgctxt "CWRelation"
-msgid "relations_object"
-msgstr ""
+msgstr "constrained by"
msgid "relative url of the bookmarked page"
msgstr ""
@@ -3279,6 +3425,9 @@
msgid "security"
msgstr ""
+msgid "see more"
+msgstr ""
+
msgid "see them all"
msgstr ""
@@ -3376,6 +3525,11 @@
msgid "sorry, the server is unable to handle this query"
msgstr ""
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+
msgid "sparql xml"
msgstr ""
@@ -3644,6 +3798,13 @@
msgid "toggle check boxes"
msgstr ""
+msgid "tr_count"
+msgstr "transition number"
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr "transition number"
+
msgid "transaction undoed"
msgstr ""
@@ -3695,6 +3856,10 @@
msgid "type"
msgstr "type"
+msgctxt "CWSource"
+msgid "type"
+msgstr ""
+
msgctxt "Transition"
msgid "type"
msgstr "type"
@@ -3706,6 +3871,9 @@
msgid "type here a sparql query"
msgstr ""
+msgid "type of the source"
+msgstr ""
+
msgid "ui"
msgstr ""
@@ -3757,7 +3925,8 @@
msgid "unknown external entity"
msgstr ""
-msgid "unknown property key"
+#, python-format
+msgid "unknown property key %s"
msgstr ""
msgid "unknown vocabulary:"
@@ -3971,10 +4140,10 @@
msgstr "workflow history"
msgid "wf_tab_info"
-msgstr ""
+msgstr "states and transitions"
msgid "wfgraph"
-msgstr ""
+msgstr "graph"
msgid ""
"when multiple addresses are equivalent (such as python-projects@logilab.org "
@@ -4045,15 +4214,3 @@
msgid "you should probably delete that property"
msgstr ""
-
-#~ msgid "add_perm"
-#~ msgstr "add permission"
-
-#~ msgid "delete_perm"
-#~ msgstr "delete permission"
-
-#~ msgid "read_perm"
-#~ msgstr "read permission"
-
-#~ msgid "update_perm"
-#~ msgstr "update permission"
diff -r 48f468f33704 -r e4580e5f0703 i18n/es.po
--- a/i18n/es.po Fri Dec 10 12:17:18 2010 +0100
+++ b/i18n/es.po Fri Mar 11 09:46:45 2011 +0100
@@ -53,6 +53,10 @@
msgstr "el valor %(value)r no satisface la condición %(cstr)s"
#, python-format
+msgid "%(etype)s by %(author)s"
+msgstr ""
+
+#, python-format
msgid "%(firstname)s %(surname)s"
msgstr "%(firstname)s %(surname)s"
@@ -209,6 +213,9 @@
msgid "AND"
msgstr "Y"
+msgid "About this site"
+msgstr "Información del Sistema"
+
msgid "Any"
msgstr "Cualquiera"
@@ -329,6 +336,18 @@
msgid "CWRelation_plural"
msgstr "Relaciones"
+msgid "CWSource"
+msgstr ""
+
+msgid "CWSourceHostConfig"
+msgstr ""
+
+msgid "CWSourceHostConfig_plural"
+msgstr ""
+
+msgid "CWSource_plural"
+msgstr ""
+
msgid "CWUniqueTogetherConstraint"
msgstr ""
@@ -526,6 +545,12 @@
msgid "New CWRelation"
msgstr "Nueva definición de relación final"
+msgid "New CWSource"
+msgstr ""
+
+msgid "New CWSourceHostConfig"
+msgstr ""
+
msgid "New CWUniqueTogetherConstraint"
msgstr ""
@@ -590,6 +615,9 @@
msgid "Please note that this is only a shallow copy"
msgstr "Recuerde que sólo es una copia superficial"
+msgid "Powered by CubicWeb"
+msgstr "Potenciado en CubicWeb"
+
msgid "RQLConstraint"
msgstr "Restricción RQL"
@@ -636,6 +664,12 @@
msgid "SizeConstraint"
msgstr "Restricción de tamaño"
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+
msgid "Startup views"
msgstr "Vistas de inicio"
@@ -719,6 +753,12 @@
msgid "This CWRelation"
msgstr "Esta definición de relación no final"
+msgid "This CWSource"
+msgstr ""
+
+msgid "This CWSourceHostConfig"
+msgstr ""
+
msgid "This CWUniqueTogetherConstraint"
msgstr ""
@@ -871,9 +911,6 @@
"Para recuperar un caché, hace falta utilizar el método\n"
"get_cache(cachename)."
-msgid "about this site"
-msgstr "Información del Sistema"
-
msgid "abstract base class for transitions"
msgstr "Clase de base abstracta para la transiciones"
@@ -937,6 +974,9 @@
msgid "add CWRelation relation_type CWRType object"
msgstr "Definición de relación"
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr ""
+
msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
msgstr ""
@@ -1127,6 +1167,14 @@
msgstr "Abril"
#, python-format
+msgid "archive for %(author)s"
+msgstr ""
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr ""
+
+#, python-format
msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
msgstr ""
"La entidad #%(eid)s de tipo %(etype)s debe necesariamente tener almenos una "
@@ -1185,56 +1233,6 @@
msgid "boxes"
msgstr "Cajas"
-msgid "boxes_bookmarks_box"
-msgstr "Caja de Favoritos"
-
-msgid "boxes_bookmarks_box_description"
-msgstr "Muestra y permite administrar los favoritos del usuario"
-
-msgid "boxes_download_box"
-msgstr "Configuración de caja de descargas"
-
-msgid "boxes_download_box_description"
-msgstr "Caja que contiene los elementos descargados"
-
-msgid "boxes_edit_box"
-msgstr "Caja de Acciones"
-
-msgid "boxes_edit_box_description"
-msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados"
-
-msgid "boxes_filter_box"
-msgstr "Filtros"
-
-msgid "boxes_filter_box_description"
-msgstr "Muestra los filtros aplicables a una búsqueda realizada"
-
-msgid "boxes_possible_views_box"
-msgstr "Caja de Vistas Posibles"
-
-msgid "boxes_possible_views_box_description"
-msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados"
-
-msgid "boxes_rss"
-msgstr "Ícono RSS"
-
-msgid "boxes_rss_description"
-msgstr "Muestra el ícono RSS para vistas RSS"
-
-msgid "boxes_search_box"
-msgstr "Caja de búsqueda"
-
-msgid "boxes_search_box_description"
-msgstr ""
-"Permite realizar una búsqueda simple para cualquier tipo de dato en la "
-"aplicación"
-
-msgid "boxes_startup_views_box"
-msgstr "Caja Vistas de inicio"
-
-msgid "boxes_startup_views_box_description"
-msgstr "Muestra las vistas de inicio de la aplicación"
-
msgid "bug report sent"
msgstr "Reporte de error enviado"
@@ -1323,11 +1321,14 @@
#, python-format
msgid ""
-"can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has cardinality="
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
"%(card)s"
msgstr ""
-"no puede poner 'inlined' = %(inlined)s, %(stype)s %(rtype)s %(otype)s tiene "
-"cardinalidad %(card)s"
+"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s "
+ "tiene cardinalidad %(card)s"
+
+msgid "cancel"
+msgstr ""
msgid "cancel select"
msgstr "Cancelar la selección"
@@ -1359,6 +1360,9 @@
msgid "click here to see created entity"
msgstr "Ver la entidad creada"
+msgid "click here to see edited entity"
+msgstr ""
+
msgid "click on the box to cancel the deletion"
msgstr "Seleccione la zona de edición para cancelar la eliminación"
@@ -1388,44 +1392,12 @@
msgid "components"
msgstr "Componentes"
-msgid "components_appliname"
-msgstr "Nombre de la aplicación"
-
-msgid "components_appliname_description"
-msgstr "Muestra el nombre de la aplicación en el encabezado de la página"
-
-msgid "components_breadcrumbs"
-msgstr "Ruta de Navegación"
-
-msgid "components_breadcrumbs_description"
-msgstr "Muestra el lugar donde se encuentra la página actual en el Sistema"
-
msgid "components_etypenavigation"
msgstr "Filtar por tipo"
msgid "components_etypenavigation_description"
msgstr "Permite filtrar por tipo de entidad los resultados de una búsqueda"
-msgid "components_help"
-msgstr "Botón de ayuda"
-
-msgid "components_help_description"
-msgstr "El botón de ayuda, en el encabezado de la página"
-
-msgid "components_loggeduserlink"
-msgstr "Liga usuario"
-
-msgid "components_loggeduserlink_description"
-msgstr ""
-"Muestra un enlace hacia el formulario de conexión para los usuarios "
-"anónimos, o una caja que contiene los enlaces del usuario conectado. "
-
-msgid "components_logo"
-msgstr "logo"
-
-msgid "components_logo_description"
-msgstr "El logo de la aplicación, en el encabezado de página"
-
msgid "components_navigation"
msgstr "Navigación por página"
@@ -1472,6 +1444,17 @@
msgid "conditions"
msgstr "condiciones"
+msgid "config"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "config"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr ""
+
msgid "config mode"
msgstr "Modo de configuración"
@@ -1525,45 +1508,6 @@
msgid "content type"
msgstr "tipo MIME"
-msgid "contentnavigation"
-msgstr "Componentes contextuales"
-
-msgid "contentnavigation_breadcrumbs"
-msgstr "Ruta de Navegación"
-
-msgid "contentnavigation_breadcrumbs_description"
-msgstr "Muestra la ruta que permite localizar la página actual en el Sistema"
-
-msgid "contentnavigation_metadata"
-msgstr "Metadatos de la Entidad"
-
-msgid "contentnavigation_metadata_description"
-msgstr ""
-
-msgid "contentnavigation_prevnext"
-msgstr "Elemento anterior / siguiente"
-
-msgid "contentnavigation_prevnext_description"
-msgstr ""
-"Muestra las ligas que permiten pasar de una entidad a otra en las entidades "
-"que implementan la interface \"anterior/siguiente\"."
-
-msgid "contentnavigation_seealso"
-msgstr "Vea también"
-
-msgid "contentnavigation_seealso_description"
-msgstr ""
-"sección que muestra las entidades relacionadas por la relación \"vea también"
-"\" , si la entidad soporta esta relación."
-
-msgid "contentnavigation_wfhistory"
-msgstr "Histórico del workflow."
-
-msgid "contentnavigation_wfhistory_description"
-msgstr ""
-"Sección que muestra el reporte histórico de las transiciones del workflow. "
-"Aplica solo en entidades con workflow."
-
msgid "context"
msgstr "Contexto"
@@ -1649,6 +1593,11 @@
msgstr "Creación de la relación %(linkto)s"
msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr ""
+
+msgid ""
"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
"constraint_of CWEType %(linkto)s)"
msgstr ""
@@ -1774,6 +1723,113 @@
msgid "csv export"
msgstr "Exportar en CSV"
+msgid "ctxcomponents"
+msgstr "Componentes contextuales"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr "Liga usuario"
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+"Muestra un enlace hacia el formulario de conexión para los usuarios "
+"anónimos, o una caja que contiene los enlaces del usuario conectado. "
+
+msgid "ctxcomponents_appliname"
+msgstr "Nombre de la aplicación"
+
+msgid "ctxcomponents_appliname_description"
+msgstr "Muestra el nombre de la aplicación en el encabezado de la página"
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "Caja de Favoritos"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "Muestra y permite administrar los favoritos del usuario"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "Ruta de Navegación"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr "Muestra la ruta que permite localizar la página actual en el Sistema"
+
+msgid "ctxcomponents_download_box"
+msgstr "Configuración de caja de descargas"
+
+msgid "ctxcomponents_download_box_description"
+msgstr "Caja que contiene los elementos descargados"
+
+msgid "ctxcomponents_edit_box"
+msgstr "Caja de Acciones"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados"
+
+msgid "ctxcomponents_facet.filters"
+msgstr "Filtros"
+
+msgid "ctxcomponents_facet.filters_description"
+msgstr "Muestra los filtros aplicables a una búsqueda realizada"
+
+msgid "ctxcomponents_logo"
+msgstr "logo"
+
+msgid "ctxcomponents_logo_description"
+msgstr "El logo de la aplicación, en el encabezado de página"
+
+msgid "ctxcomponents_metadata"
+msgstr "Metadatos de la Entidad"
+
+msgid "ctxcomponents_metadata_description"
+msgstr ""
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "Caja de Vistas Posibles"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados"
+
+msgid "ctxcomponents_prevnext"
+msgstr "Elemento anterior / siguiente"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"Muestra las ligas que permiten pasar de una entidad a otra en las entidades "
+"que implementan la interface \"anterior/siguiente\"."
+
+msgid "ctxcomponents_rss"
+msgstr "Ícono RSS"
+
+msgid "ctxcomponents_rss_description"
+msgstr "Muestra el ícono RSS para vistas RSS"
+
+msgid "ctxcomponents_search_box"
+msgstr "Caja de búsqueda"
+
+msgid "ctxcomponents_search_box_description"
+msgstr ""
+"Permite realizar una búsqueda simple para cualquier tipo de dato en la "
+"aplicación"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "Caja Vistas de inicio"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "Muestra las vistas de inicio de la aplicación"
+
+msgid "ctxcomponents_userstatus"
+msgstr ""
+
+msgid "ctxcomponents_userstatus_description"
+msgstr ""
+
+msgid "ctxcomponents_wfhistory"
+msgstr "Histórico del workflow."
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr ""
+"Sección que muestra el reporte histórico de las transiciones del workflow. "
+"Aplica solo en entidades con workflow."
+
msgid "ctxtoolbar"
msgstr "Barra de herramientas"
@@ -1783,6 +1839,72 @@
msgid "custom_workflow_object"
msgstr "Workflow de"
+msgid "cw_dont_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_dont_cross"
+msgstr ""
+
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_dont_cross_object"
+msgstr ""
+
+msgid "cw_host_config_of"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr ""
+
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr ""
+
+msgid "cw_may_cross"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_may_cross"
+msgstr ""
+
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_may_cross_object"
+msgstr ""
+
+msgid "cw_source"
+msgstr ""
+
+msgid "cw_source_object"
+msgstr ""
+
+msgid "cw_support"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "cw_support"
+msgstr ""
+
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "cw_support_object"
+msgstr ""
+
+msgctxt "CWRType"
+msgid "cw_support_object"
+msgstr ""
+
msgid "cwetype-box"
msgstr "Vista \"caja\""
@@ -2198,10 +2320,6 @@
msgid "error while embedding page"
msgstr "Error durante la inclusión de la página"
-#, python-format
-msgid "error while handling __method: %s"
-msgstr "Error ocurrido durante el tratamiento del formulario (%s)"
-
msgid "error while publishing ReST text"
msgstr ""
"Se ha producido un error durante la interpretación del texto en formato ReST"
@@ -2248,6 +2366,9 @@
msgid "external page"
msgstr "Página externa"
+msgid "facet.filters"
+msgstr "Filtros"
+
msgid "facetbox"
msgstr "Caja de facetas"
@@ -2257,6 +2378,12 @@
msgid "facets_created_by-facet_description"
msgstr "Faceta creada por"
+msgid "facets_cw_source-facet"
+msgstr ""
+
+msgid "facets_cw_source-facet_description"
+msgstr ""
+
msgid "facets_cwfinal-facet"
msgstr "Faceta \"final\""
@@ -2467,8 +2594,11 @@
msgid "has_text"
msgstr "Contiene el texto"
-msgid "help"
-msgstr "Ayuda"
+msgid "header-left"
+msgstr ""
+
+msgid "header-right"
+msgstr ""
msgid "hide filter form"
msgstr "Esconder el filtro"
@@ -2793,6 +2923,9 @@
msgid "login"
msgstr "Usuario"
+msgid "login / password"
+msgstr ""
+
msgid "login or email"
msgstr "Usuario o dirección de correo"
@@ -2809,6 +2942,9 @@
msgid "main informations"
msgstr "Informaciones Generales"
+msgid "main_tab"
+msgstr ""
+
msgid "mainvars"
msgstr "Variables principales"
@@ -2837,6 +2973,13 @@
msgid "march"
msgstr "Marzo"
+msgid "match_host"
+msgstr ""
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr ""
+
msgid "maximum number of characters in short description"
msgstr "Máximo de caracteres en las descripciones cortas"
@@ -2917,6 +3060,10 @@
msgid "name"
msgstr "Nombre"
+msgctxt "CWSource"
+msgid "name"
+msgstr ""
+
msgctxt "State"
msgid "name"
msgstr "Nombre"
@@ -2943,6 +3090,9 @@
"Nombre de las variables principales que deberían ser utilizadas en la "
"selección de ser necesario (separarlas con comas)"
+msgid "name of the source"
+msgstr ""
+
msgid "name or identifier of the permission"
msgstr "Nombre o identificador del permiso"
@@ -3135,9 +3285,6 @@
msgid "possible views"
msgstr "Vistas posibles"
-msgid "powered by CubicWeb"
-msgstr "Potenciado en CubicWeb"
-
msgid "prefered_form"
msgstr "Forma preferida"
@@ -3222,6 +3369,9 @@
msgid "read_permission_object"
msgstr "Puede leer"
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+
msgid "registry"
msgstr "Registro"
@@ -3275,11 +3425,7 @@
msgid "relations_object"
msgstr ""
-msgctxt "CWAttribute"
-msgid "relations_object"
-msgstr ""
-
-msgctxt "CWRelation"
+msgctxt "CWRType"
msgid "relations_object"
msgstr ""
@@ -3398,6 +3544,9 @@
msgid "security"
msgstr "Seguridad"
+msgid "see more"
+msgstr ""
+
msgid "see them all"
msgstr "Ver todos"
@@ -3500,6 +3649,11 @@
msgid "sorry, the server is unable to handle this query"
msgstr "Lo sentimos, el servidor no puede manejar esta consulta"
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+
msgid "sparql xml"
msgstr "XML Sparql"
@@ -3772,6 +3926,13 @@
msgid "toggle check boxes"
msgstr "Cambiar valor"
+msgid "tr_count"
+msgstr ""
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr ""
+
msgid "transaction undoed"
msgstr "Transacciones Anuladas"
@@ -3823,6 +3984,10 @@
msgid "type"
msgstr "Tipo"
+msgctxt "CWSource"
+msgid "type"
+msgstr ""
+
msgctxt "Transition"
msgid "type"
msgstr "Tipo"
@@ -3834,6 +3999,9 @@
msgid "type here a sparql query"
msgstr "Escriba aquí su consulta en Sparql"
+msgid "type of the source"
+msgstr ""
+
msgid "ui"
msgstr "Interfaz Genérica"
@@ -3885,8 +4053,9 @@
msgid "unknown external entity"
msgstr "Entidad externa desconocida"
-msgid "unknown property key"
-msgstr "Clave de Propiedad desconocida"
+#, python-format
+msgid "unknown property key %s"
+msgstr "Clave de Propiedad desconocida: %s"
msgid "unknown vocabulary:"
msgstr "Vocabulario desconocido: "
@@ -4185,24 +4354,3 @@
msgid "you should probably delete that property"
msgstr "Debería probablamente suprimir esta propriedad"
-
-#~ msgid "add_perm"
-#~ msgstr "Agregado"
-
-#~ msgid "delete_perm"
-#~ msgstr "Eliminar"
-
-#~ msgid "edition"
-#~ msgstr "Edición"
-
-#~ msgid "graphical workflow for %s"
-#~ msgstr "Gráfica del workflow por %s"
-
-#~ msgid "personnal informations"
-#~ msgstr "Información personal"
-
-#~ msgid "read_perm"
-#~ msgstr "Lectura"
-
-#~ msgid "update_perm"
-#~ msgstr "Permiso de Modificar"
diff -r 48f468f33704 -r e4580e5f0703 i18n/fr.po
--- a/i18n/fr.po Fri Dec 10 12:17:18 2010 +0100
+++ b/i18n/fr.po Fri Mar 11 09:46:45 2011 +0100
@@ -4,7 +4,7 @@
msgid ""
msgstr ""
"Project-Id-Version: cubicweb 2.46.0\n"
-"PO-Revision-Date: 2010-09-15 15:12+0200\n"
+"PO-Revision-Date: 2011-01-03 14:35+0100\n"
"Last-Translator: Logilab Team \n"
"Language-Team: fr \n"
"Language: \n"
@@ -52,6 +52,10 @@
msgstr "la valeur %(value)r ne satisfait pas la contrainte %(cstr)s"
#, python-format
+msgid "%(etype)s by %(author)s"
+msgstr "%(etype)s par %(author)s"
+
+#, python-format
msgid "%(firstname)s %(surname)s"
msgstr "%(firstname)s %(surname)s"
@@ -207,8 +211,11 @@
msgid "AND"
msgstr "ET"
+msgid "About this site"
+msgstr "À propos de ce site"
+
msgid "Any"
-msgstr "N'importe"
+msgstr "Tous"
msgid "Attributes permissions:"
msgstr "Permissions des attributs"
@@ -327,11 +334,23 @@
msgid "CWRelation_plural"
msgstr "Relations"
+msgid "CWSource"
+msgstr "Source de données"
+
+msgid "CWSourceHostConfig"
+msgstr "Configuration de source"
+
+msgid "CWSourceHostConfig_plural"
+msgstr "Configurations de source"
+
+msgid "CWSource_plural"
+msgstr "Source de données"
+
msgid "CWUniqueTogetherConstraint"
-msgstr "Contrainte unique_together"
+msgstr "Contrainte d'unicité"
msgid "CWUniqueTogetherConstraint_plural"
-msgstr "Contraintes unique_together"
+msgstr "Contraintes d'unicité"
msgid "CWUser"
msgstr "Utilisateur"
@@ -360,8 +379,8 @@
"Can't restore relation %(rtype)s of entity %(eid)s, this relation does not "
"exists anymore in the schema."
msgstr ""
-"Ne peut restaurer la relation %(rtype)s de l'entité %(eid)s, cette "
-"relationn'existe plus dans le schéma"
+"Ne peut restaurer la relation %(rtype)s de l'entité %(eid)s, cette relation "
+"n'existe plus dans le schéma"
#, python-format
msgid ""
@@ -524,6 +543,12 @@
msgid "New CWRelation"
msgstr "Nouvelle définition de relation non finale"
+msgid "New CWSource"
+msgstr "Nouvelle source"
+
+msgid "New CWSourceHostConfig"
+msgstr "Nouvelle configuration de source"
+
msgid "New CWUniqueTogetherConstraint"
msgstr "Nouvelle contrainte unique_together"
@@ -588,6 +613,9 @@
msgid "Please note that this is only a shallow copy"
msgstr "Attention, cela n'effectue qu'une copie de surface"
+msgid "Powered by CubicWeb"
+msgstr "Construit avec CubicWeb"
+
msgid "RQLConstraint"
msgstr "contrainte rql"
@@ -634,6 +662,15 @@
msgid "SizeConstraint"
msgstr "contrainte de taille"
+msgid ""
+"Source's configuration for a particular host. One key=value per line, "
+"authorized keys depending on the source's type, overriding values defined on "
+"the source."
+msgstr ""
+"Configuration de la source pour un hôte spécifique. Une clé=valeur par "
+"ligne, les clés autorisées dépendantes du type de source. Les valeur "
+"surchargent celles définies sur la source."
+
msgid "Startup views"
msgstr "Vues de départ"
@@ -717,6 +754,12 @@
msgid "This CWRelation"
msgstr "Cette définition de relation"
+msgid "This CWSource"
+msgstr "Cette source"
+
+msgid "This CWSourceHostConfig"
+msgstr "Cette configuration de source"
+
msgid "This CWUniqueTogetherConstraint"
msgstr "Cette contrainte unique_together"
@@ -869,9 +912,6 @@
"Pour récupérer un cache, il faut utiliser utiliser la méthode\n"
"get_cache(cachename)."
-msgid "about this site"
-msgstr "à propos de ce site"
-
msgid "abstract base class for transitions"
msgstr "classe de base abstraite pour les transitions"
@@ -935,6 +975,9 @@
msgid "add CWRelation relation_type CWRType object"
msgstr "définition de relation"
+msgid "add CWSourceHostConfig cw_host_config_of CWSource object"
+msgstr "configuration d'hôte"
+
msgid "add CWUniqueTogetherConstraint constraint_of CWEType object"
msgstr "contrainte unique_together"
@@ -1125,6 +1168,14 @@
msgstr "avril"
#, python-format
+msgid "archive for %(author)s"
+msgstr "archive pour l'auteur %(author)s"
+
+#, python-format
+msgid "archive for %(month)s/%(year)s"
+msgstr "archive pour le mois %(month)s/%(year)s"
+
+#, python-format
msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)"
msgstr ""
"l'entité #%(eid)s de type %(etype)s doit nécessairement être reliée à une\n"
@@ -1184,55 +1235,6 @@
msgid "boxes"
msgstr "boîtes"
-msgid "boxes_bookmarks_box"
-msgstr "boîte signets"
-
-msgid "boxes_bookmarks_box_description"
-msgstr "boîte contenant les signets de l'utilisateur"
-
-msgid "boxes_download_box"
-msgstr "boîte de téléchargement"
-
-msgid "boxes_download_box_description"
-msgstr "boîte contenant un lien permettant de télécharger la ressource"
-
-msgid "boxes_edit_box"
-msgstr "boîte d'actions"
-
-msgid "boxes_edit_box_description"
-msgstr ""
-"boîte affichant les différentes actions possibles sur les données affichées"
-
-msgid "boxes_filter_box"
-msgstr "filtrer"
-
-msgid "boxes_filter_box_description"
-msgstr "boîte permettant de filtrer parmi les résultats d'une recherche"
-
-msgid "boxes_possible_views_box"
-msgstr "boîte des vues possibles"
-
-msgid "boxes_possible_views_box_description"
-msgstr "boîte affichant les vues possibles pour les données courantes"
-
-msgid "boxes_rss"
-msgstr "icône RSS"
-
-msgid "boxes_rss_description"
-msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées"
-
-msgid "boxes_search_box"
-msgstr "boîte de recherche"
-
-msgid "boxes_search_box_description"
-msgstr "boîte avec un champ de recherche simple"
-
-msgid "boxes_startup_views_box"
-msgstr "boîte des vues de départs"
-
-msgid "boxes_startup_views_box_description"
-msgstr "boîte affichant les vues de départs de l'application"
-
msgid "bug report sent"
msgstr "rapport d'erreur envoyé"
@@ -1321,11 +1323,14 @@
#, python-format
msgid ""
-"can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has cardinality="
+"can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
"%(card)s"
msgstr ""
-"ne peut mettre 'inlined' = %(inlined)s, %(stype)s %(rtype)s %(otype)s a pour "
-"cardinalité %(card)s"
+"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a "
+"pour cardinalité %(card)s"
+
+msgid "cancel"
+msgstr "annuler"
msgid "cancel select"
msgstr "annuler la sélection"
@@ -1357,6 +1362,9 @@
msgid "click here to see created entity"
msgstr "cliquez ici pour voir l'entité créée"
+msgid "click here to see edited entity"
+msgstr "cliquez ici pour voir l'entité modifiée"
+
msgid "click on the box to cancel the deletion"
msgstr "cliquez dans la zone d'édition pour annuler la suppression"
@@ -1386,46 +1394,12 @@
msgid "components"
msgstr "composants"
-msgid "components_appliname"
-msgstr "titre de l'application"
-
-msgid "components_appliname_description"
-msgstr "affiche le titre de l'application dans l'en-tête de page"
-
-msgid "components_breadcrumbs"
-msgstr "fil d'ariane"
-
-msgid "components_breadcrumbs_description"
-msgstr ""
-"affiche un chemin permettant de localiser la page courante dans le site"
-
msgid "components_etypenavigation"
msgstr "filtrage par type"
msgid "components_etypenavigation_description"
msgstr "permet de filtrer par type d'entité les résultats d'une recherche"
-msgid "components_help"
-msgstr "bouton aide"
-
-msgid "components_help_description"
-msgstr "le bouton d'aide, dans l'en-tête de page"
-
-msgid "components_loggeduserlink"
-msgstr "lien utilisateur"
-
-msgid "components_loggeduserlink_description"
-msgstr ""
-"affiche un lien vers le formulaire d'authentification pour les utilisateurs "
-"anonymes, sinon une boite contenant notamment des liens propres à "
-"l'utilisateur connectés"
-
-msgid "components_logo"
-msgstr "logo"
-
-msgid "components_logo_description"
-msgstr "le logo de l'application, dans l'en-tête de page"
-
msgid "components_navigation"
msgstr "navigation par page"
@@ -1472,6 +1446,17 @@
msgid "conditions"
msgstr "conditions"
+msgid "config"
+msgstr "configuration"
+
+msgctxt "CWSource"
+msgid "config"
+msgstr "configuration"
+
+msgctxt "CWSourceHostConfig"
+msgid "config"
+msgstr "configuration"
+
msgid "config mode"
msgstr "mode de configuration"
@@ -1525,46 +1510,6 @@
msgid "content type"
msgstr "type MIME"
-msgid "contentnavigation"
-msgstr "composants contextuels"
-
-msgid "contentnavigation_breadcrumbs"
-msgstr "fil d'ariane"
-
-msgid "contentnavigation_breadcrumbs_description"
-msgstr ""
-"affiche un chemin permettant de localiser la page courante dans le site"
-
-msgid "contentnavigation_metadata"
-msgstr "méta-données de l'entité"
-
-msgid "contentnavigation_metadata_description"
-msgstr ""
-
-msgid "contentnavigation_prevnext"
-msgstr "élément précedent / suivant"
-
-msgid "contentnavigation_prevnext_description"
-msgstr ""
-"affiche des liens permettant de passer d'une entité à une autre sur les "
-"entités implémentant l'interface \"précédent/suivant\"."
-
-msgid "contentnavigation_seealso"
-msgstr "voir aussi"
-
-msgid "contentnavigation_seealso_description"
-msgstr ""
-"section affichant les entités liées par la relation \"voir aussi\" si "
-"l'entité supporte cette relation."
-
-msgid "contentnavigation_wfhistory"
-msgstr "historique du workflow."
-
-msgid "contentnavigation_wfhistory_description"
-msgstr ""
-"section affichant l'historique du workflow pour les entités ayant un "
-"workflow."
-
msgid "context"
msgstr "contexte"
@@ -1651,6 +1596,11 @@
msgstr "création relation %(linkto)s"
msgid ""
+"creating CWSourceHostConfig (CWSourceHostConfig cw_host_config_of CWSource "
+"%(linkto)s)"
+msgstr "création d'une configuration d'hôte pour la source %(linkto)s"
+
+msgid ""
"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint "
"constraint_of CWEType %(linkto)s)"
msgstr "création d'une contrainte unique_together sur %(linkto)s"
@@ -1776,6 +1726,116 @@
msgid "csv export"
msgstr "export CSV"
+msgid "ctxcomponents"
+msgstr "composants contextuels"
+
+msgid "ctxcomponents_anonuserlink"
+msgstr "lien utilisateur"
+
+msgid "ctxcomponents_anonuserlink_description"
+msgstr ""
+"affiche un lien vers le formulaire d'authentification pour les utilisateurs "
+"anonymes, sinon une boite contenant notamment des liens propres à "
+"l'utilisateur connectés"
+
+msgid "ctxcomponents_appliname"
+msgstr "titre de l'application"
+
+msgid "ctxcomponents_appliname_description"
+msgstr "affiche le titre de l'application dans l'en-tête de page"
+
+msgid "ctxcomponents_bookmarks_box"
+msgstr "boîte signets"
+
+msgid "ctxcomponents_bookmarks_box_description"
+msgstr "boîte contenant les signets de l'utilisateur"
+
+msgid "ctxcomponents_breadcrumbs"
+msgstr "fil d'ariane"
+
+msgid "ctxcomponents_breadcrumbs_description"
+msgstr ""
+"affiche un chemin permettant de localiser la page courante dans le site"
+
+msgid "ctxcomponents_download_box"
+msgstr "boîte de téléchargement"
+
+msgid "ctxcomponents_download_box_description"
+msgstr "boîte contenant un lien permettant de télécharger la ressource"
+
+msgid "ctxcomponents_edit_box"
+msgstr "boîte d'actions"
+
+msgid "ctxcomponents_edit_box_description"
+msgstr ""
+"boîte affichant les différentes actions possibles sur les données affichées"
+
+msgid "ctxcomponents_facet.filters"
+msgstr "boîte à facettes"
+
+msgid "ctxcomponents_facet.filters_description"
+msgstr ""
+"boîte permettant de filtrer parmi les résultats d'une recherche à l'aide de "
+"facettes"
+
+msgid "ctxcomponents_logo"
+msgstr "logo"
+
+msgid "ctxcomponents_logo_description"
+msgstr "le logo de l'application, dans l'en-tête de page"
+
+msgid "ctxcomponents_metadata"
+msgstr "méta-données de l'entité"
+
+msgid "ctxcomponents_metadata_description"
+msgstr ""
+
+msgid "ctxcomponents_possible_views_box"
+msgstr "boîte des vues possibles"
+
+msgid "ctxcomponents_possible_views_box_description"
+msgstr "boîte affichant les vues possibles pour les données courantes"
+
+msgid "ctxcomponents_prevnext"
+msgstr "élément précedent / suivant"
+
+msgid "ctxcomponents_prevnext_description"
+msgstr ""
+"affiche des liens permettant de passer d'une entité à une autre sur les "
+"entités implémentant l'interface \"précédent/suivant\"."
+
+msgid "ctxcomponents_rss"
+msgstr "icône RSS"
+
+msgid "ctxcomponents_rss_description"
+msgstr "l'icône RSS permettant de récupérer la vue RSS des données affichées"
+
+msgid "ctxcomponents_search_box"
+msgstr "boîte de recherche"
+
+msgid "ctxcomponents_search_box_description"
+msgstr "boîte avec un champ de recherche simple"
+
+msgid "ctxcomponents_startup_views_box"
+msgstr "boîte des vues de départs"
+
+msgid "ctxcomponents_startup_views_box_description"
+msgstr "boîte affichant les vues de départs de l'application"
+
+msgid "ctxcomponents_userstatus"
+msgstr "état de l'utilisateur"
+
+msgid "ctxcomponents_userstatus_description"
+msgstr ""
+
+msgid "ctxcomponents_wfhistory"
+msgstr "historique du workflow."
+
+msgid "ctxcomponents_wfhistory_description"
+msgstr ""
+"section affichant l'historique du workflow pour les entités ayant un "
+"workflow."
+
msgid "ctxtoolbar"
msgstr "barre d'outils"
@@ -1785,6 +1845,72 @@
msgid "custom_workflow_object"
msgstr "workflow de"
+msgid "cw_dont_cross"
+msgstr "don't cross"
+
+msgctxt "CWSource"
+msgid "cw_dont_cross"
+msgstr "don't cross"
+
+msgid "cw_dont_cross_object"
+msgstr "can't be crossed with"
+
+msgctxt "CWRType"
+msgid "cw_dont_cross_object"
+msgstr "can't be crossed with"
+
+msgid "cw_host_config_of"
+msgstr "host configuration of"
+
+msgctxt "CWSourceHostConfig"
+msgid "cw_host_config_of"
+msgstr "host configuration of"
+
+msgid "cw_host_config_of_object"
+msgstr "has host configuration"
+
+msgctxt "CWSource"
+msgid "cw_host_config_of_object"
+msgstr "has host configuration"
+
+msgid "cw_may_cross"
+msgstr "may cross"
+
+msgctxt "CWSource"
+msgid "cw_may_cross"
+msgstr "may cross"
+
+msgid "cw_may_cross_object"
+msgstr "may be crossed with"
+
+msgctxt "CWRType"
+msgid "cw_may_cross_object"
+msgstr "may be crossed with"
+
+msgid "cw_source"
+msgstr "from data source"
+
+msgid "cw_source_object"
+msgstr "entities"
+
+msgid "cw_support"
+msgstr "support"
+
+msgctxt "CWSource"
+msgid "cw_support"
+msgstr "support"
+
+msgid "cw_support_object"
+msgstr "supported by"
+
+msgctxt "CWEType"
+msgid "cw_support_object"
+msgstr "supported by"
+
+msgctxt "CWRType"
+msgid "cw_support_object"
+msgstr "supported by"
+
msgid "cwetype-box"
msgstr "vue \"boîte\""
@@ -2195,10 +2321,6 @@
msgid "error while embedding page"
msgstr "erreur pendant l'inclusion de la page"
-#, python-format
-msgid "error while handling __method: %s"
-msgstr "erreur survenue lors du traitement de formulaire (%s)"
-
msgid "error while publishing ReST text"
msgstr ""
"une erreur s'est produite lors de l'interprétation du texte au format ReST"
@@ -2245,6 +2367,9 @@
msgid "external page"
msgstr "page externe"
+msgid "facet.filters"
+msgstr "facettes"
+
msgid "facetbox"
msgstr "boîte à facettes"
@@ -2254,6 +2379,12 @@
msgid "facets_created_by-facet_description"
msgstr ""
+msgid "facets_cw_source-facet"
+msgstr "facette \"source de données\""
+
+msgid "facets_cw_source-facet_description"
+msgstr ""
+
msgid "facets_cwfinal-facet"
msgstr "facette \"type d'entité ou de relation final\""
@@ -2465,8 +2596,11 @@
msgid "has_text"
msgstr "contient le texte"
-msgid "help"
-msgstr "aide"
+msgid "header-left"
+msgstr "en-tête (gauche)"
+
+msgid "header-right"
+msgstr "en-tête (droite)"
msgid "hide filter form"
msgstr "cacher le filtre"
@@ -2791,6 +2925,9 @@
msgid "login"
msgstr "identifiant"
+msgid "login / password"
+msgstr "identifiant / mot de passe"
+
msgid "login or email"
msgstr "identifiant ou email"
@@ -2807,6 +2944,9 @@
msgid "main informations"
msgstr "Informations générales"
+msgid "main_tab"
+msgstr "description"
+
msgid "mainvars"
msgstr "variables principales"
@@ -2835,6 +2975,13 @@
msgid "march"
msgstr "mars"
+msgid "match_host"
+msgstr "pour l'hôte"
+
+msgctxt "CWSourceHostConfig"
+msgid "match_host"
+msgstr "pour l'hôte"
+
msgid "maximum number of characters in short description"
msgstr "nombre maximum de caractères dans les descriptions courtes"
@@ -2915,6 +3062,10 @@
msgid "name"
msgstr "nom"
+msgctxt "CWSource"
+msgid "name"
+msgstr "nom"
+
msgctxt "State"
msgid "name"
msgstr "nom"
@@ -2941,6 +3092,9 @@
"nom des variables principales qui devrait être utilisées dans la sélection "
"si nécessaire (les séparer par des virgules)"
+msgid "name of the source"
+msgstr "nom de la source"
+
msgid "name or identifier of the permission"
msgstr "nom (identifiant) de la permission"
@@ -3135,9 +3289,6 @@
msgid "possible views"
msgstr "vues possibles"
-msgid "powered by CubicWeb"
-msgstr "construit avec CubicWeb"
-
msgid "prefered_form"
msgstr "forme préférée"
@@ -3222,6 +3373,10 @@
msgid "read_permission_object"
msgstr "peut lire"
+msgid "regexp matching host(s) to which this config applies"
+msgstr ""
+"expression régulière des noms d'hôtes auxquels cette configuration s'applique"
+
msgid "registry"
msgstr "registre"
@@ -3275,13 +3430,9 @@
msgid "relations_object"
msgstr "relations de"
-msgctxt "CWAttribute"
+msgctxt "CWRType"
msgid "relations_object"
-msgstr "contraint par"
-
-msgctxt "CWRelation"
-msgid "relations_object"
-msgstr "contraint par"
+msgstr "relations de"
msgid "relative url of the bookmarked page"
msgstr "url relative de la page"
@@ -3399,6 +3550,9 @@
msgid "security"
msgstr "sécurité"
+msgid "see more"
+msgstr "voir plus"
+
msgid "see them all"
msgstr "les voir toutes"
@@ -3500,6 +3654,14 @@
msgid "sorry, the server is unable to handle this query"
msgstr "désolé, le serveur ne peut traiter cette requête"
+msgid ""
+"source's configuration. One key=value per line, authorized keys depending on "
+"the source's type"
+msgstr ""
+"Configuration de la source. Une clé=valeur par ligne, les clés autorisées "
+"dépendantes du type de source. Les valeur surchargent celles définies sur la "
+"source."
+
msgid "sparql xml"
msgstr "XML Sparql"
@@ -3773,6 +3935,13 @@
msgid "toggle check boxes"
msgstr "inverser les cases à cocher"
+msgid "tr_count"
+msgstr "n° de transition"
+
+msgctxt "TrInfo"
+msgid "tr_count"
+msgstr "n° de transition"
+
msgid "transaction undoed"
msgstr "transaction annulées"
@@ -3824,6 +3993,10 @@
msgid "type"
msgstr "type"
+msgctxt "CWSource"
+msgid "type"
+msgstr "type"
+
msgctxt "Transition"
msgid "type"
msgstr "type"
@@ -3835,6 +4008,9 @@
msgid "type here a sparql query"
msgstr "Tapez une requête sparql"
+msgid "type of the source"
+msgstr "type de la source"
+
msgid "ui"
msgstr "propriétés génériques de l'interface"
@@ -3886,8 +4062,9 @@
msgid "unknown external entity"
msgstr "entité (externe) introuvable"
-msgid "unknown property key"
-msgstr "clé de propriété inconnue"
+#, python-format
+msgid "unknown property key %s"
+msgstr "clé de propriété inconnue : %s"
msgid "unknown vocabulary:"
msgstr "vocabulaire inconnu : "
@@ -4185,27 +4362,3 @@
msgid "you should probably delete that property"
msgstr "vous devriez probablement supprimer cette propriété"
-
-#~ msgid "add_perm"
-#~ msgstr "ajout"
-
-#~ msgid "delete_perm"
-#~ msgstr "suppression"
-
-#~ msgid "edition"
-#~ msgstr "édition"
-
-#~ msgid "graphical workflow for %s"
-#~ msgstr "graphique du workflow pour %s"
-
-#~ msgid "personnal informations"
-#~ msgstr "informations personnelles"
-
-#~ msgid "read_perm"
-#~ msgstr "lecture"
-
-#~ msgid "update_perm"
-#~ msgstr "modification"
-
-#~ msgid "yams type, rdf type or mime type of the object"
-#~ msgstr "type yams, vocabulaire rdf ou type mime de l'objet"
diff -r 48f468f33704 -r e4580e5f0703 migration.py
--- a/migration.py Fri Dec 10 12:17:18 2010 +0100
+++ b/migration.py Fri Mar 11 09:46:45 2011 +0100
@@ -334,14 +334,15 @@
if not self.execscript_confirm(migrscript):
return
scriptlocals = self._create_context().copy()
+ scriptlocals.update({'__file__': migrscript,
+ '__args__': kwargs.pop("scriptargs", [])})
self._context_stack.append(scriptlocals)
if script_mode == 'python':
if funcname is None:
pyname = '__main__'
else:
pyname = splitext(basename(migrscript))[0]
- scriptlocals.update({'__file__': migrscript, '__name__': pyname,
- '__args__': kwargs.pop("scriptargs", [])})
+ scriptlocals['__name__'] = pyname
execfile(migrscript, scriptlocals)
if funcname is not None:
try:
@@ -358,8 +359,13 @@
self.commit()
else: # script_mode == 'doctest'
import doctest
- doctest.testfile(migrscript, module_relative=False,
- optionflags=doctest.ELLIPSIS, globs=scriptlocals)
+ return doctest.testfile(migrscript, module_relative=False,
+ optionflags=doctest.ELLIPSIS,
+ # verbose mode when user input is expected
+ verbose=self.verbosity==2,
+ report=True,
+ encoding='utf-8',
+ globs=scriptlocals)
self._context_stack.pop()
def cmd_option_renamed(self, oldname, newname):
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.0_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.0_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,38 @@
+from __future__ import with_statement
+
+from cubicweb.server.session import hooks_control
+
+for uri, cfg in config.sources().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg)
+
+add_entity_type('CWSource')
+add_relation_definition('CWSource', 'cw_source', 'CWSource')
+add_entity_type('CWSourceHostConfig')
+
+with hooks_control(session, session.HOOKS_ALLOW_ALL, 'cw.sources'):
+ create_entity('CWSource', type=u'native', name=u'system')
+commit()
+
+sql('INSERT INTO cw_source_relation(eid_from,eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.type')
+commit()
+
+for uri, cfg in config.sources().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri.pop(uri)
+ config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items()
+ if key != 'adapter' and value is not None)
+ create_entity('CWSource', name=unicode(uri), type=unicode(cfg['adapter']),
+ config=config)
+commit()
+
+# rename cwprops for boxes/contentnavigation
+for x in rql('Any X,XK WHERE X pkey XK, '
+ 'X pkey ~= "boxes.%s" OR '
+ 'X pkey ~= "contentnavigation.%s"').entities():
+ x.set_attributes(pkey=u'ctxcomponents.' + x.pkey.split('.', 1)[1])
+
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.0_common.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.0_common.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,1 @@
+option_group_changed('cleanup-session-time', 'web', 'main')
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.4_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.4_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,8 @@
+for eschema in schema.entities():
+ if not (eschema.final or 'cw_source' in eschema.subjrels):
+ add_relation_definition(eschema.type, 'cw_source', 'CWSource', ask_confirm=False)
+
+sql('INSERT INTO cw_source_relation(eid_from, eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)')
+commit()
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.5_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.5_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,6 @@
+sync_schema_props_perms('CWSourceHostConfig', syncperms=False)
+
+sql('INSERT INTO cw_source_relation(eid_from, eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.source AND NOT EXISTS(SELECT 1 FROM cw_source_relation WHERE eid_from=e.eid AND eid_to=s.cw_eid)')
+commit()
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.7_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.7_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,2 @@
+add_attribute('TrInfo', 'tr_count')
+sync_schema_props_perms('TrInfo')
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.8_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.8_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,1 @@
+sync_schema_props_perms('CWSource', syncprops=False)
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/3.10.9_Any.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.10.9_Any.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,32 @@
+from __future__ import with_statement
+import sys
+
+# fix some corrupted entities noticed on several instances
+rql('DELETE CWConstraint X WHERE NOT E constrained_by X')
+rql('SET X is_instance_of Y WHERE X is Y, NOT X is_instance_of Y')
+commit()
+
+if confirm('fix existing cwuri?'):
+ from logilab.common.shellutils import ProgressBar
+ from cubicweb.server.session import hooks_control
+ rset = rql('Any X, XC WHERE X cwuri XC, X cwuri ~= "%/eid/%"')
+ if sys.stdout.isatty():
+ pb = ProgressBar(nbops=rset.rowcount, size=70)
+ else:
+ pb = None
+ with hooks_control(session, session.HOOKS_DENY_ALL, 'integrity'):
+ for i, e in enumerate(rset.entities()):
+ e.set_attributes(cwuri=e.cwuri.replace('/eid', ''))
+ if i % 100: # commit every 100 entities to limit memory consumption
+ commit(ask_confirm=False)
+ if pb is not None:
+ pb.update()
+ commit(ask_confirm=False)
+
+try:
+ from cubicweb import devtools
+ option_group_changed('anonymous-user', 'main', 'web')
+ option_group_changed('anonymous-password', 'main', 'web')
+except ImportError:
+ # cubicweb-dev unavailable, nothing needed
+ pass
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/bootstrapmigration_repository.py
--- a/misc/migration/bootstrapmigration_repository.py Fri Dec 10 12:17:18 2010 +0100
+++ b/misc/migration/bootstrapmigration_repository.py Fri Mar 11 09:46:45 2011 +0100
@@ -97,6 +97,14 @@
if applcubicwebversion < (3, 9, 6) and cubicwebversion >= (3, 9, 6):
add_entity_type('CWUniqueTogetherConstraint')
+if not ('CWUniqueTogetherConstraint', 'CWRType') in schema['relations'].rdefs:
+ add_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRType')
+ rql('SET C relations RT WHERE C relations RDEF, RDEF relation_type RT')
+ commit()
+ drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWAttribute')
+ drop_relation_definition('CWUniqueTogetherConstraint', 'relations', 'CWRelation')
+
+
if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0):
with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
diff -r 48f468f33704 -r e4580e5f0703 misc/migration/postcreate.py
--- a/misc/migration/postcreate.py Fri Dec 10 12:17:18 2010 +0100
+++ b/misc/migration/postcreate.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,9 +15,8 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""cubicweb post creation script, set user's workflow
+"""cubicweb post creation script, set user's workflow"""
-"""
# insert versions
create_entity('CWProperty', pkey=u'system.version.cubicweb',
value=unicode(config.cubicweb_version()))
diff -r 48f468f33704 -r e4580e5f0703 misc/scripts/cwuser_ldap2system.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/scripts/cwuser_ldap2system.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,40 @@
+import base64
+from cubicweb.server.utils import crypt_password
+
+dbdriver = config.sources()['system']['db-driver']
+from logilab.database import get_db_helper
+dbhelper = get_db_helper(driver)
+
+insert = ('INSERT INTO cw_cwuser (cw_creation_date,'
+ ' cw_eid,'
+ ' cw_modification_date,'
+ ' cw_login,'
+ ' cw_firstname,'
+ ' cw_surname,'
+ ' cw_last_login_time,'
+ ' cw_upassword,'
+ ' cw_cwuri) '
+ "VALUES (%(mtime)s, %(eid)s, %(mtime)s, %(login)s, "
+ " %(firstname)s, %(surname)s, %(mtime)s, %(pwd)s, 'foo');")
+update = "UPDATE entities SET source='system' WHERE eid=%(eid)s;"
+rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False)
+for eid, type, source, extid, mtime in rset:
+ if type != 'CWUser':
+ print "don't know what to do with entity type", type
+ continue
+ if not source.lower().startswith('ldap'):
+ print "don't know what to do with source type", source
+ continue
+ extid = base64.decodestring(extid)
+ ldapinfos = [x.strip().split('=') for x in extid.split(',')]
+ login = ldapinfos[0][1]
+ firstname = login.capitalize()
+ surname = login.capitalize()
+ args = dict(eid=eid, type=type, source=source, login=login,
+ firstname=firstname, surname=surname, mtime=mtime,
+ pwd=dbhelper.binary_value(crypt_password('toto')))
+ print args
+ sql(insert, args)
+ sql(update, args)
+
+commit()
diff -r 48f468f33704 -r e4580e5f0703 misc/scripts/drop_external_entities.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/scripts/drop_external_entities.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,23 @@
+from cubicweb import UnknownEid
+source, = __args__
+
+sql("DELETE FROM entities WHERE type='Int'")
+
+ecnx = session.pool.connection(source)
+for e in rql('Any X WHERE X cw_source S, S name %(name)s', {'name': source}).entities():
+ meta = e.cw_metainformation()
+ assert meta['source']['uri'] == source
+ try:
+ suri = ecnx.describe(meta['extid'])[1]
+ except UnknownEid:
+ print 'cant describe', e.__regid__, e.eid, meta
+ continue
+ if suri != 'system':
+ try:
+ print 'deleting', e.__regid__, e.eid, suri, e.dc_title().encode('utf8')
+ repo.delete_info(session, e, suri, meta['extid'], scleanup=True)
+ except UnknownEid:
+ print ' cant delete', e.__regid__, e.eid, meta
+
+
+commit()
diff -r 48f468f33704 -r e4580e5f0703 misc/scripts/repair_file_1-9_migration.py
--- a/misc/scripts/repair_file_1-9_migration.py Fri Dec 10 12:17:18 2010 +0100
+++ b/misc/scripts/repair_file_1-9_migration.py Fri Mar 11 09:46:45 2011 +0100
@@ -19,9 +19,9 @@
sourcescfg = repo.config.sources()
backupcfg = cwconfig.instance_configuration(backupinstance)
backupcfg.repairing = True
-backuprepo, backupcnx = dbapi.in_memory_cnx(backupcfg, sourcescfg['admin']['login'],
- password=sourcescfg['admin']['password'],
- host='localhost')
+backuprepo, backupcnx = dbapi.in_memory_repo_cnx(backupcfg, sourcescfg['admin']['login'],
+ password=sourcescfg['admin']['password'],
+ host='localhost')
backupcu = backupcnx.cursor()
with hooks_control(session, session.HOOKS_DENY_ALL):
diff -r 48f468f33704 -r e4580e5f0703 mixins.py
--- a/mixins.py Fri Dec 10 12:17:18 2010 +0100
+++ b/mixins.py Fri Mar 11 09:46:45 2011 +0100
@@ -35,7 +35,7 @@
benefit from this default implementation
"""
__metaclass__ = class_deprecated
- __deprecation_warning__ = '[3.9] TreeMixIn is deprecated, use/override ITreeAdapter instead'
+ __deprecation_warning__ = '[3.9] TreeMixIn is deprecated, use/override ITreeAdapter instead (%(cls)s)'
tree_attribute = None
# XXX misnamed
@@ -205,7 +205,7 @@
class TreeViewMixIn(object):
"""a recursive tree view"""
__metaclass__ = class_deprecated
- __deprecation_warning__ = '[3.9] TreeViewMixIn is deprecated, use/override BaseTreeView instead'
+ __deprecation_warning__ = '[3.9] TreeViewMixIn is deprecated, use/override BaseTreeView instead (%(cls)s)'
__regid__ = 'tree'
__select__ = implements(ITree, warn=False)
@@ -244,7 +244,7 @@
class TreePathMixIn(object):
"""a recursive path view"""
__metaclass__ = class_deprecated
- __deprecation_warning__ = '[3.9] TreePathMixIn is deprecated, use/override TreePathView instead'
+ __deprecation_warning__ = '[3.9] TreePathMixIn is deprecated, use/override TreePathView instead (%(cls)s)'
__regid__ = 'path'
item_vid = 'oneline'
separator = u' > '
@@ -270,7 +270,7 @@
class ProgressMixIn(object):
"""provide a default implementations for IProgress interface methods"""
__metaclass__ = class_deprecated
- __deprecation_warning__ = '[3.9] ProgressMixIn is deprecated, use/override IProgressAdapter instead'
+ __deprecation_warning__ = '[3.9] ProgressMixIn is deprecated, use/override IProgressAdapter instead (%(cls)s)'
@property
def cost(self):
diff -r 48f468f33704 -r e4580e5f0703 req.py
--- a/req.py Fri Dec 10 12:17:18 2010 +0100
+++ b/req.py Fri Mar 11 09:46:45 2011 +0100
@@ -210,8 +210,7 @@
if not isinstance(values, (list, tuple)):
values = (values,)
for value in values:
- if value is None:
- raise ValueError(_('unauthorized value'))
+ assert value is not None
args.append(u'%s=%s' % (param, self.url_quote(value)))
return '&'.join(args)
diff -r 48f468f33704 -r e4580e5f0703 rset.py
--- a/rset.py Fri Dec 10 12:17:18 2010 +0100
+++ b/rset.py Fri Mar 11 09:46:45 2011 +0100
@@ -386,6 +386,19 @@
if self.rows[i][col] is not None:
yield self.get_entity(i, col)
+ def iter_rows_with_entities(self):
+ """ iterates over rows, and for each row
+ eids are converted to plain entities
+ """
+ for i, row in enumerate(self):
+ _row = []
+ for j, col in enumerate(row):
+ try:
+ _row.append(self.get_entity(i, j) if col is not None else col)
+ except NotAnEntity:
+ _row.append(col)
+ yield _row
+
def complete_entity(self, row, col=0, skip_bytes=True):
"""short cut to get an completed entity instance for a particular
row (all instance's attributes have been fetched)
@@ -401,9 +414,9 @@
.. warning::
- Due to the cache wrapping this function, you should NEVER
- give row as a named parameter (i.e. rset.get_entity(req, 0)
- is OK but rset.get_entity(row=0, req=req) isn't)
+ Due to the cache wrapping this function, you should NEVER give row as
+ a named parameter (i.e. `rset.get_entity(0, 1)` is OK but
+ `rset.get_entity(row=0, col=1)` isn't)
:type row,col: int, int
:param row,col:
@@ -421,11 +434,11 @@
return self._build_entity(row, col)
def _build_entity(self, row, col):
- """internal method to get a single entity, returns a
- partially initialized Entity instance.
+ """internal method to get a single entity, returns a partially
+ initialized Entity instance.
- partially means that only attributes selected in the RQL
- query will be directly assigned to the entity.
+ partially means that only attributes selected in the RQL query will be
+ directly assigned to the entity.
:type row,col: int, int
:param row,col:
@@ -474,24 +487,21 @@
select = rqlst
# take care, due to outer join support, we may find None
# values for non final relation
- for i, attr, role in attr_desc_iterator(select, col):
- outerselidx = rqlst.subquery_selection_index(select, i)
- if outerselidx is None:
- continue
+ for i, attr, role in attr_desc_iterator(select, col, entity.cw_col):
if role == 'subject':
rschema = eschema.subjrels[attr]
if rschema.final:
if attr == 'eid':
- entity.eid = rowvalues[outerselidx]
+ entity.eid = rowvalues[i]
else:
- entity[attr] = rowvalues[outerselidx]
+ entity.cw_attr_cache[attr] = rowvalues[i]
continue
else:
rschema = eschema.objrels[attr]
rdef = eschema.rdef(attr, role)
# only keep value if it can't be multivalued
if rdef.role_cardinality(role) in '1?':
- if rowvalues[outerselidx] is None:
+ if rowvalues[i] is None:
if role == 'subject':
rql = 'Any Y WHERE X %s Y, X eid %s'
else:
@@ -499,7 +509,7 @@
rrset = ResultSet([], rql % (attr, entity.eid))
rrset.req = req
else:
- rrset = self._build_entity(row, outerselidx).as_rset()
+ rrset = self._build_entity(row, i).as_rset()
entity.cw_set_relation_cache(attr, role, rrset)
return entity
@@ -637,8 +647,13 @@
return rhs.eval(self.args)
return None
+def _get_variable(term):
+ # XXX rewritten const
+ # use iget_nodes for (hack) case where we have things like MAX(V)
+ for vref in term.iget_nodes(nodes.VariableRef):
+ return vref.variable
-def attr_desc_iterator(rqlst, index=0):
+def attr_desc_iterator(select, selectidx, rootidx):
"""return an iterator on a list of 2-uple (index, attr_relation)
localizing attribute relations of the main variable in a result's row
@@ -649,25 +664,33 @@
a generator on (index, relation, target) describing column being
attribute of the main variable
"""
- main = rqlst.selection[index]
- for i, term in enumerate(rqlst.selection):
- if i == index:
+ rootselect = select
+ while rootselect.parent.parent is not None:
+ rootselect = rootselect.parent.parent.parent
+ rootmain = rootselect.selection[selectidx]
+ rootmainvar = _get_variable(rootmain)
+ assert rootmainvar
+ root = rootselect.parent
+ selectmain = select.selection[selectidx]
+ for i, term in enumerate(rootselect.selection):
+ rootvar = _get_variable(term)
+ if rootvar is None:
continue
- # XXX rewritten const
- # use iget_nodes for (hack) case where we have things like MAX(V)
- for vref in term.iget_nodes(nodes.VariableRef):
- var = vref.variable
- break
- else:
+ if rootvar.name == rootmainvar.name:
+ continue
+ if select is not rootselect:
+ term = select.selection[root.subquery_selection_index(select, i)]
+ var = _get_variable(term)
+ if var is None:
continue
for ref in var.references():
rel = ref.relation()
if rel is None or rel.is_types_restriction():
continue
lhs, rhs = rel.get_variable_parts()
- if main.is_equivalent(lhs):
+ if selectmain.is_equivalent(lhs):
if rhs.is_equivalent(term):
yield (i, rel.r_type, 'subject')
- elif main.is_equivalent(rhs):
+ elif selectmain.is_equivalent(rhs):
if lhs.is_equivalent(term):
yield (i, rel.r_type, 'object')
diff -r 48f468f33704 -r e4580e5f0703 rtags.py
--- a/rtags.py Fri Dec 10 12:17:18 2010 +0100
+++ b/rtags.py Fri Mar 11 09:46:45 2011 +0100
@@ -105,6 +105,8 @@
def apply(self, schema, func):
for eschema in schema.entities():
+ if eschema.final:
+ continue
for rschema, tschemas, role in eschema.relation_definitions(True):
for tschema in tschemas:
if role == 'subject':
@@ -216,6 +218,9 @@
def name(self):
return self.__class__.name
+ # tag_subject_of / tag_object_of issue warning if '*' is not given as target
+ # type, while tag_relation handle it silently since it may be used during
+ # initialization
def tag_subject_of(self, key, tag):
subj, rtype, obj = key
if obj != '*':
@@ -232,5 +237,14 @@
self.name, rtype, obj, subj, rtype, obj)
super(NoTargetRelationTagsDict, self).tag_object_of(('*', rtype, obj), tag)
-
+ def tag_relation(self, key, tag):
+ if key[-1] == 'subject' and key[-2] != '*':
+ if isinstance(key, tuple):
+ key = list(key)
+ key[-2] = '*'
+ elif key[-1] == 'object' and key[0] != '*':
+ if isinstance(key, tuple):
+ key = list(key)
+ key[0] = '*'
+ super(NoTargetRelationTagsDict, self).tag_relation(key, tag)
set_log_methods(RelationTags, logging.getLogger('cubicweb.rtags'))
diff -r 48f468f33704 -r e4580e5f0703 schema.py
--- a/schema.py Fri Dec 10 12:17:18 2010 +0100
+++ b/schema.py Fri Mar 11 09:46:45 2011 +0100
@@ -49,16 +49,28 @@
# set of meta-relations available for every entity types
META_RTYPES = set((
'owned_by', 'created_by', 'is', 'is_instance_of', 'identity',
- 'eid', 'creation_date', 'modification_date', 'has_text', 'cwuri',
+ 'eid', 'creation_date', 'cw_source', 'modification_date', 'has_text', 'cwuri',
))
WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for'))
-SYSTEM_RTYPES = set(('require_permission',)) | WORKFLOW_RTYPES
+WORKFLOW_DEF_RTYPES = set(('workflow_of', 'state_of', 'transition_of',
+ 'initial_state', 'default_workflow',
+ 'allowed_transition', 'destination_state',
+ 'from_state', 'to_state', 'condition',
+ 'subworkflow', 'subworkflow_state', 'subworkflow_exit',
+ ))
+SYSTEM_RTYPES = set(('in_group', 'require_group', 'require_permission',
+ # cwproperty
+ 'for_user',
+ )) | WORKFLOW_RTYPES
+NO_I18NCONTEXT = META_RTYPES | WORKFLOW_RTYPES
+NO_I18NCONTEXT.add('require_permission')
# set of entity and relation types used to build the schema
SCHEMA_TYPES = set((
'CWEType', 'CWRType', 'CWAttribute', 'CWRelation',
'CWConstraint', 'CWConstraintType', 'CWUniqueTogetherConstraint',
'RQLExpression',
+ 'specializes',
'relation_type', 'from_entity', 'to_entity',
'constrained_by', 'cstrtype',
'constraint_of', 'relations',
@@ -70,7 +82,9 @@
'WorkflowTransition', 'BaseTransition',
'SubWorkflowExitPoint'))
-INTERNAL_TYPES = set(('CWProperty', 'CWPermission', 'CWCache', 'ExternalUri'))
+INTERNAL_TYPES = set(('CWProperty', 'CWPermission', 'CWCache', 'ExternalUri',
+ 'CWSource', 'CWSourceHostConfig',
+))
_LOGGER = getLogger('cubicweb.schemaloader')
@@ -536,7 +550,11 @@
def add_entity_type(self, edef):
edef.name = edef.name.encode()
edef.name = bw_normalize_etype(edef.name)
- assert re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name), repr(edef.name)
+ if not re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name):
+ raise BadSchemaDefinition(
+ '%r is not a valid name for an entity type. It should start '
+ 'with an upper cased letter and be followed by at least a '
+ 'lower cased letter' % edef.name)
eschema = super(CubicWebSchema, self).add_entity_type(edef)
if not eschema.final:
# automatically add the eid relation to non final entity types
@@ -551,7 +569,11 @@
return eschema
def add_relation_type(self, rdef):
- rdef.name = rdef.name.lower().encode()
+ if not rdef.name.islower():
+ raise BadSchemaDefinition(
+ '%r is not a valid name for a relation type. It should be '
+ 'lower cased' % rdef.name)
+ rdef.name = rdef.name.encode()
rschema = super(CubicWebSchema, self).add_relation_type(rdef)
self._eid_index[rschema.eid] = rschema
return rschema
diff -r 48f468f33704 -r e4580e5f0703 schemas/base.py
--- a/schemas/base.py Fri Dec 10 12:17:18 2010 +0100
+++ b/schemas/base.py Fri Mar 11 09:46:45 2011 +0100
@@ -20,8 +20,8 @@
__docformat__ = "restructuredtext en"
_ = unicode
-from yams.buildobjs import (EntityType, RelationType, SubjectRelation,
- String, Datetime, Password)
+from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
+ SubjectRelation, String, Datetime, Password)
from cubicweb.schema import (
RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS)
@@ -62,7 +62,7 @@
}
alias = String(fulltextindexed=True, maxsize=56)
- address = String(required=True, fulltextindexed=True,
+ address = String(required=True, fulltextindexed=True,
indexed=True, unique=True, maxsize=128)
prefered_form = SubjectRelation('EmailAddress', cardinality='?*',
description=_('when multiple addresses are equivalent \
@@ -198,6 +198,7 @@
uri = String(required=True, unique=True, maxsize=256,
description=_('the URI of the object'))
+
class same_as(RelationType):
"""generic relation to specify that an external entity represent the same
object as a local one:
@@ -216,6 +217,7 @@
# in the cube's schema.
object = 'ExternalUri'
+
class CWCache(EntityType):
"""a simple cache entity characterized by a name and
a validity date.
@@ -234,12 +236,81 @@
'delete': ('managers',),
}
- name = String(required=True, unique=True, indexed=True, maxsize=128,
+ name = String(required=True, unique=True, maxsize=128,
description=_('name of the cache'))
timestamp = Datetime(default='NOW')
-# "abtract" relation types, not used in cubicweb itself
+class CWSource(EntityType):
+ __permissions__ = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': ('managers',),
+ 'update': ('managers',),
+ 'delete': ('managers',),
+ }
+ name = String(required=True, unique=True, maxsize=128,
+ description=_('name of the source'))
+ type = String(required=True, maxsize=20, description=_('type of the source'))
+ config = String(description=_('source\'s configuration. One key=value per '
+ 'line, authorized keys depending on the '
+ 'source\'s type'),
+ __permissions__={
+ 'read': ('managers',),
+ 'update': ('managers',),
+ })
+
+
+class CWSourceHostConfig(EntityType):
+ __permissions__ = {
+ 'read': ('managers',),
+ 'add': ('managers',),
+ 'update': ('managers',),
+ 'delete': ('managers',),
+ }
+ __unique_together__ = [('match_host', 'cw_host_config_of')]
+ match_host = String(required=True, maxsize=128,
+ description=_('regexp matching host(s) to which this config applies'))
+ config = String(required=True,
+ description=_('Source\'s configuration for a particular host. '
+ 'One key=value per line, authorized keys '
+ 'depending on the source\'s type, overriding '
+ 'values defined on the source.'),
+ __permissions__={
+ 'read': ('managers',),
+ 'update': ('managers',),
+ })
+
+
+class cw_host_config_of(RelationDefinition):
+ subject = 'CWSourceHostConfig'
+ object = 'CWSource'
+ cardinality = '1*'
+ composite = 'object'
+ inlined = True
+
+class cw_source(RelationDefinition):
+ __permissions__ = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': (),
+ 'delete': (),
+ }
+ subject = '*'
+ object = 'CWSource'
+ cardinality = '1*'
+
+class cw_support(RelationDefinition):
+ subject = 'CWSource'
+ object = ('CWEType', 'CWRType')
+
+class cw_dont_cross(RelationDefinition):
+ subject = 'CWSource'
+ object = 'CWRType'
+
+class cw_may_cross(RelationDefinition):
+ subject = 'CWSource'
+ object = 'CWRType'
+
+# "abtract" relation types, no definition in cubicweb itself ###################
class identical_to(RelationType):
"""identical to"""
diff -r 48f468f33704 -r e4580e5f0703 schemas/bootstrap.py
--- a/schemas/bootstrap.py Fri Dec 10 12:17:18 2010 +0100
+++ b/schemas/bootstrap.py Fri Mar 11 09:46:45 2011 +0100
@@ -159,10 +159,10 @@
__permissions__ = PUB_SYSTEM_ENTITY_PERMS
constraint_of = SubjectRelation('CWEType', cardinality='1*', composite='object',
inlined=True)
- relations = SubjectRelation(('CWAttribute', 'CWRelation'), cardinality='+*',
- constraints=[RQLConstraint(
- 'O from_entity X, S constraint_of X, O relation_type T, '
- 'T final TRUE OR (T final FALSE AND T inlined TRUE)')])
+ relations = SubjectRelation('CWRType', cardinality='+*',
+ constraints=[RQLConstraint(
+ 'S constraint_of ET, RDEF relation_type O, RDEF from_entity ET, '
+ 'O final TRUE OR (O final FALSE AND O inlined TRUE)')])
class CWConstraintType(EntityType):
diff -r 48f468f33704 -r e4580e5f0703 schemas/workflow.py
--- a/schemas/workflow.py Fri Dec 10 12:17:18 2010 +0100
+++ b/schemas/workflow.py Fri Mar 11 09:46:45 2011 +0100
@@ -22,7 +22,7 @@
_ = unicode
from yams.buildobjs import (EntityType, RelationType, SubjectRelation,
- RichString, String)
+ RichString, String, Int)
from cubicweb.schema import RQLConstraint, RQLUniqueConstraint
from cubicweb.schemas import (META_ETYPE_PERMS, META_RTYPE_PERMS,
HOOKS_RTYPE_PERMS)
@@ -159,13 +159,21 @@
'delete': (), # XXX should we allow managers to delete TrInfo?
'update': ('managers', 'owners',),
}
-
- from_state = SubjectRelation('State', cardinality='1*')
- to_state = SubjectRelation('State', cardinality='1*')
+ # The unique_together constraint ensures that 2 repositories
+ # sharing the db won't be able to fire a transition simultaneously
+ # on the same entity tr_count is filled in the FireTransitionHook
+ # to the number of TrInfo attached to the entity on which we
+ # attempt to fire a transition. In other word, it contains the
+ # rank of the TrInfo for that entity, and the constraint says we
+ # cannot have 2 TrInfo with the same rank.
+ __unique_together__ = [('tr_count', 'wf_info_for')]
+ from_state = SubjectRelation('State', cardinality='1*', inlined=True)
+ to_state = SubjectRelation('State', cardinality='1*', inlined=True)
# make by_transition optional because we want to allow managers to set
# entity into an arbitrary state without having to respect wf transition
by_transition = SubjectRelation('BaseTransition', cardinality='?*')
comment = RichString(fulltextindexed=True)
+ tr_count = Int(description='autocomputed attribute used to ensure transition coherency')
# get actor and date time using owned_by and creation_date
class from_state(RelationType):
diff -r 48f468f33704 -r e4580e5f0703 selectors.py
--- a/selectors.py Fri Dec 10 12:17:18 2010 +0100
+++ b/selectors.py Fri Mar 11 09:46:45 2011 +0100
@@ -60,9 +60,9 @@
.. sourcecode:: python
- class RSSIconBox(ExtResourcesBoxTemplate):
+ class RSSIconBox(box.Box):
''' just display the RSS icon on uniform result set '''
- __select__ = ExtResourcesBoxTemplate.__select__ & non_final_entity()
+ __select__ = box.Box.__select__ & non_final_entity()
It takes into account:
@@ -479,6 +479,31 @@
return score + 0.5
return score
+
+class configuration_values(Selector):
+ """Return 1 if the instance has an option set to a given value(s) in its
+ configuration file.
+ """
+ # XXX this selector could be evaluated on startup
+ def __init__(self, key, values):
+ self._key = key
+ if not isinstance(values, (tuple, list)):
+ values = (values,)
+ self._values = frozenset(values)
+
+ @lltrace
+ def __call__(self, cls, req, **kwargs):
+ try:
+ return self._score
+ except AttributeError:
+ if req is None:
+ config = kwargs['repo'].config
+ else:
+ config = req.vreg.config
+ self._score = config[self._key] in self._values
+ return self._score
+
+
# rset selectors ##############################################################
@objectify_selector
@@ -526,6 +551,8 @@
"""Return 1 if the result set is of size 1, or greater but a specific row in
the result set is specified ('row' argument).
"""
+ if rset is None and 'entity' in kwargs:
+ return 1
if rset is not None and (row is not None or rset.rowcount == 1):
return 1
return 0
@@ -534,12 +561,12 @@
class multi_lines_rset(Selector):
"""Return 1 if the operator expression matches between `num` elements
in the result set and the `expected` value if defined.
-
+
By default, multi_lines_rset(expected) matches equality expression:
`nb` row(s) in result set equals to expected value
But, you can perform richer comparisons by overriding default operator:
multi_lines_rset(expected, operator.gt)
-
+
If `expected` is None, return 1 if the result set contains *at least*
two rows.
If rset is None, return 0.
@@ -605,7 +632,7 @@
@lltrace
def sorted_rset(cls, req, rset=None, **kwargs):
"""Return 1 for sorted result set (e.g. from an RQL query containing an
- :ref:ORDERBY clause), with exception that it will return 0 if the rset is
+ ORDERBY clause), with exception that it will return 0 if the rset is
'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index).
"""
if rset is None:
@@ -752,7 +779,11 @@
def score_class(self, eclass, req):
# cache on vreg to avoid reloading issues
- cache = req.vreg._is_instance_selector_cache
+ try:
+ cache = req.vreg._is_instance_selector_cache
+ except AttributeError:
+ # XXX 'before-registry-reset' not called for db-api connections
+ cache = req.vreg._is_instance_selector_cache = {}
try:
expected_eclasses = cache[self]
except KeyError:
@@ -788,6 +819,9 @@
This is a very useful selector that will usually interest you since it
allows a lot of things without having to write a specific selector.
+ The function can return arbitrary value which will be casted to an integer
+ value at the end.
+
See :class:`~cubicweb.selectors.EntitySelector` documentation for entity
lookup / score rules according to the input context.
"""
@@ -802,21 +836,6 @@
return 1
self.score_entity = intscore
-class attribute_edited(EntitySelector):
- """Scores if the specified attribute has been edited
- This is useful for selection of forms by the edit controller.
- The initial use case is on a form, in conjunction with match_transition,
- which will not score at edit time::
-
- is_instance('Version') & (match_transition('ready') |
- attribute_edited('publication_date'))
- """
- def __init__(self, attribute, once_is_enough=False):
- super(attribute_edited, self).__init__(once_is_enough)
- self._attribute = attribute
-
- def score_entity(self, entity):
- return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form
class has_mimetype(EntitySelector):
"""Return 1 if the entity adapt to IDownloadable and has the given MIME type.
@@ -1128,27 +1147,121 @@
must use 'X' variable to represent the context entity and may use 'U' to
represent the request's user.
+ .. warning::
+ If simply testing value of some attribute/relation of context entity (X),
+ you should rather use the :class:`score_entity` selector which will
+ benefit from the ORM's request entities cache.
+
See :class:`~cubicweb.selectors.EntitySelector` documentation for entity
lookup / score rules according to the input context.
"""
def __init__(self, expression, once_is_enough=False):
super(rql_condition, self).__init__(once_is_enough)
if 'U' in frozenset(split_expression(expression)):
- rql = 'Any X WHERE X eid %%(x)s, U eid %%(u)s, %s' % expression
+ rql = 'Any COUNT(X) WHERE X eid %%(x)s, U eid %%(u)s, %s' % expression
else:
- rql = 'Any X WHERE X eid %%(x)s, %s' % expression
+ rql = 'Any COUNT(X) WHERE X eid %%(x)s, %s' % expression
self.rql = rql
- def __repr__(self):
- return u'' % (self.rql, id(self))
+ def __str__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.rql)
def score(self, req, rset, row, col):
try:
- return len(req.execute(self.rql, {'x': rset[row][col],
- 'u': req.user.eid}))
+ return req.execute(self.rql, {'x': rset[row][col],
+ 'u': req.user.eid})[0][0]
except Unauthorized:
return 0
+
+class is_in_state(score_entity):
+ """Return 1 if entity is in one of the states given as argument list
+
+ You should use this instead of your own :class:`score_entity` selector to
+ avoid some gotchas:
+
+ * possible views gives a fake entity with no state
+ * you must use the latest tr info thru the workflow adapter for repository
+ side checking of the current state
+
+ In debug mode, this selector can raise:
+ :raises: :exc:`ValueError` for unknown states names
+ (etype workflow only not checked in custom workflow)
+
+ :rtype: int
+ """
+ def __init__(self, *expected):
+ assert expected, self
+ self.expected = frozenset(expected)
+ def score(entity, expected=self.expected):
+ adapted = entity.cw_adapt_to('IWorkflowable')
+ # in debug mode only (time consuming)
+ if entity._cw.vreg.config.debugmode:
+ # validation can only be done for generic etype workflow because
+ # expected transition list could have been changed for a custom
+ # workflow (for the current entity)
+ if not entity.custom_workflow:
+ self._validate(adapted)
+ return self._score(adapted)
+ super(is_in_state, self).__init__(score)
+
+ def _score(self, adapted):
+ trinfo = adapted.latest_trinfo()
+ if trinfo is None: # entity is probably in it's initial state
+ statename = adapted.state
+ else:
+ statename = trinfo.new_state.name
+ return statename in self.expected
+
+ def _validate(self, adapted):
+ wf = adapted.current_workflow
+ valid = [n.name for n in wf.reverse_state_of]
+ unknown = sorted(self.expected.difference(valid))
+ if unknown:
+ raise ValueError("%s: unknown state(s): %s"
+ % (wf.name, ",".join(unknown)))
+
+ def __str__(self):
+ return '%s(%s)' % (self.__class__.__name__,
+ ','.join(str(s) for s in self.expected))
+
+
+class on_transition(is_in_state):
+ """Return 1 if entity is in one of the transitions given as argument list
+
+ Especially useful to match passed transition to enable notifications when
+ your workflow allows several transition to the same states.
+
+ Note that if workflow `change_state` adapter method is used, this selector
+ will not be triggered.
+
+ You should use this instead of your own :class:`score_entity` selector to
+ avoid some gotchas:
+
+ * possible views gives a fake entity with no state
+ * you must use the latest tr info thru the workflow adapter for repository
+ side checking of the current state
+
+ In debug mode, this selector can raise:
+ :raises: :exc:`ValueError` for unknown transition names
+ (etype workflow only not checked in custom workflow)
+
+ :rtype: int
+ """
+ def _score(self, adapted):
+ trinfo = adapted.latest_trinfo()
+ if trinfo and trinfo.by_transition:
+ return trinfo.by_transition[0].name in self.expected
+
+ def _validate(self, adapted):
+ wf = adapted.current_workflow
+ valid = [n.name for n in wf.reverse_transition_of]
+ unknown = sorted(self.expected.difference(valid))
+ if unknown:
+ raise ValueError("%s: unknown transition(s): %s"
+ % (wf.name, ",".join(unknown)))
+
+
# logged user selectors ########################################################
@objectify_selector
@@ -1183,7 +1296,6 @@
"""
return ~ authenticated_user()
-
class match_user_groups(ExpectedValueSelector):
"""Return a non-zero score if request's user is in at least one of the
groups given as initializer argument. Returned score is the number of groups
@@ -1213,9 +1325,9 @@
score = all(user.owns(r[col]) for r in rset)
return score
-
# Web request selectors ########################################################
+# XXX deprecate
@objectify_selector
@lltrace
def primary_view(cls, req, view=None, **kwargs):
@@ -1233,6 +1345,15 @@
return 1
+@objectify_selector
+@lltrace
+def contextual(cls, req, view=None, **kwargs):
+ """Return 1 if view's contextual property is true"""
+ if view is not None and view.contextual:
+ return 1
+ return 0
+
+
class match_view(ExpectedValueSelector):
"""Return 1 if a view is specified an as its registry id is in one of the
expected view id given to the initializer.
@@ -1244,6 +1365,19 @@
return 1
+class match_context(ExpectedValueSelector):
+
+ @lltrace
+ def __call__(self, cls, req, context=None, **kwargs):
+ try:
+ if not context in self.expected:
+ return 0
+ except AttributeError:
+ return 1 # class doesn't care about search state, accept it
+ return 1
+
+
+# XXX deprecate
@objectify_selector
@lltrace
def match_context_prop(cls, req, context=None, **kwargs):
@@ -1264,8 +1398,6 @@
return 1
propval = req.property_value('%s.%s.context' % (cls.__registry__,
cls.__regid__))
- if not propval:
- propval = cls.context
if propval and context != propval:
return 0
return 1
@@ -1347,43 +1479,62 @@
return 0
-# Other selectors ##############################################################
+class attribute_edited(EntitySelector):
+ """Scores if the specified attribute has been edited This is useful for
+ selection of forms by the edit controller.
+
+ The initial use case is on a form, in conjunction with match_transition,
+ which will not score at edit time::
+
+ is_instance('Version') & (match_transition('ready') |
+ attribute_edited('publication_date'))
+ """
+ def __init__(self, attribute, once_is_enough=False):
+ super(attribute_edited, self).__init__(once_is_enough)
+ self._attribute = attribute
+
+ def score_entity(self, entity):
+ return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form
+# Other selectors ##############################################################
+
+# XXX deprecated ? maybe use on_transition selector instead ?
class match_transition(ExpectedValueSelector):
- """Return 1 if `transition` argument is found in the input context
- which has a `.name` attribute matching one of the expected names
- given to the initializer
+ """Return 1 if `transition` argument is found in the input context which has
+ a `.name` attribute matching one of the expected names given to the
+ initializer.
"""
@lltrace
def __call__(self, cls, req, transition=None, **kwargs):
# XXX check this is a transition that apply to the object?
+ if transition is None:
+ treid = req.form.get('treid', None)
+ if treid:
+ transition = req.entity_from_eid(treid)
if transition is not None and getattr(transition, 'name', None) in self.expected:
return 1
return 0
-class is_in_state(score_entity):
- """return 1 if entity is in one of the states given as argument list
-
- you should use this instead of your own :class:`score_entity` selector to
- avoid some gotchas:
- * possible views gives a fake entity with no state
- * you must use the latest tr info, not entity.in_state for repository side
- checking of the current state
+class match_exception(ExpectedValueSelector):
+ """Return 1 if a view is specified an as its registry id is in one of the
+ expected view id given to the initializer.
"""
- def __init__(self, *states):
- def score(entity, states=set(states)):
- trinfo = entity.cw_adapt_to('IWorkflowable').latest_trinfo()
- try:
- return trinfo.new_state.name in states
- except AttributeError:
- return None
- super(is_in_state, self).__init__(score)
+ def __init__(self, *expected):
+ assert expected, self
+ self.expected = expected
+
+ @lltrace
+ def __call__(self, cls, req, exc=None, **kwargs):
+ if exc is not None and isinstance(exc, self.expected):
+ return 1
+ return 0
+
@objectify_selector
def debug_mode(cls, req, rset=None, **kwargs):
- """Return 1 if running in debug mode"""
+ """Return 1 if running in debug mode."""
return req.vreg.config.debugmode and 1 or 0
## deprecated stuff ############################################################
diff -r 48f468f33704 -r e4580e5f0703 server/__init__.py
--- a/server/__init__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/__init__.py Fri Mar 11 09:46:45 2011 +0100
@@ -19,8 +19,8 @@
(repository) side
This module contains functions to initialize a new repository.
+"""
-"""
from __future__ import with_statement
__docformat__ = "restructuredtext en"
@@ -61,7 +61,6 @@
else:
DEBUG |= debugmode
-
class debugged(object):
"""repository debugging context manager / decorator
@@ -122,7 +121,7 @@
with the minimal set of entities (ie at least the schema, base groups and
a initial user)
"""
- from cubicweb.dbapi import in_memory_cnx
+ from cubicweb.dbapi import in_memory_repo_cnx
from cubicweb.server.repository import Repository
from cubicweb.server.utils import manager_userpasswd
from cubicweb.server.sqlutils import sqlexec, sqlschema, sqldropschema
@@ -132,7 +131,6 @@
config.consider_user_state = False
config.set_language = False
# only enable the system source at initialization time
- config.enabled_sources = ('system',)
repo = Repository(config, vreg=vreg)
schema = repo.schema
sourcescfg = config.sources()
@@ -162,6 +160,12 @@
sqlcnx.commit()
sqlcnx.close()
session = repo.internal_session()
+ # insert entity representing the system source
+ ssource = session.create_entity('CWSource', type=u'native', name=u'system')
+ repo.system_source.eid = ssource.eid
+ session.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid})
+ # insert base groups and default admin
+ print '-> inserting default user and default groups.'
try:
login = unicode(sourcescfg['admin']['login'])
pwd = sourcescfg['admin']['password']
@@ -171,17 +175,18 @@
login, pwd = manager_userpasswd(msg=msg, confirm=True)
else:
login, pwd = unicode(source['db-user']), source['db-password']
- print '-> inserting default user and default groups.'
# sort for eid predicatability as expected in some server tests
for group in sorted(BASE_GROUPS):
- session.execute('INSERT CWGroup X: X name %(name)s',
- {'name': unicode(group)})
- create_user(session, login, pwd, 'managers')
+ session.create_entity('CWGroup', name=unicode(group))
+ admin = create_user(session, login, pwd, 'managers')
+ session.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s',
+ {'u': admin.eid})
session.commit()
repo.shutdown()
# reloging using the admin user
config._cubes = None # avoid assertion error
- repo, cnx = in_memory_cnx(config, login, password=pwd)
+ repo, cnx = in_memory_repo_cnx(config, login, password=pwd)
+ repo.system_source.eid = ssource.eid # redo this manually
# trigger vreg initialisation of entity classes
config.cubicweb_appobject_path = set(('entities',))
config.cube_appobject_path = set(('entities',))
@@ -197,13 +202,7 @@
initialize_schema(config, schema, handler)
# yoo !
cnx.commit()
- config.enabled_sources = None
- for uri, source_config in config.sources().items():
- if uri in ('admin', 'system'):
- # not an actual source or init_creating already called
- continue
- source = repo.get_source(uri, source_config)
- source.init_creating()
+ repo.system_source.init_creating()
cnx.commit()
cnx.close()
session.close()
diff -r 48f468f33704 -r e4580e5f0703 server/checkintegrity.py
--- a/server/checkintegrity.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/checkintegrity.py Fri Mar 11 09:46:45 2011 +0100
@@ -36,6 +36,12 @@
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.server.session import security_enabled
+def notify_fixed(fix):
+ if fix:
+ print >> sys.stderr, ' [FIXED]'
+ else:
+ print >> sys.stderr
+
def has_eid(session, sqlcursor, eid, eids):
"""return true if the eid is a valid eid"""
if eid in eids:
@@ -131,8 +137,8 @@
# attribute to their current value
source = repo.system_source
for eschema in etypes:
- for entity in session.execute('Any X WHERE X is %s' % eschema).entities():
- source.fti_index_entity(session, entity)
+ rset = session.execute('Any X WHERE X is %s' % eschema)
+ source.fti_index_entities(session, rset.entities())
if withpb:
pb.update()
@@ -169,9 +175,7 @@
print >> sys.stderr, msg % eid,
if fix:
session.system_sql('DELETE FROM appears WHERE uid=%s;' % eid)
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
def check_entities(schema, session, eids, fix=1):
@@ -185,9 +189,7 @@
print >> sys.stderr, msg % eid,
if fix:
session.system_sql('DELETE FROM entities WHERE eid=%s;' % eid)
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
print 'Checking entities tables'
for eschema in schema.entities():
if eschema.final:
@@ -204,22 +206,19 @@
print >> sys.stderr, msg % (eid, eschema.type),
if fix:
session.system_sql('DELETE FROM %s WHERE %s=%s;' % (table, column, eid))
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
def bad_related_msg(rtype, target, eid, fix):
msg = ' A relation %s with %s eid %s exists but no such entity in sources'
print >> sys.stderr, msg % (rtype, target, eid),
- if fix:
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
def check_relations(schema, session, eids, fix=1):
- """check all relations registered in the repo system table"""
+ """check that eids referenced by relations are registered in the repo system
+ table
+ """
print 'Checking relations'
for rschema in schema.relations():
if rschema.final or rschema in PURE_VIRTUAL_RTYPES:
@@ -265,6 +264,54 @@
session.system_sql(sql)
+def check_mandatory_relations(schema, session, eids, fix=1):
+ """check entities missing some mandatory relation"""
+ print 'Checking mandatory relations'
+ for rschema in schema.relations():
+ if rschema.final or rschema in PURE_VIRTUAL_RTYPES:
+ continue
+ smandatory = set()
+ omandatory = set()
+ for rdef in rschema.rdefs.values():
+ if rdef.cardinality[0] in '1+':
+ smandatory.add(rdef.subject)
+ if rdef.cardinality[1] in '1+':
+ omandatory.add(rdef.object)
+ for role, etypes in (('subject', smandatory), ('object', omandatory)):
+ for etype in etypes:
+ if role == 'subject':
+ rql = 'Any X WHERE NOT X %s Y, X is %s' % (rschema, etype)
+ else:
+ rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype)
+ for entity in session.execute(rql).entities():
+ print >> sys.stderr, '%s #%s is missing mandatory %s relation %s' % (
+ entity.__regid__, entity.eid, role, rschema)
+ if fix:
+ #if entity.cw_describe()['source']['uri'] == 'system': XXX
+ entity.delete()
+ notify_fixed(fix)
+
+
+def check_mandatory_attributes(schema, session, eids, fix=1):
+ """check for entities stored in the system source missing some mandatory
+ attribute
+ """
+ print 'Checking mandatory attributes'
+ for rschema in schema.relations():
+ if not rschema.final or rschema in VIRTUAL_RTYPES:
+ continue
+ for rdef in rschema.rdefs.values():
+ if rdef.cardinality[0] in '1+':
+ rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % (
+ rschema, rdef.subject)
+ for entity in session.execute(rql).entities():
+ print >> sys.stderr, '%s #%s is missing mandatory attribute %s' % (
+ entity.__regid__, entity.eid, rschema)
+ if fix:
+ entity.delete()
+ notify_fixed(fix)
+
+
def check_metadata(schema, session, eids, fix=1):
"""check entities has required metadata
@@ -287,9 +334,7 @@
session.system_sql("UPDATE %s SET %s=%%(v)s WHERE %s=%s ;"
% (table, column, eidcolumn, eid),
{'v': default})
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
cursor = session.system_sql('SELECT MIN(%s) FROM %sCWUser;' % (eidcolumn,
SQL_PREFIX))
default_user_eid = cursor.fetchone()[0]
@@ -305,9 +350,7 @@
if fix:
session.system_sql('INSERT INTO %s_relation VALUES (%s, %s) ;'
% (rel, eid, default))
- print >> sys.stderr, ' [FIXED]'
- else:
- print >> sys.stderr
+ notify_fixed(fix)
def check(repo, cnx, checks, reindex, fix, withpb=True):
@@ -336,6 +379,11 @@
cnx.commit()
+def info(msg, *args):
+ if args:
+ msg = msg % args
+ print 'INFO: %s' % msg
+
def warning(msg, *args):
if args:
msg = msg % args
@@ -374,13 +422,13 @@
# check relation in dont_cross_relations aren't in support_relations
for rschema in mapping['dont_cross_relations']:
if rschema in mapping['support_relations']:
- warning('relation %s is in dont_cross_relations and in support_relations',
- rschema)
+ info('relation %s is in dont_cross_relations and in support_relations',
+ rschema)
# check relation in cross_relations are in support_relations
for rschema in mapping['cross_relations']:
if rschema not in mapping['support_relations']:
- warning('relation %s is in cross_relations but not in support_relations',
- rschema)
+ info('relation %s is in cross_relations but not in support_relations',
+ rschema)
# check for relation in both cross_relations and dont_cross_relations
for rschema in mapping['cross_relations'] & mapping['dont_cross_relations']:
error('relation %s is in both cross_relations and dont_cross_relations',
@@ -410,7 +458,7 @@
if role == 'subject' and rschema.inlined:
error('inlined relation %s of %s should be supported',
rschema, eschema)
- elif not somethingprinted and rschema not in seen:
+ elif not somethingprinted and rschema not in seen and rschema not in mapping['cross_relations']:
print 'you may want to specify something for %s' % rschema
seen.add(rschema)
else:
diff -r 48f468f33704 -r e4580e5f0703 server/hook.py
--- a/server/hook.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/hook.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,37 +15,233 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""Hooks management
+"""
+Generalities
+------------
+
+Paraphrasing the `emacs`_ documentation, let us say that hooks are an important
+mechanism for customizing an application. A hook is basically a list of
+functions to be called on some well-defined occasion (this is called `running
+the hook`).
+
+.. _`emacs`: http://www.gnu.org/software/emacs/manual/html_node/emacs/Hooks.html
+
+Hooks
+~~~~~
+
+In |cubicweb|, hooks are subclasses of the :class:`~cubicweb.server.hook.Hook`
+class. They are selected over a set of pre-defined `events` (and possibly more
+conditions, hooks being selectable appobjects like views and components). They
+should implement a :meth:`~cubicweb.server.hook.Hook.__call__` method that will
+be called when the hook is triggered.
-This module defined the `Hook` class and registry and a set of abstract classes
-for operations.
+There are two families of events: data events (before / after any individual
+update of an entity / or a relation in the repository) and server events (such
+as server startup or shutdown). In a typical application, most of the hooks are
+defined over data events.
+
+Also, some :class:`~cubicweb.server.hook.Operation` may be registered by hooks,
+which will be fired when the transaction is commited or rollbacked.
+
+The purpose of data event hooks is usually to complement the data model as
+defined in the schema, which is static by nature and only provide a restricted
+builtin set of dynamic constraints, with dynamic or value driven behaviours.
+For instance they can serve the following purposes:
+
+* enforcing constraints that the static schema cannot express (spanning several
+ entities/relations, exotic value ranges and cardinalities, etc.)
+
+* implement computed attributes
+
+It is functionally equivalent to a `database trigger`_, except that database
+triggers definition languages are not standardized, hence not portable (for
+instance, PL/SQL works with Oracle and PostgreSQL but not SqlServer nor Sqlite).
+
+.. _`database trigger`: http://en.wikipedia.org/wiki/Database_trigger
-Hooks are called before / after any individual update of entities / relations
-in the repository and on special events such as server startup or shutdown.
+.. hint::
+
+ It is a good practice to write unit tests for each hook. See an example in
+ :ref:`hook_test`
+
+Operations
+~~~~~~~~~~
+
+Operations are subclasses of the :class:`~cubicweb.server.hook.Operation` class
+that may be created by hooks and scheduled to happen just before (or after) the
+`precommit`, `postcommit` or `rollback` event. Hooks are being fired immediately
+on data operations, and it is sometime necessary to delay the actual work down
+to a time where all other hooks have run. Also while the order of execution of
+hooks is data dependant (and thus hard to predict), it is possible to force an
+order on operations.
+
+Operations may be used to:
+
+* implements a validation check which needs that all relations be already set on
+ an entity
+
+* process various side effects associated with a transaction such as filesystem
+ udpates, mail notifications, etc.
-Operations may be registered by hooks during a transaction, which will be
-fired when the pool is commited or rollbacked.
+Events
+------
+
+Hooks are mostly defined and used to handle `dataflow`_ operations. It
+means as data gets in (entities added, updated, relations set or
+unset), specific events are issued and the Hooks matching these events
+are called.
+
+You can get the event that triggered a hook by accessing its :attr:event
+attribute.
+
+.. _`dataflow`: http://en.wikipedia.org/wiki/Dataflow
-Entity hooks (eg before_add_entity, after_add_entity, before_update_entity,
-after_update_entity, before_delete_entity, after_delete_entity) all have an
-`entity` attribute
+Entity modification related events
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When called for one of these events, hook will have an `entity` attribute
+containing the entity instance.
+
+* `before_add_entity`, `before_update_entity`:
+
+ - on those events, you can check what attributes of the entity are modified in
+ `entity.cw_edited` (by definition the database is not yet updated in a before
+ event)
+
+ - you are allowed to further modify the entity before database
+ operations, using the dictionary notation on `cw_edited`. By doing
+ this, you'll avoid the need for a whole new rql query processing,
+ the only difference is that the underlying backend query (eg
+ usually sql) will contains the additional data. For example:
+
+ .. sourcecode:: python
+
+ self.entity.set_attributes(age=42)
+
+ will set the `age` attribute of the entity to 42. But to do so, it will
+ generate a rql query that will have to be processed, then trigger some
+ hooks, and so one (potentially leading to infinite hook loops or such
+ awkward situations..) You can avoid this by doing the modification that way:
+
+ .. sourcecode:: python
+
+ self.entity.cw_edited['age'] = 42
+
+ Here the attribute will simply be edited in the same query that the
+ one that triggered the hook.
-Relation (eg before_add_relation, after_add_relation, before_delete_relation,
-after_delete_relation) all have `eidfrom`, `rtype`, `eidto` attributes.
+ Similarly, removing an attribute from `cw_edited` will cancel its
+ modification.
+
+ - on `before_update_entity` event, you can access to old and new values in
+ this hook, by using `entity.cw_edited.oldnewvalue(attr)`
+
+
+* `after_add_entity`, `after_update_entity`
+
+ - on those events, you can still check what attributes of the entity are
+ modified in `entity.cw_edited` but you can't get anymore the old value, nor
+ modify it.
+
+* `before_delete_entity`, `after_delete_entity`
+
+ - on those events, the entity has no `cw_edited` set.
+
+
+Relation modification related events
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When called for one of these events, hook will have `eidfrom`, `rtype`, `eidto`
+attributes containing respectivly the eid of the subject entity, the relation
+type and the eid of the object entity.
+
+* `before_add_relation`, `before_delete_relation`
+
+ - on those events, you can still get original relation by issuing a rql query
+
+* `after_add_relation`, `after_delete_relation`
+
+This is an occasion to remind us that relations support the add / delete
+operation, but no update.
+
+
+Non data events
+~~~~~~~~~~~~~~~
-Server start/maintenance/stop hooks (eg server_startup, server_maintenance,
-server_shutdown) have a `repo` attribute, but *their `_cw` attribute is None*.
-The `server_startup` is called on regular startup, while `server_maintenance`
-is called on cubicweb-ctl upgrade or shell commands. `server_shutdown` is
-called anyway.
+Hooks called on server start/maintenance/stop event (eg `server_startup`,
+`server_maintenance`, `server_shutdown`) have a `repo` attribute, but *their
+`_cw` attribute is None*. The `server_startup` is called on regular startup,
+while `server_maintenance` is called on cubicweb-ctl upgrade or shell
+commands. `server_shutdown` is called anyway.
+
+Hooks called on backup/restore event (eg 'server_backup', 'server_restore') have
+a `repo` and a `timestamp` attributes, but *their `_cw` attribute is None*.
+
+Hooks called on session event (eg `session_open`, `session_close`) have no
+special attribute.
+
+
+API
+---
+
+Hooks control
+~~~~~~~~~~~~~
+
+It is sometimes convenient to explicitly enable or disable some hooks. For
+instance if you want to disable some integrity checking hook. This can be
+controlled more finely through the `category` class attribute, which is a string
+giving a category name. One can then uses the
+:class:`~cubicweb.server.session.hooks_control` context manager to explicitly
+enable or disable some categories.
+
+.. autoclass:: cubicweb.server.session.hooks_control
+
+
+The existing categories are:
+
+* ``security``, security checking hooks
+
+* ``worfklow``, workflow handling hooks
-Backup/restore hooks (eg server_backup, server_restore) have a `repo` and a
-`timestamp` attributes, but *their `_cw` attribute is None*.
+* ``metadata``, hooks setting meta-data on newly created entities
+
+* ``notification``, email notification hooks
+
+* ``integrity``, data integrity checking hooks
+
+* ``activeintegrity``, data integrity consistency hooks, that you should **never**
+ want to disable
+
+* ``syncsession``, hooks synchronizing existing sessions
+
+* ``syncschema``, hooks synchronizing instance schema (including the physical database)
+
+* ``email``, email address handling hooks
+
+* ``bookmark``, bookmark entities handling hooks
+
-Session hooks (eg session_open, session_close) have no special attribute.
+Nothing precludes one to invent new categories and use the
+:class:`~cubicweb.server.session.hooks_control` context manager to
+filter them in or out. Note that ending the transaction with commit()
+or rollback() will restore the hooks.
+
+
+Hooks specific selector
+~~~~~~~~~~~~~~~~~~~~~~~
+.. autoclass:: cubicweb.server.hook.match_rtype
+.. autoclass:: cubicweb.server.hook.match_rtype_sets
+
+
+Hooks and operations classes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. autoclass:: cubicweb.server.hook.Hook
+.. autoclass:: cubicweb.server.hook.Operation
+.. autoclass:: cubicweb.server.hook.LateOperation
+.. autoclass:: cubicweb.server.hook.DataOperationMixIn
"""
from __future__ import with_statement
@@ -61,6 +257,7 @@
from logilab.common.logging_ext import set_log_methods
from cubicweb import RegistryNotFound
+from cubicweb.vregistry import classid
from cubicweb.cwvreg import CWRegistry, VRegistry
from cubicweb.selectors import (objectify_selector, lltrace, ExpectedValueSelector,
is_instance)
@@ -77,13 +274,21 @@
'session_open', 'session_close'))
ALL_HOOKS = ENTITIES_HOOKS | RELATIONS_HOOKS | SYSTEM_HOOKS
+def _iter_kwargs(entities, kwargs):
+ if not entities:
+ yield kwargs
+ else:
+ for entity in entities:
+ kwargs['entity'] = entity
+ yield kwargs
+
class HooksRegistry(CWRegistry):
def initialization_completed(self):
for appobjects in self.values():
for cls in appobjects:
if not cls.enabled:
- warn('[3.6] %s: enabled is deprecated' % cls)
+ warn('[3.6] %s: enabled is deprecated' % classid(cls))
self.unregister(cls)
def register(self, obj, **kwargs):
@@ -91,19 +296,30 @@
super(HooksRegistry, self).register(obj, **kwargs)
def call_hooks(self, event, session=None, **kwargs):
+ """call `event` hooks for an entity or a list of entities (passed
+ respectively as the `entity` or ``entities`` keyword argument).
+ """
kwargs['event'] = event
- if session is None:
+ if session is None: # True for events such as server_start
for hook in sorted(self.possible_objects(session, **kwargs),
key=lambda x: x.order):
hook()
else:
+ if 'entities' in kwargs:
+ assert 'entity' not in kwargs, \
+ 'can\'t pass "entities" and "entity" arguments simultaneously'
+ entities = kwargs.pop('entities')
+ else:
+ entities = []
# by default, hooks are executed with security turned off
with security_enabled(session, read=False):
- hooks = sorted(self.possible_objects(session, **kwargs),
- key=lambda x: x.order)
- with security_enabled(session, write=False):
- for hook in hooks:
- hook()
+ for _kwargs in _iter_kwargs(entities, kwargs):
+ hooks = sorted(self.possible_objects(session, **_kwargs),
+ key=lambda x: x.order)
+ with security_enabled(session, write=False):
+ for hook in hooks:
+ #print hook.category, hook.__regid__
+ hook()
class HooksManager(object):
def __init__(self, vreg):
@@ -111,29 +327,18 @@
def call_hooks(self, event, session=None, **kwargs):
try:
- self.vreg['%s_hooks' % event].call_hooks(event, session, **kwargs)
+ registry = self.vreg['%s_hooks' % event]
except RegistryNotFound:
- pass # no hooks for this event
+ return # no hooks for this event
+ registry.call_hooks(event, session, **kwargs)
for event in ALL_HOOKS:
VRegistry.REGISTRY_FACTORY['%s_hooks' % event] = HooksRegistry
-_MARKER = object()
+@deprecated('[3.10] use entity.cw_edited.oldnewvalue(attr)')
def entity_oldnewvalue(entity, attr):
- """returns the couple (old attr value, new attr value)
-
- NOTE: will only work in a before_update_entity hook
- """
- # get new value and remove from local dict to force a db query to
- # fetch old value
- newvalue = entity.pop(attr, _MARKER)
- oldvalue = getattr(entity, attr)
- if newvalue is not _MARKER:
- entity[attr] = newvalue
- else:
- newvalue = oldvalue
- return oldvalue, newvalue
+ return entity.cw_edited.oldnewvalue(attr)
# some hook specific selectors #################################################
@@ -170,6 +375,7 @@
self.expected = expected
self.frometypes = more.pop('frometypes', None)
self.toetypes = more.pop('toetypes', None)
+ assert not more, "unexpected kwargs in match_rtype: %s" % more
@lltrace
def __call__(self, cls, req, *args, **kwargs):
@@ -185,8 +391,23 @@
class match_rtype_sets(ExpectedValueSelector):
- """accept if parameters specified as initializer arguments are specified
- in named arguments given to the selector
+ """accept if the relation type is in one of the sets given as initializer
+ argument. The goal of this selector is that it keeps reference to original sets,
+ so modification to thoses sets are considered by the selector. For instance
+
+ MYSET = set()
+
+ class Hook1(Hook):
+ __regid__ = 'hook1'
+ __select__ = Hook.__select__ & match_rtype_sets(MYSET)
+ ...
+
+ class Hook2(Hook):
+ __regid__ = 'hook2'
+ __select__ = Hook.__select__ & match_rtype_sets(MYSET)
+
+ Client code can now change `MYSET`, this will changes the selection criteria
+ of :class:`Hook1` and :class:`Hook1`.
"""
def __init__(self, *expected):
@@ -203,6 +424,29 @@
# base class for hook ##########################################################
class Hook(AppObject):
+ """Base class for hook.
+
+ Hooks being appobjects like views, they have a `__regid__` and a `__select__`
+ class attribute. Like all appobjects, hooks have the `self._cw` attribute which
+ represents the current session. In entity hooks, a `self.entity` attribute is
+ also present.
+
+ The `events` tuple is used by the base class selector to dispatch the hook
+ on the right events. It is possible to dispatch on multiple events at once
+ if needed (though take care as hook attribute may vary as described above).
+
+ .. Note::
+
+ Do not forget to extend the base class selectors as in:
+
+ .. sourcecode:: python
+
+ class MyHook(Hook):
+ __regid__ = 'whatever'
+ __select__ = Hook.__select__ & is_instance('Person')
+
+ else your hooks will be called madly, whatever the event.
+ """
__select__ = enabled_category()
# set this in derivated classes
events = None
@@ -231,16 +475,16 @@
@classproperty
def __regid__(cls):
- warn('[3.6] %s.%s: please specify an id for your hook'
- % (cls.__module__, cls.__name__), DeprecationWarning)
+ warn('[3.6] %s: please specify an id for your hook' % classid(cls),
+ DeprecationWarning)
return str(id(cls))
@classmethod
def __registered__(cls, reg):
super(Hook, cls).__registered__(reg)
if getattr(cls, 'accepts', None):
- warn('[3.6] %s.%s: accepts is deprecated, define proper __select__'
- % (cls.__module__, cls.__name__), DeprecationWarning)
+ warn('[3.6] %s: accepts is deprecated, define proper __select__'
+ % classid(cls), DeprecationWarning)
rtypes = []
for ertype in cls.accepts:
if ertype.islower():
@@ -261,9 +505,8 @@
def __call__(self):
if hasattr(self, 'call'):
- cls = self.__class__
- warn('[3.6] %s.%s: call is deprecated, implement __call__'
- % (cls.__module__, cls.__name__), DeprecationWarning)
+ warn('[3.6] %s: call is deprecated, implement __call__'
+ % classid(self.__class__), DeprecationWarning)
if self.event.endswith('_relation'):
self.call(self._cw, self.eidfrom, self.rtype, self.eidto)
elif 'delete' in self.event:
@@ -392,40 +635,53 @@
# abstract classes for operation ###############################################
class Operation(object):
- """an operation is triggered on connections pool events related to
+ """Base class for operations.
+
+ Operation may be instantiated in the hooks' `__call__` method. It always
+ takes a session object as first argument (accessible as `.session` from the
+ operation instance), and optionally all keyword arguments needed by the
+ operation. These keyword arguments will be accessible as attributes from the
+ operation instance.
+
+ An operation is triggered on connections pool events related to
commit / rollback transations. Possible events are:
- precommit:
- the pool is preparing to commit. You shouldn't do anything which
- has to be reverted if the commit fails at this point, but you can freely
- do any heavy computation or raise an exception if the commit can't go.
- You can add some new operations during this phase but their precommit
- event won't be triggered
+ * `precommit`:
- commit:
- the pool is preparing to commit. You should avoid to do to expensive
- stuff or something that may cause an exception in this event
+ the transaction is being prepared for commit. You can freely do any heavy
+ computation, raise an exception if the commit can't go. or even add some
+ new operations during this phase. If you do anything which has to be
+ reverted if the commit fails afterwards (eg altering the file system for
+ instance), you'll have to support the 'revertprecommit' event to revert
+ things by yourself
- revertcommit:
- if an operation failed while commited, this event is triggered for
- all operations which had their commit event already to let them
- revert things (including the operation which made fail the commit)
+ * `revertprecommit`:
+
+ if an operation failed while being pre-commited, this event is triggered
+ for all operations which had their 'precommit' event already fired to let
+ them revert things (including the operation which made the commit fail)
+
+ * `rollback`:
- rollback:
the transaction has been either rollbacked either:
+
* intentionaly
- * a precommit event failed, all operations are rollbacked
- * a commit event failed, all operations which are not been triggered for
- commit are rollbacked
+ * a 'precommit' event failed, in which case all operations are rollbacked
+ once 'revertprecommit'' has been called
+
+ * `postcommit`:
- postcommit:
- The transaction is over. All the ORM entities are
- invalid. If you need to work on the database, you need to stard
- a new transaction, for instance using a new internal_session,
- which you will need to commit (and close!).
+ the transaction is over. All the ORM entities accessed by the earlier
+ transaction are invalid. If you need to work on the database, you need to
+ start a new transaction, for instance using a new internal session, which
+ you will need to commit (and close!).
- order of operations may be important, and is controlled according to
- the insert_index's method output
+ For an operation to support an event, one has to implement the `_event` method with no arguments.
+
+ The order of operations may be important, and is controlled according to
+ the insert_index's method output (whose implementation vary according to the
+ base hook class used).
"""
def __init__(self, session, **kwargs):
@@ -455,6 +711,10 @@
def handle_event(self, event):
"""delegate event handling to the opertaion"""
+ if event == 'postcommit_event' and hasattr(self, 'commit_event'):
+ warn('[3.10] %s: commit_event method has been replaced by postcommit_event'
+ % classid(self.__class__), DeprecationWarning)
+ self.commit_event()
getattr(self, event)()
def precommit_event(self):
@@ -467,16 +727,6 @@
been all considered if it's this operation which failed
"""
- def commit_event(self):
- """the observed connections pool is commiting"""
-
- def revertcommit_event(self):
- """an error went when commiting this operation or a later one
-
- should revert commit's changes but take care, they may have not
- been all considered if it's this operation which failed
- """
-
def rollback_event(self):
"""the observed connections pool has been rollbacked
@@ -512,21 +762,153 @@
def _container_add(container, value):
{set: set.add, list: list.append}[container.__class__](container, value)
-def set_operation(session, datakey, value, opcls, containercls=set, **opkwargs):
- """Search for session.transaction_data[`datakey`] (expected to be a set):
+
+class DataOperationMixIn(object):
+ """Mix-in class to ease applying a single operation on a set of data,
+ avoiding to create as many as operation as they are individual modification.
+ The body of the operation must then iterate over the values that have been
+ stored in a single operation instance.
+
+ You should try to use this instead of creating on operation for each
+ `value`, since handling operations becomes costly on massive data import.
+
+ Usage looks like:
+
+ .. sourcecode:: python
+
+ class MyEntityHook(Hook):
+ __regid__ = 'my.entity.hook'
+ __select__ = Hook.__select__ & is_instance('MyEntity')
+ events = ('after_add_entity',)
+
+ def __call__(self):
+ MyOperation.get_instance(self._cw).add_data(self.entity)
+
+
+ class MyOperation(DataOperationMixIn, Operation):
+ def precommit_event(self):
+ for bucket in self.get_data():
+ process(bucket)
+
+ You can modify the `containercls` class attribute, which defines the
+ container class that should be instantiated to hold payloads. An instance is
+ created on instantiation, and then the :meth:`add_data` method will add the
+ given data to the existing container. Default to a `set`. Give `list` if you
+ want to keep arrival ordering. You can also use another kind of container
+ by redefining :meth:`_build_container` and :meth:`add_data`
- * if found, simply append `value`
+ More optional parameters can be given to the `get_instance` operation, that
+ will be given to the operation constructer (though those parameters should
+ not vary accross different calls to this method for a same operation for
+ obvious reason).
+
+ .. Note::
+ For sanity reason `get_data` will reset the operation, so that once
+ the operation has started its treatment, if some hook want to push
+ additional data to this same operation, a new instance will be created
+ (else that data has a great chance to be never treated). This implies:
+
+ * you should **always** call `get_data` when starting treatment
+
+ * you should **never** call `get_data` for another reason.
+ """
+ containercls = set
+
+ @classproperty
+ def data_key(cls):
+ return ('cw.dataops', cls.__name__)
+
+ @classmethod
+ def get_instance(cls, session, **kwargs):
+ # no need to lock: transaction_data already comes from thread's local storage
+ try:
+ return session.transaction_data[cls.data_key]
+ except KeyError:
+ op = session.transaction_data[cls.data_key] = cls(session, **kwargs)
+ return op
+
+ def __init__(self, *args, **kwargs):
+ super(DataOperationMixIn, self).__init__(*args, **kwargs)
+ self._container = self._build_container()
+ self._processed = False
- * else, initialize it to containercls([`value`]) and instantiate the given
- `opcls` operation class with additional keyword arguments. `containercls`
- is a set by default. Give `list` if you want to keep arrival ordering.
+ def __contains__(self, value):
+ return value in self._container
+
+ def _build_container(self):
+ return self.containercls()
+
+ def add_data(self, data):
+ assert not self._processed, """Trying to add data to a closed operation.
+Iterating over operation data closed it and should be reserved to precommit /
+postcommit method of the operation."""
+ _container_add(self._container, data)
+
+ def remove_data(self, data):
+ assert not self._processed, """Trying to add data to a closed operation.
+Iterating over operation data closed it and should be reserved to precommit /
+postcommit method of the operation."""
+ self._container.remove(data)
+
+ def get_data(self):
+ assert not self._processed, """Trying to get data from a closed operation.
+Iterating over operation data closed it and should be reserved to precommit /
+postcommit method of the operation."""
+ self._processed = True
+ op = self.session.transaction_data.pop(self.data_key)
+ assert op is self, "Bad handling of operation data, found %s instead of %s for key %s" % (
+ op, self, self.data_key)
+ return self._container
+
- You should use this instead of creating on operation for each `value`,
+@deprecated('[3.10] use opcls.get_instance(session, **opkwargs).add_data(value)')
+def set_operation(session, datakey, value, opcls, containercls=set, **opkwargs):
+ """Function to ease applying a single operation on a set of data, avoiding
+ to create as many as operation as they are individual modification. You
+ should try to use this instead of creating on operation for each `value`,
since handling operations becomes coslty on massive data import.
+
+ Arguments are:
+
+ * the `session` object
+
+ * `datakey`, a specially forged key that will be used as key in
+ session.transaction_data
+
+ * `value` that is the actual payload of an individual operation
+
+ * `opcls`, the class of the operation. An instance is created on the first
+ call for the given key, and then subsequent calls will simply add the
+ payload to the container (hence `opkwargs` is only used on that first
+ call)
+
+ * `containercls`, the container class that should be instantiated to hold
+ payloads. An instance is created on the first call for the given key, and
+ then subsequent calls will add the data to the existing container. Default
+ to a set. Give `list` if you want to keep arrival ordering.
+
+ * more optional parameters to give to the operation (here the rtype which do not
+ vary accross operations).
+
+ The body of the operation must then iterate over the values that have been mapped
+ in the transaction_data dictionary to the forged key, e.g.:
+
+ .. sourcecode:: python
+
+ for value in self._cw.transaction_data.pop(datakey):
+ ...
+
+ .. Note::
+ **poping** the key from `transaction_data` is not an option, else you may
+ get unexpected data loss in some case of nested hooks.
"""
try:
+ # Search for session.transaction_data[`datakey`] (expected to be a set):
+ # if found, simply append `value`
_container_add(session.transaction_data[datakey], value)
except KeyError:
+ # else, initialize it to containercls([`value`]) and instantiate the given
+ # `opcls` operation class with additional keyword arguments
opcls(session, **opkwargs)
session.transaction_data[datakey] = containercls()
_container_add(session.transaction_data[datakey], value)
@@ -551,8 +933,12 @@
return -(i + 1)
-class SingleOperation(Operation):
- """special operation which should be called once"""
+
+class SingleLastOperation(Operation):
+ """special operation which should be called once and after all other
+ operations
+ """
+
def register(self, session):
"""override register to handle cases where this operation has already
been added
@@ -573,11 +959,6 @@
return -(i+1)
return None
-
-class SingleLastOperation(SingleOperation):
- """special operation which should be called once and after all other
- operations
- """
def insert_index(self):
return None
@@ -599,7 +980,7 @@
if previous:
self.to_send = previous.to_send + self.to_send
- def commit_event(self):
+ def postcommit_event(self):
self.session.repo.threaded_task(self.sendmails)
def sendmails(self):
@@ -613,7 +994,7 @@
execute(*rql)
-class CleanupNewEidsCacheOp(SingleLastOperation):
+class CleanupNewEidsCacheOp(DataOperationMixIn, SingleLastOperation):
"""on rollback of a insert query we have to remove from repository's
type/source cache eids of entities added in that transaction.
@@ -623,28 +1004,27 @@
too expensive. Notice that there is no pb when using args to specify eids
instead of giving them into the rql string.
"""
+ data_key = 'neweids'
def rollback_event(self):
"""the observed connections pool has been rollbacked,
remove inserted eid from repository type/source cache
"""
try:
- self.session.repo.clear_caches(
- self.session.transaction_data['neweids'])
+ self.session.repo.clear_caches(self.get_data())
except KeyError:
pass
-class CleanupDeletedEidsCacheOp(SingleLastOperation):
+class CleanupDeletedEidsCacheOp(DataOperationMixIn, SingleLastOperation):
"""on commit of delete query, we have to remove from repository's
type/source cache eids of entities deleted in that transaction.
"""
-
- def commit_event(self):
+ data_key = 'pendingeids'
+ def postcommit_event(self):
"""the observed connections pool has been rollbacked,
remove inserted eid from repository type/source cache
"""
try:
- self.session.repo.clear_caches(
- self.session.transaction_data['pendingeids'])
+ self.session.repo.clear_caches(self.get_data())
except KeyError:
pass
diff -r 48f468f33704 -r e4580e5f0703 server/migractions.py
--- a/server/migractions.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/migractions.py Fri Mar 11 09:46:45 2011 +0100
@@ -41,12 +41,14 @@
from glob import glob
from copy import copy
from warnings import warn
+from contextlib import contextmanager
from logilab.common.deprecation import deprecated
from logilab.common.decorators import cached, clear_cache
from yams.constraints import SizeConstraint
from yams.schema2sql import eschema2sql, rschema2sql
+from yams.schema import RelationDefinitionSchema
from cubicweb import AuthenticationError, ExecutionError
from cubicweb.selectors import is_instance
@@ -60,7 +62,7 @@
from cubicweb.server import hook
try:
from cubicweb.server import SOURCE_TYPES, schemaserial as ss
- from cubicweb.server.utils import manager_userpasswd, ask_source_config
+ from cubicweb.server.utils import manager_userpasswd
from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX
except ImportError: # LAX
pass
@@ -181,7 +183,7 @@
open(backupfile,'w').close() # kinda lock
os.chmod(backupfile, 0600)
# backup
- tmpdir = tempfile.mkdtemp(dir=instbkdir)
+ tmpdir = tempfile.mkdtemp()
try:
for source in repo.sources:
try:
@@ -534,38 +536,21 @@
unique_together = set([frozenset(ut)
for ut in eschema._unique_together])
for ut in repo_unique_together - unique_together:
- restrictions = ', '.join(['C relations R%(i)d, '
- 'R%(i)d relation_type T%(i)d, '
- 'R%(i)d from_entity X, '
- 'T%(i)d name %%(T%(i)d)s' % {'i': i,
- 'col':col}
- for (i, col) in enumerate(ut)])
- substs = {'etype': etype}
+ restrictions = []
+ substs = {'x': repoeschema.eid}
for i, col in enumerate(ut):
+ restrictions.append('C relations T%(i)d, '
+ 'T%(i)d name %%(T%(i)d)s' % {'i': i})
substs['T%d'%i] = col
self.rqlexec('DELETE CWUniqueTogetherConstraint C '
'WHERE C constraint_of E, '
- ' E name %%(etype)s,'
- ' %s' % restrictions,
+ ' E eid %%(x)s,'
+ ' %s' % ', '.join(restrictions),
substs)
for ut in unique_together - repo_unique_together:
- relations = ', '.join(['C relations R%d' % i
- for (i, col) in enumerate(ut)])
- restrictions = ', '.join(['R%(i)d relation_type T%(i)d, '
- 'R%(i)d from_entity E, '
- 'T%(i)d name %%(T%(i)d)s' % {'i': i,
- 'col':col}
- for (i, col) in enumerate(ut)])
- substs = {'etype': etype}
- for i, col in enumerate(ut):
- substs['T%d'%i] = col
- self.rqlexec('INSERT CWUniqueTogetherConstraint C:'
- ' C constraint_of E, '
- ' %s '
- 'WHERE '
- ' E name %%(etype)s,'
- ' %s' % (relations, restrictions),
- substs)
+ rql, substs = ss.uniquetogether2rql(eschema, ut)
+ substs['x'] = repoeschema.eid
+ self.rqlexec(rql, substs)
def _synchronize_rdef_schema(self, subjtype, rtype, objtype,
syncperms=True, syncprops=True):
@@ -643,13 +628,6 @@
for cube in newcubes:
self.cmd_set_property('system.version.'+cube,
self.config.cube_version(cube))
- if cube in SOURCE_TYPES:
- # don't use config.sources() in case some sources have been
- # disabled for migration
- sourcescfg = self.config.read_sources_file()
- sourcescfg[cube] = ask_source_config(cube)
- self.config.write_sources_file(sourcescfg)
- clear_cache(self.config, 'read_sources_file')
# ensure added cube is in config cubes
# XXX worth restoring on error?
if not cube in self.config._cubes:
@@ -961,8 +939,7 @@
# get some validation error on commit since integrity hooks
# may think some required relation is missing... This also ensure
# repository caches are properly cleanup
- hook.set_operation(session, 'pendingeids', eid,
- hook.CleanupDeletedEidsCacheOp)
+ hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(eid)
# and don't forget to remove record from system tables
self.repo.system_source.delete_info(
session, session.entity_from_eid(eid, rdeftype),
@@ -1119,11 +1096,20 @@
"""synchronize the persistent schema against the current definition
schema.
+ `ertype` can be :
+ - None, in that case everything will be synced ;
+ - a string, it should be an entity type or
+ a relation type. In that case, only the corresponding
+ entities / relations will be synced ;
+ - an rdef object to synchronize only this specific relation definition
+
It will synch common stuff between the definition schema and the
actual persistent schema, it won't add/remove any entity or relation.
"""
assert syncperms or syncprops, 'nothing to do'
if ertype is not None:
+ if isinstance(ertype, RelationDefinitionSchema):
+ ertype = ertype.as_triple()
if isinstance(ertype, (tuple, list)):
assert len(ertype) == 3, 'not a relation definition'
self._synchronize_rdef_schema(ertype[0], ertype[1], ertype[2],
@@ -1215,8 +1201,14 @@
# Workflows handling ######################################################
+ def cmd_make_workflowable(self, etype):
+ """add workflow relations to an entity type to make it workflowable"""
+ self.cmd_add_relation_definition(etype, 'in_state', 'State')
+ self.cmd_add_relation_definition(etype, 'custom_workflow', 'Workflow')
+ self.cmd_add_relation_definition('TrInfo', 'wf_info_for', etype)
+
def cmd_add_workflow(self, name, wfof, default=True, commit=False,
- **kwargs):
+ ensure_workflowable=True, **kwargs):
"""
create a new workflow and links it to entity types
:type name: unicode
@@ -1236,7 +1228,14 @@
**kwargs)
if not isinstance(wfof, (list, tuple)):
wfof = (wfof,)
+ def _missing_wf_rel(etype):
+ return 'missing workflow relations, see make_workflowable(%s)' % etype
for etype in wfof:
+ eschema = self.repo.schema[etype]
+ if ensure_workflowable:
+ assert 'in_state' in eschema.subjrels, _missing_wf_rel(etype)
+ assert 'custom_workflow' in eschema.subjrels, _missing_wf_rel(etype)
+ assert 'wf_info_for' in eschema.objrels, _missing_wf_rel(etype)
rset = self.rqlexec(
'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s',
{'x': wf.eid, 'et': etype}, ask_confirm=False)
@@ -1380,6 +1379,40 @@
"""add a new entity of the given type"""
return self.cmd_create_entity(etype, *args, **kwargs).eid
+ @contextmanager
+ def cmd_dropped_constraints(self, etype, attrname, cstrtype,
+ droprequired=False):
+ """context manager to drop constraints temporarily on fs_schema
+
+ `cstrtype` should be a constraint class (or a tuple of classes)
+ and will be passed to isinstance directly
+
+ For instance::
+
+ >>> with dropped_constraints('MyType', 'myattr',
+ ... UniqueConstraint, droprequired=True):
+ ... add_attribute('MyType', 'myattr')
+ ... # + instructions to fill MyType.myattr column
+ ...
+ >>>
+
+ """
+ rdef = self.fs_schema.eschema(etype).rdef(attrname)
+ original_constraints = rdef.constraints
+ # remove constraints
+ rdef.constraints = [cstr for cstr in original_constraints
+ if not (cstrtype and isinstance(cstr, cstrtype))]
+ if droprequired:
+ original_cardinality = rdef.cardinality
+ rdef.cardinality = '?' + rdef.cardinality[1]
+ yield
+ # restore original constraints
+ rdef.constraints = original_constraints
+ if droprequired:
+ rdef.cardinality = original_cardinality
+ # update repository schema
+ self.cmd_sync_schema_props_perms(rdef, syncperms=False)
+
def sqlexec(self, sql, args=None, ask_confirm=True):
"""execute the given sql if confirmed
@@ -1424,7 +1457,7 @@
return res
def rqliter(self, rql, kwargs=None, ask_confirm=True):
- return ForRqlIterator(self, rql, None, ask_confirm)
+ return ForRqlIterator(self, rql, kwargs, ask_confirm)
# broken db commands ######################################################
diff -r 48f468f33704 -r e4580e5f0703 server/msplanner.py
--- a/server/msplanner.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/msplanner.py Fri Mar 11 09:46:45 2011 +0100
@@ -84,9 +84,8 @@
1. return the result of Any X WHERE X owned_by Y from system source, that's
enough (optimization of the sql querier will avoid join on CWUser, so we
will directly get local eids)
-
+"""
-"""
__docformat__ = "restructuredtext en"
from itertools import imap, ifilterfalse
@@ -94,6 +93,7 @@
from logilab.common.compat import any
from logilab.common.decorators import cached
+from rql import BadRQLQuery
from rql.stmts import Union, Select
from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable,
Not, Exists, SortTerm, Function)
@@ -434,10 +434,13 @@
# add source for relations
rschema = self._schema.rschema
termssources = {}
+ sourcerels = []
for rel in self.rqlst.iget_nodes(Relation):
# process non final relations only
# note: don't try to get schema for 'is' relation (not available
# during bootstrap)
+ if rel.r_type == 'cw_source':
+ sourcerels.append(rel)
if not (rel.is_types_restriction() or rschema(rel.r_type).final):
# nothing to do if relation is not supported by multiple sources
# or if some source has it listed in its cross_relations
@@ -469,6 +472,75 @@
self._handle_cross_relation(rel, relsources, termssources)
self._linkedterms.setdefault(lhsv, set()).add((rhsv, rel))
self._linkedterms.setdefault(rhsv, set()).add((lhsv, rel))
+ # extract information from cw_source relation
+ for srel in sourcerels:
+ vref = srel.children[1].children[0]
+ sourceeids, sourcenames = [], []
+ if isinstance(vref, Constant):
+ # simplified variable
+ sourceeids = None, (vref.eval(self.plan.args),)
+ var = vref
+ else:
+ var = vref.variable
+ for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']:
+ if rel.r_type in ('eid', 'name'):
+ if rel.r_type == 'eid':
+ slist = sourceeids
+ else:
+ slist = sourcenames
+ sources = [cst.eval(self.plan.args)
+ for cst in rel.children[1].get_nodes(Constant)]
+ if sources:
+ if slist:
+ # don't attempt to do anything
+ sourcenames = sourceeids = None
+ break
+ slist[:] = (rel, sources)
+ if sourceeids:
+ rel, values = sourceeids
+ sourcesdict = self._repo.sources_by_eid
+ elif sourcenames:
+ rel, values = sourcenames
+ sourcesdict = self._repo.sources_by_uri
+ else:
+ sourcesdict = None
+ if sourcesdict is not None:
+ lhs = srel.children[0]
+ try:
+ sources = [sourcesdict[key] for key in values]
+ except KeyError:
+ raise BadRQLQuery('source conflict for term %s' % lhs.as_string())
+ if isinstance(lhs, Constant):
+ source = self._session.source_from_eid(lhs.eval(self.plan.args))
+ if not source in sources:
+ raise BadRQLQuery('source conflict for term %s' % lhs.as_string())
+ else:
+ lhs = getattr(lhs, 'variable', lhs)
+ invariant = getattr(lhs, '_q_invariant', False)
+ # XXX NOT NOT
+ neged = srel.neged(traverse_scope=True) or (rel and rel.neged(strict=True))
+ if neged:
+ for source in sources:
+ if invariant and source is self.system_source:
+ continue
+ self._remove_source_term(source, lhs)
+ usesys = self.system_source not in sources
+ else:
+ for source, terms in sourcesterms.items():
+ if lhs in terms and not source in sources:
+ if invariant and source is self.system_source:
+ continue
+ self._remove_source_term(source, lhs)
+ usesys = self.system_source in sources
+ if rel is None or (len(var.stinfo['relations']) == 2 and
+ not var.stinfo['selected']):
+ self._remove_source_term(self.system_source, var)
+ if not (len(sources) > 1 or usesys or invariant):
+ if rel is None:
+ srel.parent.remove(srel)
+ else:
+ self.rqlst.undefine_variable(var)
+ self._remove_source_term(self.system_source, srel)
return termssources
def _handle_cross_relation(self, rel, relsources, termssources):
@@ -673,6 +745,15 @@
and self._need_ext_source_access(term, rel):
self.needsplit = True
return
+ else:
+ # remove sources only accessing to constant nodes
+ for source, terms in self._sourcesterms.items():
+ if source is self.system_source:
+ continue
+ if not any(x for x in terms if not isinstance(x, Constant)):
+ del self._sourcesterms[source]
+ if len(self._sourcesterms) < 2:
+ self.needsplit = False
@cached
def _need_ext_source_access(self, var, rel):
@@ -713,9 +794,16 @@
assert isinstance(term, (rqlb.BaseNode, Variable)), repr(term)
continue # may occur with subquery column alias
if not sourcesterms[source][term]:
- del sourcesterms[source][term]
- if not sourcesterms[source]:
- del sourcesterms[source]
+ self._remove_source_term(source, term)
+
+ def _remove_source_term(self, source, term):
+ try:
+ poped = self._sourcesterms[source].pop(term, None)
+ except KeyError:
+ pass
+ else:
+ if not self._sourcesterms[source]:
+ del self._sourcesterms[source]
def crossed_relation(self, source, relation):
return relation in self._crossrelations.get(source, ())
@@ -736,7 +824,7 @@
while sourceterms:
# take a term randomly, and all terms supporting the
# same solutions
- term, solindices = self._choose_term(sourceterms)
+ term, solindices = self._choose_term(source, sourceterms)
if source.uri == 'system':
# ensure all variables are available for the latest step
# (missing one will be available from temporary tables
@@ -766,8 +854,24 @@
# set of terms which should be additionaly selected when
# possible
needsel = set()
- if not self._sourcesterms:
+ if not self._sourcesterms and scope is select:
terms += scope.defined_vars.values() + scope.aliases.values()
+ if isinstance(term, Relation) and len(sources) > 1:
+ variants = set()
+ partterms = [term]
+ for vref in term.get_nodes(VariableRef):
+ if not vref.variable._q_invariant:
+ variants.add(vref.name)
+ if len(variants) == 2:
+ # we need an extra-step to fetch relations from each source
+ # before a join with prefetched inputs
+ # (see test_crossed_relation_noeid_needattr in
+ # unittest_msplanner / unittest_multisources)
+ lhs, rhs = term.get_variable_parts()
+ steps.append( (sources, [term, getattr(lhs, 'variable', lhs),
+ getattr(rhs, 'variable', rhs)],
+ solindices, scope, variants, False) )
+ sources = [self.system_source]
final = True
else:
# suppose this is a final step until the contrary is proven
@@ -785,7 +889,7 @@
else:
needsel.add(var.name)
final = False
- # check where all relations are supported by the sources
+ # check all relations are supported by the sources
for rel in scope.iget_nodes(Relation):
if rel.is_types_restriction():
continue
@@ -799,7 +903,7 @@
break
else:
if not scope is select:
- self._exists_relation(rel, terms, needsel)
+ self._exists_relation(rel, terms, needsel, source)
# if relation is supported by all sources and some of
# its lhs/rhs variable isn't in "terms", and the
# other end *is* in "terms", mark it have to be
@@ -843,9 +947,14 @@
self._cleanup_sourcesterms(sources, solindices)
steps.append((sources, terms, solindices, scope, needsel, final)
)
+ if not steps[-1][-1]:
+ # add a final step
+ terms = select.defined_vars.values() + select.aliases.values()
+ steps.append( ([self.system_source], terms, set(self._solindices),
+ select, set(), True) )
return steps
- def _exists_relation(self, rel, terms, needsel):
+ def _exists_relation(self, rel, terms, needsel, source):
rschema = self._schema.rschema(rel.r_type)
lhs, rhs = rel.get_variable_parts()
try:
@@ -858,13 +967,24 @@
# variable is refed by an outer scope and should be substituted
# using an 'identity' relation (else we'll get a conflict of
# temporary tables)
- if rhsvar in terms and not lhsvar in terms and ms_scope(lhsvar) is lhsvar.stmt:
- self._identity_substitute(rel, lhsvar, terms, needsel)
- elif lhsvar in terms and not rhsvar in terms and ms_scope(rhsvar) is rhsvar.stmt:
- self._identity_substitute(rel, rhsvar, terms, needsel)
+ relscope = ms_scope(rel)
+ lhsscope = ms_scope(lhsvar)
+ rhsscope = ms_scope(rhsvar)
+ if rhsvar in terms and not lhsvar in terms and lhsscope is lhsvar.stmt:
+ self._identity_substitute(rel, lhsvar, terms, needsel, relscope)
+ elif lhsvar in terms and not rhsvar in terms and rhsscope is rhsvar.stmt:
+ self._identity_substitute(rel, rhsvar, terms, needsel, relscope)
+ elif self.crossed_relation(source, rel):
+ if lhsscope is not relscope:
+ self._identity_substitute(rel, lhsvar, terms, needsel,
+ relscope, lhsscope)
+ if rhsscope is not relscope:
+ self._identity_substitute(rel, rhsvar, terms, needsel,
+ relscope, rhsscope)
- def _identity_substitute(self, relation, var, terms, needsel):
- newvar = self._insert_identity_variable(ms_scope(relation), var)
+ def _identity_substitute(self, relation, var, terms, needsel, exist,
+ idrelscope=None):
+ newvar = self._insert_identity_variable(exist, var, idrelscope)
# ensure relation is using '=' operator, else we rely on a
# sqlgenerator side effect (it won't insert an inequality operator
# in this case)
@@ -872,12 +992,28 @@
terms.append(newvar)
needsel.add(newvar.name)
- def _choose_term(self, sourceterms):
+ def _choose_term(self, source, sourceterms):
"""pick one term among terms supported by a source, which will be used
as a base to generate an execution step
"""
secondchoice = None
if len(self._sourcesterms) > 1:
+ # first, return non invariant variable of crossed relation, then the
+ # crossed relation itself
+ for term in sourceterms:
+ if (isinstance(term, Relation)
+ and self.crossed_relation(source, term)
+ and not ms_scope(term) is self.rqlst):
+ for vref in term.get_variable_parts():
+ try:
+ var = vref.variable
+ except AttributeError:
+ # Constant
+ continue
+ if ((len(var.stinfo['relations']) > 1 or var.stinfo['selected'])
+ and var in sourceterms):
+ return var, sourceterms.pop(var)
+ return term, sourceterms.pop(term)
# priority to variable from subscopes
for term in sourceterms:
if not ms_scope(term) is self.rqlst:
@@ -962,7 +1098,7 @@
if isinstance(term, Relation) and term in cross_rels:
cross_terms = cross_rels.pop(term)
base_accept_term = accept_term
- accept_term = lambda x: (accept_term(x) or x in cross_terms)
+ accept_term = lambda x: (base_accept_term(x) or x in cross_terms)
for refed in cross_terms:
if not refed in candidates:
terms.append(refed)
@@ -1015,7 +1151,7 @@
if not sourceterms:
del self._sourcesterms[source]
- def merge_input_maps(self, allsolindices):
+ def merge_input_maps(self, allsolindices, complete=True):
"""inputmaps is a dictionary with tuple of solution indices as key with
an associated input map as value. This function compute for each
solution its necessary input map and return them grouped
@@ -1029,14 +1165,17 @@
"""
if not self._inputmaps:
return [(allsolindices, None)]
+ _allsolindices = allsolindices.copy()
mapbysol = {}
# compute a single map for each solution
for solindices, basemap in self._inputmaps.iteritems():
for solindex in solindices:
+ if not (complete or solindex in allsolindices):
+ continue
solmap = mapbysol.setdefault(solindex, {})
solmap.update(basemap)
try:
- allsolindices.remove(solindex)
+ _allsolindices.remove(solindex)
except KeyError:
continue # already removed
# group results by identical input map
@@ -1048,14 +1187,14 @@
break
else:
result.append( ([solindex], solmap) )
- if allsolindices:
- result.append( (list(allsolindices), None) )
+ if _allsolindices:
+ result.append( (list(_allsolindices), None) )
return result
def build_final_part(self, select, solindices, inputmap, sources,
insertedvars):
solutions = [self._solutions[i] for i in solindices]
- if self._conflicts:
+ if self._conflicts and inputmap:
for varname, mappedto in self._conflicts:
var = select.defined_vars[varname]
newvar = select.make_variable()
@@ -1080,7 +1219,7 @@
inputmapkey = tuple(sorted(solindices))
inputmap = self._inputmaps.setdefault(inputmapkey, {})
for varname, mapping in step.outputmap.iteritems():
- if varname in inputmap and \
+ if varname in inputmap and not '.' in varname and \
not (mapping == inputmap[varname] or
self._schema.eschema(solutions[0][varname]).final):
self._conflicts.append((varname, inputmap[varname]))
@@ -1212,13 +1351,15 @@
ppi.temptable = atemptable
vfilter = TermsFiltererVisitor(self.schema, ppi)
steps = []
+ multifinal = len([x for x in stepdefs if x[-1]]) >= 2
for sources, terms, solindices, scope, needsel, final in stepdefs:
# extract an executable query using only the specified terms
if sources[0].uri == 'system':
# in this case we have to merge input maps before call to
# filter so already processed restriction are correctly
# removed
- solsinputmaps = ppi.merge_input_maps(solindices)
+ solsinputmaps = ppi.merge_input_maps(
+ solindices, complete=not (final and multifinal))
for solindices, inputmap in solsinputmaps:
minrqlst, insertedvars = vfilter.filter(
sources, terms, scope, set(solindices), needsel, final)
@@ -1235,7 +1376,8 @@
minrqlst, insertedvars = vfilter.filter(
sources, terms, scope, solindices, needsel, final)
if final:
- solsinputmaps = ppi.merge_input_maps(solindices)
+ solsinputmaps = ppi.merge_input_maps(
+ solindices, complete=not (final and multifinal))
if len(solsinputmaps) > 1:
refrqlst = minrqlst
for solindices, inputmap in solsinputmaps:
@@ -1455,7 +1597,7 @@
def visit_relation(self, node, newroot, terms):
if not node.is_types_restriction():
- if node in self.skip and self.solindices.issubset(self.skip[node]):
+ if not node in terms and node in self.skip and self.solindices.issubset(self.skip[node]):
if not self.schema.rschema(node.r_type).final:
# can't really skip the relation if one variable is selected
# and only referenced by this relation
diff -r 48f468f33704 -r e4580e5f0703 server/pool.py
--- a/server/pool.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/pool.py Fri Mar 11 09:46:45 2011 +0100
@@ -34,7 +34,7 @@
# dictionnary of (source, connection), indexed by sources'uri
self.source_cnxs = {}
for source in sources:
- self.source_cnxs[source.uri] = (source, source.get_connection())
+ self.add_source(source)
if not 'system' in self.source_cnxs:
self.source_cnxs['system'] = self.source_cnxs[sources[0].uri]
self._cursors = {}
@@ -50,6 +50,15 @@
self._cursors[uri] = cursor
return cursor
+ def add_source(self, source):
+ assert not source.uri in self.source_cnxs
+ self.source_cnxs[source.uri] = (source, source.get_connection())
+
+ def remove_source(self, source):
+ source, cnx = self.source_cnxs.pop(source.uri)
+ cnx.close()
+ self._cursors.pop(source.uri, None)
+
def commit(self):
"""commit the current transaction for this user"""
# FIXME: what happends if a commit fail
@@ -144,11 +153,9 @@
self._cursors.pop(source.uri, None)
-from cubicweb.server.hook import (Operation, LateOperation, SingleOperation,
- SingleLastOperation)
+from cubicweb.server.hook import Operation, LateOperation, SingleLastOperation
from logilab.common.deprecation import class_moved, class_renamed
Operation = class_moved(Operation)
PreCommitOperation = class_renamed('PreCommitOperation', Operation)
LateOperation = class_moved(LateOperation)
-SingleOperation = class_moved(SingleOperation)
SingleLastOperation = class_moved(SingleLastOperation)
diff -r 48f468f33704 -r e4580e5f0703 server/querier.py
--- a/server/querier.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/querier.py Fri Mar 11 09:46:45 2011 +0100
@@ -38,7 +38,7 @@
from cubicweb.server.utils import cleanup_solutions
from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata
-from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction
+from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction, EditedEntity
from cubicweb.server.session import security_enabled
def empty_rset(rql, args, rqlst=None):
@@ -450,7 +450,7 @@
# save originaly selected variable, we may modify this
# dictionary for substitution (query parameters)
self.selected = rqlst.selection
- # list of new or updated entities definition (utils.Entity)
+ # list of rows of entities definition (ssplanner.EditedEntity)
self.e_defs = [[]]
# list of new relation definition (3-uple (from_eid, r_type, to_eid)
self.r_defs = set()
@@ -461,7 +461,6 @@
def add_entity_def(self, edef):
"""add an entity definition to build"""
- edef.querier_pending_relations = {}
self.e_defs[-1].append(edef)
def add_relation_def(self, rdef):
@@ -493,8 +492,9 @@
self.e_defs[i][colidx] = edefs[0]
samplerow = self.e_defs[i]
for edef_ in edefs[1:]:
- row = samplerow[:]
- row[colidx] = edef_
+ row = [ed.clone() for i, ed in enumerate(samplerow)
+ if i != colidx]
+ row.insert(colidx, edef_)
self.e_defs.append(row)
# now, see if this entity def is referenced as subject in some relation
# definition
@@ -560,15 +560,16 @@
if isinstance(subj, basestring):
subj = typed_eid(subj)
elif not isinstance(subj, (int, long)):
- subj = subj.eid
+ subj = subj.entity.eid
if isinstance(obj, basestring):
obj = typed_eid(obj)
elif not isinstance(obj, (int, long)):
- obj = obj.eid
+ obj = obj.entity.eid
if repo.schema.rschema(rtype).inlined:
entity = session.entity_from_eid(subj)
- entity[rtype] = obj
- repo.glob_update_entity(session, entity, set((rtype,)))
+ edited = EditedEntity(entity)
+ edited.edited_attribute(rtype, obj)
+ repo.glob_update_entity(session, edited)
else:
repo.glob_add_relation(session, subj, rtype, obj)
@@ -585,12 +586,12 @@
def set_schema(self, schema):
self.schema = schema
repo = self._repo
- # rql st and solution cache. Don't bother using a Cache instance: we
- # should have a limited number of queries in there, since there are no
- # entries in this cache for user queries (which have no args)
- self._rql_cache = {}
- # rql cache key cache
- self._rql_ck_cache = Cache(repo.config['rql-cache-size'])
+ # rql st and solution cache.
+ self._rql_cache = Cache(repo.config['rql-cache-size'])
+ # rql cache key cache. Don't bother using a Cache instance: we should
+ # have a limited number of queries in there, since there are no entries
+ # in this cache for user queries (which have no args)
+ self._rql_ck_cache = {}
# some cache usage stats
self.cache_hit, self.cache_miss = 0, 0
# rql parsing / analysing helper
@@ -601,9 +602,7 @@
self._parse = rqlhelper.parse
self._annotate = rqlhelper.annotate
# rql planner
- # note: don't use repo.sources, may not be built yet, and also "admin"
- # isn't an actual source
- if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2:
+ if len(repo.sources) < 2:
from cubicweb.server.ssplanner import SSPlanner
self._planner = SSPlanner(schema, rqlhelper)
else:
@@ -612,6 +611,14 @@
# sql generation annotator
self.sqlgen_annotate = SQLGenAnnotator(schema).annotate
+ def set_planner(self):
+ if len(self._repo.sources) < 2:
+ from cubicweb.server.ssplanner import SSPlanner
+ self._planner = SSPlanner(self.schema, self._repo.vreg.rqlhelper)
+ else:
+ from cubicweb.server.msplanner import MSPlanner
+ self._planner = MSPlanner(self.schema, self._repo.vreg.rqlhelper)
+
def parse(self, rql, annotate=False):
"""return a rql syntax tree for the given rql"""
try:
@@ -649,11 +656,15 @@
print '*'*80
print 'querier input', rql, args
# parse the query and binds variables
+ cachekey = rql
try:
- cachekey = rql
if args:
+ # search for named args in query which are eids (hence
+ # influencing query's solutions)
eidkeys = self._rql_ck_cache[rql]
if eidkeys:
+ # if there are some, we need a better cache key, eg (rql +
+ # entity type of each eid)
try:
cachekey = self._repo.querier_cache_key(session, rql,
args, eidkeys)
@@ -667,15 +678,20 @@
self.cache_miss += 1
rqlst = self.parse(rql)
try:
+ # compute solutions for rqlst and return named args in query
+ # which are eids. Notice that if you may not need `eidkeys`, we
+ # have to compute solutions anyway (kept as annotation on the
+ # tree)
eidkeys = self.solutions(session, rqlst, args)
except UnknownEid:
# we want queries such as "Any X WHERE X eid 9999" return an
# empty result instead of raising UnknownEid
return empty_rset(rql, args, rqlst)
- self._rql_ck_cache[rql] = eidkeys
- if eidkeys:
- cachekey = self._repo.querier_cache_key(session, rql, args,
- eidkeys)
+ if args and not rql in self._rql_ck_cache:
+ self._rql_ck_cache[rql] = eidkeys
+ if eidkeys:
+ cachekey = self._repo.querier_cache_key(session, rql, args,
+ eidkeys)
self._rql_cache[cachekey] = rqlst
orig_rqlst = rqlst
if rqlst.TYPE != 'select':
diff -r 48f468f33704 -r e4580e5f0703 server/repository.py
--- a/server/repository.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/repository.py Fri Mar 11 09:46:45 2011 +0100
@@ -34,17 +34,19 @@
import sys
import threading
import Queue
+from itertools import chain
from os.path import join
from datetime import datetime
from time import time, localtime, strftime
-from logilab.common.decorators import cached
+from logilab.common.decorators import cached, clear_cache
from logilab.common.compat import any
from logilab.common import flatten
from yams import BadSchemaDefinition
from yams.schema import role_name
from rql import RQLSyntaxError
+from rql.utils import rqlvar_maker
from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError,
UnknownEid, AuthenticationError, ExecutionError,
@@ -55,7 +57,29 @@
from cubicweb.server import utils, hook, pool, querier, sources
from cubicweb.server.session import Session, InternalSession, InternalManager, \
security_enabled
-_ = unicode
+from cubicweb.server.ssplanner import EditedEntity
+
+def prefill_entity_caches(entity, relations):
+ session = entity._cw
+ # prefill entity relation caches
+ for rschema in entity.e_schema.subject_relations():
+ rtype = str(rschema)
+ if rtype in schema.VIRTUAL_RTYPES:
+ continue
+ if rschema.final:
+ entity.cw_attr_cache.setdefault(rtype, None)
+ else:
+ entity.cw_set_relation_cache(rtype, 'subject',
+ session.empty_rset())
+ for rschema in entity.e_schema.object_relations():
+ rtype = str(rschema)
+ if rtype in schema.VIRTUAL_RTYPES:
+ continue
+ entity.cw_set_relation_cache(rtype, 'object', session.empty_rset())
+ # set inlined relation cache before call to after_add_entity
+ for attr, value in relations:
+ session.update_rel_cache_add(entity.eid, attr, value)
+ del_existing_rel_if_needed(session, entity.eid, attr, value)
def del_existing_rel_if_needed(session, eidfrom, rtype, eidto):
"""delete existing relation when adding a new one if card is 1 or ?
@@ -120,27 +144,15 @@
# initial schema, should be build or replaced latter
self.schema = schema.CubicWebSchema(config.appid)
self.vreg.schema = self.schema # until actual schema is loaded...
- # querier helper, need to be created after sources initialization
- self.querier = querier.QuerierHelper(self, self.schema)
- # sources
- self.sources = []
- self.sources_by_uri = {}
# shutdown flag
self.shutting_down = False
- # FIXME: store additional sources info in the system database ?
- # FIXME: sources should be ordered (add_entity priority)
- for uri, source_config in config.sources().items():
- if uri == 'admin':
- # not an actual source
- continue
- source = self.get_source(uri, source_config)
- self.sources_by_uri[uri] = source
- if config.source_enabled(uri):
- self.sources.append(source)
- self.system_source = self.sources_by_uri['system']
- # ensure system source is the first one
- self.sources.remove(self.system_source)
- self.sources.insert(0, self.system_source)
+ # sources (additional sources info in the system database)
+ self.system_source = self.get_source('native', 'system',
+ config.sources()['system'])
+ self.sources = [self.system_source]
+ self.sources_by_uri = {'system': self.system_source}
+ # querier helper, need to be created after sources initialization
+ self.querier = querier.QuerierHelper(self, self.schema)
# cache eid -> type / source
self._type_source_cache = {}
# cache (extid, source uri) -> eid
@@ -192,6 +204,7 @@
config.bootstrap_cubes()
self.set_schema(config.load_schema())
if not config.creating:
+ self.init_sources_from_database()
if 'CWProperty' in self.schema:
self.vreg.init_properties(self.properties())
# call source's init method to complete their initialisation if
@@ -208,7 +221,7 @@
# close initialization pool and reopen fresh ones for proper
# initialization now that we know cubes
self._get_pool().close(True)
- # list of available pools (we can't iterated on Queue instance)
+ # list of available pools (we can't iterate on Queue instance)
self.pools = []
for i in xrange(config['connections-pool-size']):
self.pools.append(pool.ConnectionsPool(self.sources))
@@ -219,9 +232,60 @@
# internals ###############################################################
- def get_source(self, uri, source_config):
+ def init_sources_from_database(self):
+ self.sources_by_eid = {}
+ if self.config.quick_start \
+ or not 'CWSource' in self.schema: # # 3.10 migration
+ return
+ session = self.internal_session()
+ try:
+ # FIXME: sources should be ordered (add_entity priority)
+ for sourceent in session.execute(
+ 'Any S, SN, SA, SC WHERE S is CWSource, '
+ 'S name SN, S type SA, S config SC').entities():
+ if sourceent.name == 'system':
+ self.system_source.eid = sourceent.eid
+ self.sources_by_eid[sourceent.eid] = self.system_source
+ continue
+ self.add_source(sourceent, add_to_pools=False)
+ finally:
+ session.close()
+
+ def _clear_planning_caches(self):
+ for cache in ('source_defs', 'is_multi_sources_relation',
+ 'can_cross_relation', 'rel_type_sources'):
+ clear_cache(self, cache)
+
+ def add_source(self, sourceent, add_to_pools=True):
+ source = self.get_source(sourceent.type, sourceent.name,
+ sourceent.host_config)
+ source.eid = sourceent.eid
+ self.sources_by_eid[sourceent.eid] = source
+ self.sources_by_uri[sourceent.name] = source
+ if self.config.source_enabled(source):
+ self.sources.append(source)
+ self.querier.set_planner()
+ if add_to_pools:
+ for pool in self.pools:
+ pool.add_source(source)
+ self._clear_planning_caches()
+
+ def remove_source(self, uri):
+ source = self.sources_by_uri.pop(uri)
+ del self.sources_by_eid[source.eid]
+ if self.config.source_enabled(source):
+ self.sources.remove(source)
+ self.querier.set_planner()
+ for pool in self.pools:
+ pool.remove_source(source)
+ self._clear_planning_caches()
+
+ def get_source(self, type, uri, source_config):
+ # set uri and type in source config so it's available through
+ # source_defs()
source_config['uri'] = uri
- return sources.get_source(source_config, self.schema, self)
+ source_config['type'] = type
+ return sources.get_source(type, source_config, self)
def set_schema(self, schema, resetvreg=True, rebuildinfered=True):
if rebuildinfered:
@@ -270,7 +334,10 @@
# call instance level initialisation hooks
self.hm.call_hooks('server_startup', repo=self)
# register a task to cleanup expired session
- self.looping_task(self.config['session-time']/3., self.clean_sessions)
+ self.cleanup_session_time = self.config['cleanup-session-time'] or 60 * 60 * 24
+ assert self.cleanup_session_time > 0
+ cleanup_session_interval = min(60*60, self.cleanup_session_time / 3)
+ self.looping_task(cleanup_session_interval, self.clean_sessions)
assert isinstance(self._looping_tasks, list), 'already started'
for i, (interval, func, args) in enumerate(self._looping_tasks):
self._looping_tasks[i] = task = utils.LoopTask(interval, func, args)
@@ -400,7 +467,7 @@
rset = session.execute(rql, {'x': eid})
assert len(rset) == 1, rset
cwuser = rset.get_entity(0, 0)
- # pylint: disable-msg=W0104
+ # pylint: disable=W0104
# prefetch / cache cwuser's groups and properties. This is especially
# useful for internal sessions to avoid security insertions
cwuser.groups
@@ -476,7 +543,13 @@
return self.config[option]
pool = self._get_pool()
try:
- return pool.connection(sourceuri).get_option_value(option, extid)
+ cnx = pool.connection(sourceuri)
+ # needed to check connection is valid and usable by the current
+ # thread
+ newcnx = self.sources_by_uri[sourceuri].check_connection(cnx)
+ if newcnx is not None:
+ cnx = newcnx
+ return cnx.get_option_value(option, extid)
finally:
self._free_pool(pool)
@@ -520,14 +593,10 @@
This is a public method, not requiring a session id.
"""
- sources = self.config.sources().copy()
- # remove manager information
- sources.pop('admin', None)
+ sources = {}
# remove sensitive information
- for uri, sourcedef in sources.iteritems():
- sourcedef = sourcedef.copy()
- self.sources_by_uri[uri].remove_sensitive_information(sourcedef)
- sources[uri] = sourcedef
+ for uri, source in self.sources_by_uri.iteritems():
+ sources[uri] = source.public_config
return sources
def properties(self):
@@ -568,8 +637,7 @@
password = password.encode('UTF8')
kwargs['login'] = login
kwargs['upassword'] = password
- user.update(kwargs)
- self.glob_add_entity(session, user)
+ self.glob_add_entity(session, EditedEntity(user, **kwargs))
session.execute('SET X in_group G WHERE X eid %(x)s, G name "users"',
{'x': user.eid})
if email or '@' in login:
@@ -586,6 +654,39 @@
session.close()
return True
+ def find_users(self, fetch_attrs, **query_attrs):
+ """yield user attributes for cwusers matching the given query_attrs
+ (the result set cannot survive this method call)
+
+ This can be used by low-privileges account (anonymous comes to
+ mind).
+
+ `fetch_attrs`: tuple of attributes to be fetched
+ `query_attrs`: dict of attr/values to restrict the query
+ """
+ assert query_attrs
+ if not hasattr(self, '_cwuser_attrs'):
+ cwuser = self.schema['CWUser']
+ self._cwuser_attrs = set(str(rschema)
+ for rschema, _eschema in cwuser.attribute_definitions()
+ if not rschema.meta)
+ cwuserattrs = self._cwuser_attrs
+ for k in chain(fetch_attrs, query_attrs.iterkeys()):
+ if k not in cwuserattrs:
+ raise Exception('bad input for find_user')
+ session = self.internal_session()
+ try:
+ varmaker = rqlvar_maker()
+ vars = [(attr, varmaker.next()) for attr in fetch_attrs]
+ rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars)
+ rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ','
+ rset = session.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr)
+ for attr in query_attrs.iterkeys()),
+ query_attrs)
+ return rset.rows
+ finally:
+ session.close()
+
def connect(self, login, **kwargs):
"""open a connection for a given user
@@ -660,24 +761,32 @@
session.reset_pool()
def check_session(self, sessionid):
- """raise `BadConnectionId` if the connection is no more valid"""
- self._get_session(sessionid, setpool=False)
+ """raise `BadConnectionId` if the connection is no more valid, else
+ return its latest activity timestamp.
+ """
+ return self._get_session(sessionid, setpool=False).timestamp
+
+ def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False):
+ """return value associated to key in the session's data dictionary or
+ session's transaction's data if `txdata` is true.
- def get_shared_data(self, sessionid, key, default=None, pop=False):
- """return the session's data dictionary"""
+ If pop is True, value will be removed from the dictionnary.
+
+ If key isn't defined in the dictionnary, value specified by the
+ `default` argument will be returned.
+ """
session = self._get_session(sessionid, setpool=False)
- return session.get_shared_data(key, default, pop)
+ return session.get_shared_data(key, default, pop, txdata)
- def set_shared_data(self, sessionid, key, value, querydata=False):
+ def set_shared_data(self, sessionid, key, value, txdata=False):
"""set value associated to `key` in shared data
- if `querydata` is true, the value will be added to the repository
- session's query data which are cleared on commit/rollback of the current
- transaction, and won't be available through the connexion, only on the
- repository side.
+ if `txdata` is true, the value will be added to the repository session's
+ transaction's data which are cleared on commit/rollback of the current
+ transaction.
"""
session = self._get_session(sessionid, setpool=False)
- session.set_shared_data(key, value, querydata)
+ session.set_shared_data(key, value, txdata)
def commit(self, sessionid, txid=None):
"""commit transaction for the session with the given id"""
@@ -809,7 +918,7 @@
"""close sessions not used since an amount of time specified in the
configuration
"""
- mintime = time() - self.config['session-time']
+ mintime = time() - self.cleanup_session_time
self.debug('cleaning session unused since %s',
strftime('%T', localtime(mintime)))
nbclosed = 0
@@ -964,7 +1073,6 @@
self._extid_cache[cachekey] = eid
self._type_source_cache[eid] = (etype, source.uri, extid)
entity = source.before_entity_insertion(session, extid, etype, eid)
- entity.edited_attributes = set(entity.cw_attr_cache)
if source.should_call_hooks:
self.hm.call_hooks('before_add_entity', session, entity=entity)
# XXX call add_info with complete=False ?
@@ -972,10 +1080,6 @@
source.after_entity_insertion(session, extid, entity)
if source.should_call_hooks:
self.hm.call_hooks('after_add_entity', session, entity=entity)
- else:
- # minimal meta-data
- session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
- {'x': entity.eid, 'name': entity.__regid__})
session.commit(reset_pool)
return eid
except:
@@ -987,22 +1091,30 @@
and index the entity with the full text index
"""
# begin by inserting eid/type/source/extid into the entities table
- hook.set_operation(session, 'neweids', entity.eid,
- hook.CleanupNewEidsCacheOp)
+ hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid)
self.system_source.add_info(session, entity, source, extid, complete)
- def delete_info(self, session, entity, sourceuri, extid):
+ def delete_info(self, session, entity, sourceuri, extid, scleanup=False):
"""called by external source when some entity known by the system source
has been deleted in the external source
"""
# mark eid as being deleted in session info and setup cache update
# operation
- hook.set_operation(session, 'pendingeids', entity.eid,
- hook.CleanupDeletedEidsCacheOp)
- self._delete_info(session, entity, sourceuri, extid)
+ hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid)
+ self._delete_info(session, entity, sourceuri, extid, scleanup)
- def _delete_info(self, session, entity, sourceuri, extid):
- # attributes=None, relations=None):
+ def delete_info_multi(self, session, entities, sourceuri, extids, scleanup=False):
+ """same as delete_info but accepts a list of entities and
+ extids with the same etype and belonging to the same source
+ """
+ # mark eid as being deleted in session info and setup cache update
+ # operation
+ op = hook.CleanupDeletedEidsCacheOp.get_instance(session)
+ for entity in entities:
+ op.add_data(entity.eid)
+ self._delete_info_multi(session, entities, sourceuri, extids, scleanup)
+
+ def _delete_info(self, session, entity, sourceuri, extid, scleanup=False):
"""delete system information on deletion of an entity:
* delete all remaining relations from/to this entity
* call delete info on the system source which will transfer record from
@@ -1023,9 +1135,49 @@
rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype
else:
rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
- session.execute(rql, {'x': eid}, build_descr=False)
+ if scleanup:
+ # source cleaning: only delete relations stored locally
+ rql += ', NOT (Y cw_source S, S name %(source)s)'
+ try:
+ session.execute(rql, {'x': eid, 'source': sourceuri},
+ build_descr=False)
+ except:
+ self.exception('error while cascading delete for entity %s '
+ 'from %s. RQL: %s', entity, sourceuri, rql)
self.system_source.delete_info(session, entity, sourceuri, extid)
+ def _delete_info_multi(self, session, entities, sourceuri, extids, scleanup=False):
+ """same as _delete_info but accepts a list of entities with
+ the same etype and belinging to the same source.
+ """
+ pendingrtypes = session.transaction_data.get('pendingrtypes', ())
+ # delete remaining relations: if user can delete the entity, he can
+ # delete all its relations without security checking
+ assert entities and len(entities) == len(extids)
+ with security_enabled(session, read=False, write=False):
+ eids = [_e.eid for _e in entities]
+ in_eids = ','.join((str(eid) for eid in eids))
+ for rschema, _, role in entities[0].e_schema.relation_definitions():
+ rtype = rschema.type
+ if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes:
+ continue
+ if role == 'subject':
+ # don't skip inlined relation so they are regularly
+ # deleted and so hooks are correctly called
+ rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids)
+ else:
+ rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids)
+ if scleanup:
+ # source cleaning: only delete relations stored locally
+ rql += ', NOT (Y cw_source S, S name %(source)s)'
+ try:
+ session.execute(rql, {'source': sourceuri},
+ build_descr=False)
+ except:
+ self.exception('error while cascading delete for entity %s '
+ 'from %s. RQL: %s', entities, sourceuri, rql)
+ self.system_source.delete_info_multi(session, entities, sourceuri, extids)
+
def locate_relation_source(self, session, subject, rtype, object):
subjsource = self.source_from_eid(subject, session)
objsource = self.source_from_eid(object, session)
@@ -1067,15 +1219,16 @@
self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid)
return extid
- def glob_add_entity(self, session, entity):
+ def glob_add_entity(self, session, edited):
"""add an entity to the repository
the entity eid should originaly be None and a unique eid is assigned to
the entity instance
"""
- # init edited_attributes before calling before_add_entity hooks
+ entity = edited.entity
entity._cw_is_saved = False # entity has an eid but is not yet saved
- entity.edited_attributes = set(entity.cw_attr_cache) # XXX cw_edited_attributes
+ # init edited_attributes before calling before_add_entity hooks
+ entity.cw_edited = edited
eschema = entity.e_schema
source = self.locate_etype_source(entity.__regid__)
# allocate an eid to the entity before calling hooks
@@ -1083,49 +1236,26 @@
# set caches asap
extid = self.init_entity_caches(session, entity, source)
if server.DEBUG & server.DBG_REPO:
- print 'ADD entity', self, entity.__regid__, entity.eid, entity.cw_attr_cache
+ print 'ADD entity', self, entity.__regid__, entity.eid, edited
relations = []
if source.should_call_hooks:
self.hm.call_hooks('before_add_entity', session, entity=entity)
- # XXX use entity.keys here since edited_attributes is not updated for
- # inline relations XXX not true, right? (see edited_attributes
- # affectation above)
- for attr in entity.cw_attr_cache.iterkeys():
+ for attr in edited.iterkeys():
rschema = eschema.subjrels[attr]
if not rschema.final: # inlined relation
- relations.append((attr, entity[attr]))
- entity._cw_set_defaults()
+ relations.append((attr, edited[attr]))
+ edited.set_defaults()
if session.is_hook_category_activated('integrity'):
- entity._cw_check(creation=True)
+ edited.check(creation=True)
+ prefill_entity_caches(entity, relations)
try:
source.add_entity(session, entity)
except UniqueTogetherError, exc:
- etype, rtypes = exc.args
- problems = {}
- for col in rtypes:
- problems[col] = _('violates unique_together constraints (%s)') % (','.join(rtypes))
- raise ValidationError(entity.eid, problems)
+ userhdlr = session.vreg['adapters'].select(
+ 'IUserFriendlyError', session, entity=entity, exc=exc)
+ userhdlr.raise_user_exception()
self.add_info(session, entity, source, extid, complete=False)
- entity._cw_is_saved = True # entity has an eid and is saved
- # prefill entity relation caches
- for rschema in eschema.subject_relations():
- rtype = str(rschema)
- if rtype in schema.VIRTUAL_RTYPES:
- continue
- if rschema.final:
- entity.setdefault(rtype, None)
- else:
- entity.cw_set_relation_cache(rtype, 'subject',
- session.empty_rset())
- for rschema in eschema.object_relations():
- rtype = str(rschema)
- if rtype in schema.VIRTUAL_RTYPES:
- continue
- entity.cw_set_relation_cache(rtype, 'object', session.empty_rset())
- # set inlined relation cache before call to after_add_entity
- for attr, value in relations:
- session.update_rel_cache_add(entity.eid, attr, value)
- del_existing_rel_if_needed(session, entity.eid, attr, value)
+ edited.saved = entity._cw_is_saved = True
# trigger after_add_entity after after_add_relation
if source.should_call_hooks:
self.hm.call_hooks('after_add_entity', session, entity=entity)
@@ -1137,23 +1267,24 @@
eidfrom=entity.eid, rtype=attr, eidto=value)
return entity.eid
- def glob_update_entity(self, session, entity, edited_attributes):
+ def glob_update_entity(self, session, edited):
"""replace an entity in the repository
the type and the eid of an entity must not be changed
"""
+ entity = edited.entity
if server.DEBUG & server.DBG_REPO:
print 'UPDATE entity', entity.__regid__, entity.eid, \
- entity.cw_attr_cache, edited_attributes
+ entity.cw_attr_cache, edited
hm = self.hm
eschema = entity.e_schema
session.set_entity_cache(entity)
- orig_edited_attributes = getattr(entity, 'edited_attributes', None)
- entity.edited_attributes = edited_attributes
+ orig_edited = getattr(entity, 'cw_edited', None)
+ entity.cw_edited = edited
try:
only_inline_rels, need_fti_update = True, False
relations = []
source = self.source_from_eid(entity.eid, session)
- for attr in list(edited_attributes):
+ for attr in list(edited):
if attr == 'eid':
continue
rschema = eschema.subjrels[attr]
@@ -1166,13 +1297,13 @@
previous_value = entity.related(attr) or None
if previous_value is not None:
previous_value = previous_value[0][0] # got a result set
- if previous_value == entity[attr]:
+ if previous_value == entity.cw_attr_cache[attr]:
previous_value = None
elif source.should_call_hooks:
hm.call_hooks('before_delete_relation', session,
eidfrom=entity.eid, rtype=attr,
eidto=previous_value)
- relations.append((attr, entity[attr], previous_value))
+ relations.append((attr, edited[attr], previous_value))
if source.should_call_hooks:
# call hooks for inlined relations
for attr, value, _t in relations:
@@ -1181,16 +1312,16 @@
if not only_inline_rels:
hm.call_hooks('before_update_entity', session, entity=entity)
if session.is_hook_category_activated('integrity'):
- entity._cw_check()
+ edited.check()
try:
source.update_entity(session, entity)
+ edited.saved = True
except UniqueTogetherError, exc:
etype, rtypes = exc.args
problems = {}
for col in rtypes:
- problems[col] = _('violates unique_together constraints (%s)') % (','.join(rtypes))
+ problems[col] = session._('violates unique_together constraints (%s)') % (','.join(rtypes))
raise ValidationError(entity.eid, problems)
-
self.system_source.update_info(session, entity, need_fti_update)
if source.should_call_hooks:
if not only_inline_rels:
@@ -1212,22 +1343,43 @@
hm.call_hooks('after_add_relation', session,
eidfrom=entity.eid, rtype=attr, eidto=value)
finally:
- if orig_edited_attributes is not None:
- entity.edited_attributes = orig_edited_attributes
+ if orig_edited is not None:
+ entity.cw_edited = orig_edited
+
+
+ def glob_delete_entities(self, session, eids):
+ """delete a list of entities and all related entities from the repository"""
+ data_by_etype_source = {} # values are ([list of eids],
+ # [list of extid],
+ # [list of entities])
+ #
+ # WARNING: the way this dictionary is populated is heavily optimized
+ # and does not use setdefault on purpose. Unless a new release
+ # of the Python interpreter advertises large perf improvements
+ # in setdefault, this should not be changed without profiling.
- def glob_delete_entity(self, session, eid):
- """delete an entity and all related entities from the repository"""
- entity = session.entity_from_eid(eid)
- etype, sourceuri, extid = self.type_and_source_from_eid(eid, session)
- if server.DEBUG & server.DBG_REPO:
- print 'DELETE entity', etype, eid
- source = self.sources_by_uri[sourceuri]
- if source.should_call_hooks:
- self.hm.call_hooks('before_delete_entity', session, entity=entity)
- self._delete_info(session, entity, sourceuri, extid)
- source.delete_entity(session, entity)
- if source.should_call_hooks:
- self.hm.call_hooks('after_delete_entity', session, entity=entity)
+ for eid in eids:
+ etype, sourceuri, extid = self.type_and_source_from_eid(eid, session)
+ entity = session.entity_from_eid(eid, etype)
+ _key = (etype, sourceuri)
+ if _key not in data_by_etype_source:
+ data_by_etype_source[_key] = ([eid], [extid], [entity])
+ else:
+ _data = data_by_etype_source[_key]
+ _data[0].append(eid)
+ _data[1].append(extid)
+ _data[2].append(entity)
+ for (etype, sourceuri), (eids, extids, entities) in data_by_etype_source.iteritems():
+ if server.DEBUG & server.DBG_REPO:
+ print 'DELETE entities', etype, eids
+ #print 'DELETE entities', etype, len(eids)
+ source = self.sources_by_uri[sourceuri]
+ if source.should_call_hooks:
+ self.hm.call_hooks('before_delete_entity', session, entities=entities)
+ self._delete_info_multi(session, entities, sourceuri, extids) # xxx
+ source.delete_entities(session, entities)
+ if source.should_call_hooks:
+ self.hm.call_hooks('after_delete_entity', session, entities=entities)
# don't clear cache here this is done in a hook on commit
def glob_add_relation(self, session, subject, rtype, object):
@@ -1268,8 +1420,9 @@
# pyro handling ###########################################################
- def pyro_register(self, host=''):
- """register the repository as a pyro object"""
+ @property
+ @cached
+ def pyro_appid(self):
from logilab.common import pyro_ext as pyro
config = self.config
appid = '%s.%s' % pyro.ns_group_and_id(
@@ -1277,13 +1430,27 @@
config['pyro-ns-group'])
# ensure config['pyro-instance-id'] is a full qualified pyro name
config['pyro-instance-id'] = appid
- daemon = pyro.register_object(self, appid,
- daemonhost=config['pyro-host'],
- nshost=config['pyro-ns-host'])
- self.info('repository registered as a pyro object %s', appid)
+ return appid
+
+ def pyro_register(self, host=''):
+ """register the repository as a pyro object"""
+ from logilab.common import pyro_ext as pyro
+ daemon = pyro.register_object(self, self.pyro_appid,
+ daemonhost=self.config['pyro-host'],
+ nshost=self.config['pyro-ns-host'])
+ self.info('repository registered as a pyro object %s', self.pyro_appid)
self.pyro_registered = True
+ # register a looping task to regularly ensure we're still registered
+ # into the pyro name server
+ self.looping_task(60*10, self._ensure_pyro_ns)
return daemon
+ def _ensure_pyro_ns(self):
+ from logilab.common import pyro_ext as pyro
+ pyro.ns_reregister(self.pyro_appid, nshost=self.config['pyro-ns-host'])
+ self.info('repository re-registered as a pyro object %s',
+ self.pyro_appid)
+
# multi-sources planner helpers ###########################################
@cached
diff -r 48f468f33704 -r e4580e5f0703 server/rqlannotation.py
--- a/server/rqlannotation.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/rqlannotation.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -135,7 +135,12 @@
# priority should be given to relation which are not in inner queries
# (eg exists)
try:
- stinfo['principal'] = _select_principal(var.scope, joins)
+ stinfo['principal'] = principal = _select_principal(var.scope, joins)
+ if getrschema(principal.r_type).inlined:
+ # the scope of the lhs variable must be equal or outer to the
+ # rhs variable's scope (since it's retrieved from lhs's table)
+ sstinfo = principal.children[0].variable.stinfo
+ sstinfo['scope'] = common_parent(sstinfo['scope'], stinfo['scope']).scope
except CantSelectPrincipal:
stinfo['invariant'] = False
rqlst.need_distinct = need_distinct
@@ -195,7 +200,7 @@
for rel in sorted(relations, key=lambda x: (x.children[0].name, x.r_type)):
# only equality relation with a variable as rhs may be principal
if rel.operator() not in ('=', 'IS') \
- or not isinstance(rel.children[1].children[0], VariableRef):
+ or not isinstance(rel.children[1].children[0], VariableRef) or rel.neged(strict=True):
continue
if rel.scope is rel.stmt:
return rel
diff -r 48f468f33704 -r e4580e5f0703 server/schemaserial.py
--- a/server/schemaserial.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/schemaserial.py Fri Mar 11 09:46:45 2011 +0100
@@ -235,7 +235,14 @@
uniquecstreid, eeid, releid = values
eschema = schema.schema_by_eid(eeid)
relations = unique_togethers.setdefault(uniquecstreid, (eschema, []))
- relations[1].append(ertidx[releid].rtype.type)
+ rel = ertidx[releid]
+ if isinstance(rel, schemamod.RelationDefinitionSchema):
+ # not yet migrated 3.9 database ('relations' target type changed
+ # to CWRType in 3.10)
+ rtype = rel.rtype.type
+ else:
+ rtype = str(rel)
+ relations[1].append(rtype)
for eschema, unique_together in unique_togethers.itervalues():
eschema._unique_together.append(tuple(sorted(unique_together)))
schema.infer_specialization_rules()
@@ -355,6 +362,7 @@
for eschema in eschemas:
for unique_together in eschema._unique_together:
execschemarql(execute, eschema, [uniquetogether2rql(eschema, unique_together)])
+ # serialize yams inheritance relationships
for rql, kwargs in specialize2rql(schema):
execute(rql, kwargs, build_descr=False)
if pb is not None:
@@ -417,23 +425,17 @@
restrictions = []
substs = {}
for i, name in enumerate(unique_together):
- rschema = eschema.rdef(name)
- var = 'R%d' % i
+ rschema = eschema.schema.rschema(name)
rtype = 'T%d' % i
- substs[rtype] = rschema.rtype.type
- relations.append('C relations %s' % var)
- restrictions.append('%(var)s from_entity X, '
- '%(var)s relation_type %(rtype)s, '
- '%(rtype)s name %%(%(rtype)s)s' \
- % {'var': var,
- 'rtype':rtype})
+ substs[rtype] = rschema.type
+ relations.append('C relations %s' % rtype)
+ restrictions.append('%(rtype)s name %%(%(rtype)s)s' % {'rtype': rtype})
relations = ', '.join(relations)
restrictions = ', '.join(restrictions)
rql = ('INSERT CWUniqueTogetherConstraint C: '
' C constraint_of X, %s '
'WHERE '
- ' X eid %%(x)s, %s' )
-
+ ' X eid %%(x)s, %s')
return rql % (relations, restrictions), substs
diff -r 48f468f33704 -r e4580e5f0703 server/serverconfig.py
--- a/server/serverconfig.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/serverconfig.py Fri Mar 11 09:46:45 2011 +0100
@@ -19,14 +19,15 @@
__docformat__ = "restructuredtext en"
+import sys
from os.path import join, exists
+from StringIO import StringIO
-from logilab.common.configuration import REQUIRED, Method, Configuration, \
- ini_format_section
+import logilab.common.configuration as lgconfig
from logilab.common.decorators import wproperty, cached
from cubicweb.toolsutils import read_config, restrict_perms_to_user
-from cubicweb.cwconfig import CubicWebConfiguration, merge_options
+from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration, merge_options
from cubicweb.server import SOURCE_TYPES
@@ -38,13 +39,13 @@
'level': 0,
}),
('password', {'type' : 'password',
- 'default': REQUIRED,
+ 'default': lgconfig.REQUIRED,
'help': "cubicweb manager account's password",
'level': 0,
}),
)
-class SourceConfiguration(Configuration):
+class SourceConfiguration(lgconfig.Configuration):
def __init__(self, appconfig, options):
self.appconfig = appconfig # has to be done before super call
super(SourceConfiguration, self).__init__(options=options)
@@ -54,54 +55,36 @@
return self.appconfig.appid
def input_option(self, option, optdict, inputlevel):
- if self['db-driver'] == 'sqlite':
- if option in ('db-user', 'db-password'):
- return
- if option == 'db-name':
- optdict = optdict.copy()
- optdict['help'] = 'path to the sqlite database'
- optdict['default'] = join(self.appconfig.appdatahome,
- self.appconfig.appid + '.sqlite')
+ try:
+ dbdriver = self['db-driver']
+ except lgconfig.OptionError:
+ pass
+ else:
+ if dbdriver == 'sqlite':
+ if option in ('db-user', 'db-password'):
+ return
+ if option == 'db-name':
+ optdict = optdict.copy()
+ optdict['help'] = 'path to the sqlite database'
+ optdict['default'] = join(self.appconfig.appdatahome,
+ self.appconfig.appid + '.sqlite')
super(SourceConfiguration, self).input_option(option, optdict, inputlevel)
-def generate_sources_file(appconfig, sourcesfile, sourcescfg, keys=None):
- """serialize repository'sources configuration into a INI like file
+
+def ask_source_config(appconfig, type, inputlevel=0):
+ options = SOURCE_TYPES[type].options
+ sconfig = SourceConfiguration(appconfig, options=options)
+ sconfig.input_config(inputlevel=inputlevel)
+ return sconfig
- the `keys` parameter may be used to sort sections
- """
- if keys is None:
- keys = sourcescfg.keys()
- else:
- for key in sourcescfg:
- if not key in keys:
- keys.append(key)
- stream = open(sourcesfile, 'w')
- for uri in keys:
- sconfig = sourcescfg[uri]
- if isinstance(sconfig, dict):
- # get a Configuration object
- if uri == 'admin':
- options = USER_OPTIONS
- else:
- options = SOURCE_TYPES[sconfig['adapter']].options
- _sconfig = SourceConfiguration(appconfig, options=options)
- for attr, val in sconfig.items():
- if attr == 'uri':
- continue
- if attr == 'adapter':
- _sconfig.adapter = val
- else:
- _sconfig.set_option(attr, val)
- sconfig = _sconfig
- optsbysect = list(sconfig.options_by_section())
- assert len(optsbysect) == 1, 'all options for a source should be in the same group'
- ini_format_section(stream, uri, optsbysect[0][1])
- if hasattr(sconfig, 'adapter'):
- print >> stream
- print >> stream, '# adapter for this source (YOU SHOULD NOT CHANGE THIS)'
- print >> stream, 'adapter=%s' % sconfig.adapter
- print >> stream
+def generate_source_config(sconfig, encoding=sys.stdin.encoding):
+ """serialize a repository source configuration as text"""
+ stream = StringIO()
+ optsbysect = list(sconfig.options_by_section())
+ assert len(optsbysect) == 1, 'all options for a source should be in the same group'
+ lgconfig.ini_format(stream, optsbysect[0][1], encoding)
+ return stream.getvalue()
class ServerConfiguration(CubicWebConfiguration):
@@ -121,7 +104,7 @@
}),
('pid-file',
{'type' : 'string',
- 'default': Method('default_pid_file'),
+ 'default': lgconfig.Method('default_pid_file'),
'help': 'repository\'s pid file',
'group': 'main', 'level': 2,
}),
@@ -132,10 +115,16 @@
the repository rather than the user running the command',
'group': 'main', 'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1,
}),
- ('session-time',
+ ('cleanup-session-time',
{'type' : 'time',
- 'default': '30min',
- 'help': 'session expiration time, default to 30 minutes',
+ 'default': '24h',
+ 'help': 'duration of inactivity after which a session '
+ 'will be closed, to limit memory consumption (avoid sessions that '
+ 'never expire and cause memory leak when http-session-time is 0, or '
+ 'because of bad client that never closes their connection). '
+ 'So notice that even if http-session-time is 0 and the user don\'t '
+ 'close his browser, he will have to reauthenticate after this time '
+ 'of inactivity. Default to 24h.',
'group': 'main', 'level': 3,
}),
('connections-pool-size',
@@ -276,16 +265,48 @@
"""
return self.read_sources_file()
- def source_enabled(self, uri):
- return not self.enabled_sources or uri in self.enabled_sources
+ def source_enabled(self, source):
+ if self.sources_mode is not None:
+ if 'migration' in self.sources_mode:
+ assert len(self.sources_mode) == 1
+ if source.connect_for_migration:
+ return True
+ print 'not connecting to source', source.uri, 'during migration'
+ return False
+ if 'all' in self.sources_mode:
+ assert len(self.sources_mode) == 1
+ return True
+ return source.uri in self.sources_mode
+ if self.quick_start:
+ return False
+ return (not source.disabled and (
+ not self.enabled_sources or source.uri in self.enabled_sources))
def write_sources_file(self, sourcescfg):
+ """serialize repository'sources configuration into a INI like file"""
sourcesfile = self.sources_file()
if exists(sourcesfile):
import shutil
shutil.copy(sourcesfile, sourcesfile + '.bak')
- generate_sources_file(self, sourcesfile, sourcescfg,
- ['admin', 'system'])
+ stream = open(sourcesfile, 'w')
+ for section in ('admin', 'system'):
+ sconfig = sourcescfg[section]
+ if isinstance(sconfig, dict):
+ # get a Configuration object
+ assert section == 'system'
+ _sconfig = SourceConfiguration(
+ self, options=SOURCE_TYPES['native'].options)
+ for attr, val in sconfig.items():
+ try:
+ _sconfig.set_option(attr, val)
+ except lgconfig.OptionError:
+ # skip adapter, may be present on pre 3.10 instances
+ if attr != 'adapter':
+ self.error('skip unknown option %s in sources file')
+ sconfig = _sconfig
+ print >> stream, '[%s]' % section
+ print >> stream, generate_source_config(sconfig)
+ print >> stream
restrict_perms_to_user(sourcesfile)
def pyro_enabled(self):
@@ -312,27 +333,9 @@
schema.name = 'bootstrap'
return schema
+ sources_mode = None
def set_sources_mode(self, sources):
- if 'migration' in sources:
- from cubicweb.server.sources import source_adapter
- assert len(sources) == 1
- enabled_sources = []
- for uri, config in self.sources().iteritems():
- if uri == 'admin':
- continue
- if source_adapter(config).connect_for_migration:
- enabled_sources.append(uri)
- else:
- print 'not connecting to source', uri, 'during migration'
- elif 'all' in sources:
- assert len(sources) == 1
- enabled_sources = None
- else:
- known_sources = self.sources()
- for uri in sources:
- assert uri in known_sources, uri
- enabled_sources = sources
- self.enabled_sources = enabled_sources
+ self.sources_mode = sources
def migration_handler(self, schema=None, interactive=True,
cnx=None, repo=None, connect=True, verbosity=None):
@@ -343,3 +346,6 @@
return ServerMigrationHelper(self, schema, interactive=interactive,
cnx=cnx, repo=repo, connect=connect,
verbosity=verbosity)
+
+
+CONFIGURATIONS.append(ServerConfiguration)
diff -r 48f468f33704 -r e4580e5f0703 server/serverctl.py
--- a/server/serverctl.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/serverctl.py Fri Mar 11 09:46:45 2011 +0100
@@ -25,6 +25,7 @@
import sys
import os
+from logilab.common import nullobject
from logilab.common.configuration import Configuration
from logilab.common.shellutils import ASK
@@ -32,8 +33,9 @@
from cubicweb.toolsutils import Command, CommandHandler, underline_title
from cubicweb.cwctl import CWCTL
from cubicweb.server import SOURCE_TYPES
-from cubicweb.server.serverconfig import (USER_OPTIONS, ServerConfiguration,
- SourceConfiguration)
+from cubicweb.server.serverconfig import (
+ USER_OPTIONS, ServerConfiguration, SourceConfiguration,
+ ask_source_config, generate_source_config)
# utility functions ###########################################################
@@ -55,16 +57,14 @@
else:
print dbname,
if dbhelper.users_support:
- if not verbose or (not special_privs and source.get('db-user')):
+ if not special_privs and source.get('db-user'):
user = source['db-user']
if verbose:
print 'as', user
- if source.get('db-password'):
- password = source['db-password']
- else:
- password = getpass('password: ')
+ password = source.get('db-password')
else:
- print
+ if verbose:
+ print
if special_privs:
print 'WARNING'
print ('the user will need the following special access rights '
@@ -73,9 +73,9 @@
print
default_user = source.get('db-user', os.environ.get('USER', ''))
user = raw_input('Connect as user ? [%r]: ' % default_user)
- user = user or default_user
- if user == source.get('db-user') and source.get('db-password'):
- password = source['db-password']
+ user = user.strip() or default_user
+ if user == source.get('db-user'):
+ password = source.get('db-password')
else:
password = getpass('password: ')
else:
@@ -107,22 +107,18 @@
return source_cnx(source, system_db, special_privs=special_privs, verbose=verbose)
return source_cnx(source, special_privs=special_privs, verbose=verbose)
-def _db_sys_cnx(source, what, db=None, user=None, verbose=True):
- """return a connection on the RDMS system table (to create/drop a user
- or a database
+def _db_sys_cnx(source, special_privs, verbose=True):
+ """return a connection on the RDMS system table (to create/drop a user or a
+ database)
"""
import logilab.common as lgp
from logilab.database import get_db_helper
lgp.USE_MX_DATETIME = False
- special_privs = ''
driver = source['db-driver']
helper = get_db_helper(driver)
- if user is not None and helper.users_support:
- special_privs += '%s USER' % what
- if db is not None:
- special_privs += ' %s DATABASE' % what
# connect on the dbms system base to create our base
- cnx = system_source_cnx(source, True, special_privs=special_privs, verbose=verbose)
+ cnx = system_source_cnx(source, True, special_privs=special_privs,
+ verbose=verbose)
# disable autocommit (isolation_level(1)) because DROP and
# CREATE DATABASE can't be executed in a transaction
try:
@@ -134,7 +130,7 @@
def repo_cnx(config):
"""return a in-memory repository and a db api connection it"""
- from cubicweb.dbapi import in_memory_cnx
+ from cubicweb.dbapi import in_memory_repo_cnx
from cubicweb.server.utils import manager_userpasswd
try:
login = config.sources()['admin']['login']
@@ -143,7 +139,7 @@
login, pwd = manager_userpasswd()
while True:
try:
- return in_memory_cnx(config, login, password=pwd)
+ return in_memory_repo_cnx(config, login, password=pwd)
except AuthenticationError:
print '-> Error: wrong user/password.'
# reset cubes else we'll have an assertion error on next retry
@@ -161,7 +157,6 @@
"""create an instance by copying files from the given cube and by asking
information necessary to build required configuration files
"""
- from cubicweb.server.utils import ask_source_config
config = self.config
print underline_title('Configuring the repository')
config.input_config('email', inputlevel)
@@ -176,37 +171,9 @@
# defs (in native.py)
sconfig = SourceConfiguration(config,
options=SOURCE_TYPES['native'].options)
- sconfig.adapter = 'native'
sconfig.input_config(inputlevel=inputlevel)
sourcescfg = {'system': sconfig}
- for cube in cubes:
- # if a source is named as the cube containing it, we need the
- # source to use the cube, so add it.
- if cube in SOURCE_TYPES:
- sourcescfg[cube] = ask_source_config(cube, inputlevel)
print
- while ASK.confirm('Enter another source ?', default_is_yes=False):
- available = sorted(stype for stype in SOURCE_TYPES
- if not stype in cubes)
- while True:
- sourcetype = raw_input('source type (%s): ' % ', '.join(available))
- if sourcetype in available:
- break
- print '-> unknown source type, use one of the available types.'
- while True:
- sourceuri = raw_input('source identifier (a unique name used to tell sources apart): ').strip()
- if sourceuri != 'admin' and sourceuri not in sourcescfg:
- break
- print '-> uri already used, choose another one.'
- sourcescfg[sourceuri] = ask_source_config(sourcetype, inputlevel)
- sourcemodule = SOURCE_TYPES[sourcetype].module
- if not sourcemodule.startswith('cubicweb.'):
- # module names look like cubes.mycube.themodule
- sourcecube = SOURCE_TYPES[sourcetype].module.split('.', 2)[1]
- # if the source adapter is coming from an external component,
- # ensure it's specified in used cubes
- if not sourcecube in cubes:
- cubes.append(sourcecube)
sconfig = Configuration(options=USER_OPTIONS)
sconfig.input_config(inputlevel=inputlevel)
sourcescfg['admin'] = sconfig
@@ -222,6 +189,16 @@
print ('-> nevermind, you can do it later with '
'"cubicweb-ctl db-create %s".' % self.config.appid)
+ERROR = nullobject()
+
+def confirm_on_error_or_die(msg, func, *args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception, ex:
+ print 'ERROR', ex
+ if not ASK.confirm('An error occurred while %s. Continue anyway?' % msg):
+ raise ExecutionError(str(ex))
+ return ERROR
class RepositoryDeleteHandler(CommandHandler):
cmdname = 'delete'
@@ -235,19 +212,29 @@
helper = get_db_helper(source['db-driver'])
if ASK.confirm('Delete database %s ?' % dbname):
if source['db-driver'] == 'sqlite':
- os.unlink(source['db-name'])
+ if confirm_on_error_or_die(
+ 'deleting database file %s' % dbname,
+ os.unlink, source['db-name']) is not ERROR:
+ print '-> database %s dropped.' % dbname
return
user = source['db-user'] or None
- cnx = _db_sys_cnx(source, 'DROP DATABASE', user=user)
+ cnx = confirm_on_error_or_die('connecting to database %s' % dbname,
+ _db_sys_cnx, source, 'DROP DATABASE')
+ if cnx is ERROR:
+ return
cursor = cnx.cursor()
try:
- cursor.execute('DROP DATABASE %s' % dbname)
- print '-> database %s dropped.' % dbname
+ if confirm_on_error_or_die(
+ 'dropping database %s' % dbname,
+ cursor.execute, 'DROP DATABASE "%s"' % dbname) is not ERROR:
+ print '-> database %s dropped.' % dbname
# XXX should check we are not connected as user
if user and helper.users_support and \
ASK.confirm('Delete user %s ?' % user, default_is_yes=False):
- cursor.execute('DROP USER %s' % user)
- print '-> user %s dropped.' % user
+ if confirm_on_error_or_die(
+ 'dropping user %s' % user,
+ cursor.execute, 'DROP USER %s' % user) is not ERROR:
+ print '-> user %s dropped.' % user
cnx.commit()
except:
cnx.rollback()
@@ -265,6 +252,7 @@
command.append('--loglevel %s' % config['log-threshold'].lower())
command.append(config.appid)
os.system(' '.join(command))
+ return 1
class RepositoryStopHandler(CommandHandler):
@@ -294,7 +282,7 @@
You will be prompted for a login / password to use to connect to
the system database. The given user should have almost all rights
- on the database (ie a super user on the dbms allowed to create
+ on the database (ie a super user on the DBMS allowed to create
database, users, languages...).
@@ -340,14 +328,15 @@
elif self.config.create_db:
print '\n'+underline_title('Creating the system database')
# connect on the dbms system base to create our base
- dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER', verbose=verbose)
+ dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER',
+ verbose=verbose)
cursor = dbcnx.cursor()
try:
if helper.users_support:
user = source['db-user']
if not helper.user_exists(cursor, user) and (automatic or \
ASK.confirm('Create db user %s ?' % user, default_is_yes=False)):
- helper.create_user(source['db-user'], source['db-password'])
+ helper.create_user(source['db-user'], source.get('db-password'))
print '-> user %s created.' % user
if dbname in helper.list_databases(cursor):
if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
@@ -360,7 +349,8 @@
except:
dbcnx.rollback()
raise
- cnx = system_source_cnx(source, special_privs='LANGUAGE C', verbose=verbose)
+ cnx = system_source_cnx(source, special_privs='CREATE LANGUAGE',
+ verbose=verbose)
cursor = cnx.cursor()
helper.init_fti_extensions(cursor)
# postgres specific stuff
@@ -383,9 +373,8 @@
class InitInstanceCommand(Command):
"""Initialize the system database of an instance (run after 'db-create').
- You will be prompted for a login / password to use to connect to
- the system database. The given user should have the create tables,
- and grant permissions.
+ Notice this will be done using user specified in the sources files, so this
+ user should have the create tables grant permissions on the database.
the identifier of the instance to initialize.
@@ -399,6 +388,10 @@
'default': False,
'help': 'insert drop statements to remove previously existant \
tables, indexes... (no by default)'}),
+ ('config-level',
+ {'short': 'l', 'type': 'int', 'default': 1,
+ 'help': 'level threshold for questions asked when configuring another source'
+ }),
)
def run(self, args):
@@ -422,6 +415,68 @@
'the %s file. Resolve this first (error: %s).'
% (config.sources_file(), str(ex).strip()))
init_repository(config, drop=self.config.drop)
+ while ASK.confirm('Enter another source ?', default_is_yes=False):
+ CWCTL.run(['add-source', '--config-level', self.config.config_level, config.appid])
+
+
+class AddSourceCommand(Command):
+ """Add a data source to an instance.
+
+
+ the identifier of the instance to initialize.
+ """
+ name = 'add-source'
+ arguments = ''
+ min_args = max_args = 1
+ options = (
+ ('config-level',
+ {'short': 'l', 'type': 'int', 'default': 1,
+ 'help': 'level threshold for questions asked when configuring another source'
+ }),
+ )
+
+ def run(self, args):
+ appid = args[0]
+ config = ServerConfiguration.config_for(appid)
+ config.quick_start = True
+ repo, cnx = repo_cnx(config)
+ req = cnx.request()
+ used = set(n for n, in req.execute('Any SN WHERE S is CWSource, S name SN'))
+ cubes = repo.get_cubes()
+ while True:
+ type = raw_input('source type (%s): '
+ % ', '.join(sorted(SOURCE_TYPES)))
+ if type not in SOURCE_TYPES:
+ print '-> unknown source type, use one of the available types.'
+ continue
+ sourcemodule = SOURCE_TYPES[type].module
+ if not sourcemodule.startswith('cubicweb.'):
+ # module names look like cubes.mycube.themodule
+ sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1]
+ # if the source adapter is coming from an external component,
+ # ensure it's specified in used cubes
+ if not sourcecube in cubes:
+ print ('-> this source type require the %s cube which is '
+ 'not used by the instance.')
+ continue
+ break
+ while True:
+ sourceuri = raw_input('source identifier (a unique name used to '
+ 'tell sources apart): ').strip()
+ if not sourceuri:
+ print '-> mandatory.'
+ else:
+ sourceuri = unicode(sourceuri, sys.stdin.encoding)
+ if sourceuri in used:
+ print '-> uri already used, choose another one.'
+ else:
+ break
+ # XXX configurable inputlevel
+ sconfig = ask_source_config(config, type, inputlevel=self.config.config_level)
+ cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding)
+ req.create_entity('CWSource', name=sourceuri,
+ type=unicode(type), config=cfgstr)
+ cnx.commit()
class GrantUserOnInstanceCommand(Command):
@@ -486,6 +541,9 @@
print '-> Error: could not get cubicweb administrator login.'
sys.exit(1)
cnx = source_cnx(sourcescfg['system'])
+ driver = sourcescfg['system']['db-driver']
+ from logilab.database import get_db_helper
+ dbhelper = get_db_helper(driver)
cursor = cnx.cursor()
# check admin exists
cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s",
@@ -501,7 +559,7 @@
passwdmsg='new password for %s' % adminlogin)
try:
cursor.execute("UPDATE cw_CWUser SET cw_upassword=%(p)s WHERE cw_login=%(l)s",
- {'p': buffer(crypt_password(passwd)), 'l': adminlogin})
+ {'p': dbhelper.binary_value(crypt_password(passwd)), 'l': adminlogin})
sconfig = Configuration(options=USER_OPTIONS)
sconfig['login'] = adminlogin
sconfig['password'] = passwd
@@ -519,7 +577,7 @@
class StartRepositoryCommand(Command):
- """Start an CubicWeb RQL server for a given instance.
+ """Start a CubicWeb RQL server for a given instance.
The server will be accessible through pyro
@@ -562,7 +620,7 @@
# go ! (don't daemonize in debug mode)
if not os.path.exists(piddir):
os.makedirs(piddir)
- if not debug and daemonize(pidfile):
+ if not debug and daemonize(pidfile, umask=config['umask']):
return
uid = config['uid']
if uid is not None:
@@ -791,9 +849,12 @@
options = (
('checks',
{'short': 'c', 'type' : 'csv', 'metavar' : '',
- 'default' : ('entities', 'relations', 'metadata', 'schema', 'text_index'),
+ 'default' : ('entities', 'relations',
+ 'mandatory_relations', 'mandatory_attributes',
+ 'metadata', 'schema', 'text_index'),
'help': 'Comma separated list of check to run. By default run all \
-checks, i.e. entities, relations, text_index and metadata.'}
+checks, i.e. entities, relations, mandatory_relations, mandatory_attributes, \
+metadata, text_index and schema.'}
),
('autofix',
@@ -897,7 +958,7 @@
GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
StartRepositoryCommand,
DBDumpCommand, DBRestoreCommand, DBCopyCommand,
- CheckRepositoryCommand, RebuildFTICommand,
+ AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
SynchronizeInstanceSchemaCommand,
CheckMappingCommand,
):
diff -r 48f468f33704 -r e4580e5f0703 server/session.py
--- a/server/session.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/session.py Fri Mar 11 09:46:45 2011 +0100
@@ -28,6 +28,7 @@
from warnings import warn
from logilab.common.deprecation import deprecated
+from rql import CoercionError
from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj
from yams import BASE_TYPES
@@ -46,6 +47,7 @@
# anyway in the later case
NO_UNDO_TYPES.add('is')
NO_UNDO_TYPES.add('is_instance_of')
+NO_UNDO_TYPES.add('cw_source')
# XXX rememberme,forgotpwd,apycot,vcsfile
def _make_description(selected, args, solution):
@@ -64,6 +66,14 @@
If mode is session.`HOOKS_ALLOW_ALL`, given hooks categories will
be disabled.
+
+ .. sourcecode:: python
+
+ with hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, 'integrity'):
+ # ... do stuff with all but 'integrity' hooks activated
+
+ with hooks_control(self.session, self.session.HOOKS_DENY_ALL, 'integrity'):
+ # ... do stuff with none but 'integrity' hooks activated
"""
def __init__(self, session, mode, *categories):
self.session = session
@@ -618,16 +628,20 @@
# shared data handling ###################################################
- def get_shared_data(self, key, default=None, pop=False):
+ def get_shared_data(self, key, default=None, pop=False, txdata=False):
"""return value associated to `key` in session data"""
- if pop:
- return self.data.pop(key, default)
+ if txdata:
+ data = self.transaction_data
else:
- return self.data.get(key, default)
+ data = self.data
+ if pop:
+ return data.pop(key, default)
+ else:
+ return data.get(key, default)
- def set_shared_data(self, key, value, querydata=False):
+ def set_shared_data(self, key, value, txdata=False):
"""set value associated to `key` in session data"""
- if querydata:
+ if txdata:
self.transaction_data[key] = value
else:
self.data[key] = value
@@ -738,51 +752,50 @@
try:
# by default, operations are executed with security turned off
with security_enabled(self, False, False):
- for trstate in ('precommit', 'commit'):
- processed = []
- self.commit_state = trstate
- try:
- while self.pending_operations:
- operation = self.pending_operations.pop(0)
- operation.processed = trstate
- processed.append(operation)
- operation.handle_event('%s_event' % trstate)
- self.pending_operations[:] = processed
- self.debug('%s session %s done', trstate, self.id)
- except:
- # if error on [pre]commit:
- #
- # * set .failed = True on the operation causing the failure
- # * call revert_event on processed operations
- # * call rollback_event on *all* operations
- #
- # that seems more natural than not calling rollback_event
- # for processed operations, and allow generic rollback
- # instead of having to implements rollback, revertprecommit
- # and revertcommit, that will be enough in mont case.
- operation.failed = True
- for operation in reversed(processed):
- try:
- operation.handle_event('revert%s_event' % trstate)
- except:
- self.critical('error while reverting %sing', trstate,
- exc_info=True)
- # XXX use slice notation since self.pending_operations is a
- # read-only property.
- self.pending_operations[:] = processed + self.pending_operations
- self.rollback(reset_pool)
- raise
+ processed = []
+ self.commit_state = 'precommit'
+ try:
+ while self.pending_operations:
+ operation = self.pending_operations.pop(0)
+ operation.processed = 'precommit'
+ processed.append(operation)
+ operation.handle_event('precommit_event')
+ self.pending_operations[:] = processed
+ self.debug('precommit session %s done', self.id)
+ except:
+ # if error on [pre]commit:
+ #
+ # * set .failed = True on the operation causing the failure
+ # * call revert_event on processed operations
+ # * call rollback_event on *all* operations
+ #
+ # that seems more natural than not calling rollback_event
+ # for processed operations, and allow generic rollback
+ # instead of having to implements rollback, revertprecommit
+ # and revertcommit, that will be enough in mont case.
+ operation.failed = True
+ for operation in reversed(processed):
+ try:
+ operation.handle_event('revertprecommit_event')
+ except:
+ self.critical('error while reverting precommit',
+ exc_info=True)
+ # XXX use slice notation since self.pending_operations is a
+ # read-only property.
+ self.pending_operations[:] = processed + self.pending_operations
+ self.rollback(reset_pool)
+ raise
self.pool.commit()
- self.commit_state = trstate = 'postcommit'
+ self.commit_state = 'postcommit'
while self.pending_operations:
operation = self.pending_operations.pop(0)
- operation.processed = trstate
+ operation.processed = 'postcommit'
try:
- operation.handle_event('%s_event' % trstate)
+ operation.handle_event('postcommit_event')
except:
- self.critical('error while %sing', trstate,
+ self.critical('error while postcommit',
exc_info=sys.exc_info())
- self.debug('%s session %s done', trstate, self.id)
+ self.debug('postcommit session %s done', self.id)
return self.transaction_uuid(set=False)
finally:
self._touch()
@@ -841,6 +854,10 @@
del self.__threaddata
del self._tx_data
+ @property
+ def closed(self):
+ return not hasattr(self, '_tx_data')
+
# transaction data/operations management ##################################
@property
diff -r 48f468f33704 -r e4580e5f0703 server/sources/__init__.py
--- a/server/sources/__init__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/__init__.py Fri Mar 11 09:46:45 2011 +0100
@@ -22,10 +22,12 @@
from os.path import join, splitext
from datetime import datetime, timedelta
from logging import getLogger
+import itertools
from cubicweb import set_log_methods, server
from cubicweb.schema import VIRTUAL_RTYPES
from cubicweb.server.sqlutils import SQL_PREFIX
+from cubicweb.server.ssplanner import EditedEntity
def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'):
@@ -54,7 +56,7 @@
def __init__(self, ttl):
# time to live in seconds
if ttl <= 0:
- raise ValueError('TimedCache initialized with a ttl of %ss' % self.ttl.seconds)
+ raise ValueError('TimedCache initialized with a ttl of %ss' % ttl.seconds)
self.ttl = timedelta(seconds=ttl)
def __setitem__(self, key, value):
@@ -98,13 +100,18 @@
dont_cross_relations = ()
cross_relations = ()
+ # force deactivation (configuration error for instance)
+ disabled = False
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
+ def __init__(self, repo, source_config, *args, **kwargs):
self.repo = repo
self.uri = source_config['uri']
set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
- self.set_schema(appschema)
+ self.set_schema(repo.schema)
self.support_relations['identity'] = False
+ self.eid = None
+ self.public_config = source_config.copy()
+ self.remove_sensitive_information(self.public_config)
def init_creating(self):
"""method called by the repository once ready to create a new instance"""
@@ -218,7 +225,7 @@
def extid2eid(self, value, etype, session=None, **kwargs):
return self.repo.extid2eid(self, value, etype, session, **kwargs)
- PUBLIC_KEYS = ('adapter', 'uri')
+ PUBLIC_KEYS = ('type', 'uri')
def remove_sensitive_information(self, sourcedef):
"""remove sensitive information such as login / password from source
definition
@@ -343,6 +350,7 @@
"""
entity = self.repo.vreg['etypes'].etype_class(etype)(session)
entity.eid = eid
+ entity.cw_edited = EditedEntity(entity)
return entity
def after_entity_insertion(self, session, lid, entity):
@@ -365,6 +373,11 @@
"""update an entity in the source"""
raise NotImplementedError()
+ def delete_entities(self, session, entities):
+ """delete several entities from the source"""
+ for entity in entities:
+ self.delete_entity(session, entity)
+
def delete_entity(self, session, entity):
"""delete an entity from the source"""
raise NotImplementedError()
@@ -394,12 +407,19 @@
"""mark entity as being modified, fulltext reindex if needed"""
raise NotImplementedError()
- def delete_info(self, session, entity, uri, extid, attributes, relations):
+ def delete_info(self, session, entity, uri, extid):
"""delete system information on deletion of an entity by transfering
record from the entities table to the deleted_entities table
"""
raise NotImplementedError()
+ def delete_info_multi(self, session, entities, uri, extids):
+ """ame as delete_info but accepts a list of entities with
+ the same etype and belinging to the same source.
+ """
+ for entity, extid in itertools.izip(entities, extids):
+ self.delete_info(session, entity, uri, extid)
+
def modified_entities(self, session, etypes, mtime):
"""return a 2-uple:
* list of (etype, eid) of entities of the given types which have been
@@ -416,14 +436,13 @@
"""
raise NotImplementedError()
- def fti_unindex_entity(self, session, eid):
- """remove text content for entity with the given eid from the full text
- index
+ def fti_unindex_entities(self, session, entities):
+ """remove text content for entities from the full text index
"""
raise NotImplementedError()
- def fti_index_entity(self, session, entity):
- """add text content of a created/modified entity to the full text index
+ def fti_index_entities(self, session, entities):
+ """add text content of created/modified entities to the full text index
"""
raise NotImplementedError()
@@ -505,18 +524,20 @@
pass
def cursor(self):
return None # no actual cursor support
+ def close(self):
+ if hasattr(self.cnx, 'close'):
+ self.cnx.close()
from cubicweb.server import SOURCE_TYPES
-def source_adapter(source_config):
- adapter_type = source_config['adapter'].lower()
+def source_adapter(source_type):
try:
- return SOURCE_TYPES[adapter_type]
+ return SOURCE_TYPES[source_type]
except KeyError:
- raise RuntimeError('Unknown adapter %r' % adapter_type)
+ raise RuntimeError('Unknown source type %r' % source_type)
-def get_source(source_config, global_schema, repo):
+def get_source(type, source_config, repo):
"""return a source adapter according to the adapter field in the
source's configuration
"""
- return source_adapter(source_config)(repo, global_schema, source_config)
+ return source_adapter(type)(repo, source_config)
diff -r 48f468f33704 -r e4580e5f0703 server/sources/ldapuser.py
--- a/server/sources/ldapuser.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/ldapuser.py Fri Mar 11 09:46:45 2011 +0100
@@ -97,13 +97,13 @@
{'type' : 'string',
'default': '',
'help': 'user dn to use to open data connection to the ldap (eg used \
-to respond to rql queries).',
+to respond to rql queries). Leave empty for anonymous bind',
'group': 'ldap-source', 'level': 1,
}),
('data-cnx-password',
{'type' : 'string',
'default': '',
- 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries).',
+ 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.',
'group': 'ldap-source', 'level': 1,
}),
@@ -111,19 +111,19 @@
{'type' : 'string',
'default': 'ou=People,dc=logilab,dc=fr',
'help': 'base DN to lookup for users',
- 'group': 'ldap-source', 'level': 0,
+ 'group': 'ldap-source', 'level': 1,
}),
('user-scope',
{'type' : 'choice',
'default': 'ONELEVEL',
'choices': ('BASE', 'ONELEVEL', 'SUBTREE'),
- 'help': 'user search scope',
+ 'help': 'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")',
'group': 'ldap-source', 'level': 1,
}),
('user-classes',
{'type' : 'csv',
'default': ('top', 'posixAccount'),
- 'help': 'classes of user',
+ 'help': 'classes of user (with Active Directory, you want to say "user" here)',
'group': 'ldap-source', 'level': 1,
}),
('user-filter',
@@ -135,7 +135,7 @@
('user-login-attr',
{'type' : 'string',
'default': 'uid',
- 'help': 'attribute used as login on authentication',
+ 'help': 'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)',
'group': 'ldap-source', 'level': 1,
}),
('user-default-group',
@@ -148,7 +148,7 @@
('user-attrs-map',
{'type' : 'named',
'default': {'uid': 'login', 'gecos': 'email'},
- 'help': 'map from ldap user attributes to cubicweb attributes',
+ 'help': 'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)',
'group': 'ldap-source', 'level': 1,
}),
@@ -168,9 +168,8 @@
)
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ def __init__(self, repo, source_config, *args, **kwargs):
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
self.host = source_config['host']
self.protocol = source_config.get('protocol', 'ldap')
self.authmode = source_config.get('auth-mode', 'simple')
@@ -178,7 +177,7 @@
self.cnx_dn = source_config.get('data-cnx-dn') or ''
self.cnx_pwd = source_config.get('data-cnx-password') or ''
self.user_base_scope = globals()[source_config['user-scope']]
- self.user_base_dn = source_config['user-base-dn']
+ self.user_base_dn = str(source_config['user-base-dn'])
self.user_base_scope = globals()[source_config['user-scope']]
self.user_classes = splitstrip(source_config['user-classes'])
self.user_login_attr = source_config['user-login-attr']
@@ -203,7 +202,7 @@
def _make_base_filters(self):
filters = [filter_format('(%s=%s)', ('objectClass', o))
- for o in self.user_classes]
+ for o in self.user_classes]
if self.user_filter:
filters += [self.user_filter]
return filters
@@ -280,7 +279,10 @@
def get_connection(self):
"""open and return a connection to the source"""
if self._conn is None:
- self._connect()
+ try:
+ self._connect()
+ except:
+ self.exception('unable to connect to ldap:')
return ConnectionWrapper(self._conn)
def authenticate(self, session, login, password=None, **kwargs):
@@ -325,7 +327,7 @@
return None
def prepare_columns(self, mainvars, rqlst):
- """return two list describin how to build the final results
+ """return two list describing how to build the final results
from the result of an ldap search (ie a list of dictionnary)
"""
columns = []
@@ -379,8 +381,14 @@
try:
results = self._query_cache[rqlkey]
except KeyError:
- results = self.rqlst_search(session, rqlst, args)
- self._query_cache[rqlkey] = results
+ try:
+ results = self.rqlst_search(session, rqlst, args)
+ self._query_cache[rqlkey] = results
+ except ldap.SERVER_DOWN:
+ # cant connect to server
+ msg = session._("can't connect to source %s, some data may be missing")
+ session.set_shared_data('sources_error', msg % self.uri)
+ return []
return results
def rqlst_search(self, session, rqlst, args):
@@ -523,6 +531,8 @@
searchstr='(objectClass=*)', attrs=()):
"""make an ldap query"""
self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, searchstr, list(attrs))
+ # XXX for now, we do not have connection pool support for LDAP, so
+ # this is always self._conn
cnx = session.pool.connection(self.uri).cnx
try:
res = cnx.search_s(base, scope, searchstr, attrs)
@@ -586,15 +596,16 @@
entity = super(LDAPUserSource, self).before_entity_insertion(session, lid, etype, eid)
res = self._search(session, lid, BASE)[0]
for attr in entity.e_schema.indexable_attributes():
- entity[attr] = res[self.user_rev_attrs[attr]]
+ entity.cw_edited[attr] = res[self.user_rev_attrs[attr]]
return entity
- def after_entity_insertion(self, session, dn, entity):
+ def after_entity_insertion(self, session, lid, entity):
"""called by the repository after an entity stored here has been
inserted in the system table.
"""
self.debug('ldap after entity insertion')
- super(LDAPUserSource, self).after_entity_insertion(session, dn, entity)
+ super(LDAPUserSource, self).after_entity_insertion(session, lid, entity)
+ dn = lid
for group in self.user_default_groups:
session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s',
{'x': entity.eid, 'group': group})
diff -r 48f468f33704 -r e4580e5f0703 server/sources/native.py
--- a/server/sources/native.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/native.py Fri Mar 11 09:46:45 2011 +0100
@@ -35,6 +35,7 @@
from contextlib import contextmanager
from os.path import abspath
import re
+import itertools
from logilab.common.compat import any
from logilab.common.cache import Cache
@@ -55,6 +56,7 @@
from cubicweb.server.rqlannotation import set_qdata
from cubicweb.server.hook import CleanupDeletedEidsCacheOp
from cubicweb.server.session import hooks_control, security_enabled
+from cubicweb.server.ssplanner import EditedEntity
from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results
from cubicweb.server.sources.rql2sql import SQLGenerator
@@ -262,13 +264,12 @@
}),
)
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
+ def __init__(self, repo, source_config, *args, **kwargs):
SQLAdapterMixIn.__init__(self, source_config)
self.authentifiers = [LoginPasswordAuthentifier(self)]
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
# sql generator
- self._rql_sqlgen = self.sqlgen_class(appschema, self.dbhelper,
+ self._rql_sqlgen = self.sqlgen_class(self.schema, self.dbhelper,
ATTR_MAP.copy())
# full text index helper
self.do_fti = not repo.config['delay-full-text-indexation']
@@ -547,25 +548,30 @@
# on the filesystem. To make the entity.data usage absolutely
# transparent, we'll have to reset entity.data to its binary
# value once the SQL query will be executed
- restore_values = {}
- etype = entity.__regid__
+ restore_values = []
+ if isinstance(entity, list):
+ entities = entity
+ else:
+ entities = [entity]
+ etype = entities[0].__regid__
for attr, storage in self._storages.get(etype, {}).items():
- try:
- edited = entity.edited_attributes
- except AttributeError:
- assert event == 'deleted'
- getattr(storage, 'entity_deleted')(entity, attr)
- else:
- if attr in edited:
- handler = getattr(storage, 'entity_%s' % event)
- real_value = handler(entity, attr)
- restore_values[attr] = real_value
+ for entity in entities:
+ try:
+ edited = entity.cw_edited
+ except AttributeError:
+ assert event == 'deleted'
+ getattr(storage, 'entity_deleted')(entity, attr)
+ else:
+ if attr in edited:
+ handler = getattr(storage, 'entity_%s' % event)
+ to_restore = handler(entity, attr)
+ restore_values.append((entity, attr, to_restore))
try:
yield # 2/ execute the source's instructions
finally:
# 3/ restore original values
- for attr, value in restore_values.items():
- entity[attr] = value
+ for entity, attr, value in restore_values:
+ entity.cw_edited.edited_attribute(attr, value)
def add_entity(self, session, entity):
"""add a new entity to the source"""
@@ -844,8 +850,8 @@
if self._eid_creation_cnx is None:
self._eid_creation_cnx = self.get_connection()
cnx = self._eid_creation_cnx
- cursor = cnx.cursor()
try:
+ cursor = cnx.cursor()
for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
cursor.execute(sql)
eid = cursor.fetchone()[0]
@@ -880,6 +886,21 @@
attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
'source': source.uri, 'mtime': datetime.now()}
self.doexec(session, self.sqlgen.insert('entities', attrs), attrs)
+ # insert core relations: is, is_instance_of and cw_source
+ if not hasattr(entity, '_cw_recreating'):
+ try:
+ self.doexec(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)'
+ % (entity.eid, eschema_eid(session, entity.e_schema)))
+ except IndexError:
+ # during schema serialization, skip
+ pass
+ else:
+ for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
+ self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)'
+ % (entity.eid, eschema_eid(session, eschema)))
+ if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
+ self.doexec(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) '
+ 'VALUES (%s,%s)' % (entity.eid, source.eid))
# now we can update the full text index
if self.do_fti and self.need_fti_indexation(entity.__regid__):
if complete:
@@ -904,7 +925,7 @@
* transfer it to the deleted_entities table if the entity's type is
multi-sources
"""
- self.fti_unindex_entity(session, entity.eid)
+ self.fti_unindex_entities(session, [entity])
attrs = {'eid': entity.eid}
self.doexec(session, self.sqlgen.delete('entities', attrs), attrs)
if not entity.__regid__ in self.multisources_etypes:
@@ -916,6 +937,27 @@
'source': uri, 'dtime': datetime.now()}
self.doexec(session, self.sqlgen.insert('deleted_entities', attrs), attrs)
+ def delete_info_multi(self, session, entities, uri, extids):
+ """delete system information on deletion of an entity:
+ * update the fti
+ * remove record from the entities table
+ * transfer it to the deleted_entities table if the entity's type is
+ multi-sources
+ """
+ self.fti_unindex_entities(session, entities)
+ attrs = {'eid': '(%s)' % ','.join([str(_e.eid) for _e in entities])}
+ self.doexec(session, self.sqlgen.delete_many('entities', attrs), attrs)
+ if entities[0].__regid__ not in self.multisources_etypes:
+ return
+ attrs = {'type': entities[0].__regid__,
+ 'source': uri, 'dtime': datetime.now()}
+ for entity, extid in itertools.izip(entities, extids):
+ if extid is not None:
+ assert isinstance(extid, str), type(extid)
+ extid = b64encode(extid)
+ attrs.update({'eid': entity.eid, 'extid': extid})
+ self.doexec(session, self.sqlgen.insert('deleted_entities', attrs), attrs)
+
def modified_entities(self, session, etypes, mtime):
"""return a 2-uple:
* list of (etype, eid) of entities of the given types which have been
@@ -926,7 +968,7 @@
"""
for etype in etypes:
if not etype in self.multisources_etypes:
- self.critical('%s not listed as a multi-sources entity types. '
+ self.error('%s not listed as a multi-sources entity types. '
'Modify your configuration' % etype)
self.multisources_etypes.add(etype)
modsql = _modified_sql('entities', etypes)
@@ -1127,6 +1169,7 @@
err("can't restore entity %s of type %s, type no more supported"
% (eid, etype))
return errors
+ entity.cw_edited = edited = EditedEntity(entity)
# check for schema changes, entities linked through inlined relation
# still exists, rewrap binary values
eschema = entity.e_schema
@@ -1143,27 +1186,19 @@
assert value is None
elif eschema.destination(rtype) in ('Bytes', 'Password'):
action.changes[column] = self._binary(value)
- entity[rtype] = Binary(value)
+ edited[rtype] = Binary(value)
elif isinstance(value, str):
- entity[rtype] = unicode(value, session.encoding, 'replace')
+ edited[rtype] = unicode(value, session.encoding, 'replace')
else:
- entity[rtype] = value
+ edited[rtype] = value
entity.eid = eid
session.repo.init_entity_caches(session, entity, self)
- entity.edited_attributes = set(entity)
- entity._cw_check()
+ edited.check()
self.repo.hm.call_hooks('before_add_entity', session, entity=entity)
# restore the entity
action.changes['cw_eid'] = eid
sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes)
self.doexec(session, sql, action.changes)
- # add explicitly is / is_instance_of whose deletion is not recorded for
- # consistency with addition (done by sql in hooks)
- self.doexec(session, 'INSERT INTO is_relation(eid_from, eid_to) '
- 'VALUES(%s, %s)' % (eid, eschema_eid(session, eschema)))
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,'
- 'eid_to) VALUES(%s, %s)' % (eid, eschema_eid(session, eschema)))
# restore record in entities (will update fti if needed)
self.add_info(session, entity, self, None, True)
# remove record from deleted_entities if entity's type is multi-sources
@@ -1220,13 +1255,13 @@
"no more supported" % {'eid': eid, 'etype': etype})]
entity.eid = eid
# for proper eid/type cache update
- hook.set_operation(session, 'pendingeids', eid,
- CleanupDeletedEidsCacheOp)
+ CleanupDeletedEidsCacheOp.get_instance(session).add_data(eid)
self.repo.hm.call_hooks('before_delete_entity', session, entity=entity)
# remove is / is_instance_of which are added using sql by hooks, hence
# unvisible as transaction action
self.doexec(session, 'DELETE FROM is_relation WHERE eid_from=%s' % eid)
self.doexec(session, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid)
+ self.doexec(session, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % self.eid)
# XXX check removal of inlined relation?
# delete the entity
attrs = {'cw_eid': eid}
@@ -1288,32 +1323,37 @@
"""create an operation to [re]index textual content of the given entity
on commit
"""
- hook.set_operation(session, 'ftindex', entity.eid, FTIndexEntityOp)
+ FTIndexEntityOp.get_instance(session).add_data(entity.eid)
- def fti_unindex_entity(self, session, eid):
- """remove text content for entity with the given eid from the full text
- index
+ def fti_unindex_entities(self, session, entities):
+ """remove text content for entities from the full text index
"""
+ cursor = session.pool['system']
+ cursor_unindex_object = self.dbhelper.cursor_unindex_object
try:
- self.dbhelper.cursor_unindex_object(eid, session.pool['system'])
+ for entity in entities:
+ cursor_unindex_object(entity.eid, cursor)
except Exception: # let KeyboardInterrupt / SystemExit propagate
- self.exception('error while unindexing %s', eid)
+ self.exception('error while unindexing %s', entity)
+
- def fti_index_entity(self, session, entity):
- """add text content of a created/modified entity to the full text index
+ def fti_index_entities(self, session, entities):
+ """add text content of created/modified entities to the full text index
"""
- self.debug('reindexing %r', entity.eid)
+ cursor_index_object = self.dbhelper.cursor_index_object
+ cursor = session.pool['system']
try:
# use cursor_index_object, not cursor_reindex_object since
# unindexing done in the FTIndexEntityOp
- self.dbhelper.cursor_index_object(entity.eid,
- entity.cw_adapt_to('IFTIndexable'),
- session.pool['system'])
+ for entity in entities:
+ cursor_index_object(entity.eid,
+ entity.cw_adapt_to('IFTIndexable'),
+ cursor)
except Exception: # let KeyboardInterrupt / SystemExit propagate
- self.exception('error while reindexing %s', entity)
+ self.exception('error while indexing %s', entity)
-class FTIndexEntityOp(hook.LateOperation):
+class FTIndexEntityOp(hook.DataOperationMixIn, hook.LateOperation):
"""operation to delay entity full text indexation to commit
since fti indexing may trigger discovery of other entities, it should be
@@ -1326,17 +1366,17 @@
source = session.repo.system_source
pendingeids = session.transaction_data.get('pendingeids', ())
done = session.transaction_data.setdefault('indexedeids', set())
- for eid in session.transaction_data.pop('ftindex', ()):
+ to_reindex = set()
+ for eid in self.get_data():
if eid in pendingeids or eid in done:
# entity added and deleted in the same transaction or already
# processed
- return
+ continue
done.add(eid)
iftindexable = session.entity_from_eid(eid).cw_adapt_to('IFTIndexable')
- for container in iftindexable.fti_containers():
- source.fti_unindex_entity(session, container.eid)
- source.fti_index_entity(session, container)
-
+ to_reindex |= set(iftindexable.fti_containers())
+ source.fti_unindex_entities(session, to_reindex)
+ source.fti_index_entities(session, to_reindex)
def sql_schema(driver):
helper = get_db_helper(driver)
diff -r 48f468f33704 -r e4580e5f0703 server/sources/pyrorql.py
--- a/server/sources/pyrorql.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/pyrorql.py Fri Mar 11 09:46:45 2011 +0100
@@ -18,6 +18,7 @@
"""Source to query another RQL repository using pyro"""
__docformat__ = "restructuredtext en"
+_ = unicode
import threading
from os.path import join
@@ -28,6 +29,7 @@
from Pyro.errors import PyroError, ConnectionClosedError
from logilab.common.configuration import REQUIRED
+from logilab.common.optik_ext import check_yn
from rql.nodes import Constant
from rql.utils import rqlvar_maker
@@ -64,7 +66,7 @@
assert not unknown, 'unknown mapping attribute(s): %s' % unknown
# relations that are necessarily not crossed
mapping['dont_cross_relations'] |= set(('owned_by', 'created_by'))
- for rtype in ('is', 'is_instance_of'):
+ for rtype in ('is', 'is_instance_of', 'cw_source'):
assert rtype not in mapping['dont_cross_relations'], \
'%s relation should not be in dont_cross_relations' % rtype
assert rtype not in mapping['support_relations'], \
@@ -119,6 +121,12 @@
'to generate external link to entities from this repository',
'group': 'pyro-source', 'level': 1,
}),
+ ('skip-external-entities',
+ {'type' : 'yn',
+ 'default': False,
+ 'help': 'should entities not local to the source be considered or not',
+ 'group': 'pyro-source', 'level': 0,
+ }),
('pyro-ns-host',
{'type' : 'string',
'default': None,
@@ -131,7 +139,7 @@
'default': None,
'help': 'Pyro name server\'s group where the repository will be \
registered. If not set, default to the value from all_in_one.conf.',
- 'group': 'pyro-source', 'level': 1,
+ 'group': 'pyro-source', 'level': 2,
}),
('synchronization-interval',
{'type' : 'int',
@@ -146,17 +154,26 @@
PUBLIC_KEYS = AbstractSource.PUBLIC_KEYS + ('base-url',)
_conn = None
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ def __init__(self, repo, source_config, *args, **kwargs):
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
mappingfile = source_config['mapping-file']
if not mappingfile[0] == '/':
mappingfile = join(repo.config.apphome, mappingfile)
- mapping = load_mapping_file(mappingfile)
- self.support_entities = mapping['support_entities']
- self.support_relations = mapping['support_relations']
- self.dont_cross_relations = mapping['dont_cross_relations']
- self.cross_relations = mapping['cross_relations']
+ try:
+ mapping = load_mapping_file(mappingfile)
+ except IOError:
+ self.disabled = True
+ self.error('cant read mapping file %s, source disabled',
+ mappingfile)
+ self.support_entities = {}
+ self.support_relations = {}
+ self.dont_cross_relations = set()
+ self.cross_relations = set()
+ else:
+ self.support_entities = mapping['support_entities']
+ self.support_relations = mapping['support_relations']
+ self.dont_cross_relations = mapping['dont_cross_relations']
+ self.cross_relations = mapping['cross_relations']
baseurl = source_config.get('base-url')
if baseurl and not baseurl.endswith('/'):
source_config['base-url'] += '/'
@@ -169,6 +186,8 @@
}),)
register_persistent_options(myoptions)
self._query_cache = TimedCache(1800)
+ self._skip_externals = check_yn(None, 'skip-external-entities',
+ source_config.get('skip-external-entities', False))
def reset_caches(self):
"""method called during test to reset potential source caches"""
@@ -176,10 +195,10 @@
def last_update_time(self):
pkey = u'sources.%s.latest-update-time' % self.uri
- rql = 'Any V WHERE X is CWProperty, X value V, X pkey %(k)s'
session = self.repo.internal_session()
try:
- rset = session.execute(rql, {'k': pkey})
+ rset = session.execute('Any V WHERE X is CWProperty, X value V, X pkey %(k)s',
+ {'k': pkey})
if not rset:
# insert it
session.execute('INSERT CWProperty X: X pkey %(k)s, X value %(v)s',
@@ -200,6 +219,18 @@
self.repo.looping_task(self._query_cache.ttl.seconds/10,
self._query_cache.clear_expired)
+ def local_eid(self, cnx, extid, session):
+ etype, dexturi, dextid = cnx.describe(extid)
+ if dexturi == 'system' or not (
+ dexturi in self.repo.sources_by_uri or self._skip_externals):
+ return self.repo.extid2eid(self, str(extid), etype, session), True
+ if dexturi in self.repo.sources_by_uri:
+ source = self.repo.sources_by_uri[dexturi]
+ cnx = session.pool.connection(source.uri)
+ eid = source.local_eid(cnx, dextid, session)[0]
+ return eid, False
+ return None, None
+
def synchronize(self, mtime=None):
"""synchronize content known by this repository with content in the
external repository
@@ -224,9 +255,8 @@
try:
for etype, extid in modified:
try:
- exturi = cnx.describe(extid)[1]
- if exturi == 'system' or not exturi in repo.sources_by_uri:
- eid = self.extid2eid(str(extid), etype, session)
+ eid = self.local_eid(cnx, extid, session)[0]
+ if eid is not None:
rset = session.eid_rset(eid, etype)
entity = rset.get_entity(0, 0)
entity.complete(entity.e_schema.indexable_attributes())
@@ -242,7 +272,8 @@
# entity has been deleted from external repository but is not known here
if eid is not None:
entity = session.entity_from_eid(eid, etype)
- repo.delete_info(session, entity, self.uri, extid)
+ repo.delete_info(session, entity, self.uri, extid,
+ scleanup=True)
except:
self.exception('while updating %s with external id %s of source %s',
etype, extid, self.uri)
@@ -323,8 +354,9 @@
msg = session._("can't connect to source %s, some data may be missing")
session.set_shared_data('sources_error', msg % self.uri)
return []
+ translator = RQL2RQL(self)
try:
- rql = RQL2RQL(self).generate(session, union, args)
+ rql = translator.generate(session, union, args)
except UnknownEid, ex:
if server.DEBUG:
print ' unknown eid', ex, 'no results'
@@ -350,19 +382,27 @@
cnx = session.pool.connection(self.uri)
for rowindex in xrange(rset.rowcount - 1, -1, -1):
row = rows[rowindex]
+ localrow = False
for colindex in needtranslation:
if row[colindex] is not None: # optional variable
- etype = descr[rowindex][colindex]
- exttype, exturi, extid = cnx.describe(row[colindex])
- if exturi == 'system' or not exturi in self.repo.sources_by_uri:
- eid = self.extid2eid(str(row[colindex]), etype,
- session)
+ eid, local = self.local_eid(cnx, row[colindex], session)
+ if local:
+ localrow = True
+ if eid is not None:
row[colindex] = eid
else:
# skip this row
del rows[rowindex]
del descr[rowindex]
break
+ else:
+ # skip row if it only contains eids of entities which
+ # are actually from a source we also know locally,
+ # except if some args specified (XXX should actually
+ # check if there are some args local to the source)
+ if not (translator.has_local_eid or localrow):
+ del rows[rowindex]
+ del descr[rowindex]
results = rows
else:
results = []
@@ -371,7 +411,7 @@
def _entity_relations_and_kwargs(self, session, entity):
relations = []
kwargs = {'x': self.eid2extid(entity.eid, session)}
- for key, val in entity.iteritems():
+ for key, val in entity.cw_attr_cache.iteritems():
relations.append('X %s %%(%s)s' % (key, key))
kwargs[key] = val
return relations, kwargs
@@ -434,6 +474,7 @@
self._session = session
self.kwargs = args
self.need_translation = False
+ self.has_local_eid = False
return self.visit_union(rqlst)
def visit_union(self, node):
@@ -580,6 +621,7 @@
def visit_constant(self, node):
if self.need_translation or node.uidtype:
if node.type == 'Int':
+ self.has_local_eid = True
return str(self.eid2extid(node.value))
if node.type == 'Substitute':
key = node.value
@@ -587,6 +629,7 @@
if not key in self._const_var:
self.kwargs[key] = self.eid2extid(self.kwargs[key])
self._const_var[key] = None
+ self.has_local_eid = True
return node.as_string()
def visit_variableref(self, node):
diff -r 48f468f33704 -r e4580e5f0703 server/sources/rql2sql.py
--- a/server/sources/rql2sql.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/rql2sql.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -365,6 +365,11 @@
yield 1
return
thisexistssols, thisexistsvars = self.existssols[exists]
+ # when iterating other solutions inner to an EXISTS subquery, we should
+ # reset variables which have this exists node as scope at each iteration
+ for var in exists.stmt.defined_vars.itervalues():
+ if var.scope is exists:
+ thisexistsvars.add(var.name)
origsol = self.solution
origtables = self.tables
done = self.done
@@ -416,7 +421,7 @@
p = compnode.parent
oor = None
while not isinstance(p, Select):
- if isinstance(p, Or):
+ if isinstance(p, (Or, Not)):
oor = p
p = p.parent
if oor is not None:
@@ -434,7 +439,7 @@
while not isinstance(p, Select):
if p in ors or p is None: # p is None for nodes already in fakehaving
break
- if isinstance(p, Or):
+ if isinstance(p, (Or, Not)):
oor = p
p = p.parent
else:
@@ -508,7 +513,7 @@
select.need_distinct = True
return self.__union_sql(union, needalias)
- def union_sql(self, union, needalias=False): # pylint: disable-msg=E0202
+ def union_sql(self, union, needalias=False): # pylint: disable=E0202
if len(union.children) == 1:
return self.select_sql(union.children[0], needalias)
sqls = ('(%s)' % self.select_sql(select, needalias)
@@ -991,24 +996,21 @@
unification (eg X attr1 A, Y attr2 A). In case of selection,
nothing to do here.
"""
- contextrels = {}
for var in rhs_vars:
if var.name in self._varmap:
# ensure table is added
self._var_info(var.variable)
principal = var.variable.stinfo.get('principal')
if principal is not None and principal is not relation:
- contextrels[var.name] = relation
- if not contextrels:
- return ''
- # we have to generate unification expression
- lhssql = self._inlined_var_sql(relation.children[0].variable,
- relation.r_type)
- try:
- self._state.ignore_varmap = True
- return '%s%s' % (lhssql, relation.children[1].accept(self))
- finally:
- self._state.ignore_varmap = False
+ # we have to generate unification expression
+ lhssql = self._inlined_var_sql(relation.children[0].variable,
+ relation.r_type)
+ try:
+ self._state.ignore_varmap = True
+ return '%s%s' % (lhssql, relation.children[1].accept(self))
+ finally:
+ self._state.ignore_varmap = False
+ return ''
def _visit_attribute_relation(self, rel):
"""generate SQL for an attribute relation"""
@@ -1337,10 +1339,10 @@
# tables handling #########################################################
- def alias_and_add_table(self, tablename):
+ def alias_and_add_table(self, tablename, scope=-1):
alias = '%s%s' % (tablename, self._state.count)
self._state.count += 1
- self.add_table('%s AS %s' % (tablename, alias), alias)
+ self.add_table('%s AS %s' % (tablename, alias), alias, scope)
return alias
def add_table(self, table, key=None, scope=-1):
@@ -1437,6 +1439,7 @@
except AttributeError:
pass
self._state.done.add(relation)
- alias = self.alias_and_add_table(self.dbhelper.fti_table)
+ scope = self._state.scopes[relation.scope]
+ alias = self.alias_and_add_table(self.dbhelper.fti_table, scope=scope)
relation._q_sqltable = alias
return alias
diff -r 48f468f33704 -r e4580e5f0703 server/sources/storages.py
--- a/server/sources/storages.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sources/storages.py Fri Mar 11 09:46:45 2011 +0100
@@ -24,6 +24,8 @@
from cubicweb import Binary, ValidationError
from cubicweb.server import hook
+from cubicweb.server.ssplanner import EditedEntity
+
def set_attribute_storage(repo, etype, attr, storage):
repo.system_source.set_storage(etype, attr, storage)
@@ -31,6 +33,7 @@
def unset_attribute_storage(repo, etype, attr):
repo.system_source.unset_storage(etype, attr)
+
class Storage(object):
"""abstract storage
@@ -126,14 +129,14 @@
def entity_added(self, entity, attr):
"""an entity using this storage for attr has been added"""
if entity._cw.transaction_data.get('fs_importing'):
- binary = Binary(file(entity[attr].getvalue(), 'rb').read())
+ binary = Binary(file(entity.cw_edited[attr].getvalue(), 'rb').read())
else:
- binary = entity.pop(attr)
+ binary = entity.cw_edited.pop(attr)
fpath = self.new_fs_path(entity, attr)
# bytes storage used to store file's path
- entity[attr] = Binary(fpath)
+ entity.cw_edited.edited_attribute(attr, Binary(fpath))
file(fpath, 'wb').write(binary.getvalue())
- hook.set_operation(entity._cw, 'bfss_added', fpath, AddFileOp)
+ AddFileOp.get_instance(entity._cw).add_data(fpath)
return binary
def entity_updated(self, entity, attr):
@@ -144,7 +147,7 @@
# If we are importing from the filesystem, the file already exists.
# We do not need to create it but we need to fetch the content of
# the file as the actual content of the attribute
- fpath = entity[attr].getvalue()
+ fpath = entity.cw_edited[attr].getvalue()
binary = Binary(file(fpath, 'rb').read())
else:
# We must store the content of the attributes
@@ -156,7 +159,7 @@
# went ok.
#
# fetch the current attribute value in memory
- binary = entity.pop(attr)
+ binary = entity.cw_edited.pop(attr)
# Get filename for it
fpath = self.new_fs_path(entity, attr)
assert not osp.exists(fpath)
@@ -164,20 +167,19 @@
file(fpath, 'wb').write(binary.getvalue())
# Mark the new file as added during the transaction.
# The file will be removed on rollback
- hook.set_operation(entity._cw, 'bfss_added', fpath, AddFileOp)
+ AddFileOp.get_instance(entity._cw).add_data(fpath)
if oldpath != fpath:
# register the new location for the file.
- entity[attr] = Binary(fpath)
+ entity.cw_edited.edited_attribute(attr, Binary(fpath))
# Mark the old file as useless so the file will be removed at
# commit.
- hook.set_operation(entity._cw, 'bfss_deleted', oldpath,
- DeleteFileOp)
+ DeleteFileOp.get_instance(entity._cw).add_data(oldpath)
return binary
def entity_deleted(self, entity, attr):
"""an entity using this storage for attr has been deleted"""
fpath = self.current_fs_path(entity, attr)
- hook.set_operation(entity._cw, 'bfss_deleted', fpath, DeleteFileOp)
+ DeleteFileOp.get_instance(entity._cw).add_data(fpath)
def new_fs_path(self, entity, attr):
# We try to get some hint about how to name the file using attribute's
@@ -209,7 +211,7 @@
def migrate_entity(self, entity, attribute):
"""migrate an entity attribute to the storage"""
- entity.edited_attributes = set()
+ entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
self.entity_added(entity, attribute)
session = entity._cw
source = session.repo.system_source
@@ -217,19 +219,20 @@
sql = source.sqlgen.update('cw_' + entity.__regid__, attrs,
['cw_eid'])
source.doexec(session, sql, attrs)
+ entity.cw_edited = None
-class AddFileOp(hook.Operation):
+class AddFileOp(hook.DataOperationMixIn, hook.Operation):
def rollback_event(self):
- for filepath in self.session.transaction_data.pop('bfss_added'):
+ for filepath in self.get_data():
try:
unlink(filepath)
except Exception, ex:
self.error('cant remove %s: %s' % (filepath, ex))
-class DeleteFileOp(hook.Operation):
- def commit_event(self):
- for filepath in self.session.transaction_data.pop('bfss_deleted'):
+class DeleteFileOp(hook.DataOperationMixIn, hook.Operation):
+ def postcommit_event(self):
+ for filepath in self.get_data():
try:
unlink(filepath)
except Exception, ex:
diff -r 48f468f33704 -r e4580e5f0703 server/sqlutils.py
--- a/server/sqlutils.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/sqlutils.py Fri Mar 11 09:46:45 2011 +0100
@@ -165,7 +165,7 @@
self.OperationalError = dbapi_module.OperationalError
self.InterfaceError = dbapi_module.InterfaceError
self.DbapiError = dbapi_module.Error
- self._binary = dbapi_module.Binary
+ self._binary = self.dbhelper.binary_value
self._process_value = dbapi_module.process_value
self._dbencoding = dbencoding
@@ -260,8 +260,7 @@
"""
attrs = {}
eschema = entity.e_schema
- for attr in entity.edited_attributes:
- value = entity[attr]
+ for attr, value in entity.cw_edited.iteritems():
if value is not None and eschema.subjrels[attr].final:
atype = str(entity.e_schema.destination(attr))
if atype == 'Boolean':
diff -r 48f468f33704 -r e4580e5f0703 server/ssplanner.py
--- a/server/ssplanner.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/ssplanner.py Fri Mar 11 09:46:45 2011 +0100
@@ -21,6 +21,8 @@
__docformat__ = "restructuredtext en"
+from copy import copy
+
from rql.stmts import Union, Select
from rql.nodes import Constant, Relation
@@ -55,11 +57,11 @@
if isinstance(rhs, Constant) and not rhs.uid:
# add constant values to entity def
value = rhs.eval(plan.args)
- eschema = edef.e_schema
+ eschema = edef.entity.e_schema
attrtype = eschema.subjrels[rtype].objects(eschema)[0]
if attrtype == 'Password' and isinstance(value, unicode):
value = value.encode('UTF8')
- edef[rtype] = value
+ edef.edited_attribute(rtype, value)
elif to_build.has_key(str(rhs)):
# create a relation between two newly created variables
plan.add_relation_def((edef, rtype, to_build[rhs.name]))
@@ -126,6 +128,132 @@
return select
+_MARKER = object()
+
+class dict_protocol_catcher(object):
+ def __init__(self, entity):
+ self.__entity = entity
+ def __getitem__(self, attr):
+ return self.__entity.cw_edited[attr]
+ def __setitem__(self, attr, value):
+ self.__entity.cw_edited[attr] = value
+ def __getattr__(self, attr):
+ return getattr(self.__entity, attr)
+
+
+class EditedEntity(dict):
+ """encapsulate entities attributes being written by an RQL query"""
+ def __init__(self, entity, **kwargs):
+ dict.__init__(self, **kwargs)
+ self.entity = entity
+ self.skip_security = set()
+ self.querier_pending_relations = {}
+ self.saved = False
+
+ def __hash__(self):
+ # dict|set keyable
+ return hash(id(self))
+
+ def __cmp__(self, other):
+ # we don't want comparison by value inherited from dict
+ return cmp(id(self), id(other))
+
+ def __setitem__(self, attr, value):
+ assert attr != 'eid'
+ # don't add attribute into skip_security if already in edited
+ # attributes, else we may accidentaly skip a desired security check
+ if attr not in self:
+ self.skip_security.add(attr)
+ self.edited_attribute(attr, value)
+
+ def __delitem__(self, attr):
+ assert not self.saved, 'too late to modify edited attributes'
+ super(EditedEntity, self).__delitem__(attr)
+ self.entity.cw_attr_cache.pop(attr, None)
+
+ def pop(self, attr, *args):
+ # don't update skip_security by design (think to storage api)
+ assert not self.saved, 'too late to modify edited attributes'
+ value = super(EditedEntity, self).pop(attr, *args)
+ self.entity.cw_attr_cache.pop(attr, *args)
+ return value
+
+ def setdefault(self, attr, default):
+ assert attr != 'eid'
+ # don't add attribute into skip_security if already in edited
+ # attributes, else we may accidentaly skip a desired security check
+ if attr not in self:
+ self[attr] = default
+ return self[attr]
+
+ def update(self, values, skipsec=True):
+ if skipsec:
+ setitem = self.__setitem__
+ else:
+ setitem = self.edited_attribute
+ for attr, value in values.iteritems():
+ setitem(attr, value)
+
+ def edited_attribute(self, attr, value):
+ """attribute being edited by a rql query: should'nt be added to
+ skip_security
+ """
+ assert not self.saved, 'too late to modify edited attributes'
+ super(EditedEntity, self).__setitem__(attr, value)
+ self.entity.cw_attr_cache[attr] = value
+
+ def oldnewvalue(self, attr):
+ """returns the couple (old attr value, new attr value)
+
+ NOTE: will only work in a before_update_entity hook
+ """
+ assert not self.saved, 'too late to get the old value'
+ # get new value and remove from local dict to force a db query to
+ # fetch old value
+ newvalue = self.entity.cw_attr_cache.pop(attr, _MARKER)
+ oldvalue = getattr(self.entity, attr)
+ if newvalue is not _MARKER:
+ self.entity.cw_attr_cache[attr] = newvalue
+ else:
+ newvalue = oldvalue
+ return oldvalue, newvalue
+
+ def set_defaults(self):
+ """set default values according to the schema"""
+ for attr, value in self.entity.e_schema.defaults():
+ if not attr in self:
+ self[str(attr)] = value
+
+ def check(self, creation=False):
+ """check the entity edition against its schema. Only final relation
+ are checked here, constraint on actual relations are checked in hooks
+ """
+ entity = self.entity
+ if creation:
+ # on creations, we want to check all relations, especially
+ # required attributes
+ relations = [rschema for rschema in entity.e_schema.subject_relations()
+ if rschema.final and rschema.type != 'eid']
+ else:
+ relations = [entity._cw.vreg.schema.rschema(rtype)
+ for rtype in self]
+ from yams import ValidationError
+ try:
+ entity.e_schema.check(dict_protocol_catcher(entity),
+ creation=creation, _=entity._cw._,
+ relations=relations)
+ except ValidationError, ex:
+ ex.entity = self.entity
+ raise
+
+ def clone(self):
+ thecopy = EditedEntity(copy(self.entity))
+ thecopy.entity.cw_attr_cache = copy(self.entity.cw_attr_cache)
+ thecopy.entity._cw_related_cache = {}
+ thecopy.update(self, skipsec=False)
+ return thecopy
+
+
class SSPlanner(object):
"""SingleSourcePlanner: build execution plan for rql queries
@@ -162,7 +290,7 @@
etype_class = session.vreg['etypes'].etype_class
for etype, var in rqlst.main_variables:
# need to do this since entity class is shared w. web client code !
- to_build[var.name] = etype_class(etype)(session)
+ to_build[var.name] = EditedEntity(etype_class(etype)(session))
plan.add_entity_def(to_build[var.name])
# add constant values to entity def, mark variables to be selected
to_select = _extract_const_attributes(plan, rqlst, to_build)
@@ -177,7 +305,7 @@
for edef, rdefs in to_select.items():
# create a select rql st to fetch needed data
select = Select()
- eschema = edef.e_schema
+ eschema = edef.entity.e_schema
for i, (rtype, term, reverse) in enumerate(rdefs):
if getattr(term, 'variable', None) in eidconsts:
value = eidconsts[term.variable]
@@ -284,10 +412,8 @@
rhsinfo = selectedidx[rhskey][:-1] + (None,)
rschema = getrschema(relation.r_type)
updatedefs.append( (lhsinfo, rhsinfo, rschema) )
- if rschema.final or rschema.inlined:
- attributes.add(relation.r_type)
# the update step
- step = UpdateStep(plan, updatedefs, attributes)
+ step = UpdateStep(plan, updatedefs)
# when necessary add substep to fetch yet unknown values
select = _build_substep_query(select, rqlst)
if select is not None:
@@ -476,7 +602,7 @@
result = [[]]
for row in result:
# get a new entity definition for this row
- edef = base_edef.cw_copy()
+ edef = base_edef.clone()
# complete this entity def using row values
index = 0
for rtype, rorder, value in self.rdefs:
@@ -484,7 +610,7 @@
value = row[index]
index += 1
if rorder == InsertRelationsStep.FINAL:
- edef._cw_rql_set_value(rtype, value)
+ edef.edited_attribute(rtype, value)
elif rorder == InsertRelationsStep.RELATION:
self.plan.add_relation_def( (edef, rtype, value) )
edef.querier_pending_relations[(rtype, 'subject')] = value
@@ -495,6 +621,7 @@
self.plan.substitute_entity_def(base_edef, edefs)
return result
+
class InsertStep(Step):
"""step consisting in inserting new entities / relations"""
@@ -518,21 +645,16 @@
def execute(self):
"""execute this step"""
results = self.execute_child()
- todelete = frozenset(typed_eid(eid) for eid, in results)
- session = self.plan.session
- delete = session.repo.glob_delete_entity
- # mark eids as being deleted in session info and setup cache update
- # operation (register pending eids before actual deletion to avoid
- # multiple call to glob_delete_entity)
- try:
- pending = session.transaction_data['pendingeids']
- except KeyError:
- pending = session.transaction_data['pendingeids'] = set()
- CleanupDeletedEidsCacheOp(session)
- actual = todelete - pending
- pending |= actual
- for eid in actual:
- delete(session, eid)
+ if results:
+ todelete = frozenset(typed_eid(eid) for eid, in results)
+ session = self.plan.session
+ # mark eids as being deleted in session info and setup cache update
+ # operation (register pending eids before actual deletion to avoid
+ # multiple call to glob_delete_entities)
+ op = CleanupDeletedEidsCacheOp.get_instance(session)
+ actual = todelete - op._container
+ op._container |= actual
+ session.repo.glob_delete_entities(session, actual)
return results
class DeleteRelationsStep(Step):
@@ -555,10 +677,9 @@
definitions and from results fetched in previous step
"""
- def __init__(self, plan, updatedefs, attributes):
+ def __init__(self, plan, updatedefs):
Step.__init__(self, plan)
self.updatedefs = updatedefs
- self.attributes = attributes
def execute(self):
"""execute this step"""
@@ -578,16 +699,17 @@
if rschema.final or rschema.inlined:
eid = typed_eid(lhsval)
try:
- edef = edefs[eid]
+ edited = edefs[eid]
except KeyError:
- edefs[eid] = edef = session.entity_from_eid(eid)
- edef._cw_rql_set_value(str(rschema), rhsval)
+ edef = session.entity_from_eid(eid)
+ edefs[eid] = edited = EditedEntity(edef)
+ edited.edited_attribute(str(rschema), rhsval)
else:
repo.glob_add_relation(session, lhsval, str(rschema), rhsval)
result[i] = newrow
# update entities
- for eid, edef in edefs.iteritems():
- repo.glob_update_entity(session, edef, set(self.attributes))
+ for eid, edited in edefs.iteritems():
+ repo.glob_update_entity(session, edited)
return result
def _handle_relterm(info, row, newrow):
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/extern_mapping.py
--- a/server/test/data/extern_mapping.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/data/extern_mapping.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,8 +15,9 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""
+"""mapping file for source used in unittest_multisources.py"""
-"""
support_entities = {'Card': True, 'Affaire': True, 'State': True}
support_relations = {'in_state': True, 'documented_by': True, 'multisource_inlined_rel': True}
+
+cross_relations = set( ('documented_by',) )
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/ldap_test.ldif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/ldap_test.ldif Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,55 @@
+dn: dc=cubicweb,dc=test
+structuralObjectClass: organization
+objectClass: dcObject
+objectClass: organization
+o: cubicweb
+dc: cubicweb
+
+dn: ou=People,dc=cubicweb,dc=test
+objectClass: organizationalUnit
+ou: People
+structuralObjectClass: organizationalUnit
+
+dn: uid=syt,ou=People,dc=cubicweb,dc=test
+loginShell: /bin/bash
+objectClass: inetOrgPerson
+objectClass: posixAccount
+objectClass: top
+objectClass: shadowAccount
+structuralObjectClass: inetOrgPerson
+cn: Sylvain Thenault
+sn: Thenault
+shadowMax: 99999
+gidNumber: 1004
+uid: syt
+homeDirectory: /home/syt
+shadowFlag: 134538764
+uidNumber: 1004
+givenName: Sylvain
+telephoneNumber: 106
+displayName: sthenault
+gecos: Sylvain Thenault
+mail: sylvain.thenault@logilab.fr
+mail: syt@logilab.fr
+
+dn: uid=adim,ou=People,dc=cubicweb,dc=test
+loginShell: /bin/bash
+objectClass: inetOrgPerson
+objectClass: posixAccount
+objectClass: top
+objectClass: shadowAccount
+cn: Adrien Di Mascio
+sn: Di Mascio
+shadowMax: 99999
+gidNumber: 1006
+uid: adim
+homeDirectory: /home/adim
+uidNumber: 1006
+structuralObjectClass: inetOrgPerson
+givenName: Adrien
+telephoneNumber: 109
+displayName: adimascio
+gecos: Adrien Di Mascio
+mail: adim@logilab.fr
+mail: adrien.dimascio@logilab.fr
+
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/migratedapp/schema.py
--- a/server/test/data/migratedapp/schema.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/data/migratedapp/schema.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,9 +15,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""
-
-"""
+"""cw.server.migraction test"""
from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
SubjectRelation,
RichString, String, Int, Boolean, Datetime, Date)
@@ -66,8 +64,9 @@
whatever = Int(default=2) # keep it before `date` for unittest_migraction.test_add_attribute_int
date = Datetime()
type = String(maxsize=1)
+ unique_id = String(maxsize=1, required=True, unique=True)
mydate = Date(default='TODAY')
- shortpara = String(maxsize=64)
+ shortpara = String(maxsize=64, default='hop')
ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')])
attachment = SubjectRelation('File')
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/schema.py
--- a/server/test/data/schema.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/data/schema.py Fri Mar 11 09:46:45 2011 +0100
@@ -17,7 +17,8 @@
# with CubicWeb. If not, see .
from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
- SubjectRelation, RichString, String, Int, Boolean, Datetime)
+ SubjectRelation, RichString, String, Int, Float,
+ Boolean, Datetime)
from yams.constraints import SizeConstraint
from cubicweb.schema import (WorkflowableEntityType,
RQLConstraint, RQLUniqueConstraint,
@@ -40,7 +41,7 @@
description=_('more detailed description'))
duration = Int()
- invoiced = Int()
+ invoiced = Float()
depends_on = SubjectRelation('Affaire')
require_permission = SubjectRelation('CWPermission')
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/slapd.conf.in
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data/slapd.conf.in Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,53 @@
+# This is the main slapd configuration file. See slapd.conf(5) for more
+# info on the configuration options.
+
+#######################################################################
+# Global Directives:
+
+# Features to permit
+#allow bind_v2
+
+# Schema and objectClass definitions
+include /etc/ldap/schema/core.schema
+include /etc/ldap/schema/cosine.schema
+include /etc/ldap/schema/nis.schema
+include /etc/ldap/schema/inetorgperson.schema
+include /etc/ldap/schema/openldap.schema
+include /etc/ldap/schema/misc.schema
+
+# Where the pid file is put. The init.d script
+# will not stop the server if you change this.
+pidfile %(apphome)s/test-slapd.pid
+
+# List of arguments that were passed to the server
+argsfile %(apphome)s/slapd.args
+
+# Read slapd.conf(5) for possible values
+loglevel sync
+# none
+
+# Where the dynamically loaded modules are stored
+modulepath /usr/lib/ldap
+moduleload back_hdb
+moduleload back_bdb
+moduleload back_monitor
+
+# The maximum number of entries that is returned for a search operation
+sizelimit 500
+
+# The tool-threads parameter sets the actual amount of cpu's that is used
+# for indexing.
+tool-threads 1
+
+database bdb
+
+# The base of your directory in database #1
+suffix "dc=cubicweb,dc=test"
+
+# rootdn directive for specifying a superuser on the database. This is needed
+# for syncrepl.
+#rootdn "cn=admin,dc=cubicweb,dc=test"
+#rootpw "cubicwebrocks"
+# Where the database file are physically stored for database #1
+directory "%(apphome)s/ldapdb"
+
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/sources
--- a/server/test/data/sources Fri Dec 10 12:17:18 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-[system]
-
-db-driver = sqlite
-db-host =
-adapter = native
-db-name = tmpdb
-db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[admin]
-login = admin
-password = gingkow
-
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/sources_extern
--- a/server/test/data/sources_extern Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/data/sources_extern Fri Mar 11 09:46:45 2011 +0100
@@ -1,13 +1,4 @@
[system]
-
db-driver = sqlite
-db-host =
-adapter = native
db-name = tmpdb-extern
db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[admin]
-login = admin
-password = gingkow
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/sources_ldap1
--- a/server/test/data/sources_ldap1 Fri Dec 10 12:17:18 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-[system]
-adapter=native
-# database driver (postgres or sqlite)
-db-driver=sqlite
-# database host
-db-host=
-# database name
-db-name=tmpdb
-# database user
-db-user=admin
-# database password
-db-password=gingkow
-# database encoding
-db-encoding=utf8
-
-[admin]
-login = admin
-password = gingkow
-
-[ldapuser]
-adapter=ldapuser
-# ldap host
-host=ldap1
-# base DN to lookup for usres
-user-base-dn=ou=People,dc=logilab,dc=fr
-# user search scope
-user-scope=ONELEVEL
-# classes of user
-user-classes=top,posixAccount
-# attribute used as login on authentication
-user-login-attr=uid
-# name of a group in which ldap users will be by default
-user-default-group=users
-# map from ldap user attributes to cubicweb attributes
-user-attrs-map=gecos:email,uid:login
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/sources_ldap2
--- a/server/test/data/sources_ldap2 Fri Dec 10 12:17:18 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-[system]
-adapter=native
-# database driver (postgres or sqlite)
-db-driver=sqlite
-# database host
-db-host=
-# database name
-db-name=tmpdb
-# database user
-db-user=admin
-# database password
-db-password=gingkow
-# database encoding
-db-encoding=utf8
-
-[admin]
-login = admin
-password = gingkow
-
-[ldapuser]
-adapter=ldapuser
-# ldap host
-host=ldap1
-# base DN to lookup for usres
-user-base-dn=ou=People,dc=logilab,dc=net
-# user search scope
-user-scope=ONELEVEL
-# classes of user
-user-classes=top,OpenLDAPperson
-# attribute used as login on authentication
-user-login-attr=uid
-# name of a group in which ldap users will be by default
-user-default-group=users
-# map from ldap user attributes to cubicweb attributes
-user-attrs-map=mail:email,uid:login
diff -r 48f468f33704 -r e4580e5f0703 server/test/data/sources_multi
--- a/server/test/data/sources_multi Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/data/sources_multi Fri Mar 11 09:46:45 2011 +0100
@@ -1,28 +1,5 @@
[system]
-
db-driver = sqlite
-db-host =
adapter = native
db-name = tmpdb-multi
db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[extern]
-adapter = pyrorql
-pyro-ns-id = extern
-cubicweb-user = admin
-cubicweb-password = gingkow
-mapping-file = extern_mapping.py
-base-url=http://extern.org/
-
-[extern-multi]
-adapter = pyrorql
-pyro-ns-id = extern-multi
-cubicweb-user = admin
-cubicweb-password = gingkow
-mapping-file = extern_mapping.py
-
-[admin]
-login = admin
-password = gingkow
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_checkintegrity.py
--- a/server/test/unittest_checkintegrity.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_checkintegrity.py Fri Mar 11 09:46:45 2011 +0100
@@ -26,7 +26,7 @@
class CheckIntegrityTC(TestCase):
def setUp(self):
- self.repo, self.cnx = init_test_database()
+ self.repo, self.cnx = init_test_database(apphome=self.datadir)
self.execute = self.cnx.cursor().execute
self.session = self.repo._sessions[self.cnx.sessionid]
sys.stderr = sys.stdout = StringIO()
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_fti.py
--- a/server/test/unittest_fti.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_fti.py Fri Mar 11 09:46:45 2011 +0100
@@ -2,18 +2,23 @@
import socket
+from logilab.common.testlib import SkipTest
+
from cubicweb.devtools import ApptestConfiguration
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.selectors import is_instance
from cubicweb.entities.adapters import IFTIndexableAdapter
+AT_LOGILAB = socket.gethostname().endswith('.logilab.fr')
+
+
class PostgresFTITC(CubicWebTC):
config = ApptestConfiguration('data', sourcefile='sources_fti')
- def setUp(self):
- if not socket.gethostname().endswith('.logilab.fr'):
- self.skipTest('XXX require logilab configuration')
- super(PostgresFTITC, self).setUp()
+ @classmethod
+ def setUpClass(cls):
+ if not AT_LOGILAB:
+ raise SkipTest('XXX %s: require logilab configuration' % cls.__name__)
def test_occurence_count(self):
req = self.request()
@@ -44,7 +49,6 @@
self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows,
[[c3.eid], [c1.eid], [c2.eid]])
-
def test_entity_weight(self):
class PersonneIFTIndexableAdapter(IFTIndexableAdapter):
__select__ = is_instance('Personne')
@@ -58,6 +62,7 @@
self.assertEqual(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows,
[[c1.eid], [c3.eid], [c2.eid]])
+
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
unittest_main()
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_hook.py
--- a/server/test/unittest_hook.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_hook.py Fri Mar 11 09:46:45 2011 +0100
@@ -18,6 +18,8 @@
# with CubicWeb. If not, see .
"""unit/functional tests for cubicweb.server.hook"""
+from __future__ import with_statement
+
from logilab.common.testlib import TestCase, unittest_main, mock_object
@@ -79,7 +81,7 @@
config.bootstrap_cubes()
schema = config.load_schema()
-def teardown_module(*args):
+def tearDownModule(*args):
global config, schema
del config, schema
@@ -101,20 +103,23 @@
def test_register_bad_hook1(self):
class _Hook(hook.Hook):
events = ('before_add_entiti',)
- ex = self.assertRaises(Exception, self.o.register, _Hook)
- self.assertEqual(str(ex), 'bad event before_add_entiti on %s._Hook' % __name__)
+ with self.assertRaises(Exception) as cm:
+ self.o.register(_Hook)
+ self.assertEqual(str(cm.exception), 'bad event before_add_entiti on %s._Hook' % __name__)
def test_register_bad_hook2(self):
class _Hook(hook.Hook):
events = None
- ex = self.assertRaises(Exception, self.o.register, _Hook)
- self.assertEqual(str(ex), 'bad .events attribute None on %s._Hook' % __name__)
+ with self.assertRaises(Exception) as cm:
+ self.o.register(_Hook)
+ self.assertEqual(str(cm.exception), 'bad .events attribute None on %s._Hook' % __name__)
def test_register_bad_hook3(self):
class _Hook(hook.Hook):
events = 'before_add_entity'
- ex = self.assertRaises(Exception, self.o.register, _Hook)
- self.assertEqual(str(ex), 'bad event b on %s._Hook' % __name__)
+ with self.assertRaises(Exception) as cm:
+ self.o.register(_Hook)
+ self.assertEqual(str(cm.exception), 'bad event b on %s._Hook' % __name__)
def test_call_hook(self):
self.o.register(AddAnyHook)
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_ldapuser.py
--- a/server/test/unittest_ldapuser.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_ldapuser.py Fri Mar 11 09:46:45 2011 +0100
@@ -17,25 +17,31 @@
# with CubicWeb. If not, see .
"""cubicweb.server.sources.ldapusers unit and functional tests"""
-import socket
+import os
+import shutil
+import time
+from os.path import abspath, join, exists
+import subprocess
+from socket import socket, error as socketerror
from logilab.common.testlib import TestCase, unittest_main, mock_object
-from cubicweb.devtools import TestServerConfiguration
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import RQLGeneratorTC
+from cubicweb.devtools.httptest import get_available_port
from cubicweb.server.sources.ldapuser import *
-if '17.1' in socket.gethostbyname('ldap1'):
- SYT = 'syt'
- SYT_EMAIL = 'Sylvain Thenault'
- ADIM = 'adim'
- SOURCESFILE = 'data/sources_ldap1'
-else:
- SYT = 'sthenault'
- SYT_EMAIL = 'sylvain.thenault@logilab.fr'
- ADIM = 'adimascio'
- SOURCESFILE = 'data/sources_ldap2'
+SYT = 'syt'
+SYT_EMAIL = 'Sylvain Thenault'
+ADIM = 'adim'
+CONFIG = u'''host=%s
+user-base-dn=ou=People,dc=cubicweb,dc=test
+user-scope=ONELEVEL
+user-classes=top,posixAccount
+user-login-attr=uid
+user-default-group=users
+user-attrs-map=gecos:email,uid:login
+'''
def nopwd_authenticate(self, session, login, password):
@@ -57,25 +63,80 @@
# don't check upassword !
return self.extid2eid(user['dn'], 'CWUser', session)
+def setUpModule(*args):
+ create_slapd_configuration(LDAPUserSourceTC.config)
+ global repo
+ try:
+ LDAPUserSourceTC._init_repo()
+ repo = LDAPUserSourceTC.repo
+ add_ldap_source(LDAPUserSourceTC.cnx)
+ except:
+ terminate_slapd()
+ raise
+def tearDownModule(*args):
+ global repo
+ repo.shutdown()
+ del repo
+ terminate_slapd()
+
+def add_ldap_source(cnx):
+ cnx.request().create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
+ config=CONFIG)
+ cnx.commit()
+
+def create_slapd_configuration(config):
+ global slapd_process, CONFIG
+ basedir = join(config.apphome, "ldapdb")
+ slapdconf = join(config.apphome, "slapd.conf")
+ confin = file(join(config.apphome, "slapd.conf.in")).read()
+ confstream = file(slapdconf, 'w')
+ confstream.write(confin % {'apphome': config.apphome})
+ confstream.close()
+ if not exists(basedir):
+ os.makedirs(basedir)
+ # fill ldap server with some data
+ ldiffile = join(config.apphome, "ldap_test.ldif")
+ print "Initing ldap database"
+ cmdline = "/usr/sbin/slapadd -f %s -l %s -c" % (slapdconf, ldiffile)
+ subprocess.call(cmdline, shell=True)
+
+
+ #ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
+ port = get_available_port(xrange(9000, 9100))
+ host = 'localhost:%s' % port
+ ldapuri = 'ldap://%s' % host
+ cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"]
+ print "Starting slapd on", ldapuri
+ slapd_process = subprocess.Popen(cmdline)
+ time.sleep(0.2)
+ if slapd_process.poll() is None:
+ print "slapd started with pid %s" % slapd_process.pid
+ else:
+ raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' %
+ (" ".join(cmdline), os.getcwd()))
+ CONFIG = CONFIG % host
+
+def terminate_slapd():
+ global slapd_process
+ if slapd_process.returncode is None:
+ print "terminating slapd"
+ if hasattr(slapd_process, 'terminate'):
+ slapd_process.terminate()
+ else:
+ import os, signal
+ os.kill(slapd_process.pid, signal.SIGTERM)
+ slapd_process.wait()
+ print "DONE"
+
+ del slapd_process
class LDAPUserSourceTC(CubicWebTC):
- config = TestServerConfiguration('data')
- config.sources_file = lambda: SOURCESFILE
def patch_authenticate(self):
self._orig_authenticate = LDAPUserSource.authenticate
LDAPUserSource.authenticate = nopwd_authenticate
- def setup_database(self):
- # XXX: need this first query else we get 'database is locked' from
- # sqlite since it doesn't support multiple connections on the same
- # database
- # so doing, ldap inserted users don't get removed between each test
- rset = self.sexecute('CWUser X')
- # check we get some users from ldap
- self.assert_(len(rset) > 1)
-
def tearDown(self):
if hasattr(self, '_orig_authenticate'):
LDAPUserSource.authenticate = self._orig_authenticate
@@ -93,7 +154,8 @@
def test_base(self):
# check a known one
- e = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0)
+ rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})
+ e = rset.get_entity(0, 0)
self.assertEqual(e.login, SYT)
e.complete()
self.assertEqual(e.creation_date, None)
@@ -382,19 +444,10 @@
res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
self.assertEqual(res, [[1, 5], [2, 4], [3, 6]])
-# XXX
-LDAPUserSourceTC._init_repo()
-repo = LDAPUserSourceTC.repo
-
-def teardown_module(*args):
- global repo
- del repo
- del RQL2LDAPFilterTC.schema
-
class RQL2LDAPFilterTC(RQLGeneratorTC):
- schema = repo.schema
def setUp(self):
+ self.schema = repo.schema
RQLGeneratorTC.setUp(self)
ldapsource = repo.sources[-1]
self.pool = repo._get_pool()
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_migractions.py
--- a/server/test/unittest_migractions.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_migractions.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,29 +15,35 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unit tests for module cubicweb.server.migractions
-"""
+"""unit tests for module cubicweb.server.migractions"""
+
+from __future__ import with_statement
from copy import deepcopy
from datetime import date
from os.path import join
-from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.testlib import TestCase, unittest_main, Tags, tag
-from cubicweb import ConfigurationError
+from yams.constraints import UniqueConstraint
+
+from cubicweb import ConfigurationError, ValidationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.schema import CubicWebSchemaLoader
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.server.migractions import *
migrschema = None
-def teardown_module(*args):
+def tearDownModule(*args):
global migrschema
del migrschema
- del MigrationCommandsTC.origschema
+ if hasattr(MigrationCommandsTC, 'origschema'):
+ del MigrationCommandsTC.origschema
class MigrationCommandsTC(CubicWebTC):
+ tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions'))
+
@classmethod
def init_config(cls, config):
super(MigrationCommandsTC, cls).init_config(config)
@@ -47,9 +53,11 @@
cls.origschema = deepcopy(cls.repo.schema)
# hack to read the schema from data/migrschema
config.appid = join('data', 'migratedapp')
+ config._apphome = cls.datapath('migratedapp')
global migrschema
migrschema = config.load_schema()
config.appid = 'data'
+ config._apphome = cls.datadir
assert 'Folder' in migrschema
@classmethod
@@ -72,6 +80,10 @@
assert self.cnx is self.mh._cnx
assert self.session is self.mh.session, (self.session.id, self.mh.session.id)
+ def tearDown(self):
+ CubicWebTC.tearDown(self)
+ self.repo.vreg['etypes'].clear_caches()
+
def test_add_attribute_int(self):
self.failIf('whatever' in self.schema)
self.request().create_entity('Note')
@@ -83,8 +95,12 @@
self.assertEqual(self.schema['whatever'].subjects(), ('Note',))
self.assertEqual(self.schema['whatever'].objects(), ('Int',))
self.assertEqual(self.schema['Note'].default('whatever'), 2)
+ # test default value set on existing entities
note = self.execute('Note X').get_entity(0, 0)
self.assertEqual(note.whatever, 2)
+ # test default value set for next entities
+ self.assertEqual(self.request().create_entity('Note').whatever, 2)
+ # test attribute order
orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
@@ -103,6 +119,9 @@
self.mh.rollback()
def test_add_attribute_varchar(self):
+ self.failIf('whatever' in self.schema)
+ self.request().create_entity('Note')
+ self.commit()
self.failIf('shortpara' in self.schema)
self.mh.cmd_add_attribute('Note', 'shortpara')
self.failUnless('shortpara' in self.schema)
@@ -112,6 +131,11 @@
notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0]
fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)')
+ req = self.request()
+ # test default value set on existing entities
+ self.assertEqual(req.execute('Note X').get_entity(0, 0).shortpara, 'hop')
+ # test default value set for next entities
+ self.assertEqual(req.create_entity('Note').shortpara, 'hop')
self.mh.rollback()
def test_add_datetime_with_default_value_attribute(self):
@@ -130,6 +154,29 @@
self.assertEqual(d2, testdate)
self.mh.rollback()
+ def test_drop_chosen_constraints_ctxmanager(self):
+ with self.mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint):
+ self.mh.cmd_add_attribute('Note', 'unique_id')
+ # make sure the maxsize constraint is not dropped
+ self.assertRaises(ValidationError,
+ self.mh.rqlexec,
+ 'INSERT Note N: N unique_id "xyz"')
+ self.mh.rollback()
+ # make sure the unique constraint is dropped
+ self.mh.rqlexec('INSERT Note N: N unique_id "x"')
+ self.mh.rqlexec('INSERT Note N: N unique_id "x"')
+ self.mh.rqlexec('DELETE Note N')
+ self.mh.rollback()
+
+ def test_drop_required_ctxmanager(self):
+ with self.mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None,
+ droprequired=True):
+ self.mh.cmd_add_attribute('Note', 'unique_id')
+ self.mh.rqlexec('INSERT Note N')
+ # make sure the required=True was restored
+ self.assertRaises(ValidationError, self.mh.rqlexec, 'INSERT Note N')
+ self.mh.rollback()
+
def test_rename_attribute(self):
self.failIf('civility' in self.schema)
eid1 = self.mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0]
@@ -145,7 +192,8 @@
def test_workflow_actions(self):
- wf = self.mh.cmd_add_workflow(u'foo', ('Personne', 'Email'))
+ wf = self.mh.cmd_add_workflow(u'foo', ('Personne', 'Email'),
+ ensure_workflowable=False)
for etype in ('Personne', 'Email'):
s1 = self.mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' %
etype)[0][0]
@@ -164,7 +212,7 @@
self.failUnless(self.execute('CWRType X WHERE X name "filed_under2"'))
self.schema.rebuild_infered_relations()
self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
- ['created_by', 'creation_date', 'cwuri',
+ ['created_by', 'creation_date', 'cw_source', 'cwuri',
'description', 'description_format',
'eid',
'filed_under2', 'has_text',
@@ -181,7 +229,8 @@
def test_add_drop_entity_type(self):
self.mh.cmd_add_entity_type('Folder2')
- wf = self.mh.cmd_add_workflow(u'folder2 wf', 'Folder2')
+ wf = self.mh.cmd_add_workflow(u'folder2 wf', 'Folder2',
+ ensure_workflowable=False)
todo = wf.add_state(u'todo', initial=True)
done = wf.add_state(u'done')
wf.add_transition(u'redoit', done, todo)
@@ -297,6 +346,7 @@
self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
fulltextindexed=False)
+ @tag('longrun')
def test_sync_schema_props_perms(self):
cursor = self.mh.session
cursor.set_pool()
@@ -309,7 +359,7 @@
migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person'
delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne')
add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne')
-
+
self.mh.cmd_sync_schema_props_perms(commit=False)
self.assertEqual(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0],
@@ -386,9 +436,9 @@
self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1)
self.assertItemsEqual(self.schema.eschema('Personne')._unique_together[0],
('nom', 'prenom', 'datenaiss'))
- rset = cursor.execute('Any C WHERE C is CWUniqueTogetherConstraint')
+ rset = cursor.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"')
self.assertEqual(len(rset), 1)
- relations = [r.rtype.name for r in rset.get_entity(0,0).relations]
+ relations = [r.name for r in rset.get_entity(0, 0).relations]
self.assertItemsEqual(relations, ('nom', 'prenom', 'datenaiss'))
def _erqlexpr_rset(self, action, ertype):
@@ -418,6 +468,7 @@
finally:
self.mh.cmd_set_size_constraint('CWEType', 'description', None)
+ @tag('longrun')
def test_add_remove_cube_and_deps(self):
cubes = set(self.config.cubes())
schema = self.repo.schema
@@ -481,6 +532,7 @@
self.commit()
+ @tag('longrun')
def test_add_remove_cube_no_deps(self):
cubes = set(self.config.cubes())
schema = self.repo.schema
@@ -508,9 +560,11 @@
self.commit()
def test_remove_dep_cube(self):
- ex = self.assertRaises(ConfigurationError, self.mh.cmd_remove_cube, 'file')
- self.assertEqual(str(ex), "can't remove cube file, used as a dependency")
+ with self.assertRaises(ConfigurationError) as cm:
+ self.mh.cmd_remove_cube('file')
+ self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency")
+ @tag('longrun')
def test_introduce_base_class(self):
self.mh.cmd_add_entity_type('Para')
self.mh.repo.schema.rebuild_infered_relations()
@@ -524,7 +578,7 @@
self.assertEqual(self.schema['Text'].specializes().type, 'Para')
# test columns have been actually added
text = self.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
- note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0)
+ note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
aff = self.execute('INSERT Affaire X').get_entity(0, 0)
self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
{'x': text.eid, 'y': aff.eid}))
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_msplanner.py
--- a/server/test/unittest_msplanner.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_msplanner.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,9 +15,15 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
+"""unit tests for module cubicweb.server.msplanner"""
+
+from __future__ import with_statement
from logilab.common.decorators import clear_cache
+from yams.buildobjs import RelationDefinition
+from rql import BadRQLQuery
+
from cubicweb.devtools import init_test_database
from cubicweb.devtools.repotest import BasePlannerTC, test_plan
@@ -59,8 +65,9 @@
{'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'},
{'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWEType'},
{'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'},
- {'X': 'CWRType'}, {'X': 'CWRelation'}, {'X': 'CWUser'},
- {'X': 'CWUniqueTogetherConstraint'},
+ {'X': 'CWRType'}, {'X': 'CWRelation'},
+ {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'},
+ {'X': 'CWUser'}, {'X': 'CWUniqueTogetherConstraint'},
{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'},
{'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'},
{'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'},
@@ -72,9 +79,11 @@
# keep cnx so it's not garbage collected and the associated session is closed
-repo, cnx = init_test_database()
+def setUpModule(*args):
+ global repo, cnx
+ repo, cnx = init_test_database(apphome=BaseMSPlannerTC.datadir)
-def teardown_module(*args):
+def tearDownModule(*args):
global repo, cnx
del repo, cnx
@@ -86,9 +95,9 @@
* ldap source supporting CWUser
* rql source supporting Card
"""
- repo = repo
def setUp(self):
+ self.__class__.repo = repo
#_QuerierTC.setUp(self)
self.setup()
# hijack Affaire security
@@ -537,7 +546,7 @@
[self.ldap, self.system], None,
{'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []),
('OneFetchStep',
- [('Any X,AA ORDERBY AA WHERE 5 owned_by X, X modification_date AA, X is CWUser',
+ [('Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA, X is CWUser' % ueid,
[{'AA': 'Datetime', 'X': 'CWUser'}])],
None, None, [self.system],
{'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []),
@@ -687,7 +696,7 @@
def test_complex_optional(self):
ueid = self.session.user.eid
self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS',
- [('OneFetchStep', [('Any U WHERE WF wf_info_for 5, WF owned_by U?, WF from_state FS',
+ [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])],
None, None, [self.system], {}, [])],
{'x': ueid})
@@ -695,7 +704,7 @@
def test_complex_optional(self):
ueid = self.session.user.eid
self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS',
- [('OneFetchStep', [('Any U WHERE WF wf_info_for 5, WF owned_by U?, WF from_state FS',
+ [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])],
None, None, [self.system], {}, [])],
{'x': ueid})
@@ -751,9 +760,10 @@
])
def test_not_identity(self):
- self._test('Any X WHERE NOT X identity U, U eid %s' % self.session.user.eid,
+ ueid = self.session.user.eid
+ self._test('Any X WHERE NOT X identity U, U eid %s' % ueid,
[('OneFetchStep',
- [('Any X WHERE NOT X identity 5, X is CWUser', [{'X': 'CWUser'}])],
+ [('Any X WHERE NOT X identity %s, X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None,
[self.ldap, self.system], {}, [])
])
@@ -777,18 +787,19 @@
def test_security_has_text(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X has_text "bla"',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table0.C0'}, []),
('UnionStep', None, None,
[('OneFetchStep',
- [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
None, None, [self.system], {'E': 'table0.C0'}, []),
('OneFetchStep',
- [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is Basket',
+ [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is Basket' % ueid,
[{'X': 'Basket'}]),
- ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser',
+ ('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid,
[{'X': 'CWUser'}]),
('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)',
[{'X': 'Card'}, {'X': 'Comment'},
@@ -803,18 +814,20 @@
def test_security_has_text_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
- # note: same as the above query but because of the subquery usage, the display differs (not printing solutions for each union)
+ ueid = self.session.user.eid
+ # note: same as the above query but because of the subquery usage, the
+ # display differs (not printing solutions for each union)
self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla"',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table1.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
[self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []),
('FetchStep',
- [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is Basket',
+ [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is Basket' % ueid,
[{'X': 'Basket'}]),
- ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser',
+ ('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid,
[{'X': 'CWUser'}]),
('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)',
[{'X': 'Card'}, {'X': 'Comment'},
@@ -839,22 +852,24 @@
"""a guest user trying to see another user: EXISTS(X owned_by U) is automatically inserted"""
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X login "bla"',
[('FetchStep',
[('Any X WHERE X login "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C0'}, []),
('OneFetchStep',
- [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C0'}, [])])
def test_security_complex_has_text(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C0'}, []),
('UnionStep', None, None, [
- ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C0'}, []),
('OneFetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
None, None, [self.system], {}, []),
@@ -864,11 +879,12 @@
def test_security_complex_has_text_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table1.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, []),
('FetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
@@ -881,26 +897,30 @@
def test_security_complex_aggregat(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
+ ALL_SOLS = X_ALL_SOLS[:]
+ ALL_SOLS.remove({'X': 'CWSourceHostConfig'}) # not authorized
self._test('Any MAX(X)',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table1.C0'}, []),
('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table2.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is Basket', [{'X': 'Basket'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is Basket' % ueid, [{'X': 'Basket'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
('UnionFetchStep',
[('FetchStep', [('Any X WHERE X is IN(Card, Note, State)',
[{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])],
[self.cards, self.system], {}, {'X': 'table0.C0'}, []),
('FetchStep',
- [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'X': 'BaseTransition'}, {'X': 'Bookmark'},
{'X': 'CWAttribute'}, {'X': 'CWCache'},
{'X': 'CWConstraint'}, {'X': 'CWConstraintType'},
{'X': 'CWEType'}, {'X': 'CWGroup'},
{'X': 'CWPermission'}, {'X': 'CWProperty'},
{'X': 'CWRType'}, {'X': 'CWRelation'},
+ {'X': 'CWSource'},
{'X': 'CWUniqueTogetherConstraint'},
{'X': 'Comment'}, {'X': 'Division'},
{'X': 'Email'}, {'X': 'EmailAddress'},
@@ -914,21 +934,24 @@
{'X': 'Workflow'}, {'X': 'WorkflowTransition'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
]),
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table2.C0'}, {'X': 'table0.C0'}, []),
- ('FetchStep', [('Any X WHERE (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ ('FetchStep', [('Any X WHERE (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
[self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []),
]),
- ('OneFetchStep', [('Any MAX(X)', X_ALL_SOLS)],
+ ('OneFetchStep', [('Any MAX(X)', ALL_SOLS)],
None, None, [self.system], {'X': 'table0.C0'}, [])
])
def test_security_complex_aggregat2(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
X_ET_ALL_SOLS = []
for s in X_ALL_SOLS:
+ if s == {'X': 'CWSourceHostConfig'}:
+ continue # not authorized
ets = {'ET': 'CWEType'}
ets.update(s)
X_ET_ALL_SOLS.append(ets)
@@ -941,28 +964,29 @@
('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table3.C0'}, []),
('UnionFetchStep',
- [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by 5), ET is CWEType, X is Basket',
+ [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is Basket' % ueid,
[{'ET': 'CWEType', 'X': 'Basket'}])],
[self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []),
- ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire',
+ ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire',
'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire',
'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire',
'ET': 'CWEType'}])],
[self.system], {'E': 'table2.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'},
[]),
- ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by 5), ET is CWEType, X is CWUser',
+ ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is CWUser' % ueid,
[{'ET': 'CWEType', 'X': 'CWUser'}])],
[self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []),
# extra UnionFetchStep could be avoided but has no cost, so don't care
('UnionFetchStep',
- [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'X': 'BaseTransition', 'ET': 'CWEType'},
{'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'},
{'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'},
{'X': 'CWConstraintType', 'ET': 'CWEType'}, {'X': 'CWEType', 'ET': 'CWEType'},
{'X': 'CWGroup', 'ET': 'CWEType'}, {'X': 'CWPermission', 'ET': 'CWEType'},
{'X': 'CWProperty', 'ET': 'CWEType'}, {'X': 'CWRType', 'ET': 'CWEType'},
+ {'X': 'CWSource', 'ET': 'CWEType'},
{'X': 'CWRelation', 'ET': 'CWEType'},
{'X': 'CWUniqueTogetherConstraint', 'ET': 'CWEType'},
{'X': 'Comment', 'ET': 'CWEType'},
@@ -993,6 +1017,7 @@
def test_security_3sources(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -1001,7 +1026,7 @@
[('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])],
[self.ldap, self.system], None, {'U': 'table1.C0'}, []),
('OneFetchStep',
- [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by 5), U is CWUser, X is Card',
+ [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid,
[{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])],
None, None, [self.system],
{'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, [])
@@ -1011,12 +1036,13 @@
self.restore_orig_cwuser_security()
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
[self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []),
('OneFetchStep',
- [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity 5), U is CWUser, X is Card',
+ [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity %s), U is CWUser, X is Card' % ueid,
[{'U': 'CWUser', 'X': 'Card', 'XT': 'String'}])],
None, None, [self.system], {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, [])
])
@@ -1025,9 +1051,10 @@
self.restore_orig_cwuser_security()
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X,XT,U WHERE X is Card, X owned_by U?, X title XT, U login L',
[('FetchStep',
- [('Any U,L WHERE U login L, EXISTS(U identity 5), U is CWUser',
+ [('Any U,L WHERE U login L, EXISTS(U identity %s), U is CWUser' % ueid,
[{'L': 'String', u'U': 'CWUser'}])],
[self.system], {}, {'L': 'table0.C1', 'U': 'table0.C0', 'U.login': 'table0.C1'}, []),
('FetchStep',
@@ -1046,6 +1073,7 @@
def test_security_3sources_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT LIMIT 10 OFFSET 10 WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -1054,7 +1082,7 @@
[('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])],
[self.ldap, self.system], None, {'U': 'table1.C0'}, []),
('OneFetchStep',
- [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by 5), U is CWUser, X is Card',
+ [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid,
[{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])],
10, 10, [self.system],
{'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, [])
@@ -1150,7 +1178,7 @@
'X.login': 'table0.C1',
'X.modification_date': 'table0.C4',
'X.surname': 'table0.C3'}, []),
- ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity 5) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, 5 in_group D, NOT D name "users", D is CWGroup))), X is CWUser',
+ ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity %(ueid)s) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, %(ueid)s in_group D, NOT D name "users", D is CWGroup))), X is CWUser' % {'ueid': ueid},
[{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime',
'C': 'CWGroup', 'D': 'CWGroup', 'X': 'CWUser'}])],
None, None, [self.system],
@@ -1227,7 +1255,7 @@
# in the source where %(x)s is not coming from and will be removed during rql
# generation for the external source
self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN',
- [('OneFetchStep', [('Any SN WHERE NOT EXISTS(5 in_state S), S name SN, S is State',
+ [('OneFetchStep', [('Any SN WHERE NOT EXISTS(%s in_state S), S name SN, S is State' % ueid,
[{'S': 'State', 'SN': 'String'}])],
None, None, [self.cards, self.system], {}, [])],
{'x': ueid})
@@ -1280,12 +1308,13 @@
def test_simplified_var(self):
+ ueid = self.session.user.eid
repo._type_source_cache[999999] = ('Note', 'cards', 999999)
self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
- [('OneFetchStep', [('Any 5 WHERE 5 in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999',
+ [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999' % (ueid, ueid),
[{'X': 'Note', 'G': 'CWGroup', 'P': 'CWPermission'}])],
None, None, [self.system], {}, [])],
- {'x': 999999, 'u': self.session.user.eid})
+ {'x': 999999, 'u': ueid})
def test_has_text(self):
self._test('Card X WHERE X has_text "toto"',
@@ -1325,13 +1354,14 @@
def test_security_has_text_orderby_rank(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table1.C0'}, []),
('UnionFetchStep',
[('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]),
('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"',
[{'X': 'CWUser'}, {'X': 'Personne'}])],
@@ -1354,11 +1384,12 @@
def test_security_has_text_select_rank(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C1'}, []),
('UnionStep', None, None, [
- ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C1'}, []),
('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
None, None, [self.system], {}, []),
@@ -1436,6 +1467,7 @@
])
def test_subquery_1(self):
+ ueid = self.session.user.eid
self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s '
'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Bookmark, X title T))',
[('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}]),
@@ -1445,7 +1477,7 @@
('FetchStep',
[('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])],
[self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []),
- ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Bookmark, Tag)',
+ ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Bookmark, Tag)' % ueid,
[{'A': 'Bookmark', 'B': 'CWUser', 'C': 'String'},
{'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])],
None, None, [self.system],
@@ -1454,9 +1486,10 @@
'C': 'table1.C1',
'N': 'table0.C1'},
[])],
- {'E': self.session.user.eid})
+ {'E': ueid})
def test_subquery_2(self):
+ ueid = self.session.user.eid
self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s '
'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Card, X title T))',
[('UnionFetchStep',
@@ -1479,7 +1512,7 @@
('FetchStep',
[('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])],
[self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []),
- ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Card, Tag)',
+ ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Card, Tag)' % ueid,
[{'A': 'Card', 'B': 'CWUser', 'C': 'String'},
{'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])],
None, None, [self.system],
@@ -1488,7 +1521,7 @@
'C': 'table1.C1',
'N': 'table0.C1'},
[])],
- {'E': self.session.user.eid})
+ {'E': ueid})
def test_eid_dont_cross_relation_1(self):
repo._type_source_cache[999999] = ('Personne', 'system', 999999)
@@ -1578,20 +1611,84 @@
('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])],
[self.cards, self.system], None,
{'T': 'table1.C1', 'Y': 'table1.C0', 'Y.type': 'table1.C1'}, []),
- ('UnionStep', None, None,
- [('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, X is Note, Y is Note',
- [{'T': 'String', 'X': 'Note', 'Y': 'Note'}])],
- None, None, [self.cards], None,
- []),
- ('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, X is Note, Y is Note',
- [{'T': 'String', 'X': 'Note', 'Y': 'Note'}])],
- None, None, [self.system],
- {'T': 'table1.C1', 'X': 'table0.C0', 'X.type': 'table0.C1',
- 'Y': 'table1.C0', 'Y.type': 'table1.C1'},
- [])]
- )],
+ ('FetchStep', [('Any X,Y WHERE X multisource_crossed_rel Y, X is Note, Y is Note',
+ [{'X': 'Note', 'Y': 'Note'}])],
+ [self.cards, self.system], None,
+ {'X': 'table2.C0', 'Y': 'table2.C1'},
+ []),
+ ('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, '
+ 'X is Note, Y is Note, Y identity A, X identity B, A is Note, B is Note',
+ [{u'A': 'Note', u'B': 'Note', 'T': 'String', 'X': 'Note', 'Y': 'Note'}])],
+ None, None,
+ [self.system],
+ {'A': 'table1.C0',
+ 'B': 'table0.C0',
+ 'T': 'table1.C1',
+ 'X': 'table2.C0',
+ 'X.type': 'table0.C1',
+ 'Y': 'table2.C1',
+ 'Y.type': 'table1.C1'},
+ []),
+ ],
{'x': 999999,})
+ def test_crossed_relation_noeid_needattr(self):
+ # http://www.cubicweb.org/ticket/1382452
+ self._test('DISTINCT Any DEP WHERE DEP is Note, P type "cubicweb-foo", P multisource_crossed_rel DEP, DEP type LIKE "cubicweb%"',
+ [('FetchStep', [(u'Any DEP WHERE DEP type LIKE "cubicweb%", DEP is Note',
+ [{'DEP': 'Note'}])],
+ [self.cards, self.system], None,
+ {'DEP': 'table0.C0'},
+ []),
+ ('FetchStep', [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])],
+ [self.cards, self.system], None, {'P': 'table1.C0'},
+ []),
+ ('FetchStep', [('Any DEP,P WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note',
+ [{'DEP': 'Note', 'P': 'Note'}])],
+ [self.cards, self.system], None, {'DEP': 'table2.C0', 'P': 'table2.C1'},
+ []),
+ ('OneFetchStep',
+ [('DISTINCT Any DEP WHERE P multisource_crossed_rel DEP, DEP is Note, '
+ 'P is Note, DEP identity A, P identity B, A is Note, B is Note',
+ [{u'A': 'Note', u'B': 'Note', 'DEP': 'Note', 'P': 'Note'}])],
+ None, None, [self.system],
+ {'A': 'table0.C0', 'B': 'table1.C0', 'DEP': 'table2.C0', 'P': 'table2.C1'},
+ [])])
+
+ def test_crossed_relation_noeid_invariant(self):
+ # see comment in http://www.cubicweb.org/ticket/1382452
+ self.schema.add_relation_def(
+ RelationDefinition(subject='Note', name='multisource_crossed_rel', object='Affaire'))
+ self.repo.set_schema(self.schema)
+ try:
+ self._test('DISTINCT Any P,DEP WHERE P type "cubicweb-foo", P multisource_crossed_rel DEP',
+ [('FetchStep',
+ [('Any DEP WHERE DEP is Note', [{'DEP': 'Note'}])],
+ [self.cards, self.system], None, {'DEP': 'table0.C0'}, []),
+ ('FetchStep',
+ [(u'Any P WHERE P type "cubicweb-foo", P is Note', [{'P': 'Note'}])],
+ [self.cards, self.system], None, {'P': 'table1.C0'}, []),
+ ('UnionStep', None, None,
+ [('OneFetchStep',
+ [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note',
+ [{'DEP': 'Note', 'P': 'Note'}])],
+ None, None, [self.cards], None, []),
+ ('OneFetchStep',
+ [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Note, P is Note',
+ [{'DEP': 'Note', 'P': 'Note'}])],
+ None, None, [self.system],
+ {'DEP': 'table0.C0', 'P': 'table1.C0'},
+ []),
+ ('OneFetchStep',
+ [('DISTINCT Any P,DEP WHERE P multisource_crossed_rel DEP, DEP is Affaire, P is Note',
+ [{'DEP': 'Affaire', 'P': 'Note'}])],
+ None, None, [self.system], {'P': 'table1.C0'},
+ [])])
+ ])
+ finally:
+ self.schema.del_relation_def('Note', 'multisource_crossed_rel', 'Affaire')
+ self.repo.set_schema(self.schema)
+
# edition queries tests ###################################################
def test_insert_simplified_var_1(self):
@@ -1662,7 +1759,7 @@
ueid = self.session.user.eid
self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y eid %(y)s',
[('DeleteRelationsStep', [
- ('OneFetchStep', [('Any 5,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser'%(ueid, ueid),
+ ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser' % (ueid, ueid, ueid),
[{'Y': 'CWUser'}])],
None, None, [self.system], {}, []),
]),
@@ -1681,6 +1778,18 @@
],
{'x': ueid, 'y': ueid})
+ def test_delete_relation3(self):
+ repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+ self._test('DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, NOT (Y cw_source S, S name %(source)s)',
+ [('DeleteRelationsStep',
+ [('OneFetchStep',
+ [('Any Y,999999 WHERE Y multisource_inlined_rel 999999, NOT EXISTS(Y cw_source S, S name "cards"), S is CWSource, Y is IN(Card, Note)',
+ [{'S': 'CWSource', 'Y': 'Card'}, {'S': 'CWSource', 'Y': 'Note'}])],
+ None, None, [self.system], {},
+ [])]
+ )],
+ {'x': 999999, 'source': 'cards'})
+
def test_delete_entity1(self):
repo._type_source_cache[999999] = ('Note', 'system', 999999)
self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X',
@@ -1805,6 +1914,156 @@
del self.cards.support_relations['see_also']
self.cards.cross_relations.remove('see_also')
+ def test_state_of_cross(self):
+ self._test('DELETE State X WHERE NOT X state_of Y',
+ [('DeleteEntitiesStep',
+ [('OneFetchStep',
+ [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow',
+ [{'X': 'State', 'Y': 'Workflow'}])],
+ None, None, [self.system], {}, [])])]
+ )
+
+
+ def test_source_specified_0_0(self):
+ self._test('Card X WHERE X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X WHERE X cw_source 1, X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_0_1(self):
+ self._test('Any X, S WHERE X is Card, X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X,1 WHERE X is Card, X cw_source 1',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_1_0(self):
+ self._test('Card X WHERE X cw_source S, S name "system"',
+ [('OneFetchStep', [('Any X WHERE X cw_source S, S name "system", X is Card',
+ [{'X': 'Card', 'S': 'CWSource'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_1_1(self):
+ self._test('Any X, SN WHERE X is Card, X cw_source S, S name "system", S name SN',
+ [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "system", '
+ 'S name SN',
+ [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])],
+ None, None, [self.system], {}, [])
+ ])
+
+ def test_source_specified_1_2(self):
+ sols = []
+ for sol in X_ALL_SOLS:
+ sol = sol.copy()
+ sol['S'] = 'CWSource'
+ sols.append(sol)
+ self._test('Any X WHERE X cw_source S, S name "cards"',
+ [('OneFetchStep', [('Any X WHERE X cw_source S, S name "cards"',
+ sols)],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_2_0(self):
+ self._test('Card X WHERE X cw_source S, NOT S eid 1',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+ self._test('Card X WHERE NOT X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+
+ def test_source_specified_2_1(self):
+ self._test('Card X WHERE X cw_source S, NOT S name "system"',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+ self._test('Card X WHERE NOT X cw_source S, S name "system"',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+
+ def test_source_specified_3_1(self):
+ self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "cards"',
+ [('OneFetchStep',
+ [('Any X,XT WHERE X is Card, X title XT',
+ [{'X': 'Card', 'XT': 'String'}])],
+ None, None, [self.cards], {}, [])
+ ])
+
+ def test_source_specified_3_2(self):
+ self.skipTest('oops')
+ self._test('Any STN WHERE X is Note, X type XT, X in_state ST, ST name STN, X cw_source S, S name "cards"',
+ [('OneFetchStep',
+ [('Any X,XT WHERE X is Card, X title XT',
+ [{'X': 'Card', 'XT': 'String'}])],
+ None, None, [self.cards], {}, [])
+ ])
+
+ def test_source_conflict_1(self):
+ self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+ with self.assertRaises(BadRQLQuery) as cm:
+ self._test('Any X WHERE X cw_source S, S name "system", X eid %(x)s',
+ [], {'x': 999999})
+ self.assertEqual(str(cm.exception), 'source conflict for term %(x)s')
+
+ def test_source_conflict_2(self):
+ with self.assertRaises(BadRQLQuery) as cm:
+ self._test('Card X WHERE X cw_source S, S name "systeme"', [])
+ self.assertEqual(str(cm.exception), 'source conflict for term X')
+
+ def test_source_conflict_3(self):
+ self.skipTest('oops')
+ self._test('CWSource X WHERE X cw_source S, S name "cards"',
+ [('OneFetchStep',
+ [(u'Any X WHERE X cw_source S, S name "cards", X is CWSource',
+ [{'S': 'CWSource', 'X': 'CWSource'}])],
+ None, None,
+ [self.system],
+ {}, [])])
+
+
+ def test_ambigous_cross_relation_source_specified(self):
+ self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+ self.cards.support_relations['see_also'] = True
+ self.cards.cross_relations.add('see_also')
+ try:
+ self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
+ [('AggrStep',
+ 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
+ None,
+ [('FetchStep',
+ [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
+ [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {},
+ {'AA': 'table0.C1', 'X': 'table0.C0',
+ 'X.modification_date': 'table0.C1'},
+ []),
+ ('FetchStep',
+ [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark',
+ [{'AA': 'Datetime', 'X': 'Bookmark'}])],
+ [self.system], {},
+ {'AA': 'table0.C1', 'X': 'table0.C0',
+ 'X.modification_date': 'table0.C1'},
+ [])])],
+ {'x': 999999})
+ finally:
+ del self.cards.support_relations['see_also']
+ self.cards.cross_relations.remove('see_also')
+
# non regression tests ####################################################
def test_nonregr1(self):
@@ -1864,15 +2123,16 @@
)
def test_nonregr4(self):
+ ueid = self.session.user.eid
self._test('Any U ORDERBY D DESC WHERE WF wf_info_for X, WF creation_date D, WF from_state FS, '
'WF owned_by U?, X eid %(x)s',
[#('FetchStep', [('Any U WHERE U is CWUser', [{'U': 'CWUser'}])],
# [self.ldap, self.system], None, {'U': 'table0.C0'}, []),
- ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for 5, WF creation_date D, WF from_state FS, WF owned_by U?',
+ ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for %s, WF creation_date D, WF from_state FS, WF owned_by U?' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser', 'D': 'Datetime'}])],
None, None,
[self.system], {}, [])],
- {'x': self.session.user.eid})
+ {'x': ueid})
def test_nonregr5(self):
# original jpl query:
@@ -1914,7 +2174,7 @@
[('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])],
[self.cards], None, {'WP': u'table0.C0'}, []),
('OneFetchStep', [('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, WP is Note)) OR (EXISTS(A concerne 999999)), A is Affaire, S is State',
- [{'A': 'Affaire', 'DI': 'Datetime', 'DUR': 'Int', 'I': 'Int', 'S': 'State', 'SN': 'String', 'WP': 'Note'}])],
+ [{'A': 'Affaire', 'DI': 'Datetime', 'DUR': 'Int', 'I': 'Float', 'S': 'State', 'SN': 'String', 'WP': 'Note'}])],
None, None, [self.system], {'WP': u'table0.C0'}, [])],
{'n': 999999})
@@ -1997,6 +2257,7 @@
{'x': 999999})
def test_nonregr13_1(self):
+ ueid = self.session.user.eid
# identity wrapped into exists:
# should'nt propagate constraint that U is in the same source as ME
self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
@@ -2008,7 +2269,7 @@
[self.ldap, self.system], None,
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
[]),
- ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity 5)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, 5 in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser',
+ ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity %s)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid),
[{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])],
[self.system],
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
@@ -2019,7 +2280,7 @@
None, None, [self.system],
{'U': 'table1.C0', 'UL': 'table1.C1'},
[])],
- {'x': self.session.user.eid})
+ {'x': ueid})
def test_nonregr13_2(self):
# identity *not* wrapped into exists.
@@ -2033,6 +2294,7 @@
# explain constraint propagation rules, and so why this should be
# wrapped in exists() if used in multi-source
self.skipTest('take a look at me if you wish')
+ ueid = self.session.user.eid
self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (U identity ME '
'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) '
@@ -2042,7 +2304,7 @@
[self.ldap, self.system], None,
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
[]),
- ('FetchStep', [('Any U,UL WHERE ((U identity 5) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, 5 in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser',
+ ('FetchStep', [('Any U,UL WHERE ((U identity %s) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid),
[{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])],
[self.system],
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
@@ -2094,23 +2356,15 @@
None, None, [self.system], {}, [])],
{'x': 999999, 'u': 999998})
- def test_state_of_cross(self):
- self._test('DELETE State X WHERE NOT X state_of Y',
- [('DeleteEntitiesStep',
- [('OneFetchStep',
- [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow',
- [{'X': 'State', 'Y': 'Workflow'}])],
- None, None, [self.system], {}, [])])]
- )
class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC):
"""test planner related feature on a 3-sources repository:
* 2 rql sources supporting Card
"""
- repo = repo
def setUp(self):
+ self.__class__.repo = repo
self.setup()
self.add_source(FakeCardSource, 'cards')
self.add_source(FakeCardSource, 'cards2')
@@ -2222,6 +2476,37 @@
)]
)
+ def test_version_crossed_depends_on_4(self):
+ self._test('Any X,AD,AE WHERE EXISTS(E multisource_crossed_rel X), X in_state AD, AD name AE, E is Note',
+ [('FetchStep',
+ [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
+ [{'X': 'Note', 'AD': 'State', 'AE': 'String'}])],
+ [self.cards, self.cards2, self.system], None,
+ {'X': 'table0.C0',
+ 'AD': 'table0.C1',
+ 'AD.name': 'table0.C2',
+ 'AE': 'table0.C2'},
+ []),
+ ('FetchStep',
+ [('Any A WHERE E multisource_crossed_rel A, A is Note, E is Note',
+ [{'A': 'Note', 'E': 'Note'}])],
+ [self.cards, self.cards2, self.system], None,
+ {'A': 'table1.C0'},
+ []),
+ ('OneFetchStep',
+ [('Any X,AD,AE WHERE EXISTS(X identity A), AD name AE, A is Note, AD is State, X is Note',
+ [{'A': 'Note', 'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
+ None, None,
+ [self.system],
+ {'A': 'table1.C0',
+ 'AD': 'table0.C1',
+ 'AD.name': 'table0.C2',
+ 'AE': 'table0.C2',
+ 'X': 'table0.C0'},
+ []
+ )]
+ )
+
def test_nonregr_dont_cross_rel_source_filtering_1(self):
self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
@@ -2257,7 +2542,7 @@
None, {'X': 'table0.C0'}, []),
('UnionStep', None, None,
[('OneFetchStep',
- [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'U': 'CWUser', 'X': 'Affaire'},
{'U': 'CWUser', 'X': 'BaseTransition'},
{'U': 'CWUser', 'X': 'Basket'},
@@ -2272,6 +2557,8 @@
{'U': 'CWUser', 'X': 'CWProperty'},
{'U': 'CWUser', 'X': 'CWRType'},
{'U': 'CWUser', 'X': 'CWRelation'},
+ {'U': 'CWUser', 'X': 'CWSource'},
+ {'U': 'CWUser', 'X': 'CWSourceHostConfig'},
{'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'},
{'U': 'CWUser', 'X': 'CWUser'},
{'U': 'CWUser', 'X': 'Division'},
@@ -2315,9 +2602,9 @@
return []
class MSPlannerVCSSource(BasePlannerTC):
- repo = repo
def setUp(self):
+ self.__class__.repo = repo
self.setup()
self.add_source(FakeVCSSource, 'vcs')
self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper)
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_multisources.py
--- a/server/test/unittest_multisources.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_multisources.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,29 +15,29 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-from os.path import dirname, join, abspath
+
from datetime import datetime, timedelta
-from logilab.common.decorators import cached
-
from cubicweb.devtools import TestServerConfiguration, init_test_database
from cubicweb.devtools.testlib import CubicWebTC, refresh_repo
from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch
-class TwoSourcesConfiguration(TestServerConfiguration):
- sourcefile = 'sources_multi'
-
-
class ExternalSource1Configuration(TestServerConfiguration):
sourcefile = 'sources_extern'
class ExternalSource2Configuration(TestServerConfiguration):
- sourcefile = 'sources_multi2'
+ sourcefile = 'sources_multi'
MTIME = datetime.now() - timedelta(0, 10)
-repo2, cnx2 = init_test_database(config=ExternalSource1Configuration('data'))
-repo3, cnx3 = init_test_database(config=ExternalSource2Configuration('data'))
+
+EXTERN_SOURCE_CFG = u'''
+pyro-ns-id = extern
+cubicweb-user = admin
+cubicweb-password = gingkow
+mapping-file = extern_mapping.py
+base-url=http://extern.org/
+'''
# hi-jacking
from cubicweb.server.sources.pyrorql import PyroRQLSource
@@ -46,7 +46,16 @@
PyroRQLSource_get_connection = PyroRQLSource.get_connection
Connection_close = Connection.close
-def setup_module(*args):
+def setUpModule(*args):
+ global repo2, cnx2, repo3, cnx3
+ cfg1 = ExternalSource1Configuration('data', apphome=TwoSourcesTC.datadir)
+ repo2, cnx2 = init_test_database(config=cfg1)
+ cfg2 = ExternalSource2Configuration('data', apphome=TwoSourcesTC.datadir)
+ repo3, cnx3 = init_test_database(config=cfg2)
+ cnx3.request().create_entity('CWSource', name=u'extern', type=u'pyrorql',
+ config=EXTERN_SOURCE_CFG)
+ cnx3.commit()
+
TestServerConfiguration.no_sqlite_wrap = True
# hi-jack PyroRQLSource.get_connection to access existing connection (no
# pyro connection)
@@ -55,7 +64,7 @@
# pool though we want to keep cnx2 valid
Connection.close = lambda x: None
-def teardown_module(*args):
+def tearDownModule(*args):
PyroRQLSource.get_connection = PyroRQLSource_get_connection
Connection.close = Connection_close
global repo2, cnx2, repo3, cnx3
@@ -67,8 +76,9 @@
TestServerConfiguration.no_sqlite_wrap = False
class TwoSourcesTC(CubicWebTC):
- config = TwoSourcesConfiguration('data')
-
+ """Main repo -> extern-multi -> extern
+ \-------------/
+ """
@classmethod
def _refresh_repo(cls):
super(TwoSourcesTC, cls)._refresh_repo()
@@ -82,6 +92,8 @@
do_monkey_patch()
def tearDown(self):
+ for source in self.repo.sources[1:]:
+ self.repo.remove_source(source.uri)
CubicWebTC.tearDown(self)
undo_monkey_patch()
@@ -91,6 +103,17 @@
cu.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"')
self.aff1 = cu.execute('INSERT Affaire X: X ref "AFFREF"')[0][0]
cnx2.commit()
+ for uri, config in [('extern', EXTERN_SOURCE_CFG),
+ ('extern-multi', '''
+pyro-ns-id = extern-multi
+cubicweb-user = admin
+cubicweb-password = gingkow
+mapping-file = extern_mapping.py
+''')]:
+ self.request().create_entity('CWSource', name=unicode(uri),
+ type=u'pyrorql',
+ config=unicode(config))
+ self.commit()
# trigger discovery
self.sexecute('Card X')
self.sexecute('Affaire X')
@@ -112,11 +135,11 @@
# since they are orderd by eid, we know the 3 first one is coming from the system source
# and the others from external source
self.assertEqual(rset.get_entity(0, 0).cw_metainformation(),
- {'source': {'adapter': 'native', 'uri': 'system'},
+ {'source': {'type': 'native', 'uri': 'system'},
'type': u'Card', 'extid': None})
externent = rset.get_entity(3, 0)
metainf = externent.cw_metainformation()
- self.assertEqual(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'})
+ self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'})
self.assertEqual(metainf['type'], 'Card')
self.assert_(metainf['extid'])
etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s',
@@ -184,7 +207,7 @@
def test_simplifiable_var_2(self):
affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
- {'x': affeid, 'u': self.session.user.eid})
+ {'x': affeid, 'u': self.session.user.eid})
self.assertEqual(len(rset), 1)
def test_sort_func(self):
@@ -270,7 +293,6 @@
def test_not_relation(self):
states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN'))
- self.session.user.clear_all_caches()
userstate = self.session.user.in_state[0]
states.remove((userstate.eid, userstate.name))
notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
@@ -303,6 +325,19 @@
cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid})
cnx3.commit()
+ def test_crossed_relation_noeid_needattr(self):
+ """http://www.cubicweb.org/ticket/1382452"""
+ aff1 = self.sexecute('INSERT Affaire X: X ref "AFFREF"')[0][0]
+ # link within extern source
+ ec1 = self.sexecute('Card X WHERE X wikiid "zzz"')[0][0]
+ self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s',
+ {'a': aff1, 'c': ec1})
+ # link from system to extern source
+ self.sexecute('SET A documented_by C WHERE E eid %(a)s, C eid %(c)s',
+ {'a': aff1, 'c': self.ic2})
+ rset = self.sexecute('DISTINCT Any DEP WHERE P ref "AFFREF", P documented_by DEP, DEP wikiid LIKE "z%"')
+ self.assertEqual(sorted(rset.rows), [[ec1], [self.ic2]])
+
def test_nonregr1(self):
ueid = self.session.user.eid
affaire = self.sexecute('Affaire X WHERE X ref "AFFREF"').get_entity(0, 0)
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_querier.py
--- a/server/test/unittest_querier.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_querier.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,5 +1,5 @@
# -*- coding: iso-8859-1 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -62,9 +62,11 @@
('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'}))
-repo, cnx = init_test_database()
+def setUpModule(*args):
+ global repo, cnx
+ repo, cnx = init_test_database(apphome=UtilsTC.datadir)
-def teardown_module(*args):
+def tearDownModule(*args):
global repo, cnx
cnx.close()
repo.shutdown()
@@ -72,7 +74,9 @@
class UtilsTC(BaseQuerierTC):
- repo = repo
+ def setUp(self):
+ self.__class__.repo = repo
+ super(UtilsTC, self).setUp()
def get_max_eid(self):
# no need for cleanup here
@@ -130,7 +134,7 @@
'X': 'Affaire',
'ET': 'CWEType', 'ETN': 'String'}])
rql, solutions = partrqls[1]
- self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
+ self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
self.assertListEqual(sorted(solutions),
sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'},
@@ -147,6 +151,7 @@
{'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'},
@@ -224,7 +229,9 @@
class QuerierTC(BaseQuerierTC):
- repo = repo
+ def setUp(self):
+ self.__class__.repo = repo
+ super(QuerierTC, self).setUp()
def test_encoding_pb(self):
self.assertRaises(RQLSyntaxError, self.execute,
@@ -251,15 +258,15 @@
def test_select_1(self):
rset = self.execute('Any X ORDERBY X WHERE X is CWGroup')
result, descr = rset.rows, rset.description
- self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(result), [(2,), (3,), (4,), (5,)])
self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)])
def test_select_2(self):
rset = self.execute('Any X ORDERBY N WHERE X is CWGroup, X name N')
- self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)])
self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)])
rset = self.execute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N')
- self.assertEqual(tuplify(rset.rows), [(4,), (3,), (2,), (1,)])
+ self.assertEqual(tuplify(rset.rows), [(5,), (4,), (3,), (2,)])
def test_select_3(self):
rset = self.execute('Any N GROUPBY N WHERE X is CWGroup, X name N')
@@ -302,7 +309,7 @@
def test_select_5(self):
rset = self.execute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup')
- self.assertEqual(tuplify(rset.rows), [(1, 'guests',), (2, 'managers',), (3, 'owners',), (4, 'users',)])
+ self.assertEqual(tuplify(rset.rows), [(2, 'guests',), (3, 'managers',), (4, 'owners',), (5, 'users',)])
self.assertEqual(rset.description, [('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',)])
def test_select_6(self):
@@ -350,11 +357,11 @@
self.assertEqual(len(rset.rows), 0)
def test_select_nonregr_edition_not(self):
- groupeids = set((1, 2, 3))
- groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), X read_permission Y'))
- rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+ groupeids = set((2, 3, 4))
+ groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), X read_permission Y'))
+ rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y')
self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
- rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+ rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y')
self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
def test_select_outer_join(self):
@@ -493,15 +500,16 @@
self.assertListEqual(rset.rows,
[[u'description_format', 12],
[u'description', 13],
- [u'name', 14],
- [u'created_by', 38],
- [u'creation_date', 38],
- [u'cwuri', 38],
- [u'in_basket', 38],
- [u'is', 38],
- [u'is_instance_of', 38],
- [u'modification_date', 38],
- [u'owned_by', 38]])
+ [u'name', 15],
+ [u'created_by', 40],
+ [u'creation_date', 40],
+ [u'cw_source', 40],
+ [u'cwuri', 40],
+ [u'in_basket', 40],
+ [u'is', 40],
+ [u'is_instance_of', 40],
+ [u'modification_date', 40],
+ [u'owned_by', 40]])
def test_select_aggregat_having_dumb(self):
# dumb but should not raise an error
@@ -545,6 +553,26 @@
self.assertEqual(rset.rows[0][0], 'ADMIN')
self.assertEqual(rset.description, [('String',)])
+ def test_select_float_abs(self):
+ # test positive number
+ eid = self.execute('INSERT Affaire A: A invoiced %(i)s', {'i': 1.2})[0][0]
+ rset = self.execute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid})
+ self.assertEqual(rset.rows[0][0], 1.2)
+ # test negative number
+ eid = self.execute('INSERT Affaire A: A invoiced %(i)s', {'i': -1.2})[0][0]
+ rset = self.execute('Any ABS(I) WHERE X eid %(x)s, X invoiced I', {'x': eid})
+ self.assertEqual(rset.rows[0][0], 1.2)
+
+ def test_select_int_abs(self):
+ # test positive number
+ eid = self.execute('INSERT Affaire A: A duration %(d)s', {'d': 12})[0][0]
+ rset = self.execute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid})
+ self.assertEqual(rset.rows[0][0], 12)
+ # test negative number
+ eid = self.execute('INSERT Affaire A: A duration %(d)s', {'d': -12})[0][0]
+ rset = self.execute('Any ABS(D) WHERE X eid %(x)s, X duration D', {'x': eid})
+ self.assertEqual(rset.rows[0][0], 12)
+
## def test_select_simplified(self):
## ueid = self.session.user.eid
## rset = self.execute('Any L WHERE %s login L'%ueid)
@@ -597,15 +625,15 @@
def test_select_no_descr(self):
rset = self.execute('Any X WHERE X is CWGroup', build_descr=0)
rset.rows.sort()
- self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)])
self.assertEqual(rset.description, ())
def test_select_limit_offset(self):
rset = self.execute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N')
- self.assertEqual(tuplify(rset.rows), [(1,), (2,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,)])
self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)])
rset = self.execute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N')
- self.assertEqual(tuplify(rset.rows), [(3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(4,), (5,)])
def test_select_symmetric(self):
self.execute("INSERT Personne X: X nom 'machin'")
@@ -746,14 +774,14 @@
def test_select_constant(self):
rset = self.execute('Any X, "toto" ORDERBY X WHERE X is CWGroup')
self.assertEqual(rset.rows,
- map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+ map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
self.assertIsInstance(rset[0][1], unicode)
self.assertEqual(rset.description,
zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
('String', 'String', 'String', 'String',)))
rset = self.execute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'})
self.assertEqual(rset.rows,
- map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+ map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
self.assertIsInstance(rset[0][1], unicode)
self.assertEqual(rset.description,
zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_repository.py
--- a/server/test/unittest_repository.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_repository.py Fri Mar 11 09:46:45 2011 +0100
@@ -95,15 +95,14 @@
self.assertItemsEqual(person._unique_together[0],
('nom', 'prenom', 'inline2'))
- def test_schema_has_owner(self):
- repo = self.repo
- cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.failIf(repo.execute(cnxid, 'CWEType X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWRType X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWAttribute X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWRelation X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWConstraint X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWConstraintType X WHERE NOT X owned_by U'))
+ def test_all_entities_have_owner(self):
+ self.failIf(self.execute('Any X WHERE NOT X owned_by U'))
+
+ def test_all_entities_have_is(self):
+ self.failIf(self.execute('Any X WHERE NOT X is ET'))
+
+ def test_all_entities_have_cw_source(self):
+ self.failIf(self.execute('Any X WHERE NOT X cw_source S'))
def test_connect(self):
self.assert_(self.repo.connect(self.admlogin, password=self.admpassword))
@@ -155,8 +154,9 @@
self.assertRaises(ValidationError,
self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
self.failUnless(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
- ex = self.assertRaises(QueryError, self.commit)
- self.assertEqual(str(ex), 'transaction must be rollbacked')
+ with self.assertRaises(QueryError) as cm:
+ self.commit()
+ self.assertEqual(str(cm.exception), 'transaction must be rollbacked')
self.rollback()
self.failIf(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
@@ -171,8 +171,9 @@
self.assertRaises(Unauthorized,
self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
self.failUnless(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
- ex = self.assertRaises(QueryError, self.commit)
- self.assertEqual(str(ex), 'transaction must be rollbacked')
+ with self.assertRaises(QueryError) as cm:
+ self.commit()
+ self.assertEqual(str(cm.exception), 'transaction must be rollbacked')
self.rollback()
self.failIf(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
@@ -211,7 +212,7 @@
def test_check_session(self):
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.assertEqual(repo.check_session(cnxid), None)
+ self.assertIsInstance(repo.check_session(cnxid), float)
repo.close(cnxid)
self.assertRaises(BadConnectionId, repo.check_session, cnxid)
@@ -277,8 +278,9 @@
repo.execute(cnxid, 'DELETE CWUser X WHERE X login "toto"')
repo.commit(cnxid)
try:
- ex = self.assertRaises(Exception, run_transaction)
- self.assertEqual(str(ex), 'try to access pool on a closed session')
+ with self.assertRaises(Exception) as cm:
+ run_transaction()
+ self.assertEqual(str(cm.exception), 'try to access pool on a closed session')
finally:
t.join()
@@ -288,7 +290,7 @@
self.assertListEqual([r.type for r in schema.eschema('CWAttribute').ordered_relations()
if not r.type in ('eid', 'is', 'is_instance_of', 'identity',
'creation_date', 'modification_date', 'cwuri',
- 'owned_by', 'created_by',
+ 'owned_by', 'created_by', 'cw_source',
'update_permission', 'read_permission',
'in_basket')],
['relation_type',
@@ -369,25 +371,25 @@
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
session = repo._get_session(cnxid, setpool=True)
- self.assertEqual(repo.type_and_source_from_eid(1, session),
- ('CWGroup', 'system', None))
- self.assertEqual(repo.type_from_eid(1, session), 'CWGroup')
- self.assertEqual(repo.source_from_eid(1, session).uri, 'system')
- self.assertEqual(repo.eid2extid(repo.system_source, 1, session), None)
+ self.assertEqual(repo.type_and_source_from_eid(2, session),
+ ('CWGroup', 'system', None))
+ self.assertEqual(repo.type_from_eid(2, session), 'CWGroup')
+ self.assertEqual(repo.source_from_eid(2, session).uri, 'system')
+ self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None)
class dummysource: uri = 'toto'
- self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 1, session)
+ self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 2, session)
def test_public_api(self):
self.assertEqual(self.repo.get_schema(), self.repo.schema)
- self.assertEqual(self.repo.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+ self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
# .properties() return a result set
self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U')
def test_session_api(self):
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.assertEqual(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {}))
- self.assertEqual(repo.describe(cnxid, 1), (u'CWGroup', u'system', None))
+ self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {}))
+ self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None))
repo.close(cnxid)
self.assertRaises(BadConnectionId, repo.user_info, cnxid)
self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
@@ -480,7 +482,7 @@
'EmailAddress', address=u'a@b.fr')
def test_multiple_edit_set_attributes(self):
- """make sure edited_attributes doesn't get cluttered
+ """make sure cw_edited doesn't get cluttered
by previous entities on multiple set
"""
# local hook
@@ -491,9 +493,9 @@
events = ('before_update_entity',)
def __call__(self):
# invoiced attribute shouldn't be considered "edited" before the hook
- self._test.failIf('invoiced' in self.entity.edited_attributes,
- 'edited_attributes cluttered by previous update')
- self.entity['invoiced'] = 10
+ self._test.failIf('invoiced' in self.entity.cw_edited,
+ 'cw_edited cluttered by previous update')
+ self.entity.cw_edited['invoiced'] = 10
with self.temporary_appobjects(DummyBeforeHook):
req = self.request()
req.create_entity('Affaire', ref=u'AFF01')
@@ -518,7 +520,7 @@
def test_type_from_eid(self):
self.session.set_pool()
- self.assertEqual(self.repo.type_from_eid(1, self.session), 'CWGroup')
+ self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup')
def test_type_from_eid_raise(self):
self.session.set_pool()
@@ -669,8 +671,9 @@
req.cnx.commit()
req = self.request()
req.create_entity('Note', type=u'todo', inline1=a01)
- ex = self.assertRaises(ValidationError, req.cnx.commit)
- self.assertEqual(ex.errors, {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'})
+ with self.assertRaises(ValidationError) as cm:
+ req.cnx.commit()
+ self.assertEqual(cm.exception.errors, {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'})
if __name__ == '__main__':
unittest_main()
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_rql2sql.py
--- a/server/test/unittest_rql2sql.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_rql2sql.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -38,14 +38,16 @@
pass # already registered
-config = TestServerConfiguration('data')
-config.bootstrap_cubes()
-schema = config.load_schema()
-schema['in_state'].inlined = True
-schema['state_of'].inlined = False
-schema['comments'].inlined = False
+def setUpModule():
+ global config, schema
+ config = TestServerConfiguration('data', apphome=CWRQLTC.datadir)
+ config.bootstrap_cubes()
+ schema = config.load_schema()
+ schema['in_state'].inlined = True
+ schema['state_of'].inlined = False
+ schema['comments'].inlined = False
-def teardown_module(*args):
+def tearDownModule():
global config, schema
del config, schema
@@ -178,10 +180,14 @@
FROM cw_Personne AS _X
WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, cw_CWGroup AS _G WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''),
+ ('Any X WHERE X title V, NOT X wikiid V, NOT X title "parent", X is Card',
+ '''SELECT _X.cw_eid
+FROM cw_Card AS _X
+WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)''')
]
-ADVANCED= [
+ADVANCED = [
("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'",
'''SELECT _S.cw_eid
FROM cw_Societe AS _S
@@ -571,7 +577,14 @@
'''SELECT 1
FROM in_group_relation AS rel_in_group0'''),
-
+ ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"',
+ '''SELECT _X.cw_eid
+FROM cw_CWEType AS _X
+WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''),
+ ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"',
+ '''SELECT _X.cw_eid
+FROM cw_CWEType AS _X
+WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''),
]
@@ -1078,8 +1091,12 @@
]
class CWRQLTC(RQLGeneratorTC):
- schema = schema
backend = 'sqlite'
+
+ def setUp(self):
+ self.__class__.schema = schema
+ super(CWRQLTC, self).setUp()
+
def test_nonregr_sol(self):
delete = self.rqlhelper.parse(
'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,'
@@ -1107,9 +1124,12 @@
return '\n'.join(l.strip() for l in text.strip().splitlines())
class PostgresSQLGeneratorTC(RQLGeneratorTC):
- schema = schema
backend = 'postgres'
+ def setUp(self):
+ self.__class__.schema = schema
+ super(PostgresSQLGeneratorTC, self).setUp()
+
def _norm_sql(self, sql):
return sql.strip()
@@ -1415,6 +1435,13 @@
FROM appears AS appears0
WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""),
+
+ ('Any X WHERE NOT A tags X, X has_text "pouet"',
+ '''SELECT appears1.uid
+FROM appears AS appears1
+WHERE NOT (EXISTS(SELECT 1 FROM tags_relation AS rel_tags0 WHERE appears1.uid=rel_tags0.eid_to)) AND appears1.words @@ to_tsquery('default', 'pouet')
+'''),
+
)):
yield t
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_rqlannotation.py
--- a/server/test/unittest_rqlannotation.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_rqlannotation.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,21 +16,26 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unit tests for modules cubicweb.server.rqlannotation
-"""
+"""unit tests for modules cubicweb.server.rqlannotation"""
from cubicweb.devtools import init_test_database
from cubicweb.devtools.repotest import BaseQuerierTC
-repo, cnx = init_test_database()
-def teardown_module(*args):
+def setUpModule(*args):
+ global repo, cnx
+ repo, cnx = init_test_database(apphome=SQLGenAnnotatorTC.datadir)
+
+def tearDownModule(*args):
global repo, cnx
del repo, cnx
class SQLGenAnnotatorTC(BaseQuerierTC):
- repo = repo
+
+ def setUp(self):
+ self.__class__.repo = repo
+ super(SQLGenAnnotatorTC, self).setUp()
def get_max_eid(self):
# no need for cleanup here
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_schemaserial.py
--- a/server/test/unittest_schemaserial.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_schemaserial.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,8 +15,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unit tests for schema rql (de)serialization
-"""
+"""unit tests for schema rql (de)serialization"""
import sys
from cStringIO import StringIO
@@ -26,14 +25,16 @@
from cubicweb.schema import CubicWebSchemaLoader
from cubicweb.devtools import TestServerConfiguration
-loader = CubicWebSchemaLoader()
-config = TestServerConfiguration('data')
-config.bootstrap_cubes()
-schema = loader.load(config)
+def setUpModule(*args):
+ global schema, config
+ loader = CubicWebSchemaLoader()
+ config = TestServerConfiguration('data', apphome=Schema2RQLTC.datadir)
+ config.bootstrap_cubes()
+ schema = loader.load(config)
-def teardown_module(*args):
- global schema, config, loader
- del schema, config, loader
+def tearDownModule(*args):
+ global schema, config
+ del schema, config
from cubicweb.server.schemaserial import *
from cubicweb.server.schemaserial import _erperms2rql as erperms2rql
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_security.py
--- a/server/test/unittest_security.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_security.py Fri Mar 11 09:46:45 2011 +0100
@@ -27,8 +27,8 @@
class BaseSecurityTC(CubicWebTC):
- def setUp(self):
- CubicWebTC.setUp(self)
+ def setup_database(self):
+ super(BaseSecurityTC, self).setup_database()
self.create_user('iaminusersgrouponly')
self.readoriggroups = self.schema['Personne'].permissions['read']
self.addoriggroups = self.schema['Personne'].permissions['add']
@@ -75,7 +75,7 @@
def tearDown(self):
self.repo.system_source.__dict__.pop('syntax_tree_search', None)
- BaseSecurityTC.tearDown(self)
+ super(SecurityRewritingTC, self).tearDown()
def test_not_relation_read_security(self):
cnx = self.login('iaminusersgrouponly')
@@ -86,6 +86,7 @@
self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
self.assertEqual(self.query[0][1].as_string(),
'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
+ cnx.close()
class SecurityTC(BaseSecurityTC):
@@ -104,6 +105,7 @@
cu.execute("INSERT Personne X: X nom 'bidule'")
self.assertRaises(Unauthorized, cnx.commit)
self.assertEqual(cu.execute('Personne X').rowcount, 1)
+ cnx.close()
def test_insert_rql_permission(self):
# test user can only add une affaire related to a societe he owns
@@ -120,6 +122,7 @@
cu.execute("INSERT Societe X: X nom 'chouette'")
cu.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
cnx.commit()
+ cnx.close()
def test_update_security_1(self):
cnx = self.login('anon')
@@ -147,6 +150,7 @@
cu.execute("INSERT Personne X: X nom 'biduuule'")
cu.execute("INSERT Societe X: X nom 'looogilab'")
cu.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
+ cnx.close()
def test_update_rql_permission(self):
self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
@@ -165,6 +169,7 @@
cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
cu.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'")
cnx.commit()
+ cnx.close()
def test_delete_security(self):
# FIXME: sample below fails because we don't detect "owner" can't delete
@@ -177,6 +182,7 @@
cnx = self.login('iaminusersgrouponly')
cu = cnx.cursor()
self.assertRaises(Unauthorized, cu.execute, "DELETE CWGroup Y WHERE Y name 'staff'")
+ cnx.close()
def test_delete_rql_permission(self):
self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
@@ -200,6 +206,7 @@
## self.assertRaises(Unauthorized, cu.execute, "DELETE Affaire X")
cu.execute("DELETE Affaire X WHERE X sujet 'pascool'")
cnx.commit()
+ cnx.close()
def test_insert_relation_rql_permission(self):
@@ -225,6 +232,7 @@
cu.execute("INSERT Societe X: X nom 'chouette'")
cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
cnx.commit()
+ cnx.close()
def test_delete_relation_rql_permission(self):
self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
@@ -249,6 +257,7 @@
cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
cnx.commit()
cu.execute("DELETE A concerne S WHERE S nom 'chouette'")
+ cnx.close()
def test_user_can_change_its_upassword(self):
@@ -260,6 +269,7 @@
cnx.commit()
cnx.close()
cnx = self.login('user', password='newpwd')
+ cnx.close()
def test_user_cant_change_other_upassword(self):
ueid = self.create_user('otheruser').eid
@@ -268,6 +278,7 @@
cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
{'x': ueid, 'passwd': 'newpwd'})
self.assertRaises(Unauthorized, cnx.commit)
+ cnx.close()
# read security test
@@ -277,6 +288,7 @@
cu = cnx.cursor()
self.assertRaises(Unauthorized,
cu.execute, 'Personne U where U nom "managers"')
+ cnx.close()
def test_read_erqlexpr_base(self):
eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
@@ -301,6 +313,7 @@
self.assertEqual(rset.rows, [])
# test can't update an attribute of an entity that can't be readen
self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
+ cnx.close()
def test_entity_created_in_transaction(self):
@@ -337,6 +350,7 @@
rset = cu.execute("Any X WHERE X has_text 'cool'")
self.assertEqual(sorted(eid for eid, in rset.rows),
[card1, aff2])
+ cnx.close()
def test_read_erqlexpr_has_text2(self):
self.execute("INSERT Personne X: X nom 'bidule'")
@@ -349,6 +363,7 @@
self.assertEqual(len(rset.rows), 1, rset.rows)
rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
self.assertEqual(len(rset.rows), 1, rset.rows)
+ cnx.close()
def test_read_erqlexpr_optional_rel(self):
self.execute("INSERT Personne X: X nom 'bidule'")
@@ -359,6 +374,7 @@
cu = cnx.cursor()
rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
self.assertEqual(len(rset.rows), 1, rset.rows)
+ cnx.close()
def test_read_erqlexpr_aggregat(self):
self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
@@ -382,6 +398,7 @@
values = dict(rset)
self.assertEqual(values['Affaire'], 1)
self.assertEqual(values['Societe'], 2)
+ cnx.close()
def test_attribute_security(self):
@@ -429,6 +446,7 @@
cnx.commit()
cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
cnx.commit()
+ cnx.close()
def test_attribute_read_security(self):
# anon not allowed to see users'login, but they can see users
@@ -446,6 +464,7 @@
self.assertEqual(x.login, None)
self.failUnless(x.creation_date)
cnx.rollback()
+ cnx.close()
class BaseSchemaSecurityTC(BaseSecurityTC):
"""tests related to the base schema permission configuration"""
@@ -472,6 +491,7 @@
cu.execute('DELETE Affaire X WHERE X ref "ARCT01"')
cnx.commit()
self.failIf(cu.execute('Affaire X'))
+ cnx.close()
def test_users_and_groups_non_readable_by_guests(self):
cnx = self.login('anon')
@@ -498,6 +518,7 @@
# but can't modify it
cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
self.assertRaises(Unauthorized, cnx.commit)
+ cnx.close()
def test_in_group_relation(self):
cnx = self.login('iaminusersgrouponly')
@@ -506,6 +527,7 @@
self.assertRaises(Unauthorized, cu.execute, rql)
rql = u"SET U in_group G WHERE U login 'admin', G name 'users'"
self.assertRaises(Unauthorized, cu.execute, rql)
+ cnx.close()
def test_owned_by(self):
self.execute("INSERT Personne X: X nom 'bidule'")
@@ -514,6 +536,7 @@
cu = cnx.cursor()
rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne"
self.assertRaises(Unauthorized, cu.execute, rql)
+ cnx.close()
def test_bookmarked_by_guests_security(self):
beid1 = self.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0]
@@ -535,6 +558,7 @@
self.assertRaises(Unauthorized,
cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
{'x': anoneid, 'b': beid1})
+ cnx.close()
def test_ambigous_ordered(self):
@@ -542,6 +566,7 @@
cu = cnx.cursor()
names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')]
self.assertEqual(names, sorted(names, key=lambda x: x.lower()))
+ cnx.close()
def test_in_state_without_update_perm(self):
"""check a user change in_state without having update permission on the
@@ -575,6 +600,7 @@
# restore orig perms
for action, perms in affaire_perms.iteritems():
self.schema['Affaire'].set_action_permissions(action, perms)
+ cnx.close()
def test_trinfo_security(self):
aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0)
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_ssplanner.py
--- a/server/test/unittest_ssplanner.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_ssplanner.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,25 +15,25 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""
-"""
from cubicweb.devtools import init_test_database
from cubicweb.devtools.repotest import BasePlannerTC, test_plan
from cubicweb.server.ssplanner import SSPlanner
# keep cnx so it's not garbage collected and the associated session closed
-repo, cnx = init_test_database()
+def setUpModule(*args):
+ global repo, cnx
+ repo, cnx = init_test_database(apphome=SSPlannerTC.datadir)
-def teardown_module(*args):
+def tearDownModule(*args):
global repo, cnx
del repo, cnx
class SSPlannerTC(BasePlannerTC):
- repo = repo
_test = test_plan
def setUp(self):
+ self.__class__.repo = repo
BasePlannerTC.setUp(self)
self.planner = SSPlanner(self.o.schema, self.repo.vreg.rqlhelper)
self.system = self.o._repo.system_source
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_storage.py
--- a/server/test/unittest_storage.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_storage.py Fri Mar 11 09:46:45 2011 +0100
@@ -60,7 +60,7 @@
storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage)
def tearDown(self):
- super(CubicWebTC, self).tearDown()
+ super(StorageTC, self).tearDown()
storages.unset_attribute_storage(self.repo, 'File', 'data')
shutil.rmtree(self.tempdir)
@@ -75,6 +75,15 @@
{'f': entity.eid})[0][0]
return fspath.getvalue()
+ def test_bfss_wrong_fspath_usage(self):
+ f1 = self.create_file()
+ self.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid})
+ with self.assertRaises(NotImplementedError) as cm:
+ self.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid})
+ self.assertEqual(str(cm.exception),
+ 'This callback is only available for BytesFileSystemStorage '
+ 'managed attribute. Is FSPATH() argument BFSS managed?')
+
def test_bfss_storage(self):
f1 = self.create_file()
expected_filepath = osp.join(self.tempdir, '%s_data_%s' %
@@ -114,34 +123,34 @@
self.create_file()
def test_source_mapped_attribute_error_cases(self):
- ex = self.assertRaises(QueryError, self.execute,
- 'Any X WHERE X data ~= "hop", X is File')
- self.assertEqual(str(ex), 'can\'t use File.data (X data ILIKE "hop") in restriction')
- ex = self.assertRaises(QueryError, self.execute,
- 'Any X, Y WHERE X data D, Y data D, '
- 'NOT X identity Y, X is File, Y is File')
- self.assertEqual(str(ex), "can't use D as a restriction variable")
+ with self.assertRaises(QueryError) as cm:
+ self.execute('Any X WHERE X data ~= "hop", X is File')
+ self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction')
+ with self.assertRaises(QueryError) as cm:
+ self.execute('Any X, Y WHERE X data D, Y data D, '
+ 'NOT X identity Y, X is File, Y is File')
+ self.assertEqual(str(cm.exception), "can't use D as a restriction variable")
# query returning mix of mapped / regular attributes (only file.data
# mapped, not image.data for instance)
- ex = self.assertRaises(QueryError, self.execute,
- 'Any X WITH X BEING ('
- ' (Any NULL)'
- ' UNION '
- ' (Any D WHERE X data D, X is File)'
- ')')
- self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not')
- ex = self.assertRaises(QueryError, self.execute,
- '(Any D WHERE X data D, X is File)'
- ' UNION '
- '(Any D WHERE X title D, X is Bookmark)')
- self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not')
+ with self.assertRaises(QueryError) as cm:
+ self.execute('Any X WITH X BEING ('
+ ' (Any NULL)'
+ ' UNION '
+ ' (Any D WHERE X data D, X is File)'
+ ')')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
+ with self.assertRaises(QueryError) as cm:
+ self.execute('(Any D WHERE X data D, X is File)'
+ ' UNION '
+ '(Any D WHERE X title D, X is Bookmark)')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
storages.set_attribute_storage(self.repo, 'State', 'name',
storages.BytesFileSystemStorage(self.tempdir))
try:
- ex = self.assertRaises(QueryError,
- self.execute, 'Any D WHERE X name D, X is IN (State, Transition)')
- self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not')
+ with self.assertRaises(QueryError) as cm:
+ self.execute('Any D WHERE X name D, X is IN (State, Transition)')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
finally:
storages.unset_attribute_storage(self.repo, 'State', 'name')
@@ -172,10 +181,10 @@
self.assertEqual(rset[1][0], f1.eid)
self.assertEqual(rset[0][1], len('the-data'))
self.assertEqual(rset[1][1], len('the-data'))
- ex = self.assertRaises(QueryError, self.execute,
- 'Any X,UPPER(D) WHERE X eid %(x)s, X data D',
- {'x': f1.eid})
- self.assertEqual(str(ex), 'UPPER can not be called on mapped attribute')
+ with self.assertRaises(QueryError) as cm:
+ self.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D',
+ {'x': f1.eid})
+ self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute')
def test_bfss_fs_importing_transparency(self):
diff -r 48f468f33704 -r e4580e5f0703 server/test/unittest_undo.py
--- a/server/test/unittest_undo.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/test/unittest_undo.py Fri Mar 11 09:46:45 2011 +0100
@@ -212,9 +212,10 @@
self.assertEqual(errors,
[u"Can't restore relation in_group, object entity "
"%s doesn't exist anymore." % g.eid])
- ex = self.assertRaises(ValidationError, self.commit)
- self.assertEqual(ex.entity, self.toto.eid)
- self.assertEqual(ex.errors,
+ with self.assertRaises(ValidationError) as cm:
+ self.commit()
+ self.assertEqual(cm.exception.entity, self.toto.eid)
+ self.assertEqual(cm.exception.errors,
{'in_group-subject': u'at least one relation in_group is '
'required on CWUser (%s)' % self.toto.eid})
@@ -252,10 +253,10 @@
value=u'text/html')
tutu.set_relations(use_email=email, reverse_for_user=prop)
self.commit()
- ex = self.assertRaises(ValidationError,
- self.cnx.undo_transaction, txuuid)
- self.assertEqual(ex.entity, tutu.eid)
- self.assertEqual(ex.errors,
+ with self.assertRaises(ValidationError) as cm:
+ self.cnx.undo_transaction(txuuid)
+ self.assertEqual(cm.exception.entity, tutu.eid)
+ self.assertEqual(cm.exception.errors,
{None: 'some later transaction(s) touch entity, undo them first'})
def test_undo_creation_integrity_2(self):
@@ -265,17 +266,17 @@
session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid})
self.toto.set_relations(in_group=g)
self.commit()
- ex = self.assertRaises(ValidationError,
- self.cnx.undo_transaction, txuuid)
- self.assertEqual(ex.entity, g.eid)
- self.assertEqual(ex.errors,
+ with self.assertRaises(ValidationError) as cm:
+ self.cnx.undo_transaction(txuuid)
+ self.assertEqual(cm.exception.entity, g.eid)
+ self.assertEqual(cm.exception.errors,
{None: 'some later transaction(s) touch entity, undo them first'})
# self.assertEqual(errors,
# [u"Can't restore relation in_group, object entity "
# "%s doesn't exist anymore." % g.eid])
- # ex = self.assertRaises(ValidationError, self.commit)
- # self.assertEqual(ex.entity, self.toto.eid)
- # self.assertEqual(ex.errors,
+ # with self.assertRaises(ValidationError) as cm: self.commit()
+ # self.assertEqual(cm.exception.entity, self.toto.eid)
+ # self.assertEqual(cm.exception.errors,
# {'in_group-subject': u'at least one relation in_group is '
# 'required on CWUser (%s)' % self.toto.eid})
diff -r 48f468f33704 -r e4580e5f0703 server/utils.py
--- a/server/utils.py Fri Dec 10 12:17:18 2010 +0100
+++ b/server/utils.py Fri Mar 11 09:46:45 2011 +0100
@@ -20,12 +20,11 @@
import sys
import string
+import logging
from threading import Timer, Thread
from getpass import getpass
from random import choice
-from logilab.common.configuration import Configuration
-
from cubicweb.server import SOURCE_TYPES
try:
@@ -111,12 +110,6 @@
return user, passwd
-def ask_source_config(sourcetype, inputlevel=0):
- sconfig = Configuration(options=SOURCE_TYPES[sourcetype].options)
- sconfig.adapter = sourcetype
- sconfig.input_config(inputlevel=inputlevel)
- return sconfig
-
_MARKER=object()
def func_name(func):
name = getattr(func, '__name__', _MARKER)
@@ -137,6 +130,10 @@
def auto_restart_func(self=self, func=func, args=args):
try:
func(*args)
+ except:
+ logger = logging.getLogger('cubicweb.repository')
+ logger.exception('Unhandled exception in LoopTask %s', self.name)
+ raise
finally:
self.start()
self.func = auto_restart_func
@@ -166,6 +163,10 @@
def auto_remove_func(self=self, func=target):
try:
func()
+ except:
+ logger = logging.getLogger('cubicweb.repository')
+ logger.exception('Unhandled exception in RepoThread %s', self._name)
+ raise
finally:
self.running_threads.remove(self)
Thread.__init__(self, target=auto_remove_func)
diff -r 48f468f33704 -r e4580e5f0703 setup.py
--- a/setup.py Fri Dec 10 12:17:18 2010 +0100
+++ b/setup.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# pylint: disable-msg=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
+# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
#
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
@@ -194,9 +194,10 @@
old_ok = DS._ok
def _ok(self, path):
"""Return True if ``path`` can be written during installation."""
- out = old_ok(self, path)
+ out = old_ok(self, path) # here for side effect from setuptools
realpath = os.path.normcase(os.path.realpath(path))
- if realpath.startswith(sys.prefix):
+ allowed_path = os.path.normcase(sys.prefix)
+ if realpath.startswith(allowed_path):
out = True
return out
DS._ok = _ok
diff -r 48f468f33704 -r e4580e5f0703 skeleton/__pkginfo__.py.tmpl
--- a/skeleton/__pkginfo__.py.tmpl Fri Dec 10 12:17:18 2010 +0100
+++ b/skeleton/__pkginfo__.py.tmpl Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0622
+# pylint: disable=W0622
"""%(distname)s application packaging information"""
modname = '%(cubename)s'
@@ -8,13 +8,13 @@
version = '.'.join(str(num) for num in numversion)
license = '%(license)s'
-
author = '%(author)s'
author_email = '%(author-email)s'
+description = '%(shortdesc)s'
+web = 'http://www.cubicweb.org/project/%%s' %% distname
-description = '%(shortdesc)s'
-
-web = 'http://www.cubicweb.org/project/%%s' %% distname
+__depends__ = %(dependencies)s
+__recommends__ = {}
from os import listdir as _listdir
@@ -40,6 +40,3 @@
# Note: here, you'll need to add subdirectories if you want
# them to be included in the debian package
-__depends__ = %(dependencies)s
-__recommends__ = {}
-
diff -r 48f468f33704 -r e4580e5f0703 skeleton/setup.py
--- a/skeleton/setup.py Fri Dec 10 12:17:18 2010 +0100
+++ b/skeleton/setup.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,8 +1,25 @@
#!/usr/bin/env python
-# pylint: disable=W0404,W0622,W0704,W0613
+# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
+#
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-
+#
+# This file is part of CubicWeb tag cube.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""Generic Setup script, takes package info from __pkginfo__.py file
+"""
__docformat__ = "restructuredtext en"
import os
@@ -12,49 +29,50 @@
try:
if os.environ.get('NO_SETUPTOOLS'):
- raise ImportError()
+ raise ImportError() # do as there is no setuptools
from setuptools import setup
from setuptools.command import install_lib
- USE_SETUPTOOLS = 1
+ USE_SETUPTOOLS = True
except ImportError:
from distutils.core import setup
from distutils.command import install_lib
- USE_SETUPTOOLS = 0
+ USE_SETUPTOOLS = False
+from distutils.command import install_data
-
-sys.modules.pop('__pkginfo__', None)
# import required features
-from __pkginfo__ import modname, version, license, description, \
- web, author, author_email
-# import optional features
-import __pkginfo__
-distname = getattr(__pkginfo__, 'distname', modname)
-scripts = getattr(__pkginfo__, 'scripts', [])
-data_files = getattr(__pkginfo__, 'data_files', None)
-include_dirs = getattr(__pkginfo__, 'include_dirs', [])
-ext_modules = getattr(__pkginfo__, 'ext_modules', None)
-dependency_links = getattr(__pkginfo__, 'dependency_links', [])
-
-STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
-
-IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~')
+from __pkginfo__ import modname, version, license, description, web, \
+ author, author_email
if exists('README'):
long_description = file('README').read()
else:
long_description = ''
+
+# import optional features
+import __pkginfo__
if USE_SETUPTOOLS:
- requires = {}
- for entry in ("__depends__", "__recommends__"):
- requires.update(getattr(__pkginfo__, entry, {}))
- install_requires = [("%s %s" % (d, v and v or "")).strip()
+ requires = {}
+ for entry in ("__depends__",): # "__recommends__"):
+ requires.update(getattr(__pkginfo__, entry, {}))
+ install_requires = [("%s %s" % (d, v and v or "")).strip()
for d, v in requires.iteritems()]
else:
- install_requires = []
+ install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+dependency_links = getattr(__pkginfo__, 'dependency_links', ())
+
+BASE_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~')
def ensure_scripts(linux_scripts):
- """Creates the proper script names required for each platform
+ """
+ Creates the proper script names required for each platform
(taken from 4Suite)
"""
from distutils import util
@@ -64,23 +82,8 @@
scripts_ = linux_scripts
return scripts_
-def get_packages(directory, prefix):
- """return a list of subpackages for the given directory"""
- result = []
- for package in os.listdir(directory):
- absfile = join(directory, package)
- if isdir(absfile):
- if exists(join(absfile, '__init__.py')) or \
- package in ('test', 'tests'):
- if prefix:
- result.append('%s.%s' % (prefix, package))
- else:
- result.append(package)
- result += get_packages(absfile, result[-1])
- return result
-
def export(from_dir, to_dir,
- blacklist=STD_BLACKLIST,
+ blacklist=BASE_BLACKLIST,
ignore_ext=IGNORED_EXTENSIONS,
verbose=True):
"""make a mirror of from_dir in to_dir, omitting directories and files
@@ -134,6 +137,35 @@
dest = join(self.install_dir, base, directory)
export(directory, dest, verbose=False)
+# re-enable copying data files in sys.prefix
+old_install_data = install_data.install_data
+if USE_SETUPTOOLS:
+ # overwrite InstallData to use sys.prefix instead of the egg directory
+ class MyInstallData(old_install_data):
+ """A class that manages data files installation"""
+ def run(self):
+ _old_install_dir = self.install_dir
+ if self.install_dir.endswith('egg'):
+ self.install_dir = sys.prefix
+ old_install_data.run(self)
+ self.install_dir = _old_install_dir
+ try:
+ import setuptools.command.easy_install # only if easy_install avaible
+ # monkey patch: Crack SandboxViolation verification
+ from setuptools.sandbox import DirectorySandbox as DS
+ old_ok = DS._ok
+ def _ok(self, path):
+ """Return True if ``path`` can be written during installation."""
+ out = old_ok(self, path) # here for side effect from setuptools
+ realpath = os.path.normcase(os.path.realpath(path))
+ allowed_path = os.path.normcase(sys.prefix)
+ if realpath.startswith(allowed_path):
+ out = True
+ return out
+ DS._ok = _ok
+ except ImportError:
+ pass
+
def install(**kwargs):
"""setup entry point"""
if USE_SETUPTOOLS:
@@ -142,9 +174,13 @@
# install-layout option was introduced in 2.5.3-1~exp1
elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv:
sys.argv.remove('--install-layout=deb')
- if USE_SETUPTOOLS and install_requires:
+ cmdclass = {'install_lib': MyInstallLib}
+ if USE_SETUPTOOLS:
kwargs['install_requires'] = install_requires
kwargs['dependency_links'] = dependency_links
+ kwargs['zip_safe'] = False
+ cmdclass['install_data'] = MyInstallData
+
return setup(name = distname,
version = version,
license = license,
@@ -156,7 +192,7 @@
scripts = ensure_scripts(scripts),
data_files = data_files,
ext_modules = ext_modules,
- cmdclass = {'install_lib': MyInstallLib},
+ cmdclass = cmdclass,
**kwargs
)
diff -r 48f468f33704 -r e4580e5f0703 skeleton/test/realdb_test_CUBENAME.py
--- a/skeleton/test/realdb_test_CUBENAME.py Fri Dec 10 12:17:18 2010 +0100
+++ b/skeleton/test/realdb_test_CUBENAME.py Fri Mar 11 09:46:45 2011 +0100
@@ -21,7 +21,7 @@
from cubicweb.devtools import buildconfig, loadconfig
from cubicweb.devtools.testlib import RealDBTest
-def setup_module(options):
+def setUpModule(options):
if options.source:
configcls = loadconfig(options.source)
elif options.dbname is None:
@@ -33,7 +33,7 @@
RealDatabaseTC.configcls = configcls
class RealDatabaseTC(RealDBTest):
- configcls = None # set by setup_module()
+ configcls = None # set by setUpModule()
def test_all_primaries(self):
for rset in self.iter_individual_rsets(limit=50):
diff -r 48f468f33704 -r e4580e5f0703 skeleton/test/test_CUBENAME.py.tmpl
--- a/skeleton/test/test_CUBENAME.py.tmpl Fri Dec 10 12:17:18 2010 +0100
+++ b/skeleton/test/test_CUBENAME.py.tmpl Fri Mar 11 09:46:45 2011 +0100
@@ -29,7 +29,7 @@
class DefaultTC(testlib.CubicWebTC):
def test_something(self):
- self.skip('this cube has no test')
+ self.skipTest('this cube has no test')
if __name__ == '__main__':
diff -r 48f468f33704 -r e4580e5f0703 sobjects/notification.py
--- a/sobjects/notification.py Fri Dec 10 12:17:18 2010 +0100
+++ b/sobjects/notification.py Fri Mar 11 09:46:45 2011 +0100
@@ -27,7 +27,7 @@
from cubicweb.selectors import yes
from cubicweb.view import Component
-from cubicweb.mail import NotificationView, SkipEmail
+from cubicweb.mail import NotificationView as BaseNotificationView, SkipEmail
from cubicweb.server.hook import SendMailOp
@@ -59,7 +59,7 @@
# abstract or deactivated notification views and mixin ########################
-class NotificationView(NotificationView):
+class NotificationView(BaseNotificationView):
"""overriden to delay actual sending of mails to a commit operation by
default
"""
diff -r 48f468f33704 -r e4580e5f0703 sobjects/supervising.py
--- a/sobjects/supervising.py Fri Dec 10 12:17:18 2010 +0100
+++ b/sobjects/supervising.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,11 +15,10 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""some hooks and views to handle supervising of any data changes
-
+"""some hooks and views to handle supervising of any data changes"""
-"""
__docformat__ = "restructuredtext en"
+_ = unicode
from cubicweb import UnknownEid
from cubicweb.selectors import none_rset
@@ -135,7 +134,8 @@
self.w(msg % locals())
def change_state(self, (entity, fromstate, tostate)):
- msg = self._cw._('changed state of %(etype)s #%(eid)s (%(title)s)')
+ _ = self._cw._
+ msg = _('changed state of %(etype)s #%(eid)s (%(title)s)')
self.w(u'%s\n' % (msg % self._entity_context(entity)))
self.w(_(' from state %(fromstate)s to state %(tostate)s\n' %
{'fromstate': _(fromstate.name), 'tostate': _(tostate.name)}))
@@ -185,6 +185,6 @@
msg = format_mail(uinfo, recipients, content, view.subject(), config=config)
self.to_send = [(msg, recipients)]
- def commit_event(self):
+ def postcommit_event(self):
self._prepare_email()
- SendMailOp.commit_event(self)
+ SendMailOp.postcommit_event(self)
diff -r 48f468f33704 -r e4580e5f0703 sobjects/test/unittest_notification.py
--- a/sobjects/test/unittest_notification.py Fri Dec 10 12:17:18 2010 +0100
+++ b/sobjects/test/unittest_notification.py Fri Mar 11 09:46:45 2011 +0100
@@ -58,7 +58,7 @@
def test_nonregr_empty_message_id(self):
for eid in (1, 12, 123, 1234):
msgid1 = construct_message_id('testapp', eid, 12)
- self.assertNotEquals(msgid1, '<@testapp.%s>' % gethostname())
+ self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname())
class RecipientsFinderTC(CubicWebTC):
diff -r 48f468f33704 -r e4580e5f0703 sobjects/textparsers.py
--- a/sobjects/textparsers.py Fri Dec 10 12:17:18 2010 +0100
+++ b/sobjects/textparsers.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,13 +15,13 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""hooks triggered on email entities creation:
+"""Some parsers to detect action to do from text
-* look for state change instruction (XXX security)
-* set email content as a comment on an entity when comments are supported and
- linking information are found
+Currently only a parser to look for state change instruction is provided.
+Take care to security when you're using it, think about the user that
+will provide the text to analyze...
+"""
-"""
__docformat__ = "restructuredtext en"
import re
@@ -29,7 +29,6 @@
from cubicweb import UnknownEid, typed_eid
from cubicweb.view import Component
- # XXX use user session if gpg signature validated
class TextAnalyzer(Component):
"""analyze and extract information from plain text by calling registered
diff -r 48f468f33704 -r e4580e5f0703 tags.py
--- a/tags.py Fri Dec 10 12:17:18 2010 +0100
+++ b/tags.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,9 +15,8 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""helper classes to generate simple (X)HTML tags
+"""helper classes to generate simple (X)HTML tags"""
-"""
__docformat__ = "restructuredtext en"
from cubicweb.uilib import simple_sgml_tag, sgml_attributes
diff -r 48f468f33704 -r e4580e5f0703 test/data/cubes/comment/__pkginfo__.py
--- a/test/data/cubes/comment/__pkginfo__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/cubes/comment/__pkginfo__.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0622
+# pylint: disable=W0622
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
diff -r 48f468f33704 -r e4580e5f0703 test/data/cubes/email/__pkginfo__.py
--- a/test/data/cubes/email/__pkginfo__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/cubes/email/__pkginfo__.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0622
+# pylint: disable=W0622
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
diff -r 48f468f33704 -r e4580e5f0703 test/data/cubes/file/__pkginfo__.py
--- a/test/data/cubes/file/__pkginfo__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/cubes/file/__pkginfo__.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0622
+# pylint: disable=W0622
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
diff -r 48f468f33704 -r e4580e5f0703 test/data/cubes/forge/__pkginfo__.py
--- a/test/data/cubes/forge/__pkginfo__.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/cubes/forge/__pkginfo__.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# pylint: disable-msg=W0622
+# pylint: disable=W0622
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
diff -r 48f468f33704 -r e4580e5f0703 test/data/lowered_etype.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/lowered_etype.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,5 @@
+
+from yams.buildobjs import EntityType
+
+class my_etype(EntityType):
+ pass
diff -r 48f468f33704 -r e4580e5f0703 test/data/schema.py
--- a/test/data/schema.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/schema.py Fri Mar 11 09:46:45 2011 +0100
@@ -19,7 +19,9 @@
"""
-from yams.buildobjs import EntityType, String, SubjectRelation, RelationDefinition
+from yams.buildobjs import (EntityType, String, SubjectRelation,
+ RelationDefinition)
+from cubicweb.schema import WorkflowableEntityType
class Personne(EntityType):
nom = String(required=True)
@@ -48,3 +50,9 @@
class evaluee(RelationDefinition):
subject = 'CWUser'
object = 'Note'
+
+
+class StateFull(WorkflowableEntityType):
+ name = String()
+
+
diff -r 48f468f33704 -r e4580e5f0703 test/data/scripts/script1.py
--- a/test/data/scripts/script1.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/scripts/script1.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,4 @@
-assert 'data/scripts/script1.py' == __file__
-assert '__main__' == __name__
+from os.path import join
+assert __file__.endswith(join('scripts', 'script1.py')), __file__
+assert '__main__' == __name__, __name__
assert [] == __args__, __args__
diff -r 48f468f33704 -r e4580e5f0703 test/data/scripts/script2.py
--- a/test/data/scripts/script2.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/scripts/script2.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,4 @@
-assert 'data/scripts/script2.py' == __file__
-assert '__main__' == __name__
+from os.path import join
+assert __file__.endswith(join('scripts', 'script2.py')), __file__
+assert '__main__' == __name__, __name__
assert ['-v'] == __args__, __args__
diff -r 48f468f33704 -r e4580e5f0703 test/data/scripts/script3.py
--- a/test/data/scripts/script3.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/data/scripts/script3.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,3 +1,4 @@
-assert 'data/scripts/script3.py' == __file__
-assert '__main__' == __name__
+from os.path import join
+assert __file__.endswith(join('scripts', 'script3.py')), __file__
+assert '__main__' == __name__, __name__
assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__
diff -r 48f468f33704 -r e4580e5f0703 test/data/uppered_rtype.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/uppered_rtype.py Fri Mar 11 09:46:45 2011 +0100
@@ -0,0 +1,6 @@
+
+from yams.buildobjs import RelationDefinition
+
+class ARelation(RelationDefinition):
+ subject = 'CWUser'
+ object = 'CWGroup'
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_cwconfig.py
--- a/test/unittest_cwconfig.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_cwconfig.py Fri Mar 11 09:46:45 2011 +0100
@@ -43,7 +43,7 @@
class CubicWebConfigurationTC(TestCase):
def setUp(self):
cleanup_sys_modules([CUSTOM_CUBES_DIR, ApptestConfiguration.CUBES_DIR])
- self.config = ApptestConfiguration('data')
+ self.config = ApptestConfiguration('data', apphome=self.datadir)
self.config._cubes = ('email', 'file')
def tearDown(self):
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_cwctl.py
--- a/test/unittest_cwctl.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_cwctl.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,11 +15,9 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""
-
-"""
import sys
import os
+from os.path import join
from cStringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
@@ -54,9 +52,10 @@
'script2.py': ['-v'],
'script3.py': ['-vd', '-f', 'FILE.TXT'],
}
- mih.cmd_process_script('data/scripts/script1.py', funcname=None)
+ mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'),
+ funcname=None)
for script, args in scripts.items():
- scriptname = os.path.join('data/scripts/', script)
+ scriptname = os.path.join(self.datadir, 'scripts', script)
self.assert_(os.path.exists(scriptname))
mih.cmd_process_script(scriptname, None, scriptargs=args)
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_dbapi.py
--- a/test/unittest_dbapi.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_dbapi.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,13 +15,15 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""
+"""unittest for cubicweb.dbapi"""
-"""
from __future__ import with_statement
+
from copy import copy
-from cubicweb import ConnectionError
+from logilab.common import tempattr
+
+from cubicweb import ConnectionError, cwconfig
from cubicweb.dbapi import ProgrammingError
from cubicweb.devtools.testlib import CubicWebTC
@@ -30,7 +32,7 @@
def test_public_repo_api(self):
cnx = self.login('anon')
self.assertEqual(cnx.get_schema(), self.repo.schema)
- self.assertEqual(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+ self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
self.restore_connection() # proper way to close cnx
self.assertRaises(ProgrammingError, cnx.get_schema)
self.assertRaises(ProgrammingError, cnx.source_defs)
@@ -48,7 +50,7 @@
def test_api(self):
cnx = self.login('anon')
self.assertEqual(cnx.user(None).login, 'anon')
- self.assertEqual(cnx.describe(1), (u'CWGroup', u'system', None))
+ self.assertEqual(cnx.describe(1), (u'CWSource', u'system', None))
self.restore_connection() # proper way to close cnx
self.assertRaises(ProgrammingError, cnx.user, None)
self.assertRaises(ProgrammingError, cnx.describe, 1)
@@ -68,6 +70,16 @@
self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0)
self.assertRaises(ProgrammingError, cnx.get_shared_data, 'data')
+ def test_web_compatible_request(self):
+ config = cwconfig.CubicWebNoAppConfiguration()
+ with tempattr(self.cnx.vreg, 'config', config):
+ self.cnx.use_web_compatible_requests('http://perdu.com')
+ req = self.cnx.request()
+ self.assertEqual(req.base_url(), 'http://perdu.com')
+ self.assertEqual(req.from_controller(), 'view')
+ self.assertEqual(req.relative_path(), '')
+ req.ajax_replace_url('domid') # don't crash
+ req.user.cw_adapt_to('IBreadCrumbs') # don't crash
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_entity.py
--- a/test/unittest_entity.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_entity.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,9 +16,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unit tests for cubicweb.web.views.entities module
-
-"""
+"""unit tests for cubicweb.web.views.entities module"""
from datetime import datetime
@@ -26,6 +24,8 @@
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.mttransforms import HAS_TAL
from cubicweb.entities import fetch_config
+from cubicweb.uilib import soup2xhtml
+
class EntityTC(CubicWebTC):
@@ -319,33 +319,33 @@
def test_printable_value_string(self):
e = self.request().create_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`',
- content_format=u'text/rest')
+ content_format=u'text/rest')
self.assertEqual(e.printable_value('content'),
- '
pas fermé'
self.assertEqual(tidy(e.printable_value('content')),
- u'été
été
')
- e['content'] = u'C'est un exemple sérieux'
+ u'yo !! R&D
pas fermé
')
+ e.cw_attr_cache['content'] = u'été
été'
self.assertEqual(tidy(e.printable_value('content')),
- u"C'est un exemple sérieux")
+ u'été
été
')
+ e.cw_attr_cache['content'] = u'C'est un exemple sérieux'
+ self.assertEqual(tidy(e.printable_value('content')),
+ u"C'est un exemple sérieux")
# make sure valid xhtml is left untouched
- e['content'] = u'
'
+ # Caution! current implementation of soup2xhtml strips first div element
+ content = soup2xhtml(e.printable_value('content'), 'utf-8')
+ self.assertMultiLineEqual(content, u'
ms orifice produces weird html
')
def test_fulltextindex(self):
e = self.vreg['etypes'].etype_class('File')(self.request())
- e['description'] = 'du html'
- e['description_format'] = 'text/html'
- e['data'] = Binary('some data')
- e['data_name'] = 'an html file'
- e['data_format'] = 'text/html'
- e['data_encoding'] = 'ascii'
+ e.cw_attr_cache['description'] = 'du html'
+ e.cw_attr_cache['description_format'] = 'text/html'
+ e.cw_attr_cache['data'] = Binary('some data')
+ e.cw_attr_cache['data_name'] = 'an html file'
+ e.cw_attr_cache['data_format'] = 'text/html'
+ e.cw_attr_cache['data_encoding'] = 'ascii'
e._cw.transaction_data = {} # XXX req should be a session
self.assertEqual(e.cw_adapt_to('IFTIndexable').get_words(),
- {'C': [u'du', u'html', 'an', 'html', 'file', u'some', u'data']})
+ {'C': ['an', 'html', 'file', 'du', 'html', 'some', 'data']})
def test_nonregr_relation_cache(self):
@@ -461,7 +451,7 @@
'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0]
trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0)
trinfo.complete()
- self.failUnless(isinstance(trinfo['creation_date'], datetime))
+ self.failUnless(isinstance(trinfo.cw_attr_cache['creation_date'], datetime))
self.failUnless(trinfo.cw_relation_cached('from_state', 'subject'))
self.failUnless(trinfo.cw_relation_cached('to_state', 'subject'))
self.failUnless(trinfo.cw_relation_cached('wf_info_for', 'subject'))
@@ -499,7 +489,7 @@
self.assertEqual(card3.rest_path(), 'card/eid/%d' % card3.eid)
card4 = req.create_entity('Card', title=u'pod', wikiid=u'zo?bi')
self.assertEqual(card4.rest_path(), 'card/eid/%d' % card4.eid)
-
+
def test_set_attributes(self):
req = self.request()
@@ -515,7 +505,7 @@
req = self.request()
note = req.create_entity('Note', type=u'z')
metainf = note.cw_metainformation()
- self.assertEqual(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
+ self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid)
metainf['source'] = metainf['source'].copy()
metainf['source']['base-url'] = 'http://cubicweb2.com/'
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_migration.py
--- a/test/unittest_migration.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_migration.py Fri Mar 11 09:46:45 2011 +0100
@@ -15,11 +15,9 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""cubicweb.migration unit tests
+"""cubicweb.migration unit tests"""
-"""
-
-from os.path import abspath
+from os.path import abspath, dirname, join
from logilab.common.testlib import TestCase, unittest_main
from cubicweb.devtools import TestServerConfiguration
@@ -32,8 +30,8 @@
def has_entity(self, e_type):
return self.has_key(e_type)
-SMIGRDIR = abspath('data/server_migration') + '/'
-TMIGRDIR = abspath('data/migration') + '/'
+SMIGRDIR = join(dirname(__file__), 'data', 'server_migration') + '/'
+TMIGRDIR = join(dirname(__file__), 'data', 'migration') + '/'
class MigrTestConfig(TestServerConfiguration):
verbosity = 0
@@ -105,7 +103,7 @@
def test_db_creation(self):
"""make sure database can be created"""
- config = ApptestConfiguration('data')
+ config = ApptestConfiguration('data', apphome=self.datadir)
source = config.sources()['system']
self.assertEqual(source['db-driver'], 'sqlite')
cleanup_sqlite(source['db-name'], removetemplate=True)
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_req.py
--- a/test/unittest_req.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_req.py Fri Mar 11 09:46:45 2011 +0100
@@ -40,7 +40,7 @@
self.assertEqual(req.build_url('one'), u'http://testing.fr/cubicweb/one')
self.assertEqual(req.build_url(param='ok'), u'http://testing.fr/cubicweb/view?param=ok')
self.assertRaises(AssertionError, req.build_url, 'one', 'two not allowed')
- self.assertRaises(ValueError, req.build_url, 'view', test=None)
+ self.assertRaises(AssertionError, req.build_url, 'view', test=None)
def test_ensure_no_rql(self):
req = RequestSessionBase(None)
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_rqlrewrite.py
--- a/test/unittest_rqlrewrite.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_rqlrewrite.py Fri Mar 11 09:46:45 2011 +0100
@@ -26,20 +26,23 @@
from cubicweb.rqlrewrite import RQLRewriter
from cubicweb.devtools import repotest, TestServerConfiguration
-config = TestServerConfiguration('data/rewrite')
-config.bootstrap_cubes()
-schema = config.load_schema()
-from yams.buildobjs import RelationDefinition
-schema.add_relation_def(RelationDefinition(subject='Card', name='in_state', object='State', cardinality='1*'))
-rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid',
- 'has_text': 'fti'})
+def setUpModule(*args):
+ global rqlhelper, schema
+ config = TestServerConfiguration(RQLRewriteTC.datapath('rewrite'))
+ config.bootstrap_cubes()
+ schema = config.load_schema()
+ from yams.buildobjs import RelationDefinition
+ schema.add_relation_def(RelationDefinition(subject='Card', name='in_state', object='State', cardinality='1*'))
-def setup_module(*args):
+ rqlhelper = RQLHelper(schema, special_relations={'eid': 'uid',
+ 'has_text': 'fti'})
repotest.do_monkey_patch()
-def teardown_module(*args):
+def tearDownModule(*args):
repotest.undo_monkey_patch()
+ global rqlhelper, schema
+ del rqlhelper, schema
def eid_func_map(eid):
return {1: 'CWUser',
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_rset.py
--- a/test/unittest_rset.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_rset.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,9 +16,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unit tests for module cubicweb.utils
-
-"""
+"""unit tests for module cubicweb.utils"""
from urlparse import urlsplit
import pickle
@@ -51,7 +49,7 @@
'Any C where C is Company, C employs P' : [],
}
for rql, relations in queries.items():
- result = list(attr_desc_iterator(parse(rql).children[0]))
+ result = list(attr_desc_iterator(parse(rql).children[0], 0, 0))
self.assertEqual((rql, result), (rql, relations))
def test_relations_description_indexed(self):
@@ -61,8 +59,8 @@
{0: [(2,'employs', 'subject')], 1: [(3,'login', 'subject'), (4,'mail', 'subject')]},
}
for rql, results in queries.items():
- for var_index, relations in results.items():
- result = list(attr_desc_iterator(parse(rql).children[0], var_index))
+ for idx, relations in results.items():
+ result = list(attr_desc_iterator(parse(rql).children[0], idx, idx))
self.assertEqual(result, relations)
@@ -157,13 +155,13 @@
rs.req = self.request()
rs.vreg = self.vreg
- rs2 = rs.sorted_rset(lambda e:e['login'])
+ rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'])
self.assertEqual(len(rs2), 3)
self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt'])
# make sure rs is unchanged
self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico'])
- rs2 = rs.sorted_rset(lambda e:e['login'], reverse=True)
+ rs2 = rs.sorted_rset(lambda e:e.cw_attr_cache['login'], reverse=True)
self.assertEqual(len(rs2), 3)
self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim'])
# make sure rs is unchanged
@@ -186,8 +184,7 @@
description=[['CWUser', 'String', 'String']] * 5)
rs.req = self.request()
rs.vreg = self.vreg
-
- rsets = rs.split_rset(lambda e:e['login'])
+ rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'])
self.assertEqual(len(rsets), 3)
self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim'])
self.assertEqual([login for _, login,_ in rsets[1]], ['syt'])
@@ -195,7 +192,7 @@
# make sure rs is unchanged
self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico'])
- rsets = rs.split_rset(lambda e:e['login'], return_dict=True)
+ rsets = rs.split_rset(lambda e:e.cw_attr_cache['login'], return_dict=True)
self.assertEqual(len(rsets), 3)
self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico'])
self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim'])
@@ -230,12 +227,12 @@
self.request().create_entity('CWUser', login=u'adim', upassword='adim',
surname=u'di mascio', firstname=u'adrien')
e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0)
- self.assertEqual(e['surname'], 'di mascio')
- self.assertRaises(KeyError, e.__getitem__, 'firstname')
- self.assertRaises(KeyError, e.__getitem__, 'creation_date')
+ self.assertEqual(e.cw_attr_cache['surname'], 'di mascio')
+ self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname')
+ self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'creation_date')
self.assertEqual(pprelcachedict(e._cw_related_cache), [])
e.complete()
- self.assertEqual(e['firstname'], 'adrien')
+ self.assertEqual(e.cw_attr_cache['firstname'], 'adrien')
self.assertEqual(pprelcachedict(e._cw_related_cache), [])
def test_get_entity_advanced(self):
@@ -246,20 +243,20 @@
e = rset.get_entity(0, 0)
self.assertEqual(e.cw_row, 0)
self.assertEqual(e.cw_col, 0)
- self.assertEqual(e['title'], 'zou')
- self.assertRaises(KeyError, e.__getitem__, 'path')
+ self.assertEqual(e.cw_attr_cache['title'], 'zou')
+ self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'path')
self.assertEqual(e.view('text'), 'zou')
self.assertEqual(pprelcachedict(e._cw_related_cache), [])
e = rset.get_entity(0, 1)
self.assertEqual(e.cw_row, 0)
self.assertEqual(e.cw_col, 1)
- self.assertEqual(e['login'], 'anon')
- self.assertRaises(KeyError, e.__getitem__, 'firstname')
+ self.assertEqual(e.cw_attr_cache['login'], 'anon')
+ self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname')
self.assertEqual(pprelcachedict(e._cw_related_cache),
[])
e.complete()
- self.assertEqual(e['firstname'], None)
+ self.assertEqual(e.cw_attr_cache['firstname'], None)
self.assertEqual(e.view('text'), 'anon')
self.assertEqual(pprelcachedict(e._cw_related_cache),
[])
@@ -282,17 +279,17 @@
rset = self.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, '
'X title XT, S name SN, U login UL, X eid %s' % e.eid)
e = rset.get_entity(0, 0)
- self.assertEqual(e['title'], 'zou')
+ self.assertEqual(e.cw_attr_cache['title'], 'zou')
self.assertEqual(pprelcachedict(e._cw_related_cache),
- [('created_by_subject', [5])])
+ [('created_by_subject', [self.user().eid])])
# first level of recursion
u = e.created_by[0]
- self.assertEqual(u['login'], 'admin')
- self.assertRaises(KeyError, u.__getitem__, 'firstname')
+ self.assertEqual(u.cw_attr_cache['login'], 'admin')
+ self.assertRaises(KeyError, u.cw_attr_cache.__getitem__, 'firstname')
# second level of recursion
s = u.in_state[0]
- self.assertEqual(s['name'], 'activated')
- self.assertRaises(KeyError, s.__getitem__, 'description')
+ self.assertEqual(s.cw_attr_cache['name'], 'activated')
+ self.assertRaises(KeyError, s.cw_attr_cache.__getitem__, 'description')
def test_get_entity_cache_with_left_outer_join(self):
@@ -322,7 +319,7 @@
etype, n = expected[entity.cw_row]
self.assertEqual(entity.__regid__, etype)
attr = etype == 'Bookmark' and 'title' or 'name'
- self.assertEqual(entity[attr], n)
+ self.assertEqual(entity.cw_attr_cache[attr], n)
def test_related_entity_optional(self):
e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path')
@@ -331,7 +328,7 @@
self.assertEqual(entity, None)
self.assertEqual(rtype, None)
- def test_related_entity_union_subquery(self):
+ def test_related_entity_union_subquery_1(self):
e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path')
rset = self.execute('Any X,N ORDERBY N WITH X,N BEING '
'((Any X,N WHERE X is CWGroup, X name N)'
@@ -340,10 +337,14 @@
entity, rtype = rset.related_entity(0, 1)
self.assertEqual(entity.eid, e.eid)
self.assertEqual(rtype, 'title')
+ self.assertEqual(entity.title, 'aaaa')
entity, rtype = rset.related_entity(1, 1)
self.assertEqual(entity.__regid__, 'CWGroup')
self.assertEqual(rtype, 'name')
- #
+ self.assertEqual(entity.name, 'guests')
+
+ def test_related_entity_union_subquery_2(self):
+ e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path')
rset = self.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING '
'((Any X,N WHERE X is CWGroup, X name N)'
' UNION '
@@ -351,7 +352,10 @@
entity, rtype = rset.related_entity(0, 1)
self.assertEqual(entity.eid, e.eid)
self.assertEqual(rtype, 'title')
- #
+ self.assertEqual(entity.title, 'aaaa')
+
+ def test_related_entity_union_subquery_3(self):
+ e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path')
rset = self.execute('Any X,N ORDERBY N WITH N,X BEING '
'((Any N,X WHERE X is CWGroup, X name N)'
' UNION '
@@ -359,6 +363,18 @@
entity, rtype = rset.related_entity(0, 1)
self.assertEqual(entity.eid, e.eid)
self.assertEqual(rtype, 'title')
+ self.assertEqual(entity.title, 'aaaa')
+
+ def test_related_entity_union_subquery_4(self):
+ e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path')
+ rset = self.execute('Any X,X, N ORDERBY N WITH X,N BEING '
+ '((Any X,N WHERE X is CWGroup, X name N)'
+ ' UNION '
+ ' (Any X,N WHERE X is Bookmark, X title N))')
+ entity, rtype = rset.related_entity(0, 2)
+ self.assertEqual(entity.eid, e.eid)
+ self.assertEqual(rtype, 'title')
+ self.assertEqual(entity.title, 'aaaa')
def test_related_entity_trap_subquery(self):
req = self.request()
@@ -385,6 +401,14 @@
self.assertEqual(set(e.e_schema.type for e in rset.entities(1)),
set(['CWGroup',]))
+ def test_iter_rows_with_entities(self):
+ rset = self.execute('Any U,UN,G,GN WHERE U in_group G, U login UN, G name GN')
+ # make sure we have at least one element
+ self.failUnless(rset)
+ out = list(rset.iter_rows_with_entities())[0]
+ self.assertEqual( out[0].login, out[1] )
+ self.assertEqual( out[2].name, out[3] )
+
def test_printable_rql(self):
rset = self.execute(u'CWEType X WHERE X final FALSE')
self.assertEqual(rset.printable_rql(),
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_schema.py
--- a/test/unittest_schema.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_schema.py Fri Mar 11 09:46:45 2011 +0100
@@ -17,6 +17,8 @@
# with CubicWeb. If not, see .
"""unit tests for module cubicweb.schema"""
+from __future__ import with_statement
+
import sys
from os.path import join, isabs, basename, dirname
@@ -72,7 +74,6 @@
('Personne tel Int'),
('Personne fax Int'),
('Personne datenaiss Date'),
- ('Personne TEST Boolean'),
('Personne promo String'),
# real relations
('Personne travaille Societe'),
@@ -80,7 +81,7 @@
('Societe evaluee Note'),
('Personne concerne Affaire'),
('Personne concerne Societe'),
- ('Affaire Concerne Societe'),
+ ('Affaire concerne Societe'),
)
done = {}
for rel in RELS:
@@ -108,17 +109,6 @@
self.failIf(issubclass(RQLUniqueConstraint, RQLConstraint))
self.failUnless(issubclass(RQLConstraint, RQLVocabularyConstraint))
- def test_normalize(self):
- """test that entities, relations and attributes name are normalized
- """
- self.assertEqual(esociete.type, 'Societe')
- self.assertEqual(schema.has_relation('TEST'), 0)
- self.assertEqual(schema.has_relation('test'), 1)
- self.assertEqual(eperson.subjrels['test'].type, 'test')
- self.assertEqual(schema.has_relation('Concerne'), 0)
- self.assertEqual(schema.has_relation('concerne'), 1)
- self.assertEqual(schema.rschema('concerne').type, 'concerne')
-
def test_entity_perms(self):
self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests')))
self.assertEqual(eperson.get_groups('update'), set(('managers', 'owners',)))
@@ -151,7 +141,7 @@
self.assertEqual(str(expr), 'Any O,U WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s')
loader = CubicWebSchemaLoader()
-config = TestConfiguration('data')
+config = TestConfiguration('data', apphome=DATADIR)
config.bootstrap_cubes()
class SchemaReaderClassTest(TestCase):
@@ -167,31 +157,32 @@
schema = loader.load(config)
self.assert_(isinstance(schema, CubicWebSchema))
self.assertEqual(schema.name, 'data')
- entities = [str(e) for e in schema.entities()]
- entities.sort()
+ entities = sorted([str(e) for e in schema.entities()])
expected_entities = ['BaseTransition', 'Bookmark', 'Boolean', 'Bytes', 'Card',
'Date', 'Datetime', 'Decimal',
'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType',
'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
'CWPermission', 'CWProperty', 'CWRType',
+ 'CWSource', 'CWSourceHostConfig',
'CWUniqueTogetherConstraint', 'CWUser',
'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note',
'Password', 'Personne',
'RQLExpression',
- 'Societe', 'State', 'String', 'SubNote', 'SubWorkflowExitPoint',
+ 'Societe', 'State', 'StateFull', 'String', 'SubNote', 'SubWorkflowExitPoint',
'Tag', 'Time', 'Transition', 'TrInfo',
'Workflow', 'WorkflowTransition']
self.assertListEqual(entities, sorted(expected_entities))
- relations = [str(r) for r in schema.relations()]
- relations.sort()
+ relations = sorted([str(r) for r in schema.relations()])
expected_relations = ['add_permission', 'address', 'alias', 'allowed_transition',
'bookmarked_by', 'by_transition',
'cardinality', 'comment', 'comment_format',
- 'composite', 'condition', 'connait',
+ 'composite', 'condition', 'config', 'connait',
'constrained_by', 'constraint_of',
'content', 'content_format',
- 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', 'cwuri',
+ 'created_by', 'creation_date', 'cstrtype', 'custom_workflow',
+ 'cwuri', 'cw_source', 'cw_host_config_of',
+ 'cw_support', 'cw_dont_cross', 'cw_may_cross',
'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission',
'description', 'description_format', 'destination_state',
@@ -207,7 +198,7 @@
'label', 'last_login_time', 'login',
- 'mainvars', 'modification_date',
+ 'mainvars', 'match_host', 'modification_date',
'name', 'nom',
@@ -225,13 +216,14 @@
'value',
- 'wf_info_for', 'wikiid', 'workflow_of']
+ 'wf_info_for', 'wikiid', 'workflow_of', 'tr_count']
- self.assertListEqual(relations, expected_relations)
+ self.assertListEqual(relations, sorted(expected_relations))
eschema = schema.eschema('CWUser')
rels = sorted(str(r) for r in eschema.subject_relations())
- self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', 'cwuri', 'eid',
+ self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow',
+ 'cw_source', 'cwuri', 'eid',
'evaluee', 'firstname', 'has_text', 'identity',
'in_group', 'in_state', 'is',
'is_instance_of', 'last_login_time',
@@ -267,7 +259,7 @@
self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')],
['U has_update_permission X'])
-class BadSchemaRQLExprTC(TestCase):
+class BadSchemaTC(TestCase):
def setUp(self):
self.loader = CubicWebSchemaLoader()
self.loader.defined = {}
@@ -277,9 +269,19 @@
def _test(self, schemafile, msg):
self.loader.handle_file(join(DATADIR, schemafile))
- ex = self.assertRaises(BadSchemaDefinition,
- self.loader._build_schema, 'toto', False)
- self.assertEqual(str(ex), msg)
+ with self.assertRaises(BadSchemaDefinition) as cm:
+ self.loader._build_schema('toto', False)
+ self.assertEqual(str(cm.exception), msg)
+
+ def test_lowered_etype(self):
+ self._test('lowered_etype.py',
+ "'my_etype' is not a valid name for an entity type. It should "
+ "start with an upper cased letter and be followed by at least "
+ "a lower cased letter")
+
+ def test_uppered_rtype(self):
+ self._test('uppered_rtype.py',
+ "'ARelation' is not a valid name for a relation type. It should be lower cased")
def test_rrqlexpr_on_etype(self):
self._test('rrqlexpr_on_eetype.py',
@@ -308,7 +310,7 @@
def test_comparison(self):
self.assertEqual(ERQLExpression('X is CWUser', 'X', 0),
ERQLExpression('X is CWUser', 'X', 0))
- self.assertNotEquals(ERQLExpression('X is CWUser', 'X', 0),
+ self.assertNotEqual(ERQLExpression('X is CWUser', 'X', 0),
ERQLExpression('X is CWGroup', 'X', 0))
class GuessRrqlExprMainVarsTC(TestCase):
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_selectors.py
--- a/test/unittest_selectors.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_selectors.py Fri Mar 11 09:46:45 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
"""unit tests for selectors mechanism"""
+from __future__ import with_statement
from operator import eq, lt, le, gt
from logilab.common.testlib import TestCase, unittest_main
@@ -24,10 +25,11 @@
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.appobject import Selector, AndSelector, OrSelector
from cubicweb.selectors import (is_instance, adaptable, match_user_groups,
- multi_lines_rset)
-from cubicweb.interfaces import IDownloadable
+ multi_lines_rset, score_entity, is_in_state,
+ on_transition)
from cubicweb.web import action
+
class _1_(Selector):
def __call__(self, *args, **kwargs):
return 1
@@ -138,6 +140,35 @@
self.assertEqual(selector(None), 0)
+class IsInStateSelectorTC(CubicWebTC):
+ def setup_database(self):
+ wf = self.shell().add_workflow("testwf", 'StateFull', default=True)
+ initial = wf.add_state(u'initial', initial=True)
+ final = wf.add_state(u'final')
+ wf.add_transition(u'forward', (initial,), final)
+
+ def test_initial_state(self):
+ req = self.request()
+ entity = req.create_entity('StateFull')
+ selector = is_in_state(u'initial')
+ self.commit()
+ score = selector(entity.__class__, None, entity=entity)
+ self.assertEqual(score, 1)
+
+ def test_final_state(self):
+ req = self.request()
+ entity = req.create_entity('StateFull')
+ selector = is_in_state(u'initial')
+ self.commit()
+ entity.cw_adapt_to('IWorkflowable').fire_transition(u'forward')
+ self.commit()
+ score = selector(entity.__class__, None, entity=entity)
+ self.assertEqual(score, 0)
+ selector = is_in_state(u'final')
+ score = selector(entity.__class__, None, entity=entity)
+ self.assertEqual(score, 1)
+
+
class ImplementsSelectorTC(CubicWebTC):
def test_etype_priority(self):
req = self.request()
@@ -159,6 +190,131 @@
3)
+class WorkflowSelectorTC(CubicWebTC):
+ def _commit(self):
+ self.commit()
+ self.wf_entity.clear_all_caches()
+
+ def setup_database(self):
+ wf = self.shell().add_workflow("wf_test", 'StateFull', default=True)
+ created = wf.add_state('created', initial=True)
+ validated = wf.add_state('validated')
+ abandoned = wf.add_state('abandoned')
+ wf.add_transition('validate', created, validated, ('managers',))
+ wf.add_transition('forsake', (created, validated,), abandoned, ('managers',))
+
+ def setUp(self):
+ super(WorkflowSelectorTC, self).setUp()
+ self.req = self.request()
+ self.wf_entity = self.req.create_entity('StateFull', name=u'')
+ self.rset = self.wf_entity.as_rset()
+ self.adapter = self.wf_entity.cw_adapt_to('IWorkflowable')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'created')
+ # enable debug mode to state/transition validation on the fly
+ self.vreg.config.debugmode = True
+
+ def tearDown(self):
+ self.vreg.config.debugmode = False
+ super(WorkflowSelectorTC, self).tearDown()
+
+ def test_is_in_state(self):
+ for state in ('created', 'validated', 'abandoned'):
+ selector = is_in_state(state)
+ self.assertEqual(selector(None, self.req, self.rset),
+ state=="created")
+
+ self.adapter.fire_transition('validate')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'validated')
+
+ selector = is_in_state('created')
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = is_in_state('validated')
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ selector = is_in_state('validated', 'abandoned')
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ selector = is_in_state('abandoned')
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+
+ self.adapter.fire_transition('forsake')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'abandoned')
+
+ selector = is_in_state('created')
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = is_in_state('validated')
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = is_in_state('validated', 'abandoned')
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ self.assertEqual(self.adapter.state, 'abandoned')
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+
+ def test_is_in_state_unvalid_names(self):
+ selector = is_in_state("unknown")
+ with self.assertRaises(ValueError) as cm:
+ selector(None, self.req, self.rset)
+ self.assertEqual(str(cm.exception),
+ "wf_test: unknown state(s): unknown")
+ selector = is_in_state("weird", "unknown", "created", "weird")
+ with self.assertRaises(ValueError) as cm:
+ selector(None, self.req, self.rset)
+ self.assertEqual(str(cm.exception),
+ "wf_test: unknown state(s): unknown,weird")
+
+ def test_on_transition(self):
+ for transition in ('validate', 'forsake'):
+ selector = on_transition(transition)
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+
+ self.adapter.fire_transition('validate')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'validated')
+
+ selector = on_transition("validate")
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ selector = on_transition("validate", "forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ selector = on_transition("forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+
+ self.adapter.fire_transition('forsake')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'abandoned')
+
+ selector = on_transition("validate")
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = on_transition("validate", "forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+ selector = on_transition("forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 1)
+
+ def test_on_transition_unvalid_names(self):
+ selector = on_transition("unknown")
+ with self.assertRaises(ValueError) as cm:
+ selector(None, self.req, self.rset)
+ self.assertEqual(str(cm.exception),
+ "wf_test: unknown transition(s): unknown")
+ selector = on_transition("weird", "unknown", "validate", "weird")
+ with self.assertRaises(ValueError) as cm:
+ selector(None, self.req, self.rset)
+ self.assertEqual(str(cm.exception),
+ "wf_test: unknown transition(s): unknown,weird")
+
+ def test_on_transition_with_no_effect(self):
+ """selector will not be triggered with `change_state()`"""
+ self.adapter.change_state('validated')
+ self._commit()
+ self.assertEqual(self.adapter.state, 'validated')
+
+ selector = on_transition("validate")
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = on_transition("validate", "forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+ selector = on_transition("forsake")
+ self.assertEqual(selector(None, self.req, self.rset), 0)
+
+
class MatchUserGroupsTC(CubicWebTC):
def test_owners_group(self):
"""tests usage of 'owners' group with match_user_group"""
@@ -245,6 +401,24 @@
yield self.assertEqual, selector(None, self.req, self.rset), assertion
+class ScoreEntitySelectorTC(CubicWebTC):
+
+ def test_intscore_entity_selector(self):
+ req = self.request()
+ rset = req.execute('Any E WHERE E eid 1')
+ selector = score_entity(lambda x: None)
+ self.assertEqual(selector(None, req, rset), 0)
+ selector = score_entity(lambda x: "something")
+ self.assertEqual(selector(None, req, rset), 1)
+ selector = score_entity(lambda x: object)
+ self.assertEqual(selector(None, req, rset), 1)
+ rset = req.execute('Any G LIMIT 2 WHERE G is CWGroup')
+ selector = score_entity(lambda x: 10)
+ self.assertEqual(selector(None, req, rset), 20)
+ selector = score_entity(lambda x: 10, once_is_enough=True)
+ self.assertEqual(selector(None, req, rset), 10)
+
+
if __name__ == '__main__':
unittest_main()
diff -r 48f468f33704 -r e4580e5f0703 test/unittest_uilib.py
--- a/test/unittest_uilib.py Fri Dec 10 12:17:18 2010 +0100
+++ b/test/unittest_uilib.py Fri Mar 11 09:46:45 2011 +0100
@@ -16,17 +16,20 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see .
-"""unittests for cubicweb.uilib
-
-"""
+"""unittests for cubicweb.uilib"""
__docformat__ = "restructuredtext en"
+
+import pkg_resources
from logilab.common.testlib import TestCase, unittest_main
-from logilab.common.tree import Node
+from unittest2 import skipIf
from cubicweb import uilib
+lxml_version = pkg_resources.get_distribution('lxml').version.split('.')
+
+
class UILIBTC(TestCase):
def test_remove_tags(self):
@@ -94,7 +97,15 @@
got = uilib.text_cut(text, 30)
self.assertEqual(got, expected)
+ def test_soup2xhtml_0(self):
+ self.assertEqual(uilib.soup2xhtml('hop\r\nhop', 'ascii'),
+ 'hop\nhop')
+
def test_soup2xhtml_1_1(self):
+ self.assertEqual(uilib.soup2xhtml('hop', 'ascii'),
+ 'hop')
+ self.assertEqual(uilib.soup2xhtml('hop