# HG changeset patch # User Sylvain Thénault # Date 1272019373 -7200 # Node ID b6e250dd7a7d9240c4fd7e8807a5d8ed59302e1b # Parent cb5dfea92285ecb72d6ff60633ae604085b3e6d8# Parent b619531ddbd28707a5cfbaf65af72b63d0808a96 backport stable diff -r b619531ddbd2 -r b6e250dd7a7d .hgtags --- a/.hgtags Fri Apr 23 12:40:48 2010 +0200 +++ b/.hgtags Fri Apr 23 12:42:53 2010 +0200 @@ -119,3 +119,5 @@ 44c7bf90df71dd562e5a7be5ced3019da603d24f cubicweb-debian-version-3.7.3-1 ec23f3ebcd34a92b9898b312f44d56cca748d0d6 cubicweb-version-3.7.4 fefeda65bb83dcc2d775255fe69fdee0e793d135 cubicweb-debian-version-3.7.4-1 +3c703f3245dc7696341ae1d66525554d9fa2d11d cubicweb-version-3.8.0 +24cc65ab2eca05729d66cef3de6f69bb7f9dfa35 cubicweb-debian-version-3.8.0-1 diff -r b619531ddbd2 -r b6e250dd7a7d README --- a/README Fri Apr 23 12:40:48 2010 +0200 +++ b/README Fri Apr 23 12:42:53 2010 +0200 @@ -1,6 +1,15 @@ CubicWeb semantic web framework =============================== +CubicWeb is a entities / relations based knowledge management system +developped at Logilab. + +This package contains: +* a repository server +* a RQL command line client to the repository +* an adaptative modpython interface to the server +* a bunch of other management tools + Install ------- diff -r b619531ddbd2 -r b6e250dd7a7d __pkginfo__.py --- a/__pkginfo__.py Fri Apr 23 12:40:48 2010 +0200 +++ b/__pkginfo__.py Fri Apr 23 12:42:53 2010 +0200 @@ -1,36 +1,21 @@ # pylint: disable-msg=W0622,C0103 """cubicweb global packaging information for the cubicweb knowledge management software + :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -distname = "cubicweb" -modname = "cubicweb" +modname = distname = "cubicweb" -numversion = (3, 7, 4) +numversion = (3, 8, 0) version = '.'.join(str(num) for num in numversion) -license = 'LGPL' -copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - +description = "a repository of entities / relations for knowledge management" author = "Logilab" author_email = "contact@logilab.fr" - -short_desc = "a repository of entities / relations for knowledge management" -long_desc = """CubicWeb is a entities / relations based knowledge management system -developped at Logilab. - -This package contains: -* a repository server -* a RQL command line client to the repository -* an adaptative modpython interface to the server -* a bunch of other management tools -""" - web = 'http://www.cubicweb.org' ftp = 'ftp://ftp.logilab.org/pub/cubicweb' -pyversions = ['2.5', '2.6'] +license = 'LGPL' classifiers = [ 'Environment :: Web Environment', @@ -39,6 +24,32 @@ 'Programming Language :: JavaScript', ] +__depends__ = { + 'logilab-common': '>= 0.50.0', + 'logilab-mtconverter': '>= 0.6.0', + 'rql': '>= 0.26.0', + 'yams': '>= 0.28.1', + 'docutils': '>= 0.6', + #gettext # for xgettext, msgcat, etc... + # web dependancies + 'simplejson': '>= 2.0.9', + 'lxml': '', + 'Twisted': '', + # XXX graphviz + # server dependencies + 'logilab-database': '', + 'pysqlite': '>= 2.5.5', # XXX install pysqlite2 + } + +__recommends__ = { + 'Pyro': '>= 3.9.1', + 'PIL': '', # for captcha + 'pycrypto': '', # for crypto extensions + 'fyzz': '>= 0.1.0', # for sparql + 'vobject': '>= 0.6.0', # for ical view + #'Products.FCKeditor':'', + #'SimpleTAL':'>= 4.1.6', + } import sys from os import listdir, environ @@ -49,57 +60,53 @@ if not s.endswith('.bat')] include_dirs = [join('test', 'data'), join('server', 'test', 'data'), + join('hooks', 'test', 'data'), join('web', 'test', 'data'), join('devtools', 'test', 'data'), 'skeleton'] -entities_dir = 'entities' -schema_dir = 'schemas' -sobjects_dir = 'sobjects' -server_migration_dir = join('misc', 'migration') -data_dir = join('web', 'data') -wdoc_dir = join('web', 'wdoc') -wdocimages_dir = join(wdoc_dir, 'images') -views_dir = join('web', 'views') -i18n_dir = 'i18n' +_server_migration_dir = join('misc', 'migration') +_data_dir = join('web', 'data') +_wdoc_dir = join('web', 'wdoc') +_wdocimages_dir = join(_wdoc_dir, 'images') +_views_dir = join('web', 'views') +_i18n_dir = 'i18n' -if environ.get('APYCOT_ROOT'): +_pyversion = '.'.join(str(num) for num in sys.version_info[0:2]) +if '--home' in sys.argv: # --home install - pydir = 'python' + pydir = 'python' + _pyversion else: - python_version = '.'.join(str(num) for num in sys.version_info[0:2]) - pydir = join('python' + python_version, 'site-packages') + pydir = join('python' + _pyversion, 'site-packages') try: data_files = [ - # common data - #[join('share', 'cubicweb', 'entities'), - # [join(entities_dir, filename) for filename in listdir(entities_dir)]], # server data [join('share', 'cubicweb', 'schemas'), - [join(schema_dir, filename) for filename in listdir(schema_dir)]], - #[join('share', 'cubicweb', 'sobjects'), - # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]], + [join('schemas', filename) for filename in listdir('schemas')]], [join('share', 'cubicweb', 'migration'), - [join(server_migration_dir, filename) - for filename in listdir(server_migration_dir)]], + [join(_server_migration_dir, filename) + for filename in listdir(_server_migration_dir)]], # web data [join('share', 'cubicweb', 'cubes', 'shared', 'data'), - [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]], + [join(_data_dir, fname) for fname in listdir(_data_dir) + if not isdir(join(_data_dir, fname))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'), - [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]], + [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), - [join(data_dir, 'images', fname) for fname in listdir(join(data_dir, 'images'))]], + [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'), - [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]], + [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir) + if not isdir(join(_wdoc_dir, fname))]], [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'), - [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]], - # XXX: .pt install should be handled properly in a near future version + [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]], + [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), + [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]], + # XXX: drop .pt files [join('lib', pydir, 'cubicweb', 'web', 'views'), - [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]], - [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), - [join(i18n_dir, fname) for fname in listdir(i18n_dir)]], + [join(_views_dir, fname) for fname in listdir(_views_dir) + if fname.endswith('.pt')]], # skeleton ] except OSError: diff -r b619531ddbd2 -r b6e250dd7a7d _exceptions.py --- a/_exceptions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/_exceptions.py Fri Apr 23 12:42:53 2010 +0200 @@ -52,9 +52,6 @@ """raised when when an attempt to establish a connection failed do to wrong connection information (login / password or other authentication token) """ - def __init__(self, *args, **kwargs): - super(AuthenticationError, self).__init__(*args) - self.__dict__.update(kwargs) class BadConnectionId(ConnectionError): """raised when a bad connection id is given""" diff -r b619531ddbd2 -r b6e250dd7a7d appobject.py --- a/appobject.py Fri Apr 23 12:40:48 2010 +0200 +++ b/appobject.py Fri Apr 23 12:42:53 2010 +0200 @@ -98,14 +98,13 @@ def __rand__(self, other): return AndSelector(other, self) def __iand__(self, other): - raise NotImplementedError('cant use inplace & (binary and)') - + return AndSelector(self, other) def __or__(self, other): return OrSelector(self, other) def __ror__(self, other): return OrSelector(other, self) def __ior__(self, other): - raise NotImplementedError('cant use inplace | (binary or)') + return OrSelector(self, other) def __invert__(self): return NotSelector(self) diff -r b619531ddbd2 -r b6e250dd7a7d cwconfig.py --- a/cwconfig.py Fri Apr 23 12:40:48 2010 +0200 +++ b/cwconfig.py Fri Apr 23 12:42:53 2010 +0200 @@ -124,12 +124,11 @@ import sys import os import logging -import tempfile from smtplib import SMTP from threading import Lock -from os.path import exists, join, expanduser, abspath, normpath, basename, isdir +from os.path import (exists, join, expanduser, abspath, normpath, + basename, isdir, dirname) from warnings import warn - from logilab.common.decorators import cached, classproperty from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods, init_log @@ -177,6 +176,23 @@ % (directory, modes)) return modes[0] +def _find_prefix(start_path=CW_SOFTWARE_ROOT): + """Runs along the parent directories of *start_path* (default to cubicweb source directory) + looking for one containing a 'share/cubicweb' directory. + The first matching directory is assumed as the prefix installation of cubicweb + + Returns the matching prefix or None. + """ + prefix = start_path + old_prefix = None + if not isdir(start_path): + prefix = dirname(start_path) + while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix: + old_prefix = prefix + prefix = dirname(prefix) + if isdir(join(prefix, 'share', 'cubicweb')): + return prefix + return sys.prefix # persistent options definition PERSISTENT_OPTIONS = ( @@ -249,6 +265,11 @@ CWDEV = exists(join(CW_SOFTWARE_ROOT, '.hg')) +try: + _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX'] +except KeyError: + _INSTALL_PREFIX = _find_prefix() + class CubicWebNoAppConfiguration(ConfigurationMixIn): """base class for cubicweb configuration without a specific instance directory """ @@ -262,53 +283,44 @@ # debug mode debugmode = False - if os.environ.get('APYCOT_ROOT'): - mode = 'test' - # allow to test cubes within apycot using cubicweb not installed by - # apycot - if __file__.startswith(os.environ['APYCOT_ROOT']): - CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ - # create __init__ file - file(join(CUBES_DIR, '__init__.py'), 'w').close() - else: - CUBES_DIR = '/usr/share/cubicweb/cubes/' - elif (CWDEV and _forced_mode != 'system'): + + if (CWDEV and _forced_mode != 'system'): mode = 'user' - CUBES_DIR = abspath(normpath(join(CW_SOFTWARE_ROOT, '../cubes'))) + _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes') else: - if _forced_mode == 'user': - mode = 'user' - else: - mode = 'system' - CUBES_DIR = '/usr/share/cubicweb/cubes/' + mode = _forced_mode or 'system' + _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes') + + CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False) + CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep) options = ( ('log-threshold', {'type' : 'string', # XXX use a dedicated type? 'default': 'WARNING', 'help': 'server\'s log level', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), # pyro options ('pyro-instance-id', {'type' : 'string', 'default': Method('default_instance_id'), 'help': 'identifier of the CubicWeb instance in the Pyro name server', - 'group': 'pyro', 'inputlevel': 1, + 'group': 'pyro', 'level': 1, }), ('pyro-ns-host', {'type' : 'string', 'default': '', 'help': 'Pyro name server\'s host. If not set, will be detected by a \ broadcast query. It may contains port information using : notation.', - 'group': 'pyro', 'inputlevel': 1, + 'group': 'pyro', 'level': 1, }), ('pyro-ns-group', {'type' : 'string', 'default': 'cubicweb', 'help': 'Pyro name server\'s group where the repository will be \ registered.', - 'group': 'pyro', 'inputlevel': 1, + 'group': 'pyro', 'level': 1, }), # common configuration options which are potentially required as soon as # you're using "base" application objects (ie to really server/web @@ -317,13 +329,13 @@ {'type' : 'string', 'default': None, 'help': 'web server root url', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('allow-email-login', {'type' : 'yn', 'default': False, 'help': 'allow users to login with their primary email if set', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), ('use-request-subdomain', {'type' : 'yn', @@ -331,18 +343,17 @@ 'help': ('if set, base-url subdomain is replaced by the request\'s ' 'host, to help managing sites with several subdomains in a ' 'single cubicweb instance'), - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('mangle-emails', {'type' : 'yn', 'default': False, 'help': "don't display actual email addresses but mangle them if \ this option is set to yes", - 'group': 'email', 'inputlevel': 2, + 'group': 'email', 'level': 2, }), ) # static and class methods used to get instance independant resources ## - @staticmethod def cubicweb_version(): """return installed cubicweb version""" @@ -374,28 +385,28 @@ @classmethod def available_cubes(cls): + import re cubes = set() for directory in cls.cubes_search_path(): if not exists(directory): cls.error('unexistant directory in cubes search path: %s' - % directory) + % directory) continue for cube in os.listdir(directory): - if isdir(join(directory, cube)) and not cube == 'shared': + if cube == 'shared': + continue + if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube): + continue # skip invalid python package name + cubedir = join(directory, cube) + if isdir(cubedir) and exists(join(cubedir, '__init__.py')): cubes.add(cube) return sorted(cubes) @classmethod def cubes_search_path(cls): """return the path of directories where cubes should be searched""" - path = [] - try: - for directory in os.environ['CW_CUBES_PATH'].split(os.pathsep): - directory = abspath(normpath(directory)) - if exists(directory) and not directory in path: - path.append(directory) - except KeyError: - pass + path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH + if directory.strip() and exists(directory.strip())] if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR): path.append(cls.CUBES_DIR) return path @@ -411,7 +422,7 @@ @classmethod def cube_dir(cls, cube): """return the cube directory for the given cube id, - raise ConfigurationError if it doesn't exists + raise `ConfigurationError` if it doesn't exists """ for directory in cls.cubes_search_path(): cubedir = join(directory, cube) @@ -429,10 +440,12 @@ """return the information module for the given cube""" cube = CW_MIGRATION_MAP.get(cube, cube) try: - return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__ + parent = __import__('cubes.%s.__pkginfo__' % cube) + return getattr(parent, cube).__pkginfo__ except Exception, ex: - raise ConfigurationError('unable to find packaging information for ' - 'cube %s (%s: %s)' % (cube, ex.__class__.__name__, ex)) + raise ConfigurationError( + 'unable to find packaging information for cube %s (%s: %s)' + % (cube, ex.__class__.__name__, ex)) @classmethod def cube_version(cls, cube): @@ -444,14 +457,43 @@ return Version(version) @classmethod + def _cube_deps(cls, cube, key, oldkey): + """return cubicweb cubes used by the given cube""" + pkginfo = cls.cube_pkginfo(cube) + try: + # explicit __xxx_cubes__ attribute + deps = getattr(pkginfo, key) + except AttributeError: + # deduce cubes from generic __xxx__ attribute + try: + gendeps = getattr(pkginfo, key.replace('_cubes', '')) + except AttributeError: + # bw compat + if hasattr(pkginfo, oldkey): + warn('[3.8] cube %s: %s is deprecated, use %s dict' + % (cube, oldkey, key), DeprecationWarning) + deps = getattr(pkginfo, oldkey) + else: + deps = {} + else: + deps = dict( (x[len('cubicweb-'):], v) + for x, v in gendeps.iteritems() + if x.startswith('cubicweb-')) + if not isinstance(deps, dict): + deps = dict((key, None) for key in deps) + warn('[3.8] cube %s should define %s as a dict' % (cube, key), + DeprecationWarning) + return deps + + @classmethod def cube_dependencies(cls, cube): """return cubicweb cubes used by the given cube""" - return getattr(cls.cube_pkginfo(cube), '__use__', ()) + return cls._cube_deps(cube, '__depends_cubes__', '__use__') @classmethod def cube_recommends(cls, cube): """return cubicweb cubes recommended by the given cube""" - return getattr(cls.cube_pkginfo(cube), '__recommend__', ()) + return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__') @classmethod def expand_cubes(cls, cubes, with_recommends=False): @@ -480,31 +522,19 @@ """reorder cubes from the top level cubes to inner dependencies cubes """ - from logilab.common.graph import get_cycles + from logilab.common.graph import ordered_nodes, UnorderableGraph graph = {} for cube in cubes: cube = CW_MIGRATION_MAP.get(cube, cube) - deps = cls.cube_dependencies(cube) + \ - cls.cube_recommends(cube) - graph[cube] = set(dep for dep in deps if dep in cubes) - cycles = get_cycles(graph) - if cycles: - cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles) + graph[cube] = set(dep for dep in cls.cube_dependencies(cube) + if dep in cubes) + graph[cube] |= set(dep for dep in cls.cube_recommends(cube) + if dep in cubes) + try: + return ordered_nodes(graph) + except UnorderableGraph, ex: raise ConfigurationError('cycles in cubes dependencies: %s' - % cycles) - cubes = [] - while graph: - # sorted to get predictable results - for cube, deps in sorted(graph.items()): - if not deps: - cubes.append(cube) - del graph[cube] - for deps in graph.itervalues(): - try: - deps.remove(cube) - except KeyError: - continue - return tuple(reversed(cubes)) + % ex.cycles) @classmethod def cls_adjust_sys_path(cls): @@ -634,6 +664,7 @@ cw_rest_init() def adjust_sys_path(self): + # overriden in CubicWebConfiguration self.cls_adjust_sys_path() def init_log(self, logthreshold=None, debug=False, @@ -683,35 +714,24 @@ """ return None + class CubicWebConfiguration(CubicWebNoAppConfiguration): """base class for cubicweb server and web configurations""" - INSTANCES_DATA_DIR = None - if os.environ.get('APYCOT_ROOT'): - root = os.environ['APYCOT_ROOT'] - REGISTRY_DIR = '%s/etc/cubicweb.d/' % root - if not exists(REGISTRY_DIR): - os.makedirs(REGISTRY_DIR) - RUNTIME_DIR = tempfile.gettempdir() - # allow to test cubes within apycot using cubicweb not installed by - # apycot - if __file__.startswith(os.environ['APYCOT_ROOT']): - MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root + if CubicWebNoAppConfiguration.mode == 'user': + _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/') + else: #mode = 'system' + if _INSTALL_PREFIX == '/usr': + _INSTANCES_DIR = '/etc/cubicweb.d/' else: - MIGRATION_DIR = '/usr/share/cubicweb/migration/' - else: - if CubicWebNoAppConfiguration.mode == 'user': - REGISTRY_DIR = expanduser('~/etc/cubicweb.d/') - RUNTIME_DIR = tempfile.gettempdir() - INSTANCES_DATA_DIR = REGISTRY_DIR - else: #mode = 'system' - REGISTRY_DIR = '/etc/cubicweb.d/' - RUNTIME_DIR = '/var/run/cubicweb/' - INSTANCES_DATA_DIR = '/var/lib/cubicweb/instances/' - if CWDEV: - MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration') - else: - MIGRATION_DIR = '/usr/share/cubicweb/migration/' + _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') + + if os.environ.get('APYCOT_ROOT'): + _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py') + if not exists(_cubes_init): + file(join(_cubes_init), 'w').close() + if not exists(_INSTANCES_DIR): + os.makedirs(_INSTANCES_DIR) # for some commands (creation...) we don't want to initialize gettext set_language = True @@ -723,57 +743,51 @@ {'type' : 'string', 'default': Method('default_log_file'), 'help': 'file where output logs should be written', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), # email configuration ('smtp-host', {'type' : 'string', 'default': 'mail', 'help': 'hostname of the SMTP mail server', - 'group': 'email', 'inputlevel': 1, + 'group': 'email', 'level': 1, }), ('smtp-port', {'type' : 'int', 'default': 25, 'help': 'listening port of the SMTP mail server', - 'group': 'email', 'inputlevel': 1, + 'group': 'email', 'level': 1, }), ('sender-name', {'type' : 'string', 'default': Method('default_instance_id'), 'help': 'name used as HELO name for outgoing emails from the \ repository.', - 'group': 'email', 'inputlevel': 2, + 'group': 'email', 'level': 2, }), ('sender-addr', {'type' : 'string', 'default': 'cubicweb@mydomain.com', 'help': 'email address used as HELO address for outgoing emails from \ the repository', - 'group': 'email', 'inputlevel': 1, + 'group': 'email', 'level': 1, }), ) @classmethod - def runtime_dir(cls): - """run time directory for pid file...""" - return env_path('CW_RUNTIME_DIR', cls.RUNTIME_DIR, 'run time') - - @classmethod - def registry_dir(cls): + def instances_dir(cls): """return the control directory""" - return env_path('CW_INSTANCES_DIR', cls.REGISTRY_DIR, 'registry') - - @classmethod - def instance_data_dir(cls): - """return the instance data directory""" - return env_path('CW_INSTANCES_DATA_DIR', cls.INSTANCES_DATA_DIR, - 'additional data') + return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry') @classmethod def migration_scripts_dir(cls): """cubicweb migration scripts directory""" - return env_path('CW_MIGRATION_DIR', cls.MIGRATION_DIR, 'migration') + if CWDEV: + return join(CW_SOFTWARE_ROOT, 'misc', 'migration') + mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration') + if not exists(mdir): + raise ConfigurationError('migration path %s doesn\'t exist' % mdir) + return mdir @classmethod def config_for(cls, appid, config=None): @@ -796,9 +810,10 @@ """return the home directory of the instance with the given instance id """ - home = join(cls.registry_dir(), appid) + home = join(cls.instances_dir(), appid) if not exists(home): - raise ConfigurationError('no such instance %s (check it exists with "cubicweb-ctl list")' % appid) + raise ConfigurationError('no such instance %s (check it exists with' + ' "cubicweb-ctl list")' % appid) return home MODES = ('common', 'repository', 'Any', 'web') @@ -821,7 +836,9 @@ def default_log_file(self): """return default path to the log file of the instance'server""" if self.mode == 'user': - basepath = join(tempfile.gettempdir(), '%s-%s' % (basename(self.appid), self.name)) + import tempfile + basepath = join(tempfile.gettempdir(), '%s-%s' % ( + basename(self.appid), self.name)) path = basepath + '.log' i = 1 while exists(path) and i < 100: # arbitrary limit to avoid infinite loop @@ -836,7 +853,13 @@ def default_pid_file(self): """return default path to the pid file of the instance'server""" - return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name)) + if self.mode == 'system': + # XXX not under _INSTALL_PREFIX, right? + rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time') + else: + import tempfile + rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time') + return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) # instance methods used to get instance specific resources ############# @@ -856,11 +879,17 @@ @property def apphome(self): - return join(self.registry_dir(), self.appid) + return join(self.instances_dir(), self.appid) @property def appdatahome(self): - return join(self.instance_data_dir(), self.appid) + if self.mode == 'system': + # XXX not under _INSTALL_PREFIX, right? + iddir = '/var/lib/cubicweb/instances/' + else: + iddir = self.instances_dir() + iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data') + return join(iddir, self.appid) def init_cubes(self, cubes): assert self._cubes is None, self._cubes @@ -925,7 +954,8 @@ if exists(sitefile) and not sitefile in self._site_loaded: self._load_site_cubicweb(sitefile) self._site_loaded.add(sitefile) - self.warning('[3.5] site_erudi.py is deprecated, should be renamed to site_cubicweb.py') + self.warning('[3.5] site_erudi.py is deprecated, should be ' + 'renamed to site_cubicweb.py') def _load_site_cubicweb(self, sitefile): # XXX extrapath argument to load_module_from_file only in lgc > 0.46 diff -r b619531ddbd2 -r b6e250dd7a7d cwctl.py --- a/cwctl.py Fri Apr 23 12:40:48 2010 +0200 +++ b/cwctl.py Fri Apr 23 12:42:53 2010 +0200 @@ -13,6 +13,7 @@ # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash # completion). So import locally in command helpers. import sys +from warnings import warn from os import remove, listdir, system, pathsep try: from os import kill, getpgid @@ -85,7 +86,7 @@ Instance used by another one should appears first in the file (one instance per line) """ - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() _allinstances = list_instances(regdir) if isfile(join(regdir, 'startorder')): allinstances = [] @@ -119,29 +120,33 @@ self.run_args(args, askconfirm) def run_args(self, args, askconfirm): + status = 0 for appid in args: if askconfirm: print '*'*72 if not ASK.confirm('%s instance %r ?' % (self.name, appid)): continue - self.run_arg(appid) + status = max(status, self.run_arg(appid)) + sys.exit(status) def run_arg(self, appid): cmdmeth = getattr(self, '%s_instance' % self.name) try: - cmdmeth(appid) + status = cmdmeth(appid) except (KeyboardInterrupt, SystemExit): print >> sys.stderr, '%s aborted' % self.name - sys.exit(2) # specific error code + return 2 # specific error code except (ExecutionError, ConfigurationError), ex: print >> sys.stderr, 'instance %s not %s: %s' % ( appid, self.actionverb, ex) + status = 4 except Exception, ex: import traceback traceback.print_exc() print >> sys.stderr, 'instance %s not %s: %s' % ( appid, self.actionverb, ex) - + status = 8 + return status class InstanceCommandFork(InstanceCommand): """Same as `InstanceCommand`, but command is forked in a new environment @@ -168,86 +173,6 @@ # base commands ############################################################### -def version_strictly_lower(a, b): - from logilab.common.changelog import Version - if a: - a = Version(a) - if b: - b = Version(b) - return a < b - -def max_version(a, b): - from logilab.common.changelog import Version - return str(max(Version(a), Version(b))) - -class ConfigurationProblem(object): - """Each cube has its own list of dependencies on other cubes/versions. - - The ConfigurationProblem is used to record the loaded cubes, then to detect - inconsistencies in their dependencies. - - See configuration management on wikipedia for litterature. - """ - - def __init__(self): - self.cubes = {} - - def add_cube(self, name, info): - self.cubes[name] = info - - def solve(self): - self.warnings = [] - self.errors = [] - self.read_constraints() - for cube, versions in sorted(self.constraints.items()): - oper, version = None, None - # simplify constraints - if versions: - for constraint in versions: - op, ver = constraint - if oper is None: - oper = op - version = ver - elif op == '>=' and oper == '>=': - version = max_version(ver, version) - else: - print 'unable to handle this case', oper, version, op, ver - # "solve" constraint satisfaction problem - if cube not in self.cubes: - self.errors.append( ('add', cube, version) ) - elif versions: - lower_strict = version_strictly_lower(self.cubes[cube].version, version) - if oper in ('>=','='): - if lower_strict: - self.errors.append( ('update', cube, version) ) - else: - print 'unknown operator', oper - - def read_constraints(self): - self.constraints = {} - self.reverse_constraints = {} - for cube, info in self.cubes.items(): - if hasattr(info,'__depends_cubes__'): - use = info.__depends_cubes__ - if not isinstance(use, dict): - use = dict((key, None) for key in use) - self.warnings.append('cube %s should define __depends_cubes__ as a dict not a list') - elif hasattr(info, '__use__'): - self.warnings.append('cube %s should define __depends_cubes__' % cube) - use = dict((key, None) for key in info.__use__) - else: - continue - for name, constraint in use.items(): - self.constraints.setdefault(name,set()) - if constraint: - try: - oper, version = constraint.split() - self.constraints[name].add( (oper, version) ) - except: - self.warnings.append('cube %s depends on %s but constraint badly formatted: %s' - % (cube, name, constraint)) - self.reverse_constraints.setdefault(name, set()).add(cube) - class ListCommand(Command): """List configurations, cubes and instances. @@ -264,6 +189,7 @@ """run the command with its specific arguments""" if args: raise BadCommandUsage('Too much arguments') + from cubicweb.migration import ConfigurationProblem print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode) print print 'Available configurations:' @@ -275,7 +201,7 @@ continue print ' ', line print - cfgpb = ConfigurationProblem() + cfgpb = ConfigurationProblem(cwcfg) try: cubesdir = pathsep.join(cwcfg.cubes_search_path()) namesize = max(len(x) for x in cwcfg.available_cubes()) @@ -286,26 +212,31 @@ else: print 'Available cubes (%s):' % cubesdir for cube in cwcfg.available_cubes(): - if cube in ('CVS', '.svn', 'shared', '.hg'): - continue try: tinfo = cwcfg.cube_pkginfo(cube) tversion = tinfo.version - cfgpb.add_cube(cube, tinfo) + cfgpb.add_cube(cube, tversion) except ConfigurationError: tinfo = None tversion = '[missing cube information]' print '* %s %s' % (cube.ljust(namesize), tversion) if self.config.verbose: - shortdesc = tinfo and (getattr(tinfo, 'short_desc', '') - or tinfo.__doc__) - if shortdesc: - print ' '+ ' \n'.join(shortdesc.splitlines()) + if tinfo: + descr = getattr(tinfo, 'description', '') + if not descr: + descr = getattr(tinfo, 'short_desc', '') + if descr: + warn('[3.8] short_desc is deprecated, update %s' + ' pkginfo' % cube, DeprecationWarning) + else: + descr = tinfo.__doc__ + if descr: + print ' '+ ' \n'.join(descr.splitlines()) modes = detect_available_modes(cwcfg.cube_dir(cube)) print ' available modes: %s' % ', '.join(modes) print try: - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() except ConfigurationError, ex: print 'No instance available:', ex print @@ -410,7 +341,7 @@ helper.bootstrap(cubes, self.config.config_level) # input for cubes specific options for section in set(sect.lower() for sect, opt, optdict in config.all_options() - if optdict.get('inputlevel') <= self.config.config_level): + if optdict.get('level') <= self.config.config_level): if section not in ('main', 'email', 'pyro'): print '\n' + underline_title('%s options' % section) config.input_config(section, self.config.config_level) @@ -613,7 +544,7 @@ actionverb = 'restarted' def run_args(self, args, askconfirm): - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() if not isfile(join(regdir, 'startorder')) or len(args) <= 1: # no specific startorder super(RestartInstanceCommand, self).run_args(args, askconfirm) @@ -667,6 +598,7 @@ @staticmethod def status_instance(appid): """print running status information for an instance""" + status = 0 for mode in cwcfg.possible_configurations(appid): config = cwcfg.config_for(appid, mode) print '[%s-%s]' % (appid, mode), @@ -677,6 +609,7 @@ continue if not exists(pidf): print "doesn't seem to be running" + status = 1 continue pid = int(open(pidf).read().strip()) # trick to guess whether or not the process is running @@ -684,9 +617,10 @@ getpgid(pid) except OSError: print "should be running with pid %s but the process can not be found" % pid + status = 1 continue print "running with pid %s" % (pid) - + return status class UpgradeInstanceCommand(InstanceCommandFork): """Upgrade an instance after cubicweb and/or component(s) upgrade. @@ -955,7 +889,7 @@ def run(self, args): """run the command with its specific arguments""" - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() for appid in sorted(listdir(regdir)): print appid diff -r b619531ddbd2 -r b6e250dd7a7d cwvreg.py --- a/cwvreg.py Fri Apr 23 12:40:48 2010 +0200 +++ b/cwvreg.py Fri Apr 23 12:42:53 2010 +0200 @@ -388,6 +388,10 @@ class ActionsRegistry(CWRegistry): + def poss_visible_objects(self, *args, **kwargs): + """return an ordered list of possible actions""" + return sorted(self.possible_objects(*args, **kwargs), + key=lambda x: x.order) def possible_actions(self, req, rset=None, **kwargs): if rset is None: @@ -603,7 +607,7 @@ def solutions(self, req, rqlst, args): def type_from_eid(eid, req=req): return req.describe(eid)[0] - self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args) + return self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args) def parse(self, req, rql, args=None): rqlst = self.rqlhelper.parse(rql) diff -r b619531ddbd2 -r b6e250dd7a7d dbapi.py --- a/dbapi.py Fri Apr 23 12:40:48 2010 +0200 +++ b/dbapi.py Fri Apr 23 12:42:53 2010 +0200 @@ -14,12 +14,14 @@ from logging import getLogger from time import time, clock from itertools import count +from warnings import warn from logilab.common.logging_ext import set_log_methods from logilab.common.decorators import monkeypatch from logilab.common.deprecation import deprecated -from cubicweb import ETYPE_NAME_MAP, ConnectionError, cwvreg, cwconfig +from cubicweb import ETYPE_NAME_MAP, ConnectionError, AuthenticationError,\ + cwvreg, cwconfig from cubicweb.req import RequestSessionBase @@ -193,10 +195,34 @@ cnx = repo_connect(repo, login, cnxprops=cnxprops, **kwargs) return repo, cnx +class _NeedAuthAccessMock(object): + def __getattribute__(self, attr): + raise AuthenticationError() + def __nonzero__(self): + return False + +class DBAPISession(object): + def __init__(self, cnx, login=None, authinfo=None): + self.cnx = cnx + self.data = {} + self.login = login + self.authinfo = authinfo + # dbapi session identifier is the same as the first connection + # identifier, but may later differ in case of auto-reconnection as done + # by the web authentication manager (in cw.web.views.authentication) + if cnx is not None: + self.sessionid = cnx.sessionid + else: + self.sessionid = None + + @property + def anonymous_session(self): + return not self.cnx or self.cnx.anonymous_connection + class DBAPIRequest(RequestSessionBase): - def __init__(self, vreg, cnx=None): + def __init__(self, vreg, session=None): super(DBAPIRequest, self).__init__(vreg) try: # no vreg or config which doesn't handle translations @@ -206,12 +232,13 @@ self.set_default_language(vreg) # cache entities built during the request self._eid_cache = {} - # these args are initialized after a connection is - # established - self.cnx = None # connection associated to the request - self._user = None # request's user, set at authentication - if cnx is not None: - self.set_connection(cnx) + if session is not None: + self.set_session(session) + else: + # these args are initialized after a connection is + # established + self.session = None + self.cnx = self.user = _NeedAuthAccessMock() def base_url(self): return self.vreg.config['base-url'] @@ -219,13 +246,25 @@ def from_controller(self): return 'view' - def set_connection(self, cnx, user=None): + def set_session(self, session, user=None): """method called by the session handler when the user is authenticated or an anonymous connection is open """ - self.cnx = cnx - self.cursor = cnx.cursor(self) - self.set_user(user) + self.session = session + if session.cnx: + self.cnx = session.cnx + self.execute = session.cnx.cursor(self).execute + if user is None: + user = self.cnx.user(self, {'lang': self.lang}) + if user is not None: + self.user = user + self.set_entity_cache(user) + + def execute(self, *args, **kwargs): + """overriden when session is set. By default raise authentication error + so authentication is requested. + """ + raise AuthenticationError() def set_default_language(self, vreg): try: @@ -243,14 +282,6 @@ self.pgettext = lambda x, y: y self.debug('request default language: %s', self.lang) - def describe(self, eid): - """return a tuple (type, sourceuri, extid) for the entity with id """ - return self.cnx.describe(eid) - - def source_defs(self): - """return the definition of sources used by the repository.""" - return self.cnx.source_defs() - # entities cache management ############################################### def entity_cache(self, eid): @@ -270,24 +301,6 @@ # low level session data management ####################################### - def session_data(self): - """return a dictionnary containing session data""" - return self.cnx.session_data() - - def get_session_data(self, key, default=None, pop=False): - """return value associated to `key` in session data""" - if self.cnx is None: - return default # before the connection has been established - return self.cnx.get_session_data(key, default, pop) - - def set_session_data(self, key, value): - """set value associated to `key` in session data""" - return self.cnx.set_session_data(key, value) - - def del_session_data(self, key): - """remove value associated to `key` in session data""" - return self.cnx.del_session_data(key) - def get_shared_data(self, key, default=None, pop=False): """return value associated to `key` in shared data""" return self.cnx.get_shared_data(key, default, pop) @@ -304,26 +317,39 @@ # server session compat layer ############################################# + def describe(self, eid): + """return a tuple (type, sourceuri, extid) for the entity with id """ + return self.cnx.describe(eid) + + def source_defs(self): + """return the definition of sources used by the repository.""" + return self.cnx.source_defs() + def hijack_user(self, user): """return a fake request/session using specified user""" req = DBAPIRequest(self.vreg) - req.set_connection(self.cnx, user) + req.set_session(self.session, user) return req - @property - def user(self): - if self._user is None and self.cnx: - self.set_user(self.cnx.user(self, {'lang': self.lang})) - return self._user + @deprecated('[3.8] use direct access to req.session.data dictionary') + def session_data(self): + """return a dictionnary containing session data""" + return self.session.data - def set_user(self, user): - self._user = user - if user: - self.set_entity_cache(user) + @deprecated('[3.8] use direct access to req.session.data dictionary') + def get_session_data(self, key, default=None, pop=False): + if pop: + return self.session.data.pop(key, default) + return self.session.data.get(key, default) - def execute(self, *args, **kwargs): - """Session interface compatibility""" - return self.cursor.execute(*args, **kwargs) + @deprecated('[3.8] use direct access to req.session.data dictionary') + def set_session_data(self, key, value): + self.session.data[key] = value + + @deprecated('[3.8] use direct access to req.session.data dictionary') + def del_session_data(self, key): + self.session.data.pop(key, None) + set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi')) @@ -338,68 +364,105 @@ etc. """ -# module level objects ######################################################## + +# cursor / connection objects ################################################## + +class Cursor(object): + """These objects represent a database cursor, which is used to manage the + context of a fetch operation. Cursors created from the same connection are + not isolated, i.e., any changes done to the database by a cursor are + immediately visible by the other cursors. Cursors created from different + connections are isolated. + """ + + def __init__(self, connection, repo, req=None): + """This read-only attribute return a reference to the Connection + object on which the cursor was created. + """ + self.connection = connection + """optionnal issuing request instance""" + self.req = req + self._repo = repo + self._sessid = connection.sessionid + + def close(self): + """no effect""" + pass + + def execute(self, rql, args=None, eid_key=None, build_descr=True): + """execute a rql query, return resulting rows and their description in + a :class:`~cubicweb.rset.ResultSet` object + + * `rql` should be an Unicode string or a plain ASCII string, containing + the rql query + + * `args` the optional args dictionary associated to the query, with key + matching named substitution in `rql` + + * `build_descr` is a boolean flag indicating if the description should + be built on select queries (if false, the description will be en empty + list) + + on INSERT queries, there will be one row for each inserted entity, + containing its eid + + on SET queries, XXX describe + + DELETE queries returns no result. + + .. Note:: + to maximize the rql parsing/analyzing cache performance, you should + always use substitute arguments in queries, i.e. avoid query such as:: + + execute('Any X WHERE X eid 123') + + use:: + + execute('Any X WHERE X eid %(x)s', {'x': 123}) + """ + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + # XXX use named argument for build_descr in case repo is < 3.8 + rset = self._repo.execute(self._sessid, rql, args, build_descr=build_descr) + rset.req = self.req + return rset -apilevel = '2.0' - -"""Integer constant stating the level of thread safety the interface supports. -Possible values are: - - 0 Threads may not share the module. - 1 Threads may share the module, but not connections. - 2 Threads may share the module and connections. - 3 Threads may share the module, connections and - cursors. - -Sharing in the above context means that two threads may use a resource without -wrapping it using a mutex semaphore to implement resource locking. Note that -you cannot always make external resources thread safe by managing access using -a mutex: the resource may rely on global variables or other external sources -that are beyond your control. -""" -threadsafety = 1 +class LogCursor(Cursor): + """override the standard cursor to log executed queries""" -"""String constant stating the type of parameter marker formatting expected by -the interface. Possible values are : + def execute(self, operation, parameters=None, eid_key=None, build_descr=True): + """override the standard cursor to log executed queries""" + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + tstart, cstart = time(), clock() + rset = Cursor.execute(self, operation, parameters, build_descr=build_descr) + self.connection.executed_queries.append((operation, parameters, + time() - tstart, clock() - cstart)) + return rset - 'qmark' Question mark style, - e.g. '...WHERE name=?' - 'numeric' Numeric, positional style, - e.g. '...WHERE name=:1' - 'named' Named style, - e.g. '...WHERE name=:name' - 'format' ANSI C printf format codes, - e.g. '...WHERE name=%s' - 'pyformat' Python extended format codes, - e.g. '...WHERE name=%(name)s' -""" -paramstyle = 'pyformat' - - -# connection object ########################################################### class Connection(object): """DB-API 2.0 compatible Connection object for CubicWeb """ # make exceptions available through the connection object ProgrammingError = ProgrammingError + # attributes that may be overriden per connection instance + anonymous_connection = False + cursor_class = Cursor + vreg = None + _closed = None def __init__(self, repo, cnxid, cnxprops=None): self._repo = repo self.sessionid = cnxid self._close_on_del = getattr(cnxprops, 'close_on_del', True) self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro') - self._closed = None if cnxprops and cnxprops.log_queries: self.executed_queries = [] self.cursor_class = LogCursor - else: - self.cursor_class = Cursor - self.anonymous_connection = False - self.vreg = None - # session's data - self.data = {} def __repr__(self): if self.anonymous_connection: @@ -417,29 +480,7 @@ return False #propagate the exception def request(self): - return DBAPIRequest(self.vreg, self) - - def session_data(self): - """return a dictionnary containing session data""" - return self.data - - def get_session_data(self, key, default=None, pop=False): - """return value associated to `key` in session data""" - if pop: - return self.data.pop(key, default) - else: - return self.data.get(key, default) - - def set_session_data(self, key, value): - """set value associated to `key` in session data""" - self.data[key] = value - - def del_session_data(self, key): - """remove value associated to `key` in session data""" - try: - del self.data[key] - except KeyError: - pass + return DBAPIRequest(self.vreg, DBAPISession(self)) def check(self): """raise `BadConnectionId` if the connection is no more valid""" @@ -513,8 +554,6 @@ if self._repo.config.instance_hooks: hm.register_hooks(config.load_hooks(self.vreg)) - load_vobjects = deprecated()(load_appobjects) - def use_web_compatible_requests(self, baseurl, sitetitle=None): """monkey patch DBAPIRequest to fake a cw.web.request, so you should able to call html views using rset from a simple dbapi connection. @@ -561,9 +600,13 @@ if req is None: req = self.request() rset = req.eid_rset(eid, 'CWUser') - user = self.vreg['etypes'].etype_class('CWUser')(req, rset, row=0, - groups=groups, - properties=properties) + if self.vreg is not None and 'etypes' in self.vreg: + user = self.vreg['etypes'].etype_class('CWUser')(req, rset, row=0, + groups=groups, + properties=properties) + else: + from cubicweb.entity import Entity + user = Entity(req, rset, row=0) user['login'] = login # cache login return user @@ -698,207 +741,3 @@ him). """ return self._repo.undo_transaction(self.sessionid, txuuid) - - -# cursor object ############################################################### - -class Cursor(object): - """This represents a database cursor, which is used to manage the - context of a fetch operation. Cursors created from the same connection are - not isolated, i.e., any changes done to the database by a cursor are - immediately visible by the other cursors. Cursors created from different - connections can or can not be isolated, depending on how the transaction - support is implemented (see also the connection's rollback() and commit() - methods.) - """ - - def __init__(self, connection, repo, req=None): - # This read-only attribute returns a reference to the Connection - # object on which the cursor was created. - self.connection = connection - # optionnal issuing request instance - self.req = req - - # This read/write attribute specifies the number of rows to fetch at a - # time with fetchmany(). It defaults to 1 meaning to fetch a single row - # at a time. - # Implementations must observe this value with respect to the fetchmany() - # method, but are free to interact with the database a single row at a - # time. It may also be used in the implementation of executemany(). - self.arraysize = 1 - - self._repo = repo - self._sessid = connection.sessionid - self._res = None - self._closed = None - self._index = 0 - - def close(self): - """Close the cursor now (rather than whenever __del__ is called). The - cursor will be unusable from this point forward; an Error (or subclass) - exception will be raised if any operation is attempted with the cursor. - """ - self._closed = True - - - def execute(self, operation, parameters=None, eid_key=None, build_descr=True): - """Prepare and execute a database operation (query or command). - Parameters may be provided as sequence or mapping and will be bound to - variables in the operation. Variables are specified in a - database-specific notation (see the module's paramstyle attribute for - details). - - A reference to the operation will be retained by the cursor. If the - same operation object is passed in again, then the cursor can optimize - its behavior. This is most effective for algorithms where the same - operation is used, but different parameters are bound to it (many - times). - - For maximum efficiency when reusing an operation, it is best to use the - setinputsizes() method to specify the parameter types and sizes ahead - of time. It is legal for a parameter to not match the predefined - information; the implementation should compensate, possibly with a loss - of efficiency. - - The parameters may also be specified as list of tuples to e.g. insert - multiple rows in a single operation, but this kind of usage is - depreciated: executemany() should be used instead. - - Return values are not defined by the DB-API, but this here it returns a - ResultSet object. - """ - self._res = rset = self._repo.execute(self._sessid, operation, - parameters, eid_key, build_descr) - rset.req = self.req - self._index = 0 - return rset - - - def executemany(self, operation, seq_of_parameters): - """Prepare a database operation (query or command) and then execute it - against all parameter sequences or mappings found in the sequence - seq_of_parameters. - - Modules are free to implement this method using multiple calls to the - execute() method or by using array operations to have the database - process the sequence as a whole in one call. - - Use of this method for an operation which produces one or more result - sets constitutes undefined behavior, and the implementation is - permitted (but not required) to raise an exception when it detects that - a result set has been created by an invocation of the operation. - - The same comments as for execute() also apply accordingly to this - method. - - Return values are not defined. - """ - for parameters in seq_of_parameters: - self.execute(operation, parameters) - if self._res.rows is not None: - self._res = None - raise ProgrammingError('Operation returned a result set') - - - def fetchone(self): - """Fetch the next row of a query result set, returning a single - sequence, or None when no more data is available. - - An Error (or subclass) exception is raised if the previous call to - execute*() did not produce any result set or no call was issued yet. - """ - if self._res is None: - raise ProgrammingError('No result set') - row = self._res.rows[self._index] - self._index += 1 - return row - - - def fetchmany(self, size=None): - """Fetch the next set of rows of a query result, returning a sequence - of sequences (e.g. a list of tuples). An empty sequence is returned - when no more rows are available. - - The number of rows to fetch per call is specified by the parameter. If - it is not given, the cursor's arraysize determines the number of rows - to be fetched. The method should try to fetch as many rows as indicated - by the size parameter. If this is not possible due to the specified - number of rows not being available, fewer rows may be returned. - - An Error (or subclass) exception is raised if the previous call to - execute*() did not produce any result set or no call was issued yet. - - Note there are performance considerations involved with the size - parameter. For optimal performance, it is usually best to use the - arraysize attribute. If the size parameter is used, then it is best - for it to retain the same value from one fetchmany() call to the next. - """ - if self._res is None: - raise ProgrammingError('No result set') - if size is None: - size = self.arraysize - rows = self._res.rows[self._index:self._index + size] - self._index += size - return rows - - - def fetchall(self): - """Fetch all (remaining) rows of a query result, returning them as a - sequence of sequences (e.g. a list of tuples). Note that the cursor's - arraysize attribute can affect the performance of this operation. - - An Error (or subclass) exception is raised if the previous call to - execute*() did not produce any result set or no call was issued yet. - """ - if self._res is None: - raise ProgrammingError('No result set') - if not self._res.rows: - return [] - rows = self._res.rows[self._index:] - self._index = len(self._res) - return rows - - - def setinputsizes(self, sizes): - """This can be used before a call to execute*() to predefine memory - areas for the operation's parameters. - - sizes is specified as a sequence -- one item for each input parameter. - The item should be a Type Object that corresponds to the input that - will be used, or it should be an integer specifying the maximum length - of a string parameter. If the item is None, then no predefined memory - area will be reserved for that column (this is useful to avoid - predefined areas for large inputs). - - This method would be used before the execute*() method is invoked. - - Implementations are free to have this method do nothing and users are - free to not use it. - """ - pass - - - def setoutputsize(self, size, column=None): - """Set a column buffer size for fetches of large columns (e.g. LONGs, - BLOBs, etc.). The column is specified as an index into the result - sequence. Not specifying the column will set the default size for all - large columns in the cursor. - - This method would be used before the execute*() method is invoked. - - Implementations are free to have this method do nothing and users are - free to not use it. - """ - pass - - -class LogCursor(Cursor): - """override the standard cursor to log executed queries""" - - def execute(self, operation, parameters=None, eid_key=None, build_descr=True): - """override the standard cursor to log executed queries""" - tstart, cstart = time(), clock() - rset = Cursor.execute(self, operation, parameters, eid_key, build_descr) - self.connection.executed_queries.append((operation, parameters, - time() - tstart, clock() - cstart)) - return rset diff -r b619531ddbd2 -r b6e250dd7a7d debian.hardy/rules --- a/debian.hardy/rules Fri Apr 23 12:40:48 2010 +0200 +++ b/debian.hardy/rules Fri Apr 23 12:42:53 2010 +0200 @@ -14,7 +14,7 @@ # and I can't get pbuilder find them in its chroot :( # cd doc && make # FIXME cleanup and use sphinx-build as build-depends ? - python setup.py build + NO_SETUPTOOLS=1 python setup.py build touch build-stamp clean: @@ -33,7 +33,7 @@ dh_clean dh_installdirs - python setup.py -q install --no-compile --prefix=debian/tmp/usr + NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/tmp/usr # Put all the python library and data in cubicweb-common # and scripts in cubicweb-server diff -r b619531ddbd2 -r b6e250dd7a7d debian/changelog --- a/debian/changelog Fri Apr 23 12:40:48 2010 +0200 +++ b/debian/changelog Fri Apr 23 12:42:53 2010 +0200 @@ -1,3 +1,9 @@ +cubicweb (3.8.0-1) unstable; urgency=low + + * new upstream release + + -- Sylvain Thénault Tue, 20 Apr 2010 16:31:44 +0200 + cubicweb (3.7.4-1) unstable; urgency=low * new upstream release diff -r b619531ddbd2 -r b6e250dd7a7d debian/control --- a/debian/control Fri Apr 23 12:40:48 2010 +0200 +++ b/debian/control Fri Apr 23 12:42:53 2010 +0200 @@ -68,7 +68,7 @@ Architecture: all XB-Python-Version: ${python:Versions} Provides: cubicweb-web-frontend -Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web2 +Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web Recommends: pyro, cubicweb-documentation (= ${source:Version}) Description: twisted-based web interface for the CubicWeb framework CubicWeb is a semantic web application framework. @@ -83,7 +83,7 @@ Architecture: all XB-Python-Version: ${python:Versions} Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3) -Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-pysixt, fop, python-imaging +Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging Description: web interface library for the CubicWeb framework CubicWeb is a semantic web application framework. . @@ -97,7 +97,7 @@ Package: cubicweb-common Architecture: all XB-Python-Version: ${python:Versions} -Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.25.0), python-lxml +Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.50.0), python-yams (>= 0.29.0), python-rql (>= 0.26.0), python-lxml Recommends: python-simpletal (>= 4.0), python-crypto Conflicts: cubicweb-core Replaces: cubicweb-core diff -r b619531ddbd2 -r b6e250dd7a7d debian/rules --- a/debian/rules Fri Apr 23 12:40:48 2010 +0200 +++ b/debian/rules Fri Apr 23 12:42:53 2010 +0200 @@ -14,7 +14,7 @@ # and I can't get pbuilder find them in its chroot :( # cd doc && make # FIXME cleanup and use sphinx-build as build-depends ? - python setup.py build + NO_SETUPTOOLS=1 python setup.py build touch build-stamp clean: @@ -34,7 +34,7 @@ dh_installdirs #python setup.py install_lib --no-compile --install-dir=debian/cubicweb-common/usr/lib/python2.4/site-packages/ - python setup.py -q install --no-compile --prefix=debian/tmp/usr + NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/tmp/usr # Put all the python library and data in cubicweb-common # and scripts in cubicweb-server diff -r b619531ddbd2 -r b6e250dd7a7d devtools/__init__.py --- a/devtools/__init__.py Fri Apr 23 12:40:48 2010 +0200 +++ b/devtools/__init__.py Fri Apr 23 12:42:53 2010 +0200 @@ -87,19 +87,16 @@ {'type' : 'string', 'default': None, 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('anonymous-password', {'type' : 'string', 'default': None, 'help': 'password of the CubicWeb user account matching login', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), )) - if not os.environ.get('APYCOT_ROOT'): - REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes')) - def __init__(self, appid, log_threshold=logging.CRITICAL+10): ServerConfiguration.__init__(self, appid) self.init_log(log_threshold, force=True) diff -r b619531ddbd2 -r b6e250dd7a7d devtools/devctl.py --- a/devtools/devctl.py Fri Apr 23 12:40:48 2010 +0200 +++ b/devtools/devctl.py Fri Apr 23 12:42:53 2010 +0200 @@ -22,7 +22,8 @@ from cubicweb.__pkginfo__ import version as cubicwebversion from cubicweb import CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage -from cubicweb.toolsutils import Command, copy_skeleton, underline_title +from cubicweb.toolsutils import (SKEL_EXCLUDE, Command, + copy_skeleton, underline_title) from cubicweb.web.webconfig import WebConfiguration from cubicweb.server.serverconfig import ServerConfiguration @@ -440,12 +441,19 @@ """Create a new cube. - the name of the new cube + the name of the new cube. It should be a valid python module name. """ name = 'newcube' arguments = '' options = ( + ("layout", + {'short': 'L', 'type' : 'choice', 'metavar': '', + 'default': 'simple', 'choices': ('simple', 'full'), + 'help': 'cube layout. You\'ll get a minimal cube with the "simple" \ +layout, and a full featured cube with "full" layout.', + } + ), ("directory", {'short': 'd', 'type' : 'string', 'metavar': '', 'help': 'directory where the new cube should be created', @@ -475,14 +483,28 @@ 'help': 'cube author\'s web site', } ), + ("license", + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': 'LGPL', 'choices': ('GPL', 'LGPL', ''), + 'help': 'cube license', + } + ), ) + LICENSES = { + 'LGPL': 'GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses', + 'GPL': 'GNU General Public License, v2.1 - http://www.gnu.org/licenses', + '': 'closed source' + } def run(self, args): + import re from logilab.common.shellutils import ASK if len(args) != 1: raise BadCommandUsage("exactly one argument (cube name) is expected") - cubename, = args + cubename = args[0] + if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cubename): + raise BadCommandUsage("cube name should be a valid python module name") verbose = self.get('verbose') cubesdir = self.get('directory') if not cubesdir: @@ -501,7 +523,7 @@ if exists(cubedir): self.fail("%s already exists !" % (cubedir)) skeldir = join(BASEDIR, 'skeleton') - default_name = 'cubicweb-%s' % cubename.lower() + default_name = 'cubicweb-%s' % cubename.lower().replace('_', '-') if verbose: distname = raw_input('Debian name for your cube ? [%s]): ' % default_name).strip() if not distname: @@ -511,41 +533,49 @@ distname = 'cubicweb-' + distname else: distname = default_name - + if not re.match('[a-z][-a-z0-9]*$', distname): + raise BadCommandUsage("cube distname should be a valid debian package name") longdesc = shortdesc = raw_input('Enter a short description for your cube: ') if verbose: longdesc = raw_input('Enter a long description (leave empty to reuse the short one): ') - dependencies = {} + dependencies = {'cubicweb': '>= %s' % cubicwebversion} if verbose: - dependencies = self._ask_for_dependencies() + dependencies.update(self._ask_for_dependencies()) context = {'cubename' : cubename, 'distname' : distname, 'shortdesc' : shortdesc, 'longdesc' : longdesc or shortdesc, - 'dependencies' : dict((dep, None) for dep in dependencies), + 'dependencies' : dependencies, 'version' : cubicwebversion, 'year' : str(datetime.now().year), 'author': self['author'], 'author-email': self['author-email'], 'author-web-site': self['author-web-site'], + 'license': self['license'], + 'long-license': self.LICENSES[self['license']], } - copy_skeleton(skeldir, cubedir, context) + exclude = SKEL_EXCLUDE + if self['layout'] == 'simple': + exclude += ('sobjects.py*', 'precreate.py*', 'realdb_test*', + 'cubes.*', 'external_resources*') + copy_skeleton(skeldir, cubedir, context, exclude=exclude) def _ask_for_dependencies(self): from logilab.common.shellutils import ASK from logilab.common.textutils import splitstrip - includes = [] - for stdtype in ServerConfiguration.available_cubes(): - answer = ASK.ask("Depends on cube %s? " % stdtype, + depcubes = [] + for cube in ServerConfiguration.available_cubes(): + answer = ASK.ask("Depends on cube %s? " % cube, ('N','y','skip','type'), 'N') if answer == 'y': - includes.append(stdtype) + depcubes.append(cube) if answer == 'type': - includes = splitstrip(raw_input('type dependencies: ')) + depcubes = splitstrip(raw_input('type dependencies: ')) break elif answer == 'skip': break - return includes + return dict(('cubicweb-' + cube, ServerConfiguration.cube_version(cube)) + for cube in depcubes) class ExamineLogCommand(Command): diff -r b619531ddbd2 -r b6e250dd7a7d devtools/repotest.py --- a/devtools/repotest.py Fri Apr 23 12:40:48 2010 +0200 +++ b/devtools/repotest.py Fri Apr 23 12:42:53 2010 +0200 @@ -233,8 +233,8 @@ self._dumb_sessions.append(s) return s - def execute(self, rql, args=None, eid_key=None, build_descr=True): - return self.o.execute(self.session, rql, args, eid_key, build_descr) + def execute(self, rql, args=None, build_descr=True): + return self.o.execute(self.session, rql, args, build_descr) def commit(self): self.session.commit() diff -r b619531ddbd2 -r b6e250dd7a7d devtools/testlib.py --- a/devtools/testlib.py Fri Apr 23 12:40:48 2010 +0200 +++ b/devtools/testlib.py Fri Apr 23 12:42:53 2010 +0200 @@ -5,6 +5,8 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +from __future__ import with_statement + __docformat__ = "restructuredtext en" import os @@ -13,6 +15,7 @@ from urllib import unquote from math import log from contextlib import contextmanager +from warnings import warn try: import json @@ -30,9 +33,10 @@ from cubicweb import ValidationError, NoSelectableObject, AuthenticationError from cubicweb import cwconfig, devtools, web, server -from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError +from cubicweb.dbapi import ProgrammingError, DBAPISession, repo_connect from cubicweb.sobjects import notification from cubicweb.web import Redirect, application +from cubicweb.server.session import security_enabled from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS from cubicweb.devtools import fake, htmlparser @@ -212,11 +216,10 @@ cls.init_config(cls.config) cls.repo.hm.call_hooks('server_startup', repo=cls.repo) cls.vreg = cls.repo.vreg - cls._orig_cnx = cls.cnx + cls.websession = DBAPISession(cls.cnx, cls.admlogin, + {'password': cls.admpassword}) + cls._orig_cnx = (cls.cnx, cls.websession) cls.config.repository = lambda x=None: cls.repo - # necessary for authentication tests - cls.cnx.login = cls.admlogin - cls.cnx.authinfo = {'password': cls.admpassword} @classmethod def _refresh_repo(cls): @@ -239,7 +242,7 @@ @property def adminsession(self): """return current server side session (using default manager account)""" - return self.repo._sessions[self._orig_cnx.sessionid] + return self.repo._sessions[self._orig_cnx[0].sessionid] def set_option(self, optname, value): self.config.global_set_option(optname, value) @@ -289,12 +292,12 @@ if password is None: password = login.encode('utf8') if req is None: - req = self._orig_cnx.request() + req = self._orig_cnx[0].request() user = req.create_entity('CWUser', login=unicode(login), upassword=password, **kwargs) req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(g) for g in groups), - {'x': user.eid}, 'x') + {'x': user.eid}) user.clear_related_cache('in_group', 'subject') if commit: req.cnx.commit() @@ -307,22 +310,21 @@ else: if not kwargs: kwargs['password'] = str(login) - self.cnx = repo_connect(self.repo, unicode(login), - cnxprops=ConnectionProperties('inmemory'), - **kwargs) + self.cnx = repo_connect(self.repo, unicode(login), **kwargs) + self.websession = DBAPISession(self.cnx) self._cnxs.append(self.cnx) if login == self.vreg.config.anonymous_user()[0]: self.cnx.anonymous_connection = True return self.cnx def restore_connection(self): - if not self.cnx is self._orig_cnx: + if not self.cnx is self._orig_cnx[0]: try: self.cnx.close() self._cnxs.remove(self.cnx) except ProgrammingError: pass # already closed - self.cnx = self._orig_cnx + self.cnx, self.websession = self._orig_cnx # db api ################################################################## @@ -335,8 +337,11 @@ """executes , builds a resultset, and returns a couple (rset, req) where req is a FakeRequest """ + if eidkey is not None: + warn('[3.8] eidkey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) req = req or self.request(rql=rql) - return self.cnx.cursor(req).execute(unicode(rql), args, eidkey) + return req.execute(unicode(rql), args) @nocoverage def commit(self): @@ -357,14 +362,14 @@ # # server side db api ####################################################### def sexecute(self, rql, args=None, eid_key=None): + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) self.session.set_pool() - return self.session.execute(rql, args, eid_key) + return self.session.execute(rql, args) # other utilities ######################################################### - def entity(self, rql, args=None, eidkey=None, req=None): - return self.execute(rql, args, eidkey, req=req).get_entity(0, 0) - @contextmanager def temporary_appobjects(self, *appobjects): self.vreg._loadedmods.setdefault(self.__module__, {}) @@ -481,7 +486,7 @@ def request(self, *args, **kwargs): """return a web ui request""" req = self.requestcls(self.vreg, form=kwargs) - req.set_connection(self.cnx) + req.set_session(self.websession) return req def remote_call(self, fname, *args): @@ -537,27 +542,31 @@ self.set_option('auth-mode', authmode) self.set_option('anonymous-user', anonuser) req = self.request() - origcnx = req.cnx - req.cnx = None + origsession = req.session + req.session = req.cnx = None + del req.execute # get back to class implementation sh = self.app.session_handler authm = sh.session_manager.authmanager authm.anoninfo = self.vreg.config.anonymous_user() + authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} # not properly cleaned between tests self.open_sessions = sh.session_manager._sessions = {} - return req, origcnx + return req, origsession - def assertAuthSuccess(self, req, origcnx, nbsessions=1): + def assertAuthSuccess(self, req, origsession, nbsessions=1): sh = self.app.session_handler path, params = self.expect_redirect(lambda x: self.app.connect(x), req) - cnx = req.cnx + session = req.session self.assertEquals(len(self.open_sessions), nbsessions, self.open_sessions) - self.assertEquals(cnx.login, origcnx.login) - self.assertEquals(cnx.anonymous_connection, False) + self.assertEquals(session.login, origsession.login) + self.assertEquals(session.anonymous_session, False) self.assertEquals(path, 'view') - self.assertEquals(params, {'__message': 'welcome %s !' % cnx.user().login}) + self.assertEquals(params, {'__message': 'welcome %s !' % req.user.login}) def assertAuthFailure(self, req, nbsessions=0): - self.assertRaises(AuthenticationError, self.app.connect, req) + self.app.connect(req) + self.assertIsInstance(req.session, DBAPISession) + self.assertEquals(req.session.cnx, None) self.assertEquals(req.cnx, None) self.assertEquals(len(self.open_sessions), nbsessions) clear_cache(req, 'get_authorization') @@ -688,29 +697,19 @@ # deprecated ############################################################### + @deprecated('[3.8] use self.execute(...).get_entity(0, 0)') + def entity(self, rql, args=None, eidkey=None, req=None): + if eidkey is not None: + warn('[3.8] eidkey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + return self.execute(rql, args, req=req).get_entity(0, 0) + @deprecated('[3.6] use self.request().create_entity(...)') def add_entity(self, etype, req=None, **kwargs): if req is None: req = self.request() return req.create_entity(etype, **kwargs) - @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())') - def etype_instance(self, etype, req=None): - req = req or self.request() - e = self.vreg['etypes'].etype_class(etype)(req) - e.eid = None - return e - - @nocoverage - @deprecated('[3.4] use req = self.request(); rset = req.execute()', - stacklevel=3) - def rset_and_req(self, rql, optional_args=None, args=None, eidkey=None): - """executes , builds a resultset, and returns a - couple (rset, req) where req is a FakeRequest - """ - return (self.execute(rql, args, eidkey), - self.request(rql=rql, **optional_args or {})) - # auto-populating test classes and utilities ################################### @@ -794,6 +793,10 @@ """this method populates the database with `how_many` entities of each possible type. It also inserts random relations between them """ + with security_enabled(self.session, read=False, write=False): + self._auto_populate(how_many) + + def _auto_populate(self, how_many): cu = self.cursor() self.custom_populate(how_many, cu) vreg = self.vreg diff -r b619531ddbd2 -r b6e250dd7a7d doc/book/en/development/devweb/views/index.rst diff -r b619531ddbd2 -r b6e250dd7a7d entities/authobjs.py --- a/entities/authobjs.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/authobjs.py Fri Apr 23 12:42:53 2010 +0200 @@ -96,7 +96,7 @@ try: return self._cw.execute( 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - {'x': eid, 'u': self.eid}, 'x') + {'x': eid, 'u': self.eid}) except Unauthorized: return False owns = cached(owns, keyarg=1) @@ -105,13 +105,11 @@ rql = 'Any P WHERE P is CWPermission, U eid %(u)s, U in_group G, '\ 'P name %(pname)s, P require_group G' kwargs = {'pname': pname, 'u': self.eid} - cachekey = None if contexteid is not None: rql += ', X require_permission P, X eid %(x)s' kwargs['x'] = contexteid - cachekey = 'x' try: - return self._cw.execute(rql, kwargs, cachekey) + return self._cw.execute(rql, kwargs) except Unauthorized: return False diff -r b619531ddbd2 -r b6e250dd7a7d entities/lib.py --- a/entities/lib.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/lib.py Fri Apr 23 12:42:53 2010 +0200 @@ -23,6 +23,7 @@ return address return '%s at %s' % (name, host.replace('.', ' dot ')) + class EmailAddress(AnyEntity): __regid__ = 'EmailAddress' fetch_attrs, fetch_order = fetch_config(['address', 'alias']) @@ -50,8 +51,10 @@ subjrels = self.e_schema.object_relations() if not ('sender' in subjrels and 'recipients' in subjrels): return - rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s' - rset = self._cw.execute(rql, {'y': self.eid}, 'y') + rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC ' + 'WHERE X sender Y or X recipients Y, ' + 'X subject S, X date D, Y eid %(y)s', + {'y': self.eid}) if skipeids is None: skipeids = set() for i in xrange(len(rset)): @@ -131,7 +134,7 @@ def touch(self): self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', - {'t': datetime.now(), 'x': self.eid}, 'x') + {'t': datetime.now(), 'x': self.eid}) def valid(self, date): if date: diff -r b619531ddbd2 -r b6e250dd7a7d entities/schemaobjs.py --- a/entities/schemaobjs.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/schemaobjs.py Fri Apr 23 12:42:53 2010 +0200 @@ -122,6 +122,9 @@ def otype(self): return self.to_entity[0] + def yams_schema(self): + rschema = self._cw.vreg.schema.rschema(self.rtype.name) + return rschema.rdefs[(self.stype.name, self.otype.name)] class CWAttribute(CWRelation): __regid__ = 'CWAttribute' @@ -162,6 +165,9 @@ fetch_attrs, fetch_order = fetch_config(['exprtype', 'mainvars', 'expression']) def dc_title(self): + return self.expression or u'' + + def dc_long_title(self): return '%s(%s)' % (self.exprtype, self.expression or u'') @property diff -r b619531ddbd2 -r b6e250dd7a7d entities/test/unittest_base.py --- a/entities/test/unittest_base.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/test/unittest_base.py Fri Apr 23 12:42:53 2010 +0200 @@ -69,18 +69,18 @@ class CWUserTC(BaseEntityTC): def test_complete(self): - e = self.entity('CWUser X WHERE X login "admin"') + e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) e.complete() def test_matching_groups(self): - e = self.entity('CWUser X WHERE X login "admin"') + e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) self.failUnless(e.matching_groups('managers')) self.failIf(e.matching_groups('xyz')) self.failUnless(e.matching_groups(('xyz', 'managers'))) self.failIf(e.matching_groups(('xyz', 'abcd'))) def test_dc_title_and_name(self): - e = self.entity('CWUser U WHERE U login "member"') + e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) self.assertEquals(e.dc_title(), 'member') self.assertEquals(e.name(), 'member') e.set_attributes(firstname=u'bouah') @@ -91,7 +91,7 @@ self.assertEquals(e.name(), u'bouah lôt') def test_allowed_massmail_keys(self): - e = self.entity('CWUser U WHERE U login "member"') + e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omited self.assertEquals(e.allowed_massmail_keys(), set(('surname', 'firstname', 'login', 'last_login_time', diff -r b619531ddbd2 -r b6e250dd7a7d entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/test/unittest_wfobjs.py Fri Apr 23 12:42:53 2010 +0200 @@ -96,7 +96,7 @@ self.assertEquals(e.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): - user = self.entity('CWUser X') + user = self.execute('CWUser X').get_entity(0, 0) trs = list(user.possible_transitions()) self.assertEquals(len(trs), 1) self.assertEquals(trs[0].name, u'deactivate') @@ -131,7 +131,7 @@ with security_enabled(self.session, write=False): ex = self.assertRaises(ValidationError, self.session.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': self.user().eid, 's': s.eid}, 'x') + {'x': self.user().eid, 's': s.eid}) self.assertEquals(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " "You may want to set a custom workflow for this entity first."}) @@ -413,7 +413,7 @@ wf = add_wf(self, 'Company') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}, 'x') + {'wf': wf.eid, 'x': self.member.eid}) ex = self.assertRaises(ValidationError, self.commit) self.assertEquals(ex.errors, {'custom_workflow-subject': 'workflow isn\'t a workflow for this type'}) diff -r b619531ddbd2 -r b6e250dd7a7d entities/wfobjs.py --- a/entities/wfobjs.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entities/wfobjs.py Fri Apr 23 12:42:53 2010 +0200 @@ -65,7 +65,7 @@ def state_by_name(self, statename): rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, ' 'S state_of WF, WF eid %(wf)s', - {'n': statename, 'wf': self.eid}, 'wf') + {'n': statename, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -73,7 +73,7 @@ def state_by_eid(self, eid): rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' 'S state_of WF, WF eid %(wf)s', - {'s': eid, 'wf': self.eid}, ('wf', 's')) + {'s': eid, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -81,7 +81,7 @@ def transition_by_name(self, trname): rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' 'T transition_of WF, WF eid %(wf)s', - {'n': trname, 'wf': self.eid}, 'wf') + {'n': trname, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -89,7 +89,7 @@ def transition_by_eid(self, eid): rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' 'T transition_of WF, WF eid %(wf)s', - {'t': eid, 'wf': self.eid}, ('wf', 't')) + {'t': eid, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -100,12 +100,12 @@ """add a state to this workflow""" state = self._cw.create_entity('State', name=unicode(name), **kwargs) self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) + {'s': state.eid, 'wf': self.eid}) if initial: assert not self.initial, "Initial state already defined as %s" % self.initial self._cw.execute('SET WF initial_state S ' 'WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) + {'s': state.eid, 'wf': self.eid}) return state def _add_transition(self, trtype, name, fromstates, @@ -113,7 +113,7 @@ tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs) self._cw.execute('SET T transition_of WF ' 'WHERE T eid %(t)s, WF eid %(wf)s', - {'t': tr.eid, 'wf': self.eid}, ('t', 'wf')) + {'t': tr.eid, 'wf': self.eid}) assert fromstates, fromstates if not isinstance(fromstates, (tuple, list)): fromstates = (fromstates,) @@ -122,7 +122,7 @@ state = state.eid self._cw.execute('SET S allowed_transition T ' 'WHERE S eid %(s)s, T eid %(t)s', - {'s': state, 't': tr.eid}, ('s', 't')) + {'s': state, 't': tr.eid}) tr.set_permissions(requiredgroups, conditions, reset=False) return tr @@ -136,7 +136,7 @@ tostate = tostate.eid self._cw.execute('SET T destination_state S ' 'WHERE S eid %(s)s, T eid %(t)s', - {'t': tr.eid, 's': tostate}, ('s', 't')) + {'t': tr.eid, 's': tostate}) return tr def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(), @@ -147,7 +147,7 @@ if hasattr(subworkflow, 'eid'): subworkflow = subworkflow.eid assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', - {'t': tr.eid, 'wf': subworkflow}, ('wf', 't')) + {'t': tr.eid, 'wf': subworkflow}) for fromstate, tostate in exitpoints: tr.add_exit_point(fromstate, tostate) return tr @@ -159,11 +159,11 @@ if not hasattr(replacement, 'eid'): replacement = self.state_by_name(replacement) execute = self._cw.execute - execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}, 's') + execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}) execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', - {'os': todelstate.eid, 'ns': replacement.eid}, 's') + {'os': todelstate.eid, 'ns': replacement.eid}) execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', - {'os': todelstate.eid, 'ns': replacement.eid}, 's') + {'os': todelstate.eid, 'ns': replacement.eid}) todelstate.delete() @@ -174,7 +174,7 @@ fired by the logged user """ __regid__ = 'BaseTransition' - fetch_attrs, fetch_order = fetch_config(['name']) + fetch_attrs, fetch_order = fetch_config(['name', 'type']) def __init__(self, *args, **kwargs): if self.__regid__ == 'BaseTransition': @@ -227,13 +227,13 @@ """ if reset: self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', - {'x': self.eid}, 'x') + {'x': self.eid}) self._cw.execute('DELETE T condition R WHERE T eid %(x)s', - {'x': self.eid}, 'x') + {'x': self.eid}) for gname in requiredgroups: rset = self._cw.execute('SET T require_group G ' 'WHERE T eid %(x)s, G name %(gn)s', - {'x': self.eid, 'gn': gname}, 'x') + {'x': self.eid, 'gn': gname}) assert rset, '%s is not a known group' % gname if isinstance(conditions, basestring): conditions = (conditions,) @@ -247,7 +247,7 @@ kwargs.setdefault('mainvars', u'X') self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' 'X expression %(expr)s, X mainvars %(mainvars)s, ' - 'T condition X WHERE T eid %(x)s',kwargs, 'x') + 'T condition X WHERE T eid %(x)s', kwargs) # XXX clear caches? @deprecated('[3.6.1] use set_permission') @@ -299,15 +299,14 @@ if tostate is None: self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s', - {'t': self.eid, 'fs': fromstate}, ('t', 'fs')) + {'t': self.eid, 'fs': fromstate}) else: if hasattr(tostate, 'eid'): tostate = tostate.eid self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS, X destination_state TS ' 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s', - {'t': self.eid, 'fs': fromstate, 'ts': tostate}, - ('t', 'fs', 'ts')) + {'t': self.eid, 'fs': fromstate, 'ts': tostate}) def get_exit_point(self, entity, stateeid): """if state is an exit point, return its associated destination state""" @@ -469,7 +468,7 @@ 'T type TT, T type %(type)s, ' 'T name TN, T transition_of WF, WF eid %(wfeid)s', {'x': self.current_state.eid, 'type': type, - 'wfeid': self.current_workflow.eid}, 'x') + 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): if tr.may_be_fired(self.eid): yield tr diff -r b619531ddbd2 -r b6e250dd7a7d entity.py --- a/entity.py Fri Apr 23 12:40:48 2010 +0200 +++ b/entity.py Fri Apr 23 12:42:53 2010 +0200 @@ -561,7 +561,7 @@ continue rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y')) + execute(rql, {'x': self.eid, 'y': ceid}) self.clear_related_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): if rschema.meta: @@ -579,7 +579,7 @@ continue rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y')) + execute(rql, {'x': self.eid, 'y': ceid}) self.clear_related_cache(rschema.type, 'object') # data fetching methods ################################################### @@ -681,8 +681,7 @@ # if some outer join are included to fetch inlined relations rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected), ','.join(rql)) - rset = self._cw.execute(rql, {'x': self.eid}, 'x', - build_descr=False)[0] + rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0] # handle attributes for i in xrange(1, lastattr): self[str(selected[i-1][0])] = rset[i] @@ -711,7 +710,7 @@ return None rql = "Any A WHERE X eid %%(x)s, X %s A" % name try: - rset = self._cw.execute(rql, {'x': self.eid}, 'x') + rset = self._cw.execute(rql, {'x': self.eid}) except Unauthorized: self[name] = value = None else: @@ -742,7 +741,7 @@ pass assert self.has_eid() rql = self.related_rql(rtype, role) - rset = self._cw.execute(rql, {'x': self.eid}, 'x') + rset = self._cw.execute(rql, {'x': self.eid}) self.set_related_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) @@ -868,7 +867,7 @@ if limit is not None: before, after = rql.split(' WHERE ', 1) rql = '%s LIMIT %s WHERE %s' % (before, limit, after) - return self._cw.execute(rql, args, tuple(args)) + return self._cw.execute(rql, args) # relations cache handling ################################################ @@ -951,7 +950,7 @@ # and now update the database kwargs['x'] = self.eid self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), - kwargs, 'x') + kwargs) kwargs.pop('x') # update current local object _after_ the rql query to avoid # interferences between the query execution itself and the @@ -974,13 +973,13 @@ restr = 'X %s Y' % attr if values is None: self._cw.execute('DELETE %s WHERE X eid %%(x)s' % restr, - {'x': self.eid}, 'x') + {'x': self.eid}) continue if not isinstance(values, (tuple, list, set, frozenset)): values = (values,) self._cw.execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( restr, ','.join(str(r.eid) for r in values)), - {'x': self.eid}, 'x') + {'x': self.eid}) def delete(self, **kwargs): assert self.has_eid(), self.eid diff -r b619531ddbd2 -r b6e250dd7a7d etwist/http.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/etwist/http.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,71 @@ +"""twisted server for CubicWeb web instances + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +__docformat__ = "restructuredtext en" + +from cubicweb.web.http_headers import Headers + +class HTTPResponse(object): + """An object representing an HTTP Response to be sent to the client. + """ + def __init__(self, twisted_request, code=None, headers=None, stream=None): + self._headers_out = headers + self._twreq = twisted_request + self._stream = stream + self._code = code + + self._init_headers() + self._finalize() + + def _init_headers(self): + if self._headers_out is None: + return + + # initialize cookies + cookies = self._headers_out.getHeader('set-cookie') or [] + for cookie in cookies: + self._twreq.addCookie(cookie.name, cookie.value, cookie.expires, + cookie.domain, cookie.path, #TODO max-age + comment = cookie.comment, secure=cookie.secure) + self._headers_out.removeHeader('set-cookie') + + # initialize other headers + for k, v in self._headers_out.getAllRawHeaders(): + self._twreq.setHeader(k, v[0]) + + # add content-length if not present + if (self._headers_out.getHeader('content-length') is None + and self._stream is not None): + self._twreq.setHeader('content-length', len(self._stream)) + + + def _finalize(self): + if self._stream is not None: + self._twreq.write(str(self._stream)) + if self._code is not None: + self._twreq.setResponseCode(self._code) + self._twreq.finish() + + def __repr__(self): + return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code) + + +def not_modified_response(twisted_request, headers_in): + headers_out = Headers() + + for header in ( + # Required from sec 10.3.5: + 'date', 'etag', 'content-location', 'expires', + 'cache-control', 'vary', + # Others: + 'server', 'proxy-authenticate', 'www-authenticate', 'warning'): + value = headers_in.getRawHeaders(header) + if value is not None: + headers_out.setRawHeaders(header, value) + return HTTPResponse(twisted_request=twisted_request, + headers=headers_out) diff -r b619531ddbd2 -r b6e250dd7a7d etwist/request.py --- a/etwist/request.py Fri Apr 23 12:40:48 2010 +0200 +++ b/etwist/request.py Fri Apr 23 12:42:53 2010 +0200 @@ -9,22 +9,13 @@ from datetime import datetime -from twisted.web2 import http, http_headers +from twisted.web import http from cubicweb.web import DirectResponse from cubicweb.web.request import CubicWebRequestBase from cubicweb.web.httpcache import GMTOFFSET - -def cleanup_files(dct, encoding): - d = {} - for k, infos in dct.items(): - for (filename, mt, stream) in infos: - if filename: - # XXX: suppose that no file submitted <-> no filename - filename = unicode(filename, encoding) - mt = u'%s/%s' % (mt.mediaType, mt.mediaSubtype) - d[k] = (filename, mt, stream) - return d +from cubicweb.web.http_headers import Headers +from cubicweb.etwist.http import not_modified_response class CubicWebTwistedRequestAdapter(CubicWebRequestBase): @@ -32,10 +23,15 @@ self._twreq = req self._base_url = base_url super(CubicWebTwistedRequestAdapter, self).__init__(vreg, https, req.args) - self.form.update(cleanup_files(req.files, self.encoding)) - # prepare output headers - self.headers_out = http_headers.Headers() - self._headers = req.headers + for key, (name, stream) in req.files.iteritems(): + if name is None: + self.form[key] = (name, stream) + else: + self.form[key] = (unicode(name, self.encoding), stream) + # XXX can't we keep received_headers? + self._headers_in = Headers() + for k, v in req.received_headers.iteritems(): + self._headers_in.addRawHeader(k, v) def base_url(self): """return the root url of the instance""" @@ -63,29 +59,8 @@ raise KeyError if the header is not set """ if raw: - return self._twreq.headers.getRawHeaders(header, [default])[0] - return self._twreq.headers.getHeader(header, default) - - def set_header(self, header, value, raw=True): - """set an output HTTP header""" - if raw: - # adding encoded header is important, else page content - # will be reconverted back to unicode and apart unefficiency, this - # may cause decoding problem (e.g. when downloading a file) - self.headers_out.setRawHeaders(header, [str(value)]) - else: - self.headers_out.setHeader(header, value) - - def add_header(self, header, value): - """add an output HTTP header""" - # adding encoded header is important, else page content - # will be reconverted back to unicode and apart unefficiency, this - # may cause decoding problem (e.g. when downloading a file) - self.headers_out.addRawHeader(header, str(value)) - - def remove_header(self, header): - """remove an output HTTP header""" - self.headers_out.removeHeader(header) + return self._headers_in.getRawHeaders(header, [default])[0] + return self._headers_in.getHeader(header, default) def _validate_cache(self): """raise a `DirectResponse` exception if a cached page along the way @@ -95,11 +70,32 @@ # Expires header seems to be required by IE7 self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') return - try: - http.checkPreconditions(self._twreq, _PreResponse(self)) - except http.HTTPError, ex: - self.info('valid http cache, no actual rendering') - raise DirectResponse(ex.response) + + # when using both 'Last-Modified' and 'ETag' response headers + # (i.e. using respectively If-Modified-Since and If-None-Match request + # headers, see + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4 for + # reference + + cached_because_not_modified_since = False + + last_modified = self.headers_out.getHeader('last-modified') + if last_modified is not None: + cached_because_not_modified_since = (self._twreq.setLastModified(last_modified) + == http.CACHED) + + if not cached_because_not_modified_since: + return + + cached_because_etag_is_same = False + etag = self.headers_out.getRawHeaders('etag') + if etag is not None: + cached_because_etag_is_same = self._twreq.setETag(etag[0]) == http.CACHED + + if cached_because_etag_is_same: + response = not_modified_response(self._twreq, self._headers_in) + raise DirectResponse(response) + # Expires header seems to be required by IE7 self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') @@ -120,9 +116,3 @@ # :/ twisted is returned a localized time stamp return datetime.fromtimestamp(mtime) + GMTOFFSET return None - - -class _PreResponse(object): - def __init__(self, request): - self.headers = request.headers_out - self.code = 200 diff -r b619531ddbd2 -r b6e250dd7a7d etwist/server.py --- a/etwist/server.py Fri Apr 23 12:40:48 2010 +0200 +++ b/etwist/server.py Fri Apr 23 12:42:53 2010 +0200 @@ -11,22 +11,30 @@ import os import select import errno +import traceback +from os.path import join from time import mktime from datetime import date, timedelta from urlparse import urlsplit, urlunsplit +from cgi import FieldStorage, parse_header +from cStringIO import StringIO from twisted.internet import reactor, task, threads from twisted.internet.defer import maybeDeferred -from twisted.web2 import channel, http, server, iweb -from twisted.web2 import static, resource, responsecode +from twisted.web import http, server +from twisted.web import static, resource +from twisted.web.server import NOT_DONE_YET + +from simplejson import dumps -from cubicweb import ConfigurationError, CW_EVENT_MANAGER -from cubicweb.web import (AuthenticationError, NotFound, Redirect, - RemoteCallFailed, DirectResponse, StatusResponse, - ExplicitLogin) +from logilab.common.decorators import monkeypatch + +from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER +from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut from cubicweb.web.application import CubicWebPublisher - +from cubicweb.web.http_headers import generateDateTime from cubicweb.etwist.request import CubicWebTwistedRequestAdapter +from cubicweb.etwist.http import HTTPResponse def daemonize(): # XXX unix specific @@ -67,8 +75,20 @@ return baseurl -class LongTimeExpiringFile(static.File): - """overrides static.File and sets a far futre ``Expires`` date +class ForbiddenDirectoryLister(resource.Resource): + def render(self, request): + return HTTPResponse(twisted_request=request, + code=http.FORBIDDEN, + stream='Access forbidden') + +class File(static.File): + """Prevent from listing directories""" + def directoryListing(self): + return ForbiddenDirectoryLister() + + +class LongTimeExpiringFile(File): + """overrides static.File and sets a far future ``Expires`` date on the resouce. versions handling is done by serving static files by different @@ -79,22 +99,16 @@ etc. """ - def renderHTTP(self, request): - def setExpireHeader(response): - response = iweb.IResponse(response) - # Don't provide additional resource information to error responses - if response.code < 400: - # the HTTP RFC recommands not going further than 1 year ahead - expires = date.today() + timedelta(days=6*30) - response.headers.setHeader('Expires', mktime(expires.timetuple())) - return response - d = maybeDeferred(super(LongTimeExpiringFile, self).renderHTTP, request) - return d.addCallback(setExpireHeader) + def render(self, request): + # XXX: Don't provide additional resource information to error responses + # + # the HTTP RFC recommands not going further than 1 year ahead + expires = date.today() + timedelta(days=6*30) + request.setHeader('Expires', generateDateTime(mktime(expires.timetuple()))) + return File.render(self, request) -class CubicWebRootResource(resource.PostableResource): - addSlash = False - +class CubicWebRootResource(resource.Resource): def __init__(self, config, debug=None): self.debugmode = debug self.config = config @@ -103,7 +117,11 @@ self.appli = CubicWebPublisher(config, debug=self.debugmode) self.base_url = config['base-url'] self.https_url = config['https-url'] - self.versioned_datadir = 'data%s' % config.instance_md5_version() + self.children = {} + self.static_directories = set(('data%s' % config.instance_md5_version(), + 'data', 'static', 'fckeditor')) + global MAX_POST_LENGTH + MAX_POST_LENGTH = config['max-post-length'] def init_publisher(self): config = self.config @@ -143,35 +161,38 @@ except select.error: return - def locateChild(self, request, segments): + def getChild(self, path, request): """Indicate which resource to use to process down the URL's path""" - if segments: - if segments[0] == 'https': - segments = segments[1:] - if len(segments) >= 2: - if segments[0] in (self.versioned_datadir, 'data', 'static'): - # Anything in data/, static/ is treated as static files - if segments[0] == 'static': - # instance static directory - datadir = self.config.static_directory - elif segments[1] == 'fckeditor': - fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] - return static.File(fckeditordir), segments[2:] - else: - # cube static data file - datadir = self.config.locate_resource(segments[1]) - if datadir is None: - return None, [] - self.debug('static file %s from %s', segments[-1], datadir) - if segments[0] == 'data': - return static.File(str(datadir)), segments[1:] - else: - return LongTimeExpiringFile(datadir), segments[1:] - elif segments[0] == 'fckeditor': - fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] - return static.File(fckeditordir), segments[1:] + pre_path = request.path.split('/')[1:] + if pre_path[0] == 'https': + pre_path.pop(0) + directory = pre_path[0] + # Anything in data/, static/, fckeditor/ and the generated versioned + # data directory is treated as static files + if directory in self.static_directories: + # take care fckeditor may appears as root directory or as a data + # subdirectory + if directory == 'static': + return File(self.config.static_directory) + if directory == 'fckeditor': + return File(self.config.ext_resources['FCKEDITOR_PATH']) + if directory != 'data': + # versioned directory, use specific file with http cache + # headers so their are cached for a very long time + cls = LongTimeExpiringFile + else: + cls = File + if path == 'fckeditor': + return cls(self.config.ext_resources['FCKEDITOR_PATH']) + if path == directory: # recurse + return self + datadir = self.config.locate_resource(path) + if datadir is None: + return self # recurse + self.debug('static file %s from %s', path, datadir) + return cls(join(datadir, path)) # Otherwise we use this single resource - return self, () + return self def render(self, request): """Render a page from the root resource""" @@ -181,9 +202,19 @@ if self.config['profile']: # default profiler don't trace threads return self.render_request(request) else: - return threads.deferToThread(self.render_request, request) + deferred = threads.deferToThread(self.render_request, request) + return NOT_DONE_YET def render_request(self, request): + try: + return self._render_request(request) + except: + errorstream = StringIO() + traceback.print_exc(file=errorstream) + return HTTPResponse(stream='
%s
' % errorstream.getvalue(), + code=500, twisted_request=request) + + def _render_request(self, request): origpath = request.path host = request.host # dual http/https access handling: expect a rewrite rule to prepend @@ -206,13 +237,11 @@ req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) try: self.appli.connect(req) - except AuthenticationError: - return self.request_auth(req) except Redirect, ex: - return self.redirect(req, ex.location) - if https and req.cnx.anonymous_connection: + return self.redirect(request=req, location=ex.location) + if https and req.session.anonymous_session: # don't allow anonymous on https connection - return self.request_auth(req) + return self.request_auth(request=req) if self.url_rewriter is not None: # XXX should occur before authentication? try: @@ -229,234 +258,139 @@ except DirectResponse, ex: return ex.response except StatusResponse, ex: - return http.Response(stream=ex.content, code=ex.status, - headers=req.headers_out or None) - except RemoteCallFailed, ex: - req.set_header('content-type', 'application/json') - return http.Response(stream=ex.dumps(), - code=responsecode.INTERNAL_SERVER_ERROR) - except NotFound: - result = self.appli.notfound_content(req) - return http.Response(stream=result, code=responsecode.NOT_FOUND, - headers=req.headers_out or None) - except ExplicitLogin: # must be before AuthenticationError - return self.request_auth(req) - except AuthenticationError, ex: - if self.config['auth-mode'] == 'cookie' and getattr(ex, 'url', None): - return self.redirect(req, ex.url) + return HTTPResponse(stream=ex.content, code=ex.status, + twisted_request=req._twreq, + headers=req.headers_out) + except AuthenticationError: + return self.request_auth(request=req) + except LogOut, ex: + if self.config['auth-mode'] == 'cookie' and ex.url: + return self.redirect(request=req, location=ex.url) # in http we have to request auth to flush current http auth # information - return self.request_auth(req, loggedout=True) + return self.request_auth(request=req, loggedout=True) except Redirect, ex: - return self.redirect(req, ex.location) + return self.redirect(request=req, location=ex.location) # request may be referenced by "onetime callback", so clear its entity # cache to avoid memory usage req.drop_entity_cache() - return http.Response(stream=result, code=responsecode.OK, - headers=req.headers_out or None) - - def redirect(self, req, location): - req.headers_out.setHeader('location', str(location)) - self.debug('redirecting to %s', location) - # 303 See other - return http.Response(code=303, headers=req.headers_out) + return HTTPResponse(twisted_request=req._twreq, code=http.OK, + stream=result, headers=req.headers_out) - def request_auth(self, req, loggedout=False): - if self.https_url and req.base_url() != self.https_url: - req.headers_out.setHeader('location', self.https_url + 'login') - return http.Response(code=303, headers=req.headers_out) + def redirect(self, request, location): + self.debug('redirecting to %s', str(location)) + request.headers_out.setHeader('location', str(location)) + # 303 See other + return HTTPResponse(twisted_request=request._twreq, code=303, + headers=request.headers_out) + + def request_auth(self, request, loggedout=False): + if self.https_url and request.base_url() != self.https_url: + return self.redirect(request, self.https_url + 'login') if self.config['auth-mode'] == 'http': - code = responsecode.UNAUTHORIZED + code = http.UNAUTHORIZED else: - code = responsecode.FORBIDDEN + code = http.FORBIDDEN if loggedout: - if req.https: - req._base_url = self.base_url - req.https = False - content = self.appli.loggedout_content(req) + if request.https: + request._base_url = self.base_url + request.https = False + content = self.appli.loggedout_content(request) else: - content = self.appli.need_login_content(req) - return http.Response(code, req.headers_out, content) + content = self.appli.need_login_content(request) + return HTTPResponse(twisted_request=request._twreq, + stream=content, code=code, + headers=request.headers_out) -from twisted.internet import defer -from twisted.web2 import fileupload -# XXX set max file size to 200MB: put max upload size in the configuration -# line below for twisted >= 8.0, default param value for earlier version -resource.PostableResource.maxSize = 200*1024*1024 -def parsePOSTData(request, maxMem=100*1024, maxFields=1024, - maxSize=200*1024*1024): - if request.stream.length == 0: - return defer.succeed(None) +JSON_PATHS = set(('json',)) +FRAME_POST_PATHS = set(('validateform',)) - ctype = request.headers.getHeader('content-type') - - if ctype is None: - return defer.succeed(None) - - def updateArgs(data): - args = data - request.args.update(args) +orig_gotLength = http.Request.gotLength +@monkeypatch(http.Request) +def gotLength(self, length): + orig_gotLength(self, length) + if length > MAX_POST_LENGTH: # length is 0 on GET + path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1] + self.clientproto = 'HTTP/1.1' # not yet initialized + self.channel.persistent = 0 # force connection close on cleanup + self.setResponseCode(http.BAD_REQUEST) + if path in JSON_PATHS: # XXX better json path detection + self.setHeader('content-type',"application/json") + body = dumps({'reason': 'request max size exceeded'}) + elif path in FRAME_POST_PATHS: # XXX better frame post path detection + self.setHeader('content-type',"text/html") + body = ('' % dumps( (False, 'request max size exceeded', None) )) + else: + self.setHeader('content-type',"text/html") + body = ("Processing Failed" + "request max size exceeded") + self.setHeader('content-length', str(len(body))) + self.write(body) + # see request.finish(). Done here since we get error due to not full + # initialized request + self.finished = 1 + if not self.queued: + self._cleanup() + for d in self.notifications: + d.callback(None) + self.notifications = [] - def updateArgsAndFiles(data): - args, files = data - request.args.update(args) - request.files.update(files) - def error(f): - f.trap(fileupload.MimeFormatError) - raise http.HTTPError(responsecode.BAD_REQUEST) +@monkeypatch(http.Request) +def requestReceived(self, command, path, version): + """Called by channel when all data has been received. - if ctype.mediaType == 'application' and ctype.mediaSubtype == 'x-www-form-urlencoded': - d = fileupload.parse_urlencoded(request.stream, keep_blank_values=True) - d.addCallbacks(updateArgs, error) - return d - elif ctype.mediaType == 'multipart' and ctype.mediaSubtype == 'form-data': - boundary = ctype.params.get('boundary') - if boundary is None: - return defer.fail(http.HTTPError( - http.StatusResponse(responsecode.BAD_REQUEST, - "Boundary not specified in Content-Type."))) - d = fileupload.parseMultipartFormData(request.stream, boundary, - maxMem, maxFields, maxSize) - d.addCallbacks(updateArgsAndFiles, error) - return d + This method is not intended for users. + """ + self.content.seek(0, 0) + self.args = {} + self.files = {} + self.stack = [] + self.method, self.uri = command, path + self.clientproto = version + x = self.uri.split('?', 1) + if len(x) == 1: + self.path = self.uri else: - raise http.HTTPError(responsecode.BAD_REQUEST) - -server.parsePOSTData = parsePOSTData + self.path, argstring = x + self.args = http.parse_qs(argstring, 1) + # cache the client and server information, we'll need this later to be + # serialized and sent with the request so CGIs will work remotely + self.client = self.channel.transport.getPeer() + self.host = self.channel.transport.getHost() + # Argument processing + ctype = self.getHeader('content-type') + if self.method == "POST" and ctype: + key, pdict = parse_header(ctype) + if key == 'application/x-www-form-urlencoded': + self.args.update(http.parse_qs(self.content.read(), 1)) + elif key == 'multipart/form-data': + self.content.seek(0, 0) + form = FieldStorage(self.content, self.received_headers, + environ={'REQUEST_METHOD': 'POST'}, + keep_blank_values=1, + strict_parsing=1) + for key in form: + value = form[key] + if isinstance(value, list): + self.args[key] = [v.value for v in value] + elif value.filename: + if value.done != -1: # -1 is transfer has been interrupted + self.files[key] = (value.filename, value.file) + else: + self.files[key] = (None, None) + else: + self.args[key] = value.value + self.process() from logging import getLogger from cubicweb import set_log_methods -set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted')) - - -listiterator = type(iter([])) - -def _gc_debug(all=True): - import gc - from pprint import pprint - from cubicweb.appobject import AppObject - gc.collect() - count = 0 - acount = 0 - fcount = 0 - rcount = 0 - ccount = 0 - scount = 0 - ocount = {} - from rql.stmts import Union - from cubicweb.schema import CubicWebSchema - from cubicweb.rset import ResultSet - from cubicweb.dbapi import Connection, Cursor - from cubicweb.req import RequestSessionBase - from cubicweb.server.repository import Repository - from cubicweb.server.sources.native import NativeSQLSource - from cubicweb.server.session import Session - from cubicweb.devtools.testlib import CubicWebTC - from logilab.common.testlib import TestSuite - from optparse import Values - import types, weakref - for obj in gc.get_objects(): - if isinstance(obj, RequestSessionBase): - count += 1 - if isinstance(obj, Session): - print ' session', obj, referrers(obj, True) - elif isinstance(obj, AppObject): - acount += 1 - elif isinstance(obj, ResultSet): - rcount += 1 - #print ' rset', obj, referrers(obj) - elif isinstance(obj, Repository): - print ' REPO', obj, referrers(obj, True) - #elif isinstance(obj, NativeSQLSource): - # print ' SOURCe', obj, referrers(obj) - elif isinstance(obj, CubicWebTC): - print ' TC', obj, referrers(obj) - elif isinstance(obj, TestSuite): - print ' SUITE', obj, referrers(obj) - #elif isinstance(obj, Values): - # print ' values', '%#x' % id(obj), referrers(obj, True) - elif isinstance(obj, Connection): - ccount += 1 - #print ' cnx', obj, referrers(obj) - #elif isinstance(obj, Cursor): - # ccount += 1 - # print ' cursor', obj, referrers(obj) - elif isinstance(obj, file): - fcount += 1 - # print ' open file', file.name, file.fileno - elif isinstance(obj, CubicWebSchema): - scount += 1 - print ' schema', obj, referrers(obj) - elif not isinstance(obj, (type, tuple, dict, list, set, frozenset, - weakref.ref, weakref.WeakKeyDictionary, - listiterator, - property, classmethod, - types.ModuleType, types.MemberDescriptorType, - types.FunctionType, types.MethodType)): - try: - ocount[obj.__class__] += 1 - except KeyError: - ocount[obj.__class__] = 1 - except AttributeError: - pass - if count: - print ' NB REQUESTS/SESSIONS', count - if acount: - print ' NB APPOBJECTS', acount - if ccount: - print ' NB CONNECTIONS', ccount - if rcount: - print ' NB RSETS', rcount - if scount: - print ' NB SCHEMAS', scount - if fcount: - print ' NB FILES', fcount - if all: - ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20] - pprint(ocount) - if gc.garbage: - print 'UNREACHABLE', gc.garbage - -def referrers(obj, showobj=False): - try: - return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x))) - for x in _referrers(obj))) - except TypeError: - s = set() - unhashable = [] - for x in _referrers(obj): - try: - s.add(x) - except TypeError: - unhashable.append(x) - return sorted(s) + unhashable - -def _referrers(obj, seen=None, level=0): - import gc, types - from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema - interesting = [] - if seen is None: - seen = set() - for x in gc.get_referrers(obj): - if id(x) in seen: - continue - seen.add(id(x)) - if isinstance(x, types.FrameType): - continue - if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)): - continue - if isinstance(x, (list, tuple, set, dict, listiterator)): - if level >= 5: - pass - #interesting.append(x) - else: - interesting += _referrers(x, seen, level+1) - else: - interesting.append(x) - return interesting +LOGGER = getLogger('cubicweb.twisted') +set_log_methods(CubicWebRootResource, LOGGER) def run(config, debug): # create the site @@ -464,7 +398,7 @@ website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 - reactor.listenTCP(port, channel.HTTPFactory(website)) + reactor.listenTCP(port, website) logger = getLogger('cubicweb.twisted') if not debug: if sys.platform == 'win32': diff -r b619531ddbd2 -r b6e250dd7a7d etwist/twconfig.py --- a/etwist/twconfig.py Fri Apr 23 12:40:48 2010 +0200 +++ b/etwist/twconfig.py Fri Apr 23 12:42:53 2010 +0200 @@ -30,45 +30,51 @@ {'type' : 'string', 'default': None, 'help': 'host name if not correctly detectable through gethostname', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('port', {'type' : 'int', 'default': None, 'help': 'http server port number (default to 8080)', - 'group': 'main', 'inputlevel': 0, + 'group': 'main', 'level': 0, }), ('pid-file', {'type' : 'string', 'default': Method('default_pid_file'), 'help': 'repository\'s pid file', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), ('uid', {'type' : 'string', 'default': None, 'help': 'if this option is set, use the specified user to start \ the repository rather than the user running the command', - 'group': 'main', 'inputlevel': WebConfiguration.mode == 'system' + 'group': 'main', 'level': WebConfiguration.mode == 'system' + }), + ('max-post-length', + {'type' : 'bytes', + 'default': '100MB', + 'help': 'maximum length of HTTP request. Default to 100 MB.', + 'group': 'main', 'level': 1, }), ('session-time', {'type' : 'time', 'default': '30min', 'help': 'session expiration time, default to 30 minutes', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('profile', {'type' : 'string', 'default': None, 'help': 'profile code and use the specified file to store stats if this option is set', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), ('pyro-server', {'type' : 'yn', # pyro is only a recommends by default, so don't activate it here 'default': False, 'help': 'run a pyro server', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ) + WebConfiguration.options) diff -r b619531ddbd2 -r b6e250dd7a7d ext/xhtml2fo.py --- a/ext/xhtml2fo.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,137 +0,0 @@ -from xml.etree.ElementTree import QName -from pysixt.standard.xhtml_xslfo.transformer import XHTML2FOTransformer -from pysixt.utils.xslfo.standard import cm -from pysixt.utils.xslfo import SimplePageMaster -from pysixt.standard.xhtml_xslfo.default_styling import default_styles -from pysixt.standard.xhtml_xslfo import XHTML_NS - - -class ReportTransformer(XHTML2FOTransformer): - """ - Class transforming an XHTML input tree into a FO document - displaying reports (one report for each
- element in the input tree. - """ - - def __init__(self, section, - page_width=21.0, page_height=29.7, - margin_top=1.0, margin_bottom=1.0, - margin_left=1.0, margin_right=1.0, - header_footer_height=0.75, - standard_font_size=11.0, default_lang=u"fr" ): - """ - Initializes a transformer turning an XHTML input tree - containing
elements representing - main content sections into a FO output tree displaying the - reports. - - page_width: float - width of the page (in cm) - page_height: float - height of the page (in cm) - margin_top: float - top margin of the page (in cm) - margin_bottom: float - bottom margin of the page (in cm) - margin_left: float - left margin of the page (in cm) - margin_right: float - right margin of the page (in cm) - header_footer_height: float - height of the header or the footer of the - page that the page number (if any) will be - inserted in. - standard_font_size: float - standard size of the font (in pt) - default_lang: u"" - default language (used for hyphenation) - """ - self.section = section - self.page_width = page_width - self.page_height = page_height - - self.page_tmargin = margin_top - self.page_bmargin = margin_bottom - self.page_lmargin = margin_left - self.page_rmargin = margin_right - - self.hf_height = header_footer_height - - self.font_size = standard_font_size - self.lang = default_lang - - XHTML2FOTransformer.__init__(self) - - - def define_pagemasters(self): - """ - Defines the page masters for the FO output document. - """ - pm = SimplePageMaster(u"page-report") - pm.set_page_dims( self.page_width*cm, self.page_height*cm ) - pm.set_page_margins({u'top' : self.page_tmargin*cm, - u'bottom': self.page_bmargin*cm, - u'left' : self.page_lmargin*cm, - u'right' : self.page_rmargin*cm }) - pm.add_peripheral_region(u"end", self.hf_height) - dims = {} - dims[u"bottom"] = self.hf_height + 0.25 - pm.set_main_region_margins(dims) - return [pm] - - def _visit_report(self, in_elt, _out_elt, params): - """ - Specific visit function for the input
elements whose class is - "report". The _root_visit method of this class selects these input - elements and asks the process of these elements with this specific - visit function. - """ - - ps = self.create_pagesequence(u"page-report") - props = { u"force-page-count": u"no-force", - u"initial-page-number": u"1", - u"format": u"1", } - self._output_properties(ps, props) - - sc = self.create_staticcontent(ps, u"end") - sc_bl = self.create_block(sc) - attrs = { u"hyphenate": u"false", } - attrs[u"font-size"] = u"%.1fpt" % (self.font_size * 0.7) - attrs[u"language"] = self.lang - attrs[u"text-align"] = u"center" - self._output_properties(sc_bl, attrs) - sc_bl.text = u"Page" + u" " # ### Should be localised! - pn = self.create_pagenumber(sc_bl) - pn.tail = u"/" - self.create_pagenumbercitation( - sc_bl, u"last-block-of-report-%d" % params[u"context_pos"]) - - fl = self.create_flow(ps, u"body") - bl = self.create_block(fl) - - # Sets on the highest block element the properties of the XHTML body - # element. These properties (at the least the inheritable ones) will - # be inherited by all the future FO elements. - bodies = list(self.in_tree.getiterator(QName(XHTML_NS, u"body"))) - if len(bodies) > 0: - attrs = self._extract_properties([bodies[0]]) - else: - attrs = default_styles[u"body"].copy() - attrs[u"font-size"] = u"%.1fpt" % self.font_size - attrs[u"language"] = self.lang - self._output_properties(bl,attrs) - - # Processes the report content - self._copy_text(in_elt, bl) - self._process_nodes(in_elt.getchildren(), bl) - - # Inserts an empty block at the end of the report in order to be able - # to compute the last page number of this report. - last_bl = self.create_block(bl) - props = { u"keep-with-previous": u"always", } - props[u"id"] = u"last-block-of-report-%d" % params[u"context_pos"] - self._output_properties(last_bl,props) - - - def _root_visit(self): - """ - Visit function called when starting the process of the input tree. - """ - content = [ d for d in self.in_tree.getiterator(QName(XHTML_NS, u"div")) - if d.get(u"id") == self.section ] - # Asks the process of the report elements with a specific visit - # function - self._process_nodes(content, self.fo_root, - with_function=self._visit_report) - diff -r b619531ddbd2 -r b6e250dd7a7d hooks/email.py --- a/hooks/email.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/email.py Fri Apr 23 12:42:53 2010 +0200 @@ -28,7 +28,7 @@ if self.condition(): self.session.execute( 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, - {'x': self.entity.eid, 'y': self.email.eid}, 'x') + {'x': self.entity.eid, 'y': self.email.eid}) class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): diff -r b619531ddbd2 -r b6e250dd7a7d hooks/integrity.py --- a/hooks/integrity.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/integrity.py Fri Apr 23 12:42:53 2010 +0200 @@ -77,7 +77,7 @@ continue if rtype in pendingrtypes: continue - if not session.execute(self.base_rql % rtype, {'x': eid}, 'x'): + if not session.execute(self.base_rql % rtype, {'x': eid}): etype = session.describe(eid)[0] _ = session._ msg = _('at least one relation %(rtype)s is required on ' @@ -304,7 +304,7 @@ # don't do anything if the entity is being created or deleted if not (eid in pendingeids or eid in neweids): etype = session.describe(eid)[0] - session.execute(self.base_rql % (etype, rtype), {'x': eid}, 'x') + session.execute(self.base_rql % (etype, rtype), {'x': eid}) class _DelayedDeleteSEntityOp(_DelayedDeleteOp): """delete orphan subject entity of a composite relation""" diff -r b619531ddbd2 -r b6e250dd7a7d hooks/metadata.py --- a/hooks/metadata.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/metadata.py Fri Apr 23 12:42:53 2010 +0200 @@ -102,8 +102,7 @@ def precommit_event(self): self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' 'NOT EXISTS(X owned_by U, X eid %(x)s)', - {'c': self.compositeeid, 'x': self.composedeid}, - ('c', 'x')) + {'c': self.compositeeid, 'x': self.composedeid}) class SyncCompositeOwner(MetaDataHook): diff -r b619531ddbd2 -r b6e250dd7a7d hooks/notification.py --- a/hooks/notification.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/notification.py Fri Apr 23 12:42:53 2010 +0200 @@ -124,7 +124,7 @@ rqlsel.append(var) rqlrestr.append('X %s %s' % (attr, var)) rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) - rset = session.execute(rql, {'x': self.entity.eid}, 'x') + rset = session.execute(rql, {'x': self.entity.eid}) for i, attr in enumerate(attrs): oldvalue = rset[0][i] newvalue = self.entity[attr] diff -r b619531ddbd2 -r b6e250dd7a7d hooks/syncschema.py --- a/hooks/syncschema.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/syncschema.py Fri Apr 23 12:42:53 2010 +0200 @@ -1004,7 +1004,7 @@ DropRelationTable(session, rschema.type) # if this is the last instance, drop associated relation type if lastrel and not self.eidto in pendings: - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x') + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}) MemSchemaRDefDel(session, (subjschema, rschema, objschema)) diff -r b619531ddbd2 -r b6e250dd7a7d hooks/syncsession.py --- a/hooks/syncsession.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/syncsession.py Fri Apr 23 12:42:53 2010 +0200 @@ -36,7 +36,7 @@ no query should be emitted while comitting """ rql = 'Any N WHERE G eid %(x)s, G name N' - result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False) + result = session.execute(rql, {'x': kwargs['geid']}, build_descr=False) hook.Operation.__init__(self, session, *args, **kwargs) self.group = result[0][0] @@ -216,7 +216,7 @@ if not session.describe(eidfrom)[0] == 'CWProperty': return key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', - {'x': eidfrom}, 'x')[0] + {'x': eidfrom})[0] if session.vreg.property_info(key)['sitewide']: qname = role_name('for_user', 'subject') msg = session._("site-wide property can't be set for user") @@ -234,7 +234,7 @@ def __call__(self): session = self._cw key = session.execute('Any K WHERE P eid %(x)s, P pkey K', - {'x': self.eidfrom}, 'x')[0][0] + {'x': self.eidfrom})[0][0] session.transaction_data.setdefault('pendingrelations', []).append( (self.eidfrom, self.rtype, self.eidto)) for session_ in get_user_sessions(session.repo, self.eidto): diff -r b619531ddbd2 -r b6e250dd7a7d hooks/test/unittest_bookmarks.py --- a/hooks/test/unittest_bookmarks.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/test/unittest_bookmarks.py Fri Apr 23 12:42:53 2010 +0200 @@ -1,7 +1,7 @@ """ :organization: Logilab -:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ @@ -18,10 +18,10 @@ self.commit() self.execute('DELETE X bookmarked_by U WHERE U login "admin"') self.commit() - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid})) self.execute('DELETE X bookmarked_by U WHERE U login "anon"') self.commit() - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x')) + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid})) if __name__ == '__main__': unittest_main() diff -r b619531ddbd2 -r b6e250dd7a7d hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Fri Apr 23 12:40:48 2010 +0200 +++ b/hooks/test/unittest_syncschema.py Fri Apr 23 12:42:53 2010 +0200 @@ -32,17 +32,17 @@ def _set_perms(self, eid): self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"', - {'x': eid}, 'x') + {'x': eid}) def _set_attr_perms(self, eid): self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}, 'x') + {'x': eid}) def test_base(self): schema = self.repo.schema @@ -88,7 +88,7 @@ 'WHERE RT name "concerne2", E name "CWUser"')[0][0] self._set_perms(rdefeid) self.commit() - self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x') + self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) self.commit() self.failUnless('concerne2' in schema['CWUser'].subject_relations()) self.failIf('concerne2' in schema['Societe2'].subject_relations()) @@ -248,7 +248,7 @@ attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' 'WHERE RT name "messageid", E name "BaseTransition", F name "String"')[0][0] assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': attreid}, 'x') + {'x': attreid}) self.commit() self.schema.rebuild_infered_relations() self.failUnless('Transition' in self.schema['messageid'].subjects()) @@ -299,10 +299,10 @@ if not getattr(cstr, 'eid', None): self.skip('start me alone') # bug in schema reloading, constraint's eid not restored self.execute('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}, 'x') + {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' 'WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}, 'x') + {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) self.commit() cstr = rdef.constraint_by_type('StaticVocabularyConstraint') self.assertEquals(cstr.values, (u'normal', u'auto', u'new')) diff -r b619531ddbd2 -r b6e250dd7a7d i18n/en.po --- a/i18n/en.po Fri Apr 23 12:40:48 2010 +0200 +++ b/i18n/en.po Fri Apr 23 12:42:53 2010 +0200 @@ -30,6 +30,9 @@ msgid " from state %(fromstate)s to state %(tostate)s\n" msgstr "" +msgid " :" +msgstr "" + #, python-format msgid "%(attr)s set to %(newvalue)s" msgstr "" @@ -51,6 +54,22 @@ msgstr "" #, python-format +msgid "%(value)r doesn't match the %(regexp)r regular expression" +msgstr "" + +#, python-format +msgid "%(value)r must be %(op)s %(boundary)s" +msgstr "" + +#, python-format +msgid "%(value)r must be <= %(boundary)s" +msgstr "" + +#, python-format +msgid "%(value)r must be >= %(boundary)s" +msgstr "" + +#, python-format msgid "%d days" msgstr "" @@ -188,13 +207,13 @@ msgid "AND" msgstr "" -msgid "Add permissions" -msgstr "" - msgid "Any" msgstr "" -msgid "Attributes" +msgid "Attributes permissions:" +msgstr "" + +msgid "Attributes with non default permissions:" msgstr "" # schema pot file, generated on 2009-09-16 16:46:55 @@ -221,6 +240,9 @@ msgid "BoundConstraint" msgstr "bound constraint" +msgid "BoundaryConstraint" +msgstr "" + msgid "Browse by category" msgstr "" @@ -362,13 +384,10 @@ msgid "Decimal_plural" msgstr "Decimal numbers" -msgid "Delete permissions" -msgstr "" - msgid "Do you want to delete the following element(s) ?" msgstr "" -msgid "Download page as pdf" +msgid "Download schema as OWL" msgstr "" msgctxt "inlined:CWUser.use_email.subject" @@ -384,6 +403,9 @@ msgid "Entities" msgstr "" +msgid "Entity types" +msgstr "" + msgid "ExternalUri" msgstr "External Uri" @@ -411,6 +433,9 @@ msgid "Help" msgstr "" +msgid "Index" +msgstr "" + msgid "Instance" msgstr "" @@ -504,15 +529,27 @@ msgid "No result matching query" msgstr "" +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "" + msgid "OR" msgstr "" +msgid "Parent classes:" +msgstr "" + msgid "Password" msgstr "Password" msgid "Password_plural" msgstr "Passwords" +msgid "Permissions for entity types" +msgstr "" + +msgid "Permissions for relations" +msgstr "" + msgid "Please note that this is only a shallow copy" msgstr "" @@ -531,9 +568,6 @@ msgid "RQLVocabularyConstraint" msgstr "RQL vocabulary constraint" -msgid "Read permissions" -msgstr "" - msgid "Recipients:" msgstr "" @@ -543,6 +577,9 @@ msgid "Registry's content" msgstr "" +msgid "Relation types" +msgstr "" + msgid "Relations" msgstr "" @@ -580,6 +617,9 @@ msgid "String_plural" msgstr "Strings" +msgid "Sub-classes:" +msgstr "" + msgid "SubWorkflowExitPoint" msgstr "Subworkflow exit-point" @@ -606,6 +646,9 @@ msgid "The view %s could not be found" msgstr "" +msgid "There is no default workflow" +msgstr "" + msgid "This BaseTransition" msgstr "This abstract transition" @@ -672,6 +715,9 @@ msgid "This WorkflowTransition" msgstr "This workflow-transition" +msgid "This entity type permissions:" +msgstr "" + msgid "Time" msgstr "Time" @@ -696,9 +742,6 @@ msgid "Unreachable objects" msgstr "" -msgid "Update permissions" -msgstr "" - msgid "Used by:" msgstr "" @@ -776,183 +819,12 @@ msgid "abstract base class for transitions" msgstr "" -msgid "access type" -msgstr "" - msgid "action(s) on this selection" msgstr "" msgid "actions" msgstr "" -msgid "actions_about" -msgstr "" - -msgid "actions_about_description" -msgstr "" - -msgid "actions_addentity" -msgstr "add an entity of this type" - -msgid "actions_addentity_description" -msgstr "" - -msgid "actions_addrelated" -msgstr "" - -msgid "actions_addrelated_description" -msgstr "" - -msgid "actions_cancel" -msgstr "cancel the selection" - -msgid "actions_cancel_description" -msgstr "" - -msgid "actions_changelog" -msgstr "" - -msgid "actions_changelog_description" -msgstr "" - -msgid "actions_copy" -msgstr "copy" - -msgid "actions_copy_description" -msgstr "" - -msgid "actions_delete" -msgstr "delete" - -msgid "actions_delete_description" -msgstr "" - -msgid "actions_download_as_owl" -msgstr "download as owl" - -msgid "actions_download_as_owl_description" -msgstr "" - -msgid "actions_edit" -msgstr "modify" - -msgid "actions_edit_description" -msgstr "" - -msgid "actions_embed" -msgstr "embed" - -msgid "actions_embed_description" -msgstr "" - -msgid "actions_entitiesoftype" -msgstr "" - -msgid "actions_entitiesoftype_description" -msgstr "" - -msgid "actions_follow" -msgstr "follow" - -msgid "actions_follow_description" -msgstr "" - -msgid "actions_help" -msgstr "" - -msgid "actions_help_description" -msgstr "" - -msgid "actions_logout" -msgstr "logout" - -msgid "actions_logout_description" -msgstr "" - -msgid "actions_manage" -msgstr "manage" - -msgid "actions_manage_description" -msgstr "" - -msgid "actions_managepermission" -msgstr "manage permissions" - -msgid "actions_managepermission_description" -msgstr "" - -msgid "actions_muledit" -msgstr "modify all" - -msgid "actions_muledit_description" -msgstr "" - -msgid "actions_myinfos" -msgstr "my profile" - -msgid "actions_myinfos_description" -msgstr "" - -msgid "actions_myprefs" -msgstr "my preferences" - -msgid "actions_myprefs_description" -msgstr "" - -msgid "actions_poweredby" -msgstr "" - -msgid "actions_poweredby_description" -msgstr "" - -msgid "actions_prefs" -msgstr "preferences" - -msgid "actions_prefs_description" -msgstr "" - -msgid "actions_schema" -msgstr "schema" - -msgid "actions_schema_description" -msgstr "" - -msgid "actions_select" -msgstr "select" - -msgid "actions_select_description" -msgstr "" - -msgid "actions_sendemail" -msgstr "send email" - -msgid "actions_sendemail_description" -msgstr "" - -msgid "actions_siteconfig" -msgstr "site configuration" - -msgid "actions_siteconfig_description" -msgstr "" - -msgid "actions_siteinfo" -msgstr "site information" - -msgid "actions_siteinfo_description" -msgstr "" - -msgid "actions_view" -msgstr "view" - -msgid "actions_view_description" -msgstr "" - -msgid "actions_workflow" -msgstr "see workflow" - -msgid "actions_workflow_description" -msgstr "" - msgid "activate" msgstr "" @@ -1198,9 +1070,6 @@ msgid "attribute" msgstr "" -msgid "attributes with modified permissions:" -msgstr "" - msgid "august" msgstr "" @@ -1517,9 +1386,6 @@ msgid "condition" msgstr "condition" -msgid "condition:" -msgstr "condtion:" - msgctxt "RQLExpression" msgid "condition_object" msgstr "condition of" @@ -1527,6 +1393,9 @@ msgid "condition_object" msgstr "condition of" +msgid "conditions" +msgstr "" + msgid "config mode" msgstr "" @@ -1563,6 +1432,9 @@ msgid "constraints applying on this relation" msgstr "" +msgid "content type" +msgstr "" + msgid "contentnavigation" msgstr "contextual components" @@ -1578,12 +1450,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "view page as pdf icon" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "previous / next entity" @@ -1768,6 +1634,9 @@ msgid "creation" msgstr "" +msgid "creation date" +msgstr "" + msgid "creation time of an entity" msgstr "" @@ -1803,18 +1672,33 @@ msgid "custom_workflow_object" msgstr "custom workflow of" -msgid "cwetype-schema-image" -msgstr "schema" - -msgid "cwetype-schema-permissions" +msgid "cwetype-box" +msgstr "\"box\" view" + +msgid "cwetype-description" +msgstr "description" + +msgid "cwetype-permissions" msgstr "permissions" -msgid "cwetype-schema-text" -msgstr "description" +msgid "cwetype-views" +msgstr "views" msgid "cwetype-workflow" msgstr "workflow" +msgid "cwgroup-main" +msgstr "description" + +msgid "cwgroup-permissions" +msgstr "permissions" + +msgid "cwrtype-description" +msgstr "description" + +msgid "cwrtype-permissions" +msgstr "permissions" + msgid "cwuri" msgstr "internal uri" @@ -1842,6 +1726,9 @@ msgid "default user workflow" msgstr "" +msgid "default value" +msgstr "" + msgid "default workflow for an entity type" msgstr "" @@ -2068,18 +1955,12 @@ msgid "detach attached file" msgstr "" -msgid "display order of the action" -msgstr "" - msgid "display order of the box" msgstr "" msgid "display order of the component" msgstr "" -msgid "display the action or not" -msgstr "" - msgid "display the box or not" msgstr "" @@ -2285,6 +2166,9 @@ msgid "final" msgstr "" +msgid "first name" +msgstr "" + msgid "firstname" msgstr "" @@ -2398,10 +2282,6 @@ msgid "granted to groups" msgstr "" -#, python-format -msgid "graphical representation of %s" -msgstr "" - msgid "graphical representation of the instance'schema" msgstr "" @@ -2425,9 +2305,6 @@ msgid "groups to which the permission is granted" msgstr "" -msgid "groups:" -msgstr "" - msgid "guests" msgstr "" @@ -2466,6 +2343,18 @@ msgid "i18n_login_popup" msgstr "login" +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + msgid "i18nprevnext_next" msgstr "next" @@ -2501,12 +2390,6 @@ msgid "image" msgstr "" -msgid "in memory entity schema" -msgstr "" - -msgid "in memory relation schema" -msgstr "" - msgid "in_group" msgstr "in group" @@ -2537,9 +2420,6 @@ msgid "incorrect value (%(value)s) for type \"%(type)s\"" msgstr "" -msgid "index" -msgstr "" - msgid "index this attribute's value in the plain text index" msgstr "" @@ -2609,9 +2489,19 @@ msgid "invalid action %r" msgstr "" +#, python-format +msgid "invalid value %(value)s, it must be one of %(choices)s" +msgstr "" + msgid "is" msgstr "" +msgid "is object of:" +msgstr "" + +msgid "is subject of:" +msgstr "" + msgid "" "is the subject/object entity of the relation composed of the other ? This " "implies that when the composite is deleted, composants are also deleted." @@ -2659,6 +2549,12 @@ msgid "last connection date" msgstr "" +msgid "last login time" +msgstr "" + +msgid "last name" +msgstr "" + msgid "last usage" msgstr "" @@ -2803,6 +2699,9 @@ msgid "more actions" msgstr "" +msgid "more info about this workflow" +msgstr "" + msgid "multiple edit" msgstr "" @@ -2944,8 +2843,8 @@ msgid "object" msgstr "" -msgid "object_plural:" -msgstr "objects:" +msgid "object type" +msgstr "" msgid "october" msgstr "" @@ -2971,6 +2870,9 @@ msgid "opened web sessions" msgstr "" +msgid "options" +msgstr "" + msgid "order" msgstr "" @@ -3026,10 +2928,7 @@ msgid "permission" msgstr "" -msgid "permissions for entities" -msgstr "" - -msgid "permissions for relations" +msgid "permissions" msgstr "" msgid "permissions for this entity" @@ -3106,11 +3005,17 @@ msgid "project" msgstr "" +msgid "rdef-description" +msgstr "description" + +msgid "rdef-permissions" +msgstr "permissions" + msgid "read" msgstr "" msgid "read_perm" -msgstr "read perm" +msgstr "read permission" msgid "read_permission" msgstr "can be read by" @@ -3147,6 +3052,9 @@ msgid "related entity has no workflow set" msgstr "" +msgid "relation" +msgstr "" + #, python-format msgid "relation %(relname)s of %(ent)s" msgstr "" @@ -3175,9 +3083,6 @@ msgid "relation_type_object" msgstr "relation definitions" -msgid "relations" -msgstr "" - msgid "relations deleted" msgstr "" @@ -3219,6 +3124,9 @@ msgid "require_permission_object" msgstr "required by" +msgid "required" +msgstr "" + msgid "required attribute" msgstr "" @@ -3262,11 +3170,17 @@ msgid "schema's permissions definitions" msgstr "" +msgid "schema-entity-types" +msgstr "" + msgid "schema-image" -msgstr "schema" - -msgid "schema-text" -msgstr "description" +msgstr "image" + +msgid "schema-relation-types" +msgstr "" + +msgid "schema-security" +msgstr "permissions" msgid "search" msgstr "" @@ -3453,12 +3367,12 @@ msgid "subject" msgstr "" +msgid "subject type" +msgstr "" + msgid "subject/object cardinality" msgstr "" -msgid "subject_plural:" -msgstr "subjects:" - msgid "subworkflow" msgstr "" @@ -3857,6 +3771,9 @@ msgid "used to grant a permission to a group" msgstr "" +msgid "user" +msgstr "" + #, python-format msgid "" "user %s has made the following change(s):\n" @@ -3895,6 +3812,14 @@ msgid "value associated to this key is not editable manually" msgstr "" +#, python-format +msgid "value should have maximum size of %s" +msgstr "" + +#, python-format +msgid "value should have minimum size of %s" +msgstr "" + msgid "vcard" msgstr "" @@ -3913,15 +3838,18 @@ msgid "view history" msgstr "" +msgid "view identifier" +msgstr "" + +msgid "view title" +msgstr "" + msgid "view workflow" msgstr "" msgid "view_index" msgstr "index" -msgid "views" -msgstr "" - msgid "visible" msgstr "" @@ -3944,6 +3872,12 @@ msgid "wf_info_for_object" msgstr "workflow history" +msgid "wf_tab_info" +msgstr "" + +msgid "wfgraph" +msgstr "" + msgid "" "when multiple addresses are equivalent (such as python-projects@logilab.org " "and python-projects@lists.logilab.org), set this to indicate which is the " diff -r b619531ddbd2 -r b6e250dd7a7d i18n/es.po --- a/i18n/es.po Fri Apr 23 12:40:48 2010 +0200 +++ b/i18n/es.po Fri Apr 23 12:42:53 2010 +0200 @@ -35,6 +35,9 @@ msgid " from state %(fromstate)s to state %(tostate)s\n" msgstr " del estado %(fromstate)s hacia el estado %(tostate)s\n" +msgid " :" +msgstr "" + #, python-format msgid "%(attr)s set to %(newvalue)s" msgstr "" @@ -56,6 +59,22 @@ msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" #, python-format +msgid "%(value)r doesn't match the %(regexp)r regular expression" +msgstr "" + +#, python-format +msgid "%(value)r must be %(op)s %(boundary)s" +msgstr "" + +#, python-format +msgid "%(value)r must be <= %(boundary)s" +msgstr "" + +#, python-format +msgid "%(value)r must be >= %(boundary)s" +msgstr "" + +#, python-format msgid "%d days" msgstr "%d días" @@ -196,14 +215,14 @@ msgid "AND" msgstr "Y" -msgid "Add permissions" -msgstr "Añadir autorizaciónes" - msgid "Any" msgstr "Cualquiera" -msgid "Attributes" -msgstr "Atributos" +msgid "Attributes permissions:" +msgstr "" + +msgid "Attributes with non default permissions:" +msgstr "" # schema pot file, generated on 2009-09-16 16:46:55 # @@ -229,6 +248,9 @@ msgid "BoundConstraint" msgstr "" +msgid "BoundaryConstraint" +msgstr "" + msgid "Browse by category" msgstr "Busca por categoría" @@ -370,13 +392,10 @@ msgid "Decimal_plural" msgstr "Decimales" -msgid "Delete permissions" -msgstr "Autorización de suprimir" - msgid "Do you want to delete the following element(s) ?" msgstr "Desea suprimir el(los) elemento(s) siguiente(s)" -msgid "Download page as pdf" +msgid "Download schema as OWL" msgstr "" msgctxt "inlined:CWUser.use_email.subject" @@ -392,6 +411,9 @@ msgid "Entities" msgstr "Entidades" +msgid "Entity types" +msgstr "" + msgid "ExternalUri" msgstr "" @@ -419,6 +441,9 @@ msgid "Help" msgstr "" +msgid "Index" +msgstr "" + msgid "Instance" msgstr "" @@ -512,15 +537,27 @@ msgid "No result matching query" msgstr "Ningún resultado corresponde a su búsqueda" +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "" + msgid "OR" msgstr "O" +msgid "Parent classes:" +msgstr "" + msgid "Password" msgstr "Contraseña" msgid "Password_plural" msgstr "Contraseñas" +msgid "Permissions for entity types" +msgstr "" + +msgid "Permissions for relations" +msgstr "" + msgid "Please note that this is only a shallow copy" msgstr "Recuerde que no es más que una copia superficial" @@ -539,9 +576,6 @@ msgid "RQLVocabularyConstraint" msgstr "" -msgid "Read permissions" -msgstr "Autorización de leer" - msgid "Recipients:" msgstr "Destinatarios" @@ -551,6 +585,9 @@ msgid "Registry's content" msgstr "" +msgid "Relation types" +msgstr "" + msgid "Relations" msgstr "Relaciones" @@ -588,6 +625,9 @@ msgid "String_plural" msgstr "Cadenas de caracteres" +msgid "Sub-classes:" +msgstr "" + msgid "SubWorkflowExitPoint" msgstr "" @@ -614,6 +654,9 @@ msgid "The view %s could not be found" msgstr "La vista %s no ha podido ser encontrada" +msgid "There is no default workflow" +msgstr "" + msgid "This BaseTransition" msgstr "" @@ -680,6 +723,9 @@ msgid "This WorkflowTransition" msgstr "" +msgid "This entity type permissions:" +msgstr "" + msgid "Time" msgstr "Hora" @@ -704,9 +750,6 @@ msgid "Unreachable objects" msgstr "" -msgid "Update permissions" -msgstr "Autorización de modificar" - msgid "Used by:" msgstr "Utilizado por :" @@ -799,183 +842,12 @@ msgid "abstract base class for transitions" msgstr "" -msgid "access type" -msgstr "Tipo de Acceso" - msgid "action(s) on this selection" msgstr "acción(es) en esta selección" msgid "actions" msgstr "acciones" -msgid "actions_about" -msgstr "" - -msgid "actions_about_description" -msgstr "" - -msgid "actions_addentity" -msgstr "agregar una entidad de este tipo" - -msgid "actions_addentity_description" -msgstr "" - -msgid "actions_addrelated" -msgstr "" - -msgid "actions_addrelated_description" -msgstr "" - -msgid "actions_cancel" -msgstr "Anular" - -msgid "actions_cancel_description" -msgstr "" - -msgid "actions_changelog" -msgstr "" - -msgid "actions_changelog_description" -msgstr "" - -msgid "actions_copy" -msgstr "Copiar" - -msgid "actions_copy_description" -msgstr "" - -msgid "actions_delete" -msgstr "Eliminar" - -msgid "actions_delete_description" -msgstr "" - -msgid "actions_download_as_owl" -msgstr "Download como OWL" - -msgid "actions_download_as_owl_description" -msgstr "" - -msgid "actions_edit" -msgstr "Modificar" - -msgid "actions_edit_description" -msgstr "" - -msgid "actions_embed" -msgstr "Embarcar" - -msgid "actions_embed_description" -msgstr "" - -msgid "actions_entitiesoftype" -msgstr "" - -msgid "actions_entitiesoftype_description" -msgstr "" - -msgid "actions_follow" -msgstr "Seguir" - -msgid "actions_follow_description" -msgstr "" - -msgid "actions_help" -msgstr "" - -msgid "actions_help_description" -msgstr "" - -msgid "actions_logout" -msgstr "Desconectarse" - -msgid "actions_logout_description" -msgstr "" - -msgid "actions_manage" -msgstr "Administración del sitio" - -msgid "actions_manage_description" -msgstr "" - -msgid "actions_managepermission" -msgstr "Administración de autorizaciónes" - -msgid "actions_managepermission_description" -msgstr "" - -msgid "actions_muledit" -msgstr "Edición múltiple" - -msgid "actions_muledit_description" -msgstr "" - -msgid "actions_myinfos" -msgstr "Información personal" - -msgid "actions_myinfos_description" -msgstr "" - -msgid "actions_myprefs" -msgstr "Preferencias del usuario" - -msgid "actions_myprefs_description" -msgstr "" - -msgid "actions_poweredby" -msgstr "" - -msgid "actions_poweredby_description" -msgstr "" - -msgid "actions_prefs" -msgstr "Preferencias" - -msgid "actions_prefs_description" -msgstr "" - -msgid "actions_schema" -msgstr "Ver el esquema" - -msgid "actions_schema_description" -msgstr "" - -msgid "actions_select" -msgstr "Seleccionar" - -msgid "actions_select_description" -msgstr "" - -msgid "actions_sendemail" -msgstr "Enviar un email" - -msgid "actions_sendemail_description" -msgstr "" - -msgid "actions_siteconfig" -msgstr "Configuración del sitio" - -msgid "actions_siteconfig_description" -msgstr "" - -msgid "actions_siteinfo" -msgstr "" - -msgid "actions_siteinfo_description" -msgstr "" - -msgid "actions_view" -msgstr "Ver" - -msgid "actions_view_description" -msgstr "" - -msgid "actions_workflow" -msgstr "Ver el workflow" - -msgid "actions_workflow_description" -msgstr "" - msgid "activate" msgstr "Activar" @@ -1223,9 +1095,6 @@ msgid "attribute" msgstr "Atributo" -msgid "attributes with modified permissions:" -msgstr "atributos con autorizaciónes modificadas:" - msgid "august" msgstr "Agosto" @@ -1548,9 +1417,6 @@ msgid "condition" msgstr "" -msgid "condition:" -msgstr "condición:" - msgctxt "RQLExpression" msgid "condition_object" msgstr "" @@ -1558,6 +1424,9 @@ msgid "condition_object" msgstr "condición de" +msgid "conditions" +msgstr "" + msgid "config mode" msgstr "" @@ -1594,6 +1463,9 @@ msgid "constraints applying on this relation" msgstr "Restricciones que se aplican a esta relación" +msgid "content type" +msgstr "" + msgid "contentnavigation" msgstr "Componentes contextuales" @@ -1609,12 +1481,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "Elemento anterior / siguiente" @@ -1809,6 +1675,9 @@ msgid "creation" msgstr "Creación" +msgid "creation date" +msgstr "" + msgid "creation time of an entity" msgstr "Fecha de creación de una entidad" @@ -1844,17 +1713,32 @@ msgid "custom_workflow_object" msgstr "" -msgid "cwetype-schema-image" -msgstr "Esquema" - -msgid "cwetype-schema-permissions" -msgstr "Autorizaciónes" - -msgid "cwetype-schema-text" -msgstr "Modelo de datos" +msgid "cwetype-box" +msgstr "" + +msgid "cwetype-description" +msgstr "" + +msgid "cwetype-permissions" +msgstr "" + +msgid "cwetype-views" +msgstr "" msgid "cwetype-workflow" -msgstr "Workflow" +msgstr "" + +msgid "cwgroup-main" +msgstr "" + +msgid "cwgroup-permissions" +msgstr "" + +msgid "cwrtype-description" +msgstr "" + +msgid "cwrtype-permissions" +msgstr "" msgid "cwuri" msgstr "" @@ -1883,6 +1767,9 @@ msgid "default user workflow" msgstr "" +msgid "default value" +msgstr "" + msgid "default workflow for an entity type" msgstr "" @@ -2111,18 +1998,12 @@ msgid "detach attached file" msgstr "soltar el archivo existente" -msgid "display order of the action" -msgstr "Orden de aparición de la acción" - msgid "display order of the box" msgstr "Orden de aparición de la caja" msgid "display order of the component" msgstr "Orden de aparición del componente" -msgid "display the action or not" -msgstr "Mostrar la acción o no" - msgid "display the box or not" msgstr "Mostrar la caja o no" @@ -2335,6 +2216,9 @@ msgid "final" msgstr "" +msgid "first name" +msgstr "" + msgid "firstname" msgstr "Nombre" @@ -2448,10 +2332,6 @@ msgid "granted to groups" msgstr "Otorgado a los grupos" -#, python-format -msgid "graphical representation of %s" -msgstr "" - msgid "graphical representation of the instance'schema" msgstr "" @@ -2475,9 +2355,6 @@ msgid "groups to which the permission is granted" msgstr "Grupos quienes tienen otorgada esta autorización" -msgid "groups:" -msgstr "Grupos :" - msgid "guests" msgstr "Invitados" @@ -2522,6 +2399,18 @@ msgid "i18n_login_popup" msgstr "Identificarse" +msgid "i18ncard_*" +msgstr "" + +msgid "i18ncard_+" +msgstr "" + +msgid "i18ncard_1" +msgstr "" + +msgid "i18ncard_?" +msgstr "" + msgid "i18nprevnext_next" msgstr "Siguiente" @@ -2559,12 +2448,6 @@ msgid "image" msgstr "Imagen" -msgid "in memory entity schema" -msgstr "Esquema de la entidad en memoria" - -msgid "in memory relation schema" -msgstr "Esquema de la relación en memoria" - msgid "in_group" msgstr "En el grupo" @@ -2595,9 +2478,6 @@ msgid "incorrect value (%(value)s) for type \"%(type)s\"" msgstr "valor %(value)s incorrecto para el tipo \"%(type)s\"" -msgid "index" -msgstr "Indice" - msgid "index this attribute's value in the plain text index" msgstr "Indexar el valor de este atributo en el índice de texto simple" @@ -2668,9 +2548,19 @@ msgid "invalid action %r" msgstr "Acción %r invalida" +#, python-format +msgid "invalid value %(value)s, it must be one of %(choices)s" +msgstr "" + msgid "is" msgstr "es" +msgid "is object of:" +msgstr "es objeto de" + +msgid "is subject of:" +msgstr "es sujeto de" + msgid "" "is the subject/object entity of the relation composed of the other ? This " "implies that when the composite is deleted, composants are also deleted." @@ -2722,6 +2612,12 @@ msgid "last connection date" msgstr "Ultima fecha de conexión" +msgid "last login time" +msgstr "" + +msgid "last name" +msgstr "" + msgid "last usage" msgstr "" @@ -2871,6 +2767,9 @@ msgid "more actions" msgstr "mas acciones" +msgid "more info about this workflow" +msgstr "" + msgid "multiple edit" msgstr "Edicion multiple" @@ -3018,8 +2917,8 @@ msgid "object" msgstr "objeto" -msgid "object_plural:" -msgstr "objetos:" +msgid "object type" +msgstr "" msgid "october" msgstr "octubre" @@ -3045,6 +2944,9 @@ msgid "opened web sessions" msgstr "" +msgid "options" +msgstr "" + msgid "order" msgstr "orden" @@ -3099,11 +3001,8 @@ msgid "permission" msgstr "Permiso" -msgid "permissions for entities" -msgstr "autorizaciónes para entidades" - -msgid "permissions for relations" -msgstr "autorizaciónes para relaciones" +msgid "permissions" +msgstr "" msgid "permissions for this entity" msgstr "Permisos para esta entidad" @@ -3179,6 +3078,12 @@ msgid "project" msgstr "Proyecto" +msgid "rdef-description" +msgstr "" + +msgid "rdef-permissions" +msgstr "" + msgid "read" msgstr "Lectura" @@ -3220,6 +3125,9 @@ msgid "related entity has no workflow set" msgstr "" +msgid "relation" +msgstr "" + #, python-format msgid "relation %(relname)s of %(ent)s" msgstr "relación %(relname)s de %(ent)s" @@ -3248,9 +3156,6 @@ msgid "relation_type_object" msgstr "Definición" -msgid "relations" -msgstr "relaciones" - msgid "relations deleted" msgstr "Relaciones eliminadas" @@ -3292,6 +3197,9 @@ msgid "require_permission_object" msgstr "Requerido por autorización" +msgid "required" +msgstr "" + msgid "required attribute" msgstr "Atributo requerido" @@ -3339,11 +3247,17 @@ msgid "schema's permissions definitions" msgstr "definiciones de permisos del esquema" +msgid "schema-entity-types" +msgstr "" + msgid "schema-image" msgstr "esquema imagen" -msgid "schema-text" -msgstr "esquema text" +msgid "schema-relation-types" +msgstr "" + +msgid "schema-security" +msgstr "" msgid "search" msgstr "buscar" @@ -3534,12 +3448,12 @@ msgid "subject" msgstr "sujeto" +msgid "subject type" +msgstr "" + msgid "subject/object cardinality" msgstr "cardinalidad sujeto/objeto" -msgid "subject_plural:" -msgstr "sujetos:" - msgid "subworkflow" msgstr "" @@ -3942,6 +3856,9 @@ msgid "used to grant a permission to a group" msgstr "utilizado para otorgar permisos a un grupo" +msgid "user" +msgstr "" + #, python-format msgid "" "user %s has made the following change(s):\n" @@ -3982,6 +3899,14 @@ msgid "value associated to this key is not editable manually" msgstr "el valor asociado a este elemento no es editable manualmente" +#, python-format +msgid "value should have maximum size of %s" +msgstr "" + +#, python-format +msgid "value should have minimum size of %s" +msgstr "" + msgid "vcard" msgstr "vcard" @@ -4000,15 +3925,18 @@ msgid "view history" msgstr "" +msgid "view identifier" +msgstr "" + +msgid "view title" +msgstr "" + msgid "view workflow" msgstr "ver workflow" msgid "view_index" msgstr "" -msgid "views" -msgstr "vistas" - msgid "visible" msgstr "visible" @@ -4031,6 +3959,12 @@ msgid "wf_info_for_object" msgstr "historial de transiciones" +msgid "wf_tab_info" +msgstr "" + +msgid "wfgraph" +msgstr "" + msgid "" "when multiple addresses are equivalent (such as python-projects@logilab.org " "and python-projects@lists.logilab.org), set this to indicate which is the " @@ -4100,346 +4034,3 @@ msgid "you should probably delete that property" msgstr "deberia probablamente suprimir esta propriedad" - -#~ msgid "%(fmt1)s, or without time: %(fmt2)s" -#~ msgstr "%(fmt1)s, o bien sin especificar horario: %(fmt2)s" - -#~ msgid "%s results matching query" -#~ msgstr "%s resultados de la demanda" - -#~ msgid "Application" -#~ msgstr "Aplicación" - -#~ msgid "Debug level set to %s" -#~ msgstr "Nivel de debug puesto a %s" - -#~ msgid "Environment" -#~ msgstr "Ambiente" - -#~ msgid "No query has been executed" -#~ msgstr "Ninguna búsqueda ha sido ejecutada" - -#~ msgid "Request" -#~ msgstr "Petición" - -#~ msgid "Server" -#~ msgstr "Servidor" - -#~ msgid "There is no workflow defined for this entity." -#~ msgstr "No hay workflow para este entidad" - -#~ msgid "Unable to find anything named \"%s\" in the schema !" -#~ msgstr "No encontramos el nombre \"%s\" en el esquema" - -#~ msgid "You are now connected to %s" -#~ msgstr "Usted esta conectado a %s" - -#~ msgid "" -#~ "You have no access to this view or it's not applyable to current data" -#~ msgstr "No tiene acceso a esta vista o No es aplicable a los datos actuales" - -#~ msgid "__msg state changed" -#~ msgstr "El estado a cambiado" - -#~ msgid "account state" -#~ msgstr "Estado de la Cuenta" - -#~ msgid "add CWRType add_permission RQLExpression subject" -#~ msgstr "Expresión RQL de agregación" - -#~ msgid "add CWRType delete_permission RQLExpression subject" -#~ msgstr "Expresión RQL de eliminación" - -#~ msgid "add CWRType read_permission RQLExpression subject" -#~ msgstr "Expresión RQL de lectura" - -#~ msgid "add State state_of CWEType object" -#~ msgstr "Estado" - -#~ msgid "add Transition transition_of CWEType object" -#~ msgstr "Transición" - -#~ msgid "add a Bookmark" -#~ msgstr "Agregar un Favorito" - -#~ msgid "add a CWAttribute" -#~ msgstr "Agregar un tipo de relación" - -#~ msgid "add a CWCache" -#~ msgstr "Agregar un cache" - -#~ msgid "add a CWConstraint" -#~ msgstr "Agregar una Restricción" - -#~ msgid "add a CWConstraintType" -#~ msgstr "Agregar un tipo de Restricción" - -#~ msgid "add a CWEType" -#~ msgstr "Agregar un tipo de entidad" - -#~ msgid "add a CWGroup" -#~ msgstr "Agregar un grupo de usuarios" - -#~ msgid "add a CWPermission" -#~ msgstr "Agregar una autorización" - -#~ msgid "add a CWProperty" -#~ msgstr "Agregar una propiedad" - -#~ msgid "add a CWRType" -#~ msgstr "Agregar un tipo de relación" - -#~ msgid "add a CWRelation" -#~ msgstr "Agregar una relación" - -#~ msgid "add a CWUser" -#~ msgstr "Agregar un usuario" - -#~ msgid "add a EmailAddress" -#~ msgstr "Agregar un email" - -#~ msgid "add a RQLExpression" -#~ msgstr "Agregar una expresión rql" - -#~ msgid "add a State" -#~ msgstr "Agregar un estado" - -#~ msgid "add a TrInfo" -#~ msgstr "Agregar una información de transición" - -#~ msgid "add a Transition" -#~ msgstr "Agregar una transición" - -#~ msgid "add relation" -#~ msgstr "Agregar una relación" - -#~ msgid "" -#~ "added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%" -#~ "(toeid)s" -#~ msgstr "" -#~ "Relación agregada %(rtype)s de %(frometype)s #%(fromeid)s hacia %(toetype)" -#~ "s #%(toeid)s" - -#~ msgid "allowed transition from this state" -#~ msgstr "transición autorizada desde este estado" - -#~ msgid "button_reset" -#~ msgstr "Cancelar los cambios" - -#~ msgid "canonical" -#~ msgstr "canónico" - -#~ msgid "comment:" -#~ msgstr "Comentario:" - -#~ msgid "copy edition" -#~ msgstr "Edición de una copia" - -#~ msgid "" -#~ "core relation giving to a group the permission to add an entity or " -#~ "relation type" -#~ msgstr "" -#~ "Relación sistema que otorga a un grupo la autorización de agregar una " -#~ "entidad o una relación" - -#~ msgid "" -#~ "core relation giving to a group the permission to delete an entity or " -#~ "relation type" -#~ msgstr "" -#~ "Relación sistema que otorga a un grupo la autorización de eliminar una " -#~ "entidad o relación" - -#~ msgid "" -#~ "core relation giving to a group the permission to read an entity or " -#~ "relation type" -#~ msgstr "" -#~ "Relación sistema que otorga a un grupo la autorización de leer una " -#~ "entidad o una relación " - -#~ msgid "" -#~ "core relation giving to a group the permission to update an entity type" -#~ msgstr "" -#~ "Relación sistema que otorga a un grupo la autorización de actualizar una " -#~ "entidad" - -#~ msgid "" -#~ "creating RQLExpression (CWRType %(linkto)s add_permission RQLExpression)" -#~ msgstr "" -#~ "Creación de una expresión RQL para la autorización de agregar relaciones %" -#~ "(linkto)s" - -#~ msgid "" -#~ "creating RQLExpression (CWRType %(linkto)s delete_permission " -#~ "RQLExpression)" -#~ msgstr "" -#~ "creación de una expresión RQL para autorizar la eliminación de relaciones " -#~ "%(linkto)s" - -#~ msgid "" -#~ "creating RQLExpression (CWRType %(linkto)s read_permission RQLExpression)" -#~ msgstr "" -#~ "Creación de una expresión RQL para autorizar la lectura de relaciones %" -#~ "(linkto)s" - -#~ msgid "creating State (State state_of CWEType %(linkto)s)" -#~ msgstr "Creación de un estado por el tipo %(linkto)s" - -#~ msgid "creating Transition (Transition transition_of CWEType %(linkto)s)" -#~ msgstr "Creación de una transición para el tipo %(linkto)s" - -#~ msgid "currently attached file: %s" -#~ msgstr "archivo adjunto: %s" - -#~ msgid "" -#~ "deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s " -#~ "#%(toeid)s" -#~ msgstr "" -#~ "Eliminación de la relación %(rtype)s de %(frometype)s #%(fromeid)s hacia %" -#~ "(toetype)s #%(toeid)s" - -#~ msgid "detach attached file %s" -#~ msgstr "Quitar archivo adjunto %s" - -#~ msgid "element copied" -#~ msgstr "Elemento copiado" - -#~ msgid "element created" -#~ msgstr "Elemento creado" - -#~ msgid "element edited" -#~ msgstr "Elemento editado" - -#~ msgid "entity types which may use this state" -#~ msgstr "Tipo de entidades que pueden utilizar este estado" - -#~ msgid "entity types which may use this transition" -#~ msgstr "Entidades que pueden utilizar esta transición" - -#~ msgid "groups allowed to add entities/relations of this type" -#~ msgstr "Grupos autorizados a agregar entidades/relaciones de este tipo" - -#~ msgid "groups allowed to delete entities/relations of this type" -#~ msgstr "Grupos autorizados a eliminar entidades/relaciones de este tipo" - -#~ msgid "groups allowed to read entities/relations of this type" -#~ msgstr "Grupos autorizados a leer entidades/relaciones de este tipo" - -#~ msgid "groups allowed to update entities of this type" -#~ msgstr "Grupos autorizados a actualizar entidades de este tipo" - -#~ msgid "home" -#~ msgstr "Inicio" - -#~ msgid "initial state for entities of this type" -#~ msgstr "Estado inicial para las entidades de este tipo" - -#~ msgid "invalid date" -#~ msgstr "Esta fecha no es válida" - -#~ msgid "link a state to one or more entity type" -#~ msgstr "liga un estado a una o mas entidades" - -#~ msgid "link a transition to one or more entity type" -#~ msgstr "liga una transición a una o mas tipos de entidad" - -#~ msgid "link to each item in" -#~ msgstr "ligar hacia cada elemento en" - -#~ msgid "loading" -#~ msgstr "Cargando" - -#~ msgid "nothing to edit" -#~ msgstr "nada que editar" - -#~ msgid "remove this Bookmark" -#~ msgstr "Eliminar este Favorito" - -#~ msgid "remove this CWAttribute" -#~ msgstr "Eliminar este atributo" - -#~ msgid "remove this CWCache" -#~ msgstr "Eliminar esta cache de aplicación" - -#~ msgid "remove this CWConstraint" -#~ msgstr "Eliminar esta restricción" - -#~ msgid "remove this CWConstraintType" -#~ msgstr "Eliminar este tipo de restricción" - -#~ msgid "remove this CWEType" -#~ msgstr "Eliminar este tipo de entidad" - -#~ msgid "remove this CWGroup" -#~ msgstr "Eliminar este grupo" - -#~ msgid "remove this CWPermission" -#~ msgstr "Eliminar este permiso" - -#~ msgid "remove this CWProperty" -#~ msgstr "Eliminar esta propiedad" - -#~ msgid "remove this CWRType" -#~ msgstr "Eliminar esta definición de relación" - -#~ msgid "remove this CWRelation" -#~ msgstr "Eliminar esta relación" - -#~ msgid "remove this CWUser" -#~ msgstr "Eliminar este usuario" - -#~ msgid "remove this EmailAddress" -#~ msgstr "Eliminar este correo electronico" - -#~ msgid "remove this RQLExpression" -#~ msgstr "Eliminar esta expresión RQL" - -#~ msgid "remove this State" -#~ msgstr "Eliminar este estado" - -#~ msgid "remove this TrInfo" -#~ msgstr "Eliminar información de esta transición" - -#~ msgid "remove this Transition" -#~ msgstr "Eliminar esta transición" - -#~ msgid "rql expression allowing to add entities/relations of this type" -#~ msgstr "expresion RQL permitiendo agregar entidades/relaciones de este tipo" - -#~ msgid "rql expression allowing to delete entities/relations of this type" -#~ msgstr "" -#~ "expresion RQL permitiendo eliminar entidades/relaciones de este tipo" - -#~ msgid "rql expression allowing to read entities/relations of this type" -#~ msgstr "expresion RQL permitiendo leer entidades/relaciones de este tipo" - -#~ msgid "rql expression allowing to update entities of this type" -#~ msgstr "expresion RQL permitiendo actualizar entidades de este tipo" - -#~ msgid "server debug information" -#~ msgstr "server debug information" - -#~ msgid "" -#~ "use to define a transition from one or multiple states to a destination " -#~ "states in workflow's definitions." -#~ msgstr "" -#~ "utilizado para definir una transición desde uno o multiples estados hacia " -#~ "uno o varios estados destino en las definiciones del workflow" - -#~ msgid "" -#~ "user for which this property is applying. If this relation is not set, " -#~ "the property is considered as a global property" -#~ msgstr "" -#~ "usuario para el cual aplica esta propiedad. Si no se establece esta " -#~ "relación, la propiedad es considerada como una propiedad global." - -#~ msgid "" -#~ "when multiple addresses are equivalent (such as python-projects@logilab." -#~ "org and python-projects@lists.logilab.org), set this to true on one of " -#~ "them which is the preferred form." -#~ msgstr "" -#~ "cuando multiples direcciones de correo son equivalentes (como python-" -#~ "projects@logilab.org y python-projects@lists.logilab.org), establecer " -#~ "esto como verdadero en una de ellas es la forma preferida " - -#~ msgid "workflow for %s" -#~ msgstr "workflow para %s" diff -r b619531ddbd2 -r b6e250dd7a7d i18n/fr.po --- a/i18n/fr.po Fri Apr 23 12:40:48 2010 +0200 +++ b/i18n/fr.po Fri Apr 23 12:42:53 2010 +0200 @@ -35,6 +35,9 @@ msgid " from state %(fromstate)s to state %(tostate)s\n" msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n" +msgid " :" +msgstr "" + #, python-format msgid "%(attr)s set to %(newvalue)s" msgstr "%(attr)s modifié à %(newvalue)s" @@ -56,6 +59,22 @@ msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)" #, python-format +msgid "%(value)r doesn't match the %(regexp)r regular expression" +msgstr "%(value)r ne correspond pas à l'expression régulière %(regexp)r" + +#, python-format +msgid "%(value)r must be %(op)s %(boundary)s" +msgstr "%(value)r doit être %(op)s %(boundary)s" + +#, python-format +msgid "%(value)r must be <= %(boundary)s" +msgstr "%(value)r doit être <= %(boundary)s" + +#, python-format +msgid "%(value)r must be >= %(boundary)s" +msgstr "%(value)r doit être >= %(boundary)s" + +#, python-format msgid "%d days" msgstr "%d jours" @@ -195,14 +214,14 @@ msgid "AND" msgstr "ET" -msgid "Add permissions" -msgstr "Permissions d'ajouter" - msgid "Any" msgstr "N'importe" -msgid "Attributes" -msgstr "Attributs" +msgid "Attributes permissions:" +msgstr "Permissions des attributs" + +msgid "Attributes with non default permissions:" +msgstr "Attributs ayant des permissions non-standard" # schema pot file, generated on 2009-09-16 16:46:55 # @@ -228,6 +247,9 @@ msgid "BoundConstraint" msgstr "contrainte de bornes" +msgid "BoundaryConstraint" +msgstr "contrainte de bornes" + msgid "Browse by category" msgstr "Naviguer par catégorie" @@ -381,14 +403,11 @@ msgid "Decimal_plural" msgstr "Nombres décimaux" -msgid "Delete permissions" -msgstr "Permissions de supprimer" - msgid "Do you want to delete the following element(s) ?" msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?" -msgid "Download page as pdf" -msgstr "télécharger la page au format PDF" +msgid "Download schema as OWL" +msgstr "Télécharger le schéma au format OWL" msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" @@ -403,6 +422,9 @@ msgid "Entities" msgstr "entités" +msgid "Entity types" +msgstr "Types d'entités" + msgid "ExternalUri" msgstr "Uri externe" @@ -430,6 +452,9 @@ msgid "Help" msgstr "Aide" +msgid "Index" +msgstr "Index" + msgid "Instance" msgstr "Instance" @@ -523,15 +548,27 @@ msgid "No result matching query" msgstr "aucun résultat" +msgid "Non exhaustive list of views that may apply to entities of this type" +msgstr "Liste non exhausite des vues s'appliquant à ce type d'entité" + msgid "OR" msgstr "OU" +msgid "Parent classes:" +msgstr "Classes parentes :" + msgid "Password" msgstr "Mot de passe" msgid "Password_plural" msgstr "Mots de passe" +msgid "Permissions for entity types" +msgstr "Permissions pour les types d'entités" + +msgid "Permissions for relations" +msgstr "Permissions pour les relations" + msgid "Please note that this is only a shallow copy" msgstr "Attention, cela n'effectue qu'une copie de surface" @@ -550,9 +587,6 @@ msgid "RQLVocabularyConstraint" msgstr "contrainte rql de vocabulaire" -msgid "Read permissions" -msgstr "Permissions de lire" - msgid "Recipients:" msgstr "Destinataires :" @@ -562,6 +596,9 @@ msgid "Registry's content" msgstr "Contenu du registre" +msgid "Relation types" +msgstr "Types de relation" + msgid "Relations" msgstr "Relations" @@ -599,6 +636,9 @@ msgid "String_plural" msgstr "Chaînes de caractères" +msgid "Sub-classes:" +msgstr "Classes filles :" + msgid "SubWorkflowExitPoint" msgstr "Sortie de sous-workflow" @@ -625,6 +665,9 @@ msgid "The view %s could not be found" msgstr "La vue %s est introuvable" +msgid "There is no default workflow" +msgstr "Ce type d'entité n'a pas de workflow par défault" + msgid "This BaseTransition" msgstr "Cette transition abstraite" @@ -691,6 +734,9 @@ msgid "This WorkflowTransition" msgstr "Cette transition workflow" +msgid "This entity type permissions:" +msgstr "Permissions pour ce type d'endité" + msgid "Time" msgstr "Heure" @@ -715,9 +761,6 @@ msgid "Unreachable objects" msgstr "Objets inacessible" -msgid "Update permissions" -msgstr "Permissions de modifier" - msgid "Used by:" msgstr "Utilisé par :" @@ -816,183 +859,12 @@ msgid "abstract base class for transitions" msgstr "classe de base abstraite pour les transitions" -msgid "access type" -msgstr "type d'accès" - msgid "action(s) on this selection" msgstr "action(s) sur cette sélection" msgid "actions" msgstr "actions" -msgid "actions_about" -msgstr "à propos" - -msgid "actions_about_description" -msgstr "" - -msgid "actions_addentity" -msgstr "ajouter une entité de ce type" - -msgid "actions_addentity_description" -msgstr "" - -msgid "actions_addrelated" -msgstr "menu ajouter" - -msgid "actions_addrelated_description" -msgstr "" - -msgid "actions_cancel" -msgstr "annuler la sélection" - -msgid "actions_cancel_description" -msgstr "" - -msgid "actions_changelog" -msgstr "changements récents" - -msgid "actions_changelog_description" -msgstr "" - -msgid "actions_copy" -msgstr "copier" - -msgid "actions_copy_description" -msgstr "" - -msgid "actions_delete" -msgstr "supprimer" - -msgid "actions_delete_description" -msgstr "" - -msgid "actions_download_as_owl" -msgstr "télécharger en owl" - -msgid "actions_download_as_owl_description" -msgstr "" - -msgid "actions_edit" -msgstr "modifier" - -msgid "actions_edit_description" -msgstr "" - -msgid "actions_embed" -msgstr "embarquer" - -msgid "actions_embed_description" -msgstr "" - -msgid "actions_entitiesoftype" -msgstr "voir les entités de ce type" - -msgid "actions_entitiesoftype_description" -msgstr "" - -msgid "actions_follow" -msgstr "suivre" - -msgid "actions_follow_description" -msgstr "" - -msgid "actions_help" -msgstr "aide" - -msgid "actions_help_description" -msgstr "" - -msgid "actions_logout" -msgstr "se déconnecter" - -msgid "actions_logout_description" -msgstr "" - -msgid "actions_manage" -msgstr "gestion du site" - -msgid "actions_manage_description" -msgstr "" - -msgid "actions_managepermission" -msgstr "gestion des permissions" - -msgid "actions_managepermission_description" -msgstr "" - -msgid "actions_muledit" -msgstr "édition multiple" - -msgid "actions_muledit_description" -msgstr "" - -msgid "actions_myinfos" -msgstr "informations personnelles" - -msgid "actions_myinfos_description" -msgstr "" - -msgid "actions_myprefs" -msgstr "préférences utilisateur" - -msgid "actions_myprefs_description" -msgstr "" - -msgid "actions_poweredby" -msgstr "powered by" - -msgid "actions_poweredby_description" -msgstr "" - -msgid "actions_prefs" -msgstr "préférences" - -msgid "actions_prefs_description" -msgstr "" - -msgid "actions_schema" -msgstr "voir le schéma" - -msgid "actions_schema_description" -msgstr "" - -msgid "actions_select" -msgstr "sélectionner" - -msgid "actions_select_description" -msgstr "" - -msgid "actions_sendemail" -msgstr "envoyer un email" - -msgid "actions_sendemail_description" -msgstr "" - -msgid "actions_siteconfig" -msgstr "configuration du site" - -msgid "actions_siteconfig_description" -msgstr "" - -msgid "actions_siteinfo" -msgstr "information sur ce site" - -msgid "actions_siteinfo_description" -msgstr "" - -msgid "actions_view" -msgstr "voir" - -msgid "actions_view_description" -msgstr "" - -msgid "actions_workflow" -msgstr "voir le workflow" - -msgid "actions_workflow_description" -msgstr "" - msgid "activate" msgstr "activer" @@ -1242,9 +1114,6 @@ msgid "attribute" msgstr "attribut" -msgid "attributes with modified permissions:" -msgstr "attributs ayant des permissions modifiées :" - msgid "august" msgstr "août" @@ -1568,9 +1437,6 @@ msgid "condition" msgstr "condition" -msgid "condition:" -msgstr "condition :" - msgctxt "RQLExpression" msgid "condition_object" msgstr "condition de" @@ -1578,6 +1444,9 @@ msgid "condition_object" msgstr "condition de" +msgid "conditions" +msgstr "conditions" + msgid "config mode" msgstr "mode de configuration" @@ -1614,6 +1483,9 @@ msgid "constraints applying on this relation" msgstr "contraintes s'appliquant à cette relation" +msgid "content type" +msgstr "type MIME" + msgid "contentnavigation" msgstr "composants contextuels" @@ -1630,12 +1502,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "icône pdf" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "élément précedent / suivant" @@ -1832,6 +1698,9 @@ msgid "creation" msgstr "création" +msgid "creation date" +msgstr "date de création" + msgid "creation time of an entity" msgstr "date de création d'une entité" @@ -1867,18 +1736,33 @@ msgid "custom_workflow_object" msgstr "workflow de" -msgid "cwetype-schema-image" -msgstr "schéma" - -msgid "cwetype-schema-permissions" +msgid "cwetype-box" +msgstr "vue \"boîte\"" + +msgid "cwetype-description" +msgstr "description" + +msgid "cwetype-permissions" msgstr "permissions" -msgid "cwetype-schema-text" -msgstr "description" +msgid "cwetype-views" +msgstr "vues" msgid "cwetype-workflow" msgstr "workflow" +msgid "cwgroup-main" +msgstr "description" + +msgid "cwgroup-permissions" +msgstr "permissions" + +msgid "cwrtype-description" +msgstr "description" + +msgid "cwrtype-permissions" +msgstr "permissions" + msgid "cwuri" msgstr "uri interne" @@ -1906,6 +1790,9 @@ msgid "default user workflow" msgstr "workflow par défaut des utilisateurs" +msgid "default value" +msgstr "valeur par défaut" + msgid "default workflow for an entity type" msgstr "workflow par défaut pour un type d'entité" @@ -2146,18 +2033,12 @@ msgid "detach attached file" msgstr "détacher le fichier existant" -msgid "display order of the action" -msgstr "ordre d'affichage de l'action" - msgid "display order of the box" msgstr "ordre d'affichage de la boîte" msgid "display order of the component" msgstr "ordre d'affichage du composant" -msgid "display the action or not" -msgstr "afficher l'action ou non" - msgid "display the box or not" msgstr "afficher la boîte ou non" @@ -2369,6 +2250,9 @@ msgid "final" msgstr "final" +msgid "first name" +msgstr "prénom" + msgid "firstname" msgstr "prénom" @@ -2387,7 +2271,7 @@ msgstr "suivez ce lien pour plus d'information sur ce %s" msgid "follow this link if javascript is deactivated" -msgstr "" +msgstr "suivez ce lien si javascript est désactivé" msgid "for_user" msgstr "pour l'utilisateur" @@ -2484,10 +2368,6 @@ msgid "granted to groups" msgstr "accordée aux groupes" -#, python-format -msgid "graphical representation of %s" -msgstr "représentation graphique de %s" - msgid "graphical representation of the instance'schema" msgstr "représentation graphique du schéma de l'instance" @@ -2512,9 +2392,6 @@ msgid "groups to which the permission is granted" msgstr "groupes auquels cette permission est donnée" -msgid "groups:" -msgstr "groupes :" - msgid "guests" msgstr "invités" @@ -2559,6 +2436,18 @@ msgid "i18n_login_popup" msgstr "s'authentifier" +msgid "i18ncard_*" +msgstr "0..n" + +msgid "i18ncard_+" +msgstr "1..n" + +msgid "i18ncard_1" +msgstr "1" + +msgid "i18ncard_?" +msgstr "0..1" + msgid "i18nprevnext_next" msgstr "suivant" @@ -2596,12 +2485,6 @@ msgid "image" msgstr "image" -msgid "in memory entity schema" -msgstr "schéma de l'entité en mémoire" - -msgid "in memory relation schema" -msgstr "schéma de la relation en mémoire" - msgid "in_group" msgstr "dans le groupe" @@ -2632,9 +2515,6 @@ msgid "incorrect value (%(value)s) for type \"%(type)s\"" msgstr "valeur %(value)s incorrecte pour le type \"%(type)s\"" -msgid "index" -msgstr "index" - msgid "index this attribute's value in the plain text index" msgstr "indexer la valeur de cet attribut dans l'index plein texte" @@ -2705,9 +2585,19 @@ msgid "invalid action %r" msgstr "action %r invalide" +#, python-format +msgid "invalid value %(value)s, it must be one of %(choices)s" +msgstr "valeur %(value)s incorrect, doit être parmi %(choices)s" + msgid "is" msgstr "de type" +msgid "is object of:" +msgstr "est object de" + +msgid "is subject of:" +msgstr "est sujet de" + msgid "" "is the subject/object entity of the relation composed of the other ? This " "implies that when the composite is deleted, composants are also deleted." @@ -2760,6 +2650,12 @@ msgid "last connection date" msgstr "dernière date de connexion" +msgid "last login time" +msgstr "dernière date de connexion" + +msgid "last name" +msgstr "nom" + msgid "last usage" msgstr "dernier usage" @@ -2909,6 +2805,9 @@ msgid "more actions" msgstr "plus d'actions" +msgid "more info about this workflow" +msgstr "plus d'information sur ce workflow" + msgid "multiple edit" msgstr "édition multiple" @@ -3052,8 +2951,8 @@ msgid "object" msgstr "objet" -msgid "object_plural:" -msgstr "objets :" +msgid "object type" +msgstr "type de l'objet" msgid "october" msgstr "octobre" @@ -3079,6 +2978,9 @@ msgid "opened web sessions" msgstr "sessions web ouvertes" +msgid "options" +msgstr "options" + msgid "order" msgstr "ordre" @@ -3135,11 +3037,8 @@ msgid "permission" msgstr "permission" -msgid "permissions for entities" -msgstr "permissions pour les entités" - -msgid "permissions for relations" -msgstr "permissions pour les relations" +msgid "permissions" +msgstr "permissions" msgid "permissions for this entity" msgstr "permissions pour cette entité" @@ -3215,6 +3114,12 @@ msgid "project" msgstr "projet" +msgid "rdef-description" +msgstr "description" + +msgid "rdef-permissions" +msgstr "permissions" + msgid "read" msgstr "lecture" @@ -3256,6 +3161,9 @@ msgid "related entity has no workflow set" msgstr "l'entité lié n'a pas de workflow" +msgid "relation" +msgstr "relation" + #, python-format msgid "relation %(relname)s of %(ent)s" msgstr "relation %(relname)s de %(ent)s" @@ -3284,9 +3192,6 @@ msgid "relation_type_object" msgstr "définition" -msgid "relations" -msgstr "relations" - msgid "relations deleted" msgstr "relations supprimées" @@ -3328,6 +3233,9 @@ msgid "require_permission_object" msgstr "permission of" +msgid "required" +msgstr "requis" + msgid "required attribute" msgstr "attribut requis" @@ -3376,11 +3284,17 @@ msgid "schema's permissions definitions" msgstr "permissions définies dans le schéma" +msgid "schema-entity-types" +msgstr "types d'entités" + msgid "schema-image" -msgstr "schéma" - -msgid "schema-text" -msgstr "description" +msgstr "image" + +msgid "schema-relation-types" +msgstr "types de relations" + +msgid "schema-security" +msgstr "permissions" msgid "search" msgstr "rechercher" @@ -3573,12 +3487,12 @@ msgid "subject" msgstr "sujet" +msgid "subject type" +msgstr "type du sujet" + msgid "subject/object cardinality" msgstr "cardinalité sujet/objet" -msgid "subject_plural:" -msgstr "sujets :" - msgid "subworkflow" msgstr "sous-workflow" @@ -3985,6 +3899,9 @@ msgid "used to grant a permission to a group" msgstr "utiliser pour donner une permission à un groupe" +msgid "user" +msgstr "utilisateur" + #, python-format msgid "" "user %s has made the following change(s):\n" @@ -4025,6 +3942,14 @@ msgid "value associated to this key is not editable manually" msgstr "la valeur associée à cette clé n'est pas éditable manuellement" +#, python-format +msgid "value should have maximum size of %s" +msgstr "la valeur doit être de taille %s au maximum" + +#, python-format +msgid "value should have minimum size of %s" +msgstr "la valeur doit être de taille %s au minimum" + msgid "vcard" msgstr "vcard" @@ -4043,15 +3968,18 @@ msgid "view history" msgstr "voir l'historique" +msgid "view identifier" +msgstr "identifiant" + +msgid "view title" +msgstr "titre" + msgid "view workflow" msgstr "voir les états possibles" msgid "view_index" msgstr "accueil" -msgid "views" -msgstr "vues" - msgid "visible" msgstr "visible" @@ -4075,6 +4003,12 @@ msgid "wf_info_for_object" msgstr "historique des transitions" +msgid "wf_tab_info" +msgstr "description" + +msgid "wfgraph" +msgstr "image du workflow" + msgid "" "when multiple addresses are equivalent (such as python-projects@logilab.org " "and python-projects@lists.logilab.org), set this to indicate which is the " diff -r b619531ddbd2 -r b6e250dd7a7d migration.py --- a/migration.py Fri Apr 23 12:40:48 2010 +0200 +++ b/migration.py Fri Apr 23 12:42:53 2010 +0200 @@ -16,6 +16,7 @@ from logilab.common.decorators import cached from logilab.common.configuration import REQUIRED, read_old_config from logilab.common.shellutils import ASK +from logilab.common.changelog import Version from cubicweb import ConfigurationError @@ -374,3 +375,75 @@ from logging import getLogger from cubicweb import set_log_methods set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) + + +def version_strictly_lower(a, b): + if a: + a = Version(a) + if b: + b = Version(b) + return a < b + +def max_version(a, b): + return str(max(Version(a), Version(b))) + +class ConfigurationProblem(object): + """Each cube has its own list of dependencies on other cubes/versions. + + The ConfigurationProblem is used to record the loaded cubes, then to detect + inconsistencies in their dependencies. + + See configuration management on wikipedia for litterature. + """ + + def __init__(self, config): + self.cubes = {} + self.config = config + + def add_cube(self, name, version): + self.cubes[name] = version + + def solve(self): + self.warnings = [] + self.errors = [] + self.read_constraints() + for cube, versions in sorted(self.constraints.items()): + oper, version = None, None + # simplify constraints + if versions: + for constraint in versions: + op, ver = constraint + if oper is None: + oper = op + version = ver + elif op == '>=' and oper == '>=': + version = max_version(ver, version) + else: + print 'unable to handle this case', oper, version, op, ver + # "solve" constraint satisfaction problem + if cube not in self.cubes: + self.errors.append( ('add', cube, version) ) + elif versions: + lower_strict = version_strictly_lower(self.cubes[cube], version) + if oper in ('>=','='): + if lower_strict: + self.errors.append( ('update', cube, version) ) + else: + print 'unknown operator', oper + + def read_constraints(self): + self.constraints = {} + self.reverse_constraints = {} + for cube in self.cubes: + use = self.config.cube_dependencies(cube) + for name, constraint in use.iteritems(): + self.constraints.setdefault(name,set()) + if constraint: + try: + oper, version = constraint.split() + self.constraints[name].add( (oper, version) ) + except: + self.warnings.append( + 'cube %s depends on %s but constraint badly ' + 'formatted: %s' % (cube, name, constraint)) + self.reverse_constraints.setdefault(name, set()).add(cube) diff -r b619531ddbd2 -r b6e250dd7a7d misc/cmp_schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/cmp_schema.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,24 @@ +"""This module compare the Schema on the file system to the one in the database""" + +from cStringIO import StringIO +from cubicweb.web.schemaviewer import SchemaViewer +from logilab.common.ureports import TextWriter +import difflib + +viewer = SchemaViewer() +layout_db = viewer.visit_schema(schema, display_relations=True) +layout_fs = viewer.visit_schema(fsschema, display_relations=True) +writer = TextWriter() +stream_db = StringIO() +stream_fs = StringIO() +writer.format(layout_db, stream=stream_db) +writer.format(layout_fs, stream=stream_fs) + +stream_db.seek(0) +stream_fs.seek(0) +db = stream_db.getvalue().splitlines() +fs = stream_fs.getvalue().splitlines() +open('db_schema.txt', 'w').write(stream_db.getvalue()) +open('fs_schema.txt', 'w').write(stream_fs.getvalue()) +#for diff in difflib.ndiff(fs, db): +# print diff diff -r b619531ddbd2 -r b6e250dd7a7d misc/migration/3.8.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.8.1_Any.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,2 @@ +rql('SET X name "BoundaryConstraint" ' + 'WHERE X is CWConstraintType, X name "BoundConstraint"') diff -r b619531ddbd2 -r b6e250dd7a7d misc/migration/postcreate.py --- a/misc/migration/postcreate.py Fri Apr 23 12:40:48 2010 +0200 +++ b/misc/migration/postcreate.py Fri Apr 23 12:42:53 2010 +0200 @@ -43,7 +43,7 @@ # need this since we already have at least one user in the database (the default admin) for user in rql('Any X WHERE X is CWUser').entities(): rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': user.eid, 's': activated.eid}, 'x') + {'x': user.eid, 's': activated.eid}) # on interactive mode, ask for level 0 persistent options if interactive_mode: @@ -55,11 +55,12 @@ default = cfg.option_default(optname, optdict) # only record values differing from default if value != default: - rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', {'k': key, 'v': value}) + rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', + {'k': key, 'v': value}) # add PERM_USE_TEMPLATE_FORMAT permission from cubicweb.schema import PERM_USE_TEMPLATE_FORMAT usetmplperm = create_entity('CWPermission', name=PERM_USE_TEMPLATE_FORMAT, label=_('use template languages')) rql('SET X require_group G WHERE G name "managers", X eid %(x)s', - {'x': usetmplperm.eid}, 'x') + {'x': usetmplperm.eid}) diff -r b619531ddbd2 -r b6e250dd7a7d pytestconf.py --- a/pytestconf.py Fri Apr 23 12:40:48 2010 +0200 +++ b/pytestconf.py Fri Apr 23 12:42:53 2010 +0200 @@ -5,8 +5,6 @@ from os.path import split, splitext from logilab.common.pytest import PyTester -from cubicweb.etwist.server import _gc_debug - class CustomPyTester(PyTester): def testfile(self, filename, batchmode=False): try: @@ -22,7 +20,6 @@ if getattr(cls, '__module__', None) != modname: continue clean_repo_test_cls(cls) - #_gc_debug() def clean_repo_test_cls(cls): if 'repo' in cls.__dict__: diff -r b619531ddbd2 -r b6e250dd7a7d req.py diff -r b619531ddbd2 -r b6e250dd7a7d rqlrewrite.py --- a/rqlrewrite.py Fri Apr 23 12:40:48 2010 +0200 +++ b/rqlrewrite.py Fri Apr 23 12:42:53 2010 +0200 @@ -41,15 +41,15 @@ except KeyError: continue stinfo = var.stinfo - if stinfo.get('uidrels'): + if stinfo.get('uidrel') is not None: continue # eid specified, no need for additional type specification try: - typerels = rqlst.defined_vars[varname].stinfo.get('typerels') + typerel = rqlst.defined_vars[varname].stinfo.get('typerel') except KeyError: assert varname in rqlst.aliases continue - if newroot is rqlst and typerels: - mytyperel = iter(typerels).next() + if newroot is rqlst and typerel is not None: + mytyperel = typerel else: for vref in newroot.defined_vars[varname].references(): rel = vref.relation() @@ -80,7 +80,7 @@ # tree is not annotated yet, no scope set so add the restriction # to the root rel = newroot.add_type_restriction(var, possibletypes) - stinfo['typerels'] = frozenset((rel,)) + stinfo['typerel'] = rel stinfo['possibletypes'] = possibletypes diff -r b619531ddbd2 -r b6e250dd7a7d rset.py --- a/rset.py Fri Apr 23 12:40:48 2010 +0200 +++ b/rset.py Fri Apr 23 12:42:53 2010 +0200 @@ -32,14 +32,12 @@ :type rql: str or unicode :param rql: the original RQL query string """ - def __init__(self, results, rql, args=None, description=(), cachekey=None, - rqlst=None): + def __init__(self, results, rql, args=None, description=(), rqlst=None): self.rows = results self.rowcount = results and len(results) or 0 # original query and arguments self.rql = rql self.args = args - self.cachekey = cachekey # entity types for each cell (same shape as rows) # maybe discarded if specified when the query has been executed self.description = description diff -r b619531ddbd2 -r b6e250dd7a7d schema.py --- a/schema.py Fri Apr 23 12:40:48 2010 +0200 +++ b/schema.py Fri Apr 23 12:42:53 2010 +0200 @@ -161,7 +161,7 @@ mainvars.append('U') if not mainvars: raise Exception('unable to guess selection variables') - return ','.join(mainvars) + return ','.join(sorted(mainvars)) def split_expression(rqlstring): for expr in rqlstring.split(','): @@ -615,13 +615,13 @@ # start with a comma for bw compat, see below return ';' + self.mainvars + ';' + self.restriction + @classmethod def deserialize(cls, value): # XXX < 3.5.10 bw compat if not value.startswith(';'): return cls(value) _, mainvars, restriction = value.split(';', 2) return cls(restriction, mainvars) - deserialize = classmethod(deserialize) def check(self, entity, rtype, value): """return true if the value satisfy the constraint, else false""" @@ -705,14 +705,14 @@ if eidto is None: # checking constraint for an attribute relation restriction = 'S eid %(s)s, ' + self.restriction - args, ck = {'s': eidfrom}, 's' + args = {'s': eidfrom} else: restriction = 'S eid %(s)s, O eid %(o)s, ' + self.restriction - args, ck = {'s': eidfrom, 'o': eidto}, ('s', 'o') + args = {'s': eidfrom, 'o': eidto} rql = 'Any %s WHERE %s' % (self.mainvars, restriction) if self.distinct_query: rql = 'DISTINCT ' + rql - return session.execute(rql, args, ck, build_descr=False) + return session.execute(rql, args, build_descr=False) class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint): @@ -839,9 +839,8 @@ return False if keyarg is None: kwargs.setdefault('u', session.user.eid) - cachekey = kwargs.keys() try: - rset = session.execute(rql, kwargs, cachekey, build_descr=True) + rset = session.execute(rql, kwargs, build_descr=True) except NotImplementedError: self.critical('cant check rql expression, unsupported rql %s', rql) if self.eid is not None: @@ -972,8 +971,8 @@ class workflowable_definition(ybo.metadefinition): """extends default EntityType's metaclass to add workflow relations - (i.e. in_state and wf_info_for). - This is the default metaclass for WorkflowableEntityType + (i.e. in_state, wf_info_for and custom_workflow). This is the default + metaclass for WorkflowableEntityType. """ def __new__(mcs, name, bases, classdict): abstract = classdict.pop('__abstract__', False) @@ -983,23 +982,33 @@ make_workflowable(cls) return cls +class WorkflowableEntityType(ybo.EntityType): + """Use this base class instead of :class:`EntityType` to have workflow + relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your + entity type. + """ + __metaclass__ = workflowable_definition + __abstract__ = True + + def make_workflowable(cls, in_state_descr=None): + """Adds workflow relations as :class:`WorkflowableEntityType`, but usable on + existing classes which are not using that base class. + """ existing_rels = set(rdef.name for rdef in cls.__relations__) # let relation types defined in cw.schemas.workflow carrying # cardinality, constraints and other relation definition properties + etype = getattr(cls, 'name', cls.__name__) if 'custom_workflow' not in existing_rels: - rdef = ybo.SubjectRelation('Workflow') - yams_add_relation(cls.__relations__, rdef, 'custom_workflow') + rdef = ybo.RelationDefinition(etype, 'custom_workflow', 'Workflow') + yams_add_relation(cls.__relations__, rdef) if 'in_state' not in existing_rels: - rdef = ybo.SubjectRelation('State', description=in_state_descr) - yams_add_relation(cls.__relations__, rdef, 'in_state') + rdef = ybo.RelationDefinition(etype, 'in_state', 'State', + description=in_state_descr) + yams_add_relation(cls.__relations__, rdef) if 'wf_info_for' not in existing_rels: - rdef = ybo.ObjectRelation('TrInfo') - yams_add_relation(cls.__relations__, rdef, 'wf_info_for') - -class WorkflowableEntityType(ybo.EntityType): - __metaclass__ = workflowable_definition - __abstract__ = True + rdef = ybo.RelationDefinition('TrInfo', 'wf_info_for', etype) + yams_add_relation(cls.__relations__, rdef) # schema loading ############################################################## diff -r b619531ddbd2 -r b6e250dd7a7d schemaviewer.py --- a/schemaviewer.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,229 +0,0 @@ -"""an helper class to display CubicWeb schema using ureports - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -__docformat__ = "restructuredtext en" -_ = unicode - -from logilab.common.ureports import Section, Title, Table, Link, Span, Text - -from yams.schema2dot import CARD_MAP -from yams.schema import RelationDefinitionSchema - -I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')] - - -class SchemaViewer(object): - """return an ureport layout for some part of a schema""" - def __init__(self, req=None, encoding=None): - self.req = req - if req is not None: - self.req.add_css('cubicweb.schema.css') - self._possible_views = req.vreg['views'].possible_views - if not encoding: - encoding = req.encoding - else: - self._possible_views = lambda x: () - self.encoding = encoding - - def format_acls(self, schema, access_types): - """return a layout displaying access control lists""" - data = [self.req._('access type'), self.req._('groups')] - for access_type in access_types: - data.append(self.req._(access_type)) - acls = [Link(self.req.build_url('cwgroup/%s' % group), self.req._(group)) - for group in schema.get_groups(access_type)] - acls += (Text(rqlexp.expression) for rqlexp in schema.get_rqlexprs(access_type)) - acls = [n for _n in acls for n in (_n, Text(', '))][:-1] - data.append(Span(children=acls)) - return Section(children=(Table(cols=2, cheaders=1, rheaders=1, children=data),), - klass='acl') - - - def visit_schema(self, schema, display_relations=0, skiptypes=()): - """get a layout for a whole schema""" - title = Title(self.req._('Schema %s') % schema.name, - klass='titleUnderline') - layout = Section(children=(title,)) - esection = Section(children=(Title(self.req._('Entities'), - klass='titleUnderline'),)) - layout.append(esection) - eschemas = [eschema for eschema in schema.entities() - if not (eschema.final or eschema in skiptypes)] - for eschema in sorted(eschemas): - esection.append(self.visit_entityschema(eschema, skiptypes)) - if display_relations: - title = Title(self.req._('Relations'), klass='titleUnderline') - rsection = Section(children=(title,)) - layout.append(rsection) - relations = [rschema for rschema in schema.relations() - if not (rschema.final or rschema.type in skiptypes)] - keys = [(rschema.type, rschema) for rschema in relations] - for key, rschema in sorted(keys): - relstr = self.visit_relationschema(rschema) - rsection.append(relstr) - return layout - - def _entity_attributes_data(self, eschema): - _ = self.req._ - data = [_('attribute'), _('type'), _('default'), _('constraints')] - for rschema, aschema in eschema.attribute_definitions(): - rdef = eschema.rdef(rschema) - if not rdef.may_have_permission('read', self.req): - continue - aname = rschema.type - if aname == 'eid': - continue - data.append('%s (%s)' % (aname, _(aname))) - data.append(_(aschema.type)) - defaultval = eschema.default(aname) - if defaultval is not None: - default = self.to_string(defaultval) - elif rdef.cardinality[0] == '1': - default = _('required field') - else: - default = '' - data.append(default) - constraints = rschema.rproperty(eschema.type, aschema.type, - 'constraints') - data.append(', '.join(str(constr) for constr in constraints)) - return data - - def eschema_link_url(self, eschema): - return self.req.build_url('cwetype/%s' % eschema) - - def rschema_link_url(self, rschema): - return self.req.build_url('cwrtype/%s' % rschema) - - def possible_views(self, etype): - rset = self.req.etype_rset(etype) - return [v for v in self._possible_views(self.req, rset) - if v.category != 'startupview'] - - def stereotype(self, name): - return Span((' <<%s>>' % name,), klass='stereotype') - - def visit_entityschema(self, eschema, skiptypes=()): - """get a layout for an entity schema""" - etype = eschema.type - layout = Section(children=' ', klass='clear') - layout.append(Link(etype,' ' , id=etype)) # anchor - title = Link(self.eschema_link_url(eschema), etype) - boxchild = [Section(children=(title, ' (%s)'% eschema.display_name(self.req)), klass='title')] - table = Table(cols=4, rheaders=1, klass='listing', - children=self._entity_attributes_data(eschema)) - boxchild.append(Section(children=(table,), klass='body')) - data = [] - data.append(Section(children=boxchild, klass='box')) - data.append(Section(children='', klass='vl')) - data.append(Section(children='', klass='hl')) - t_vars = [] - rels = [] - first = True - for rschema, targetschemas, role in eschema.relation_definitions(): - if rschema.type in skiptypes: - continue - rschemaurl = self.rschema_link_url(rschema) - for oeschema in targetschemas: - rdef = rschema.role_rdef(eschema, oeschema, role) - if not rdef.may_have_permission('read', self.req): - continue - label = rschema.type - if role == 'subject': - cards = rschema.rproperty(eschema, oeschema, 'cardinality') - else: - cards = rschema.rproperty(oeschema, eschema, 'cardinality') - cards = cards[::-1] - label = '%s %s (%s) %s' % (CARD_MAP[cards[1]], label, - display_name(self.req, label, role), - CARD_MAP[cards[0]]) - rlink = Link(rschemaurl, label) - elink = Link(self.eschema_link_url(oeschema), oeschema.type) - if first: - t_vars.append(Section(children=(elink,), klass='firstvar')) - rels.append(Section(children=(rlink,), klass='firstrel')) - first = False - else: - t_vars.append(Section(children=(elink,), klass='var')) - rels.append(Section(children=(rlink,), klass='rel')) - data.append(Section(children=rels, klass='rels')) - data.append(Section(children=t_vars, klass='vars')) - layout.append(Section(children=data, klass='entityAttributes')) - if eschema.final: # stop here for final entities - return layout - _ = self.req._ - if self.req.user.matching_groups('managers'): - # layout.append(self.format_acls(eschema, ('read', 'add', 'delete', 'update'))) - # possible views for this entity type - views = [_(view.title) for view in self.possible_views(etype)] - layout.append(Section(children=(Table(cols=1, rheaders=1, - children=[_('views')]+views),), - klass='views')) - return layout - - def visit_relationschema(self, rschema, title=True): - """get a layout for a relation schema""" - _ = self.req._ - if title: - title = Link(self.rschema_link_url(rschema), rschema.type) - stereotypes = [] - if rschema.meta: - stereotypes.append('meta') - if rschema.symmetric: - stereotypes.append('symmetric') - if rschema.inlined: - stereotypes.append('inlined') - title = Section(children=(title, ' (%s)'%rschema.display_name(self.req)), klass='title') - if stereotypes: - title.append(self.stereotype(','.join(stereotypes))) - layout = Section(children=(title,), klass='schema') - else: - layout = Section(klass='schema') - data = [_('from'), _('to')] - schema = rschema.schema - rschema_objects = rschema.objects() - if rschema_objects: - # might be empty - properties = [p for p in RelationDefinitionSchema.rproperty_defs(rschema_objects[0]) - if not p in ('cardinality', 'composite', 'eid')] - else: - properties = [] - data += [_(prop) for prop in properties] - cols = len(data) - done = set() - for subjtype, objtypes in rschema.associations(): - for objtype in objtypes: - if (subjtype, objtype) in done: - continue - done.add((subjtype, objtype)) - if rschema.symmetric: - done.add((objtype, subjtype)) - data.append(Link(self.eschema_link_url(schema[subjtype]), subjtype)) - data.append(Link(self.eschema_link_url(schema[objtype]), objtype)) - rdef = rschema.rdef(subjtype, objtype) - for prop in properties: - val = getattr(rdef, prop) - if val is None: - val = '' - elif isinstance(val, (list, tuple)): - val = ', '.join(str(v) for v in val) - elif val and isinstance(val, basestring): - val = _(val) - else: - val = str(val) - data.append(Text(val)) - table = Table(cols=cols, rheaders=1, children=data, klass='listing') - layout.append(Section(children=(table,), klass='relationDefinition')) - #if self.req.user.matching_groups('managers'): - # layout.append(self.format_acls(rschema, ('read', 'add', 'delete'))) - layout.append(Section(children='', klass='clear')) - return layout - - def to_string(self, value): - """used to converte arbitrary values to encoded string""" - if isinstance(value, unicode): - return value.encode(self.encoding, 'replace') - return str(value) diff -r b619531ddbd2 -r b6e250dd7a7d selectors.py --- a/selectors.py Fri Apr 23 12:40:48 2010 +0200 +++ b/selectors.py Fri Apr 23 12:42:53 2010 +0200 @@ -103,7 +103,7 @@ __regid__ = 'loggeduserlink' def call(self): - if self._cw.cnx.anonymous_connection: + if self._cw.session.anonymous_session: # display login link ... else: @@ -1031,7 +1031,7 @@ def score(self, req, rset, row, col): try: return len(req.execute(self.rql, {'x': rset[row][col], - 'u': req.user.eid}, 'x')) + 'u': req.user.eid})) except Unauthorized: return 0 @@ -1039,12 +1039,24 @@ @objectify_selector @lltrace +def no_cnx(cls, req, rset, *args, **kwargs): + """Return 1 if the web session has no connection set. This occurs when + anonymous access is not allowed and user isn't authenticated. + + May only be used on the web side, not on the data repository side. + """ + if not req.cnx: + return 1 + return 0 + +@objectify_selector +@lltrace def authenticated_user(cls, req, **kwargs): """Return 1 if the user is authenticated (e.g. not the anonymous user). May only be used on the web side, not on the data repository side. """ - if req.cnx.anonymous_connection: + if req.session.anonymous_session: return 0 return 1 diff -r b619531ddbd2 -r b6e250dd7a7d server/hook.py diff -r b619531ddbd2 -r b6e250dd7a7d server/migractions.py --- a/server/migractions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/migractions.py Fri Apr 23 12:42:53 2010 +0200 @@ -268,9 +268,9 @@ if self.session: self.session.set_pool() - def rqlexecall(self, rqliter, cachekey=None, ask_confirm=True): + def rqlexecall(self, rqliter, ask_confirm=True): for rql, kwargs in rqliter: - self.rqlexec(rql, kwargs, cachekey, ask_confirm=ask_confirm) + self.rqlexec(rql, kwargs, ask_confirm=ask_confirm) @cached def _create_context(self): @@ -361,14 +361,14 @@ # handle groups newgroups = list(erschema.get_groups(action)) for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, ' - 'T eid %%(x)s' % perm, {'x': teid}, 'x', + 'T eid %%(x)s' % perm, {'x': teid}, ask_confirm=False): if not gname in newgroups: if not confirm or self.confirm('Remove %s permission of %s to %s?' % (action, erschema, gname)): self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), - {'x': geid}, 'x', ask_confirm=False) + {'x': geid}, ask_confirm=False) else: newgroups.remove(gname) for gname in newgroups: @@ -376,7 +376,7 @@ % (action, erschema, gname)): self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), - {'x': gm[gname]}, 'x', ask_confirm=False) + {'x': gm[gname]}, ask_confirm=False) # handle rql expressions newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' @@ -388,7 +388,7 @@ # deleting the relation will delete the expression entity self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s' % (perm, teid), - {'x': expreid}, 'x', ask_confirm=False) + {'x': expreid}, ask_confirm=False) else: newexprs.pop(expression) for expression in newexprs.values(): @@ -399,7 +399,7 @@ 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X ' 'WHERE T eid %%(x)s' % perm, {'expr': expr, 'exprtype': exprtype, - 'vars': expression.mainvars, 'x': teid}, 'x', + 'vars': expression.mainvars, 'x': teid}, ask_confirm=False) def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True, syncprops=True): @@ -524,14 +524,13 @@ newcstr = None if newcstr is None: self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s', - {'x': cstr.eid}, 'x', - ask_confirm=confirm) + {'x': cstr.eid}, ask_confirm=confirm) else: newconstraints.remove(newcstr) value = unicode(newcstr.serialize()) if value != unicode(cstr.serialize()): self.rqlexec('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': value}, 'x', + {'x': cstr.eid, 'v': value}, ask_confirm=confirm) # 2. add new constraints cstrtype_map = self.cstrtype_mapping() @@ -644,10 +643,10 @@ self.cmd_drop_relation_definition( str(fromtype), rschema.type, str(totype)) # execute post-remove files - for pack in reversed(removedcubes): - self.exec_event_script('postremove', self.config.cube_dir(pack)) + for cube in reversed(removedcubes): + self.exec_event_script('postremove', self.config.cube_dir(cube)) self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s', - {'pk': u'system.version.'+pack}, ask_confirm=False) + {'pk': u'system.version.'+cube}, ask_confirm=False) self.commit() # schema migration actions ################################################ @@ -743,8 +742,8 @@ continue if instspschema.specializes() != eschema: self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s', - {'d': instspschema.eid, - 'pn': eschema.type}, ask_confirm=confirm) + {'d': instspschema.eid, 'pn': eschema.type}, + ask_confirm=confirm) for rschema, tschemas, role in spschema.relation_definitions(True): for tschema in tschemas: if not tschema in instschema: @@ -1086,12 +1085,12 @@ for etype in wfof: rset = self.rqlexec( 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False) + {'x': wf.eid, 'et': etype}, ask_confirm=False) assert rset, 'unexistant entity type %s' % etype if default: self.rqlexec( 'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False) + {'x': wf.eid, 'et': etype}, ask_confirm=False) if commit: self.commit() return wf @@ -1215,6 +1214,9 @@ def rqlexec(self, rql, kwargs=None, cachekey=None, build_descr=True, ask_confirm=True): """rql action""" + if cachekey is not None: + warn('[3.8] cachekey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) if not isinstance(rql, (tuple, list)): rql = ( (rql, kwargs), ) res = None @@ -1226,7 +1228,7 @@ msg = rql if not ask_confirm or self.confirm('Execute rql: %s ?' % msg): try: - res = execute(rql, kwargs, cachekey, build_descr=build_descr) + res = execute(rql, kwargs, build_descr=build_descr) except Exception, ex: if self.confirm('Error: %s\nabort?' % ex): raise diff -r b619531ddbd2 -r b6e250dd7a7d server/msplanner.py --- a/server/msplanner.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/msplanner.py Fri Apr 23 12:42:53 2010 +0200 @@ -309,21 +309,24 @@ # find for each source which variable/solution are supported for varname, varobj in self.rqlst.defined_vars.items(): # if variable has an eid specified, we can get its source directly - # NOTE: use uidrels and not constnode to deal with "X eid IN(1,2,3,4)" - if varobj.stinfo['uidrels']: - vrels = varobj.stinfo['relations'] - varobj.stinfo['uidrels'] - for rel in varobj.stinfo['uidrels']: - for const in rel.children[1].get_nodes(Constant): - eid = const.eval(self.plan.args) - source = self._session.source_from_eid(eid) - if vrels and not any(source.support_relation(r.r_type) - for r in vrels): - self._set_source_for_term(self.system_source, varobj) - else: - self._set_source_for_term(source, varobj) + # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)" + if varobj.stinfo['uidrel'] is not None: + rel = varobj.stinfo['uidrel'] + hasrel = len(varobj.stinfo['relations']) > 1 + for const in rel.children[1].get_nodes(Constant): + eid = const.eval(self.plan.args) + source = self._session.source_from_eid(eid) + if (source is self.system_source + or (hasrel and + not any(source.support_relation(r.r_type) + for r in varobj.stinfo['relations'] + if not r is rel))): + self._set_source_for_term(self.system_source, varobj) + else: + self._set_source_for_term(source, varobj) continue rels = varobj.stinfo['relations'] - if not rels and not varobj.stinfo['typerels']: + if not rels and varobj.stinfo['typerel'] is None: # (rare) case where the variable has no type specified nor # relation accessed ex. "Any MAX(X)" self._set_source_for_term(self.system_source, varobj) @@ -700,7 +703,7 @@ for var in select.defined_vars.itervalues(): if not var in terms: stinfo = var.stinfo - for ovar, rtype in stinfo['attrvars']: + for ovar, rtype in stinfo.get('attrvars', ()): if ovar in terms: needsel.add(var.name) terms.append(var) @@ -778,20 +781,19 @@ # variable is refed by an outer scope and should be substituted # using an 'identity' relation (else we'll get a conflict of # temporary tables) - if rhsvar in terms and not lhsvar in terms: + if rhsvar in terms and not lhsvar in terms and lhsvar.scope is lhsvar.stmt: self._identity_substitute(rel, lhsvar, terms, needsel) - elif lhsvar in terms and not rhsvar in terms: + elif lhsvar in terms and not rhsvar in terms and rhsvar.scope is rhsvar.stmt: self._identity_substitute(rel, rhsvar, terms, needsel) def _identity_substitute(self, relation, var, terms, needsel): newvar = self._insert_identity_variable(relation.scope, var) - if newvar is not None: - # ensure relation is using '=' operator, else we rely on a - # sqlgenerator side effect (it won't insert an inequality operator - # in this case) - relation.children[1].operator = '=' - terms.append(newvar) - needsel.add(newvar.name) + # ensure relation is using '=' operator, else we rely on a + # sqlgenerator side effect (it won't insert an inequality operator + # in this case) + relation.children[1].operator = '=' + terms.append(newvar) + needsel.add(newvar.name) def _choose_term(self, sourceterms): """pick one term among terms supported by a source, which will be used @@ -1419,7 +1421,7 @@ return False if not var in terms or used_in_outer_scope(var, self.current_scope): return False - if any(v for v, _ in var.stinfo['attrvars'] if not v in terms): + if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms): return False return True diff -r b619531ddbd2 -r b6e250dd7a7d server/mssteps.py --- a/server/mssteps.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/mssteps.py Fri Apr 23 12:42:53 2010 +0200 @@ -61,7 +61,7 @@ if not isinstance(vref, VariableRef): continue var = vref.variable - if var.stinfo['attrvars']: + if var.stinfo.get('attrvars'): for lhsvar, rtype in var.stinfo['attrvars']: if lhsvar.name in srqlst.defined_vars: key = '%s.%s' % (lhsvar.name, rtype) diff -r b619531ddbd2 -r b6e250dd7a7d server/querier.py --- a/server/querier.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/querier.py Fri Apr 23 12:42:53 2010 +0200 @@ -326,16 +326,9 @@ varkwargs = {} if not session.transaction_data.get('security-rqlst-cache'): for var in rqlst.defined_vars.itervalues(): - for rel in var.stinfo['uidrels']: - const = rel.children[1].children[0] - try: - varkwargs[var.name] = typed_eid(const.eval(self.args)) - break - except AttributeError: - #from rql.nodes import Function - #assert isinstance(const, Function) - # X eid IN(...) - pass + if var.stinfo['constnode'] is not None: + eid = var.stinfo['constnode'].eval(self.args) + varkwargs[var.name] = typed_eid(eid) # dictionnary of variables restricted for security reason localchecks = {} restricted_vars = set() @@ -529,16 +522,22 @@ def set_schema(self, schema): self.schema = schema repo = self._repo + # rql st and solution cache. Don't bother using a Cache instance: we + # should have a limited number of queries in there, since there are no + # entries in this cache for user queries (which have no args) + self._rql_cache = {} + # rql cache key cache + self._rql_ck_cache = Cache(repo.config['rql-cache-size']) + # some cache usage stats + self.cache_hit, self.cache_miss = 0, 0 # rql parsing / analysing helper self.solutions = repo.vreg.solutions - self._rql_cache = Cache(repo.config['rql-cache-size']) - self.cache_hit, self.cache_miss = 0, 0 + rqlhelper = repo.vreg.rqlhelper + self._parse = rqlhelper.parse + self._annotate = rqlhelper.annotate # rql planner # note: don't use repo.sources, may not be built yet, and also "admin" # isn't an actual source - rqlhelper = repo.vreg.rqlhelper - self._parse = rqlhelper.parse - self._annotate = rqlhelper.annotate if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2: from cubicweb.server.ssplanner import SSPlanner self._planner = SSPlanner(schema, rqlhelper) @@ -561,7 +560,7 @@ return InsertPlan(self, rqlst, args, session) return ExecutionPlan(self, rqlst, args, session) - def execute(self, session, rql, args=None, eid_key=None, build_descr=True): + def execute(self, session, rql, args=None, build_descr=True): """execute a rql query, return resulting rows and their description in a `ResultSet` object @@ -570,12 +569,6 @@ * `build_descr` is a boolean flag indicating if the description should be built on select queries (if false, the description will be en empty list) - * `eid_key` must be both a key in args and a substitution in the rql - query. It should be used to enhance cacheability of rql queries. - It may be a tuple for keys in args. - `eid_key` must be provided in cases where a eid substitution is provided - and resolves ambiguities in the possible solutions inferred for each - variable in the query. on INSERT queries, there will be one row with the eid of each inserted entity @@ -591,40 +584,33 @@ print '*'*80 print 'querier input', rql, args # parse the query and binds variables - if eid_key is not None: - if not isinstance(eid_key, (tuple, list)): - eid_key = (eid_key,) - cachekey = [rql] - for key in eid_key: - try: - etype = self._repo.type_from_eid(args[key], session) - except KeyError: - raise QueryError('bad cache key %s (no value)' % key) - except TypeError: - raise QueryError('bad cache key %s (value: %r)' % ( - key, args[key])) - except UnknownEid: - # we want queries such as "Any X WHERE X eid 9999" - # return an empty result instead of raising UnknownEid - return empty_rset(rql, args) - cachekey.append(etype) - # ensure eid is correctly typed in args - args[key] = typed_eid(args[key]) - cachekey = tuple(cachekey) - else: + try: cachekey = rql - try: + if args: + eidkeys = self._rql_ck_cache[rql] + if eidkeys: + try: + cachekey = self._repo.querier_cache_key(session, rql, + args, eidkeys) + except UnknownEid: + # we want queries such as "Any X WHERE X eid 9999" + # return an empty result instead of raising UnknownEid + return empty_rset(rql, args) rqlst = self._rql_cache[cachekey] self.cache_hit += 1 except KeyError: self.cache_miss += 1 rqlst = self.parse(rql) try: - self.solutions(session, rqlst, args) + eidkeys = self.solutions(session, rqlst, args) except UnknownEid: # we want queries such as "Any X WHERE X eid 9999" return an # empty result instead of raising UnknownEid return empty_rset(rql, args, rqlst) + self._rql_ck_cache[rql] = eidkeys + if eidkeys: + cachekey = self._repo.querier_cache_key(session, rql, args, + eidkeys) self._rql_cache[cachekey] = rqlst orig_rqlst = rqlst if rqlst.TYPE != 'select': @@ -684,7 +670,7 @@ # FIXME: get number of affected entities / relations on non # selection queries ? # return a result set object - return ResultSet(results, rql, args, descr, eid_key, orig_rqlst) + return ResultSet(results, rql, args, descr, orig_rqlst) from logging import getLogger from cubicweb import set_log_methods diff -r b619531ddbd2 -r b6e250dd7a7d server/repository.py --- a/server/repository.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/repository.py Fri Apr 23 12:42:53 2010 +0200 @@ -33,7 +33,7 @@ from yams.schema import role_name from rql import RQLSyntaxError -from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, +from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError, UnknownEid, AuthenticationError, ExecutionError, ETypeNotSupportedBySources, MultiSourcesError, BadConnectionId, Unauthorized, ValidationError, @@ -76,12 +76,12 @@ with security_enabled(session, read=False): session.execute('DELETE X %s Y WHERE X eid %%(x)s, ' 'NOT Y eid %%(y)s' % rtype, - {'x': eidfrom, 'y': eidto}, 'x') + {'x': eidfrom, 'y': eidto}) if card[1] in '1?': with security_enabled(session, read=False): session.execute('DELETE X %sY WHERE Y eid %%(y)s, ' 'NOT X eid %%(x)s' % rtype, - {'x': eidfrom, 'y': eidto}, 'y') + {'x': eidfrom, 'y': eidto}) class Repository(object): @@ -408,7 +408,7 @@ """return a CWUser entity for user with the given eid""" cls = self.vreg['etypes'].etype_class('CWUser') rql = cls.fetch_rql(session.user, ['X eid %(x)s']) - rset = session.execute(rql, {'x': eid}, 'x') + rset = session.execute(rql, {'x': eid}) assert len(rset) == 1, rset cwuser = rset.get_entity(0, 0) # pylint: disable-msg=W0104 @@ -567,7 +567,7 @@ session.commit() return session.id - def execute(self, sessionid, rqlstring, args=None, eid_key=None, build_descr=True): + def execute(self, sessionid, rqlstring, args=None, build_descr=True): """execute a RQL query * rqlstring should be an unicode string or a plain ascii string @@ -578,7 +578,7 @@ session = self._get_session(sessionid, setpool=True) try: try: - return self.querier.execute(session, rqlstring, args, eid_key, + return self.querier.execute(session, rqlstring, args, build_descr) except (Unauthorized, RQLSyntaxError): raise @@ -836,6 +836,21 @@ """return the source for the given entity's eid""" return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]] + def querier_cache_key(self, session, rql, args, eidkeys): + cachekey = [rql] + for key in sorted(eidkeys): + try: + etype = self.type_from_eid(args[key], session) + except KeyError: + raise QueryError('bad cache key %s (no value)' % key) + except TypeError: + raise QueryError('bad cache key %s (value: %r)' % ( + key, args[key])) + cachekey.append(etype) + # ensure eid is correctly typed in args + args[key] = typed_eid(args[key]) + return tuple(cachekey) + def eid2extid(self, source, eid, session=None): """get local id from an eid""" etype, uri, extid = self.type_and_source_from_eid(eid, session) @@ -901,7 +916,7 @@ else: # minimal meta-data session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s', - {'x': entity.eid, 'name': entity.__regid__}, 'x') + {'x': entity.eid, 'name': entity.__regid__}) session.commit(reset_pool) return eid except: @@ -949,7 +964,7 @@ rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype else: rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype - session.execute(rql, {'x': eid}, 'x', build_descr=False) + session.execute(rql, {'x': eid}, build_descr=False) self.system_source.delete_info(session, entity, sourceuri, extid) def locate_relation_source(self, session, subject, rtype, object): diff -r b619531ddbd2 -r b6e250dd7a7d server/rqlannotation.py --- a/server/rqlannotation.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/rqlannotation.py Fri Apr 23 12:42:53 2010 +0200 @@ -38,7 +38,7 @@ stinfo['invariant'] = False stinfo['principal'] = _select_main_var(stinfo['rhsrelations']) continue - if not stinfo['relations'] and not stinfo['typerels']: + if not stinfo['relations'] and stinfo['typerel'] is None: # Any X, Any MAX(X)... # those particular queries should be executed using the system # entities table unless there is some type restriction @@ -80,7 +80,7 @@ continue rschema = getrschema(rel.r_type) if rel.optional: - if rel in stinfo['optrelations']: + if rel in stinfo.get('optrelations', ()): # optional variable can't be invariant if this is the lhs # variable of an inlined relation if not rel in stinfo['rhsrelations'] and rschema.inlined: @@ -296,7 +296,7 @@ def compute(self, rqlst): # set domains for each variable for varname, var in rqlst.defined_vars.iteritems(): - if var.stinfo['uidrels'] or \ + if var.stinfo['uidrel'] is not None or \ self.eschema(rqlst.solutions[0][varname]).final: ptypes = var.stinfo['possibletypes'] else: @@ -339,7 +339,7 @@ def set_rel_constraint(self, term, rel, etypes_func): if isinstance(term, VariableRef) and self.is_ambiguous(term.variable): var = term.variable - if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \ + if len(var.stinfo['relations']) == 1 \ or rel.sqlscope is var.sqlscope or rel.r_type == 'identity': self.restrict(var, frozenset(etypes_func())) try: @@ -356,7 +356,7 @@ if isinstance(other, VariableRef) and isinstance(other.variable, Variable): deambiguifier = other.variable if not var is self.deambification_map.get(deambiguifier): - if not var.stinfo['typerels']: + if var.stinfo['typerel'] is None: otheretypes = deambiguifier.stinfo['possibletypes'] elif not self.is_ambiguous(deambiguifier): otheretypes = self.varsols[deambiguifier] @@ -364,7 +364,7 @@ # we know variable won't be invariant, try to use # it to deambguify the current variable otheretypes = self.varsols[deambiguifier] - if not deambiguifier.stinfo['typerels']: + if deambiguifier.stinfo['typerel'] is None: # if deambiguifier has no type restriction using 'is', # don't record it deambiguifier = None diff -r b619531ddbd2 -r b6e250dd7a7d server/schemaserial.py --- a/server/schemaserial.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/schemaserial.py Fri Apr 23 12:42:53 2010 +0200 @@ -52,7 +52,10 @@ def cstrtype_mapping(cursor): """cached constraint types mapping""" - return dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T')) + map = dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T')) + if not 'BoundConstraint' in map: + map['BoundConstraint'] = map['BoundaryConstraint'] + return map # schema / perms deserialization ############################################## @@ -240,10 +243,13 @@ cstrtypemap = {} rql = 'INSERT CWConstraintType X: X name %(ct)s' for cstrtype in CONSTRAINTS: + if cstrtype == 'BoundConstraint': + continue # XXX deprecated in yams 0.29 / cw 3.8.1 cstrtypemap[cstrtype] = execute(rql, {'ct': unicode(cstrtype)}, build_descr=False)[0][0] if pb is not None: pb.update() + cstrtypemap['BoundConstraint'] = cstrtypemap['BoundaryConstraint'] # serialize relations for rschema in schema.relations(): # skip virtual relations such as eid, has_text and identity diff -r b619531ddbd2 -r b6e250dd7a7d server/serverconfig.py --- a/server/serverconfig.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/serverconfig.py Fri Apr 23 12:42:53 2010 +0200 @@ -23,12 +23,12 @@ 'default': 'admin', 'help': "cubicweb manager account's login " '(this user will be created)', - 'inputlevel': 0, + 'level': 0, }), ('password', {'type' : 'password', 'default': REQUIRED, 'help': "cubicweb manager account's password", - 'inputlevel': 0, + 'level': 0, }), ) @@ -93,39 +93,39 @@ {'type' : 'string', 'default': None, 'help': 'host name if not correctly detectable through gethostname', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('pid-file', {'type' : 'string', 'default': Method('default_pid_file'), 'help': 'repository\'s pid file', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), ('uid', {'type' : 'string', 'default': None, 'help': 'if this option is set, use the specified user to start \ the repository rather than the user running the command', - 'group': 'main', 'inputlevel': (CubicWebConfiguration.mode == 'installed') and 0 or 1, + 'group': 'main', 'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1, }), ('session-time', {'type' : 'time', 'default': '30min', 'help': 'session expiration time, default to 30 minutes', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('connections-pool-size', {'type' : 'int', 'default': 4, 'help': 'size of the connections pools. Each source supporting multiple \ connections will have this number of opened connections.', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('rql-cache-size', {'type' : 'int', 'default': 300, 'help': 'size of the parsed rql cache size.', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('undo-support', {'type' : 'string', 'default': '', @@ -133,20 +133,20 @@ [C]reate [U]pdate [D]elete entities / [A]dd [R]emove relation. Leave it empty \ for no undo support, set it to CUDAR for full undo support, or to DR for \ support undoing of deletion only.', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('keep-transaction-lifetime', {'type' : 'int', 'default': 7, 'help': 'number of days during which transaction records should be \ kept (hence undoable).', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('multi-sources-etypes', {'type' : 'csv', 'default': (), 'help': 'defines which entity types from this repository are used \ by some other instances. You should set this properly so those instances to \ detect updates / deletions.', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('delay-full-text-indexation', @@ -155,7 +155,7 @@ ' to be done when entity are added/modified by users, activate this ' 'option and setup a job using cubicweb-ctl db-rebuild-fti on your ' 'system (using cron for instance).', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), # email configuration @@ -168,7 +168,7 @@ modes are "default-dest-addrs" (emails specified in the configuration \ variable with the same name), "users" (every users which has activated \ account with an email set), "none" (no notification).', - 'group': 'email', 'inputlevel': 1, + 'group': 'email', 'level': 1, }), ('default-dest-addrs', {'type' : 'csv', @@ -176,14 +176,14 @@ 'help': 'comma separated list of email addresses that will be used \ as default recipient when an email is sent and the notification has no \ specific recipient rules.', - 'group': 'email', 'inputlevel': 1, + 'group': 'email', 'level': 1, }), ('supervising-addrs', {'type' : 'csv', 'default': (), 'help': 'comma separated list of email addresses that will be \ notified of every changes.', - 'group': 'email', 'inputlevel': 2, + 'group': 'email', 'level': 2, }), # pyro server.serverconfig ('pyro-host', @@ -192,7 +192,7 @@ 'help': 'Pyro server host, if not detectable correctly through \ gethostname(). It may contains port information using : notation, \ and if not set, it will be choosen randomly', - 'group': 'pyro', 'inputlevel': 2, + 'group': 'pyro', 'level': 2, }), ) + CubicWebConfiguration.options) diff -r b619531ddbd2 -r b6e250dd7a7d server/session.py --- a/server/session.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/session.py Fri Apr 23 12:42:53 2010 +0200 @@ -13,6 +13,7 @@ import threading from time import time from uuid import uuid4 +from warnings import warn from logilab.common.deprecation import deprecated from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj @@ -642,8 +643,14 @@ return self.repo.source_from_eid(eid, self) def execute(self, rql, kwargs=None, eid_key=None, build_descr=True): - """db-api like method directly linked to the querier execute method""" - rset = self._execute(self, rql, kwargs, eid_key, build_descr) + """db-api like method directly linked to the querier execute method. + + See :meth:`cubicweb.dbapi.Cursor.execute` documentation. + """ + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + rset = self._execute(self, rql, kwargs, build_descr) rset.req = self return rset diff -r b619531ddbd2 -r b6e250dd7a7d server/sources/extlite.py --- a/server/sources/extlite.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/sources/extlite.py Fri Apr 23 12:42:53 2010 +0200 @@ -66,7 +66,7 @@ 'default': None, 'help': 'path to the sqlite database file used to do queries on the \ repository.', - 'inputlevel': 2, + 'level': 2, }), ) diff -r b619531ddbd2 -r b6e250dd7a7d server/sources/ldapuser.py --- a/server/sources/ldapuser.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/sources/ldapuser.py Fri Apr 23 12:42:53 2010 +0200 @@ -58,14 +58,14 @@ 'default': 'ldap', 'help': 'ldap host. It may contains port information using \ : notation.', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('protocol', {'type' : 'choice', 'default': 'ldap', 'choices': ('ldap', 'ldaps', 'ldapi'), 'help': 'ldap protocol (allowed values: ldap, ldaps, ldapi)', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('auth-mode', @@ -73,13 +73,13 @@ 'default': 'simple', 'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'), 'help': 'authentication mode used to authenticate user to the ldap.', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('auth-realm', {'type' : 'string', 'default': None, 'help': 'realm to use when using gssapi/kerberos authentication.', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('data-cnx-dn', @@ -87,52 +87,52 @@ 'default': '', 'help': 'user dn to use to open data connection to the ldap (eg used \ to respond to rql queries).', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('data-cnx-password', {'type' : 'string', 'default': '', 'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries).', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('user-base-dn', {'type' : 'string', 'default': 'ou=People,dc=logilab,dc=fr', 'help': 'base DN to lookup for users', - 'group': 'ldap-source', 'inputlevel': 0, + 'group': 'ldap-source', 'level': 0, }), ('user-scope', {'type' : 'choice', 'default': 'ONELEVEL', 'choices': ('BASE', 'ONELEVEL', 'SUBTREE'), 'help': 'user search scope', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('user-classes', {'type' : 'csv', 'default': ('top', 'posixAccount'), 'help': 'classes of user', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('user-login-attr', {'type' : 'string', 'default': 'uid', 'help': 'attribute used as login on authentication', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('user-default-group', {'type' : 'csv', 'default': ('users',), 'help': 'name of a group in which ldap users will be by default. \ You can set multiple groups by separating them by a comma.', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('user-attrs-map', {'type' : 'named', 'default': {'uid': 'login', 'gecos': 'email'}, 'help': 'map from ldap user attributes to cubicweb attributes', - 'group': 'ldap-source', 'inputlevel': 1, + 'group': 'ldap-source', 'level': 1, }), ('synchronization-interval', @@ -140,13 +140,13 @@ 'default': '1d', 'help': 'interval between synchronization with the ldap \ directory (default to once a day).', - 'group': 'ldap-source', 'inputlevel': 2, + 'group': 'ldap-source', 'level': 2, }), ('cache-life-time', {'type' : 'time', 'default': '2h', 'help': 'life time of query cache in minutes (default to two hours).', - 'group': 'ldap-source', 'inputlevel': 2, + 'group': 'ldap-source', 'level': 2, }), ) @@ -230,10 +230,10 @@ elif rset: if not execute('SET X address %(addr)s WHERE ' 'U primary_email X, U eid %(u)s', - {'addr': ldapemailaddr, 'u': eid}, 'u'): + {'addr': ldapemailaddr, 'u': eid}): execute('SET X address %(addr)s WHERE ' 'X eid %(x)s', - {'addr': ldapemailaddr, 'x': rset[0][0]}, 'x') + {'addr': ldapemailaddr, 'x': rset[0][0]}) else: # no email found, create it _insert_email(session, ldapemailaddr, eid) @@ -546,7 +546,7 @@ super(LDAPUserSource, self).after_entity_insertion(session, dn, entity) for group in self.user_default_groups: session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s', - {'x': entity.eid, 'group': group}, 'x') + {'x': entity.eid, 'group': group}) # search for existant email first try: emailaddr = self._cache[dn][self.user_rev_attrs['email']] @@ -556,7 +556,7 @@ {'addr': emailaddr}) if rset: session.execute('SET U primary_email X WHERE U eid %(u)s, X eid %(x)s', - {'x': rset[0][0], 'u': entity.eid}, 'u') + {'x': rset[0][0], 'u': entity.eid}) else: # not found, create it _insert_email(session, emailaddr, entity.eid) @@ -571,7 +571,7 @@ def _insert_email(session, emailaddr, ueid): session.execute('INSERT EmailAddress X: X address %(addr)s, U primary_email X ' - 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}, 'x') + 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}) class GotDN(Exception): """exception used when a dn localizing the searched user has been found""" diff -r b619531ddbd2 -r b6e250dd7a7d server/sources/native.py --- a/server/sources/native.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/sources/native.py Fri Apr 23 12:42:53 2010 +0200 @@ -178,43 +178,43 @@ {'type' : 'string', 'default': 'postgres', 'help': 'database driver (postgres or sqlite)', - 'group': 'native-source', 'inputlevel': 1, + 'group': 'native-source', 'level': 1, }), ('db-host', {'type' : 'string', 'default': '', 'help': 'database host', - 'group': 'native-source', 'inputlevel': 1, + 'group': 'native-source', 'level': 1, }), ('db-port', {'type' : 'string', 'default': '', 'help': 'database port', - 'group': 'native-source', 'inputlevel': 1, + 'group': 'native-source', 'level': 1, }), ('db-name', {'type' : 'string', 'default': Method('default_instance_id'), 'help': 'database name', - 'group': 'native-source', 'inputlevel': 0, + 'group': 'native-source', 'level': 0, }), ('db-user', {'type' : 'string', 'default': CubicWebNoAppConfiguration.mode == 'user' and getlogin() or 'cubicweb', 'help': 'database user', - 'group': 'native-source', 'inputlevel': 0, + 'group': 'native-source', 'level': 0, }), ('db-password', {'type' : 'password', 'default': '', 'help': 'database password', - 'group': 'native-source', 'inputlevel': 0, + 'group': 'native-source', 'level': 0, }), ('db-encoding', {'type' : 'string', 'default': 'utf8', 'help': 'database encoding', - 'group': 'native-source', 'inputlevel': 1, + 'group': 'native-source', 'level': 1, }), ) diff -r b619531ddbd2 -r b6e250dd7a7d server/sources/pyrorql.py --- a/server/sources/pyrorql.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/sources/pyrorql.py Fri Apr 23 12:42:53 2010 +0200 @@ -54,53 +54,53 @@ {'type' : 'string', 'default': REQUIRED, 'help': 'identifier of the repository in the pyro name server', - 'group': 'pyro-source', 'inputlevel': 0, + 'group': 'pyro-source', 'level': 0, }), ('mapping-file', {'type' : 'string', 'default': REQUIRED, 'help': 'path to a python file with the schema mapping definition', - 'group': 'pyro-source', 'inputlevel': 1, + 'group': 'pyro-source', 'level': 1, }), ('cubicweb-user', {'type' : 'string', 'default': REQUIRED, 'help': 'user to use for connection on the distant repository', - 'group': 'pyro-source', 'inputlevel': 0, + 'group': 'pyro-source', 'level': 0, }), ('cubicweb-password', {'type' : 'password', 'default': '', 'help': 'user to use for connection on the distant repository', - 'group': 'pyro-source', 'inputlevel': 0, + 'group': 'pyro-source', 'level': 0, }), ('base-url', {'type' : 'string', 'default': '', 'help': 'url of the web site for the distant repository, if you want ' 'to generate external link to entities from this repository', - 'group': 'pyro-source', 'inputlevel': 1, + 'group': 'pyro-source', 'level': 1, }), ('pyro-ns-host', {'type' : 'string', 'default': None, 'help': 'Pyro name server\'s host. If not set, default to the value \ from all_in_one.conf. It may contains port information using : notation.', - 'group': 'pyro-source', 'inputlevel': 1, + 'group': 'pyro-source', 'level': 1, }), ('pyro-ns-group', {'type' : 'string', 'default': None, 'help': 'Pyro name server\'s group where the repository will be \ registered. If not set, default to the value from all_in_one.conf.', - 'group': 'pyro-source', 'inputlevel': 1, + 'group': 'pyro-source', 'level': 1, }), ('synchronization-interval', {'type' : 'int', 'default': 5*60, 'help': 'interval between synchronization with the external \ repository (default to 5 minutes).', - 'group': 'pyro-source', 'inputlevel': 2, + 'group': 'pyro-source', 'level': 2, }), ) @@ -286,7 +286,7 @@ session.set_shared_data('sources_error', msg % self.uri) return [] try: - rql, cachekey = RQL2RQL(self).generate(session, union, args) + rql = RQL2RQL(self).generate(session, union, args) except UnknownEid, ex: if server.DEBUG: print ' unknown eid', ex, 'no results' @@ -294,7 +294,7 @@ if server.DEBUG & server.DBG_RQL: print ' translated rql', rql try: - rset = cu.execute(rql, args, cachekey) + rset = cu.execute(rql, args) except Exception, ex: self.exception(str(ex)) msg = session._("error while querying source %s, some data may be missing") @@ -346,8 +346,7 @@ """update an entity in the source""" relations, kwargs = self._entity_relations_and_kwargs(session, entity) cu = session.pool[self.uri] - cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), - kwargs, 'x') + cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs) self._query_cache.clear() entity.clear_all_caches() @@ -355,7 +354,7 @@ """delete an entity from the source""" cu = session.pool[self.uri] cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__, - {'x': self.eid2extid(entity.eid, session)}, 'x') + {'x': self.eid2extid(entity.eid, session)}) self._query_cache.clear() def add_relation(self, session, subject, rtype, object): @@ -363,7 +362,7 @@ cu = session.pool[self.uri] cu.execute('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, {'x': self.eid2extid(subject, session), - 'y': self.eid2extid(object, session)}, ('x', 'y')) + 'y': self.eid2extid(object, session)}) self._query_cache.clear() session.entity_from_eid(subject).clear_all_caches() session.entity_from_eid(object).clear_all_caches() @@ -373,7 +372,7 @@ cu = session.pool[self.uri] cu.execute('DELETE X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, {'x': self.eid2extid(subject, session), - 'y': self.eid2extid(object, session)}, ('x', 'y')) + 'y': self.eid2extid(object, session)}) self._query_cache.clear() session.entity_from_eid(subject).clear_all_caches() session.entity_from_eid(object).clear_all_caches() @@ -396,9 +395,8 @@ def generate(self, session, rqlst, args): self._session = session self.kwargs = args - self.cachekey = [] self.need_translation = False - return self.visit_union(rqlst), self.cachekey + return self.visit_union(rqlst) def visit_union(self, node): s = self._accept_children(node) @@ -547,7 +545,6 @@ # ensure we have not yet translated the value... if not key in self._const_var: self.kwargs[key] = self.eid2extid(self.kwargs[key]) - self.cachekey.append(key) self._const_var[key] = None return node.as_string() diff -r b619531ddbd2 -r b6e250dd7a7d server/sources/rql2sql.py --- a/server/sources/rql2sql.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/sources/rql2sql.py Fri Apr 23 12:42:53 2010 +0200 @@ -93,7 +93,7 @@ modified = False for varname in tuple(unstable): var = select.defined_vars[varname] - if not var.stinfo['optrelations']: + if not var.stinfo.get('optrelations'): continue modified = True unstable.remove(varname) @@ -120,13 +120,13 @@ var.stinfo['relations'].remove(rel) newvar.stinfo['relations'].add(newrel) if rel.optional in ('left', 'both'): - newvar.stinfo['optrelations'].add(newrel) + newvar.add_optional_relation(newrel) for vref in newrel.children[1].iget_nodes(VariableRef): var = vref.variable var.stinfo['relations'].add(newrel) var.stinfo['rhsrelations'].add(newrel) if rel.optional in ('right', 'both'): - var.stinfo['optrelations'].add(newrel) + var.add_optional_relation(newrel) # extract subquery solutions mysolutions = [sol.copy() for sol in solutions] cleanup_solutions(newselect, mysolutions) @@ -894,7 +894,7 @@ condition = '%s=%s' % (lhssql, rhsconst.accept(self)) if relation.r_type != 'identity': condition = '(%s OR %s IS NULL)' % (condition, lhssql) - if not lhsvar.stinfo['optrelations']: + if not lhsvar.stinfo.get('optrelations'): return condition self.add_outer_join_condition(lhsvar, t1, condition) return @@ -993,7 +993,7 @@ sql = '%s%s' % (lhssql, rhssql) except AttributeError: sql = '%s%s' % (lhssql, rhssql) - if lhs.variable.stinfo['optrelations']: + if lhs.variable.stinfo.get('optrelations'): self.add_outer_join_condition(lhs.variable, table, sql) else: return sql @@ -1008,7 +1008,7 @@ lhsvar = lhs.variable me_is_principal = lhsvar.stinfo.get('principal') is rel if me_is_principal: - if not lhsvar.stinfo['typerels']: + if lhsvar.stinfo['typerel'] is None: # the variable is using the fti table, no join needed jointo = None elif not lhsvar.name in self._varmap: @@ -1141,7 +1141,7 @@ vtablename = '_' + variable.name self.add_table('entities AS %s' % vtablename, vtablename) sql = '%s.eid' % vtablename - if variable.stinfo['typerels']: + if variable.stinfo['typerel'] is not None: # add additional restriction on entities.type column pts = variable.stinfo['possibletypes'] if len(pts) == 1: @@ -1303,7 +1303,7 @@ tablealias = self._state.outer_tables[table] actualtables = self._state.actual_tables[-1] except KeyError: - for rel in var.stinfo['optrelations']: + for rel in var.stinfo.get('optrelations'): self.visit_relation(rel) assert self._state.outer_tables self.add_outer_join_condition(var, table, condition) diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_ldapuser.py Fri Apr 23 12:42:53 2010 +0200 @@ -176,7 +176,7 @@ rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' 'WF creation_date D, WF from_state FS,' 'WF owned_by U?, X eid %(x)s', - {'x': adim.eid}, 'x') + {'x': adim.eid}) self.assertEquals(rset.rows, [[syt.eid]]) finally: # restore db state diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_migractions.py Fri Apr 23 12:42:53 2010 +0200 @@ -108,8 +108,8 @@ testdate = date(2005, 12, 13) eid1 = self.mh.rqlexec('INSERT Note N')[0][0] eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] - d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1}, 'x')[0][0] - d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2}, 'x')[0][0] + d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] + d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] self.assertEquals(d1, date.today()) self.assertEquals(d2, testdate) self.mh.rollback() @@ -503,13 +503,13 @@ note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0) aff = self.execute('INSERT Affaire X').get_entity(0, 0) self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid}, 'x')) + {'x': text.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid}, 'x')) + {'x': note.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid}, 'x')) + {'x': text.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid}, 'x')) + {'x': note.eid, 'y': aff.eid})) # XXX remove specializes by ourselves, else tearDown fails when removing # Para because of Note inheritance. This could be fixed by putting the # MemSchemaCWETypeDel(session, name) operation in the diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_multisources.py Fri Apr 23 12:42:53 2010 +0200 @@ -109,7 +109,7 @@ self.assertEquals(metainf['type'], 'Card') self.assert_(metainf['extid']) etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', - {'x': externent.eid}, 'x')[0][0] + {'x': externent.eid})[0][0] self.assertEquals(etype, 'Card') def test_order_limit_offset(self): @@ -129,7 +129,7 @@ self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0] aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0] # grant read access - self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x') + self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}) self.commit() cnx = self.login('anon') cu = cnx.cursor() @@ -139,8 +139,8 @@ def test_synchronization(self): cu = cnx2.cursor() - assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}, 'x') - cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}, 'x') + assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}) + cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}) aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0] cnx2.commit() try: @@ -155,20 +155,20 @@ self.failIf(rset) finally: # restore state - cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}, 'x') + cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}) cnx2.commit() def test_simplifiable_var(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - {'x': affeid}, 'x') + {'x': affeid}) self.assertEquals(len(rset), 1) self.assertEquals(rset[0][1], "pitetre") def test_simplifiable_var_2(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - {'x': affeid, 'u': self.session.user.eid}, 'x') + {'x': affeid, 'u': self.session.user.eid}) self.assertEquals(len(rset), 1) def test_sort_func(self): @@ -216,7 +216,7 @@ rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') self.assertEquals(len(rset), 2, rset.rows) finally: - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}, 'x') + cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}) cnx2.commit() def test_attr_unification_neq_1(self): @@ -258,15 +258,15 @@ userstate = self.session.user.in_state[0] states.remove((userstate.eid, userstate.name)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': self.session.user.eid}, 'x')) + {'x': self.session.user.eid})) self.assertSetEquals(notstates, states) aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0] + aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0] self.assertEquals(aff1statename, 'pitetre') states.add((userstate.eid, userstate.name)) states.remove((aff1stateeid, aff1statename)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': aff1}, 'x')) + {'x': aff1})) self.assertSetEquals(notstates, states) def test_absolute_url_base_url(self): diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_querier.py --- a/server/test/unittest_querier.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_querier.py Fri Apr 23 12:42:53 2010 +0200 @@ -220,13 +220,13 @@ def test_typed_eid(self): # should return an empty result set - rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}, 'x') + rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}) self.assertIsInstance(rset[0][0], (int, long)) def test_bytes_storage(self): feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s', {'data': Binary("xxx")})[0][0] - fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid}, 'x')[0][0] + fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] self.assertIsInstance(fdata, Binary) self.assertEquals(fdata.getvalue(), 'xxx') @@ -356,17 +356,17 @@ def test_select_outer_join_optimized(self): peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}, 'x') + rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) self.assertEquals(rset.rows, [[peid1]]) rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?', - {'x':peid1}, 'x') + {'x':peid1}) self.assertEquals(rset.rows, [[peid1]]) def test_select_left_outer_join(self): rset = self.execute('DISTINCT Any G WHERE U? in_group G') self.assertEquals(len(rset), 4) rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', - {'x': self.session.user.eid}, 'x') + {'x': self.session.user.eid}) self.assertEquals(len(rset), 4) def test_select_ambigous_outer_join(self): @@ -374,7 +374,7 @@ self.execute("INSERT Tag X: X name 'tagbis'")[0][0] geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - {'g': geid, 't': teid}, 'g') + {'g': geid, 't': teid}) rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") self.failUnless(['users', 'tag'] in rset.rows) self.failUnless(['activated', None] in rset.rows) @@ -882,7 +882,7 @@ def test_insert_5bis(self): peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': peid}, 'x') + {'x': peid}) rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEquals(rset.description, [('Personne', 'Societe',)]) @@ -1000,17 +1000,17 @@ eid = self.execute("INSERT Folder T: T name 'toto'")[0][0] self.commit() # fill the cache - self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x') + self.execute("Any X WHERE X eid %(x)s", {'x': eid}) self.execute("Any X WHERE X eid %s" %eid) - self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x') + self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) self.execute("Folder X WHERE X eid %s" %eid) self.execute("DELETE Folder T WHERE T eid %s"%eid) self.commit() - rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x') + rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}) self.assertEquals(rset.rows, []) rset = self.execute("Any X WHERE X eid %s" %eid) self.assertEquals(rset.rows, []) - rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x') + rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) self.assertEquals(rset.rows, []) rset = self.execute("Folder X WHERE X eid %s" %eid) self.assertEquals(rset.rows, []) @@ -1086,7 +1086,7 @@ def test_update_string_concat(self): beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'}) - newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid}, 'x')[0][0] + newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] self.assertEquals(newname, 'toto-moved') def test_update_query_error(self): @@ -1203,7 +1203,7 @@ 'creation_date': '2000/07/03 11:00'}) rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' 'X owned_by U, U eid %(x)s', - {'x':self.session.user.eid}, 'x') + {'x':self.session.user.eid}) self.assertEquals(rset.rows, [[u'\xe9name0']]) @@ -1286,7 +1286,7 @@ ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] self.execute("SET E in_group G, E firstname %(firstname)s, E surname %(surname)s " "WHERE E eid %(x)s, G name 'users'", - {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}, 'x') + {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}) def test_nonregr_u_owned_by_u(self): ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_repository.py --- a/server/test/unittest_repository.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_repository.py Fri Apr 23 12:42:53 2010 +0200 @@ -478,7 +478,7 @@ # our sqlite datetime adapter is ignore seconds fraction, so we have to # ensure update is done the next seconds time.sleep(1 - (ts.second - int(ts.second))) - self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x') + self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}) self.commit() self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) self.session.set_pool() diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_rql2sql.py Fri Apr 23 12:42:53 2010 +0200 @@ -1209,6 +1209,10 @@ '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER) FROM cw_Personne AS _P''') + def test_substring(self): + self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", + '''SELECT SUBSTR(_P.cw_nom, 1, 1) +FROM cw_Personne AS _P''') def test_parser_parse(self): for t in self._parse(PARSER): @@ -1601,12 +1605,16 @@ WHERE rel_concerne0.eid_from=_A.cw_eid AND rel_concerne0.eid_to=_N.cw_eid GROUP BY _A.cw_eid,rel_todo_by1.eid_to,rel_todo_by3.eid_to''') + def test_substring(self): + self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne", + '''SELECT SUBSTRING(_P.cw_nom, 1, 1) +FROM cw_Personne AS _P''') class removeUnsusedSolutionsTC(TestCase): def test_invariant_not_varying(self): rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, @@ -1616,8 +1624,8 @@ def test_invariant_varying(self): rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_rqlannotation.py Fri Apr 23 12:42:53 2010 +0200 @@ -100,6 +100,12 @@ self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + def test_8(self): + # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission + rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, ' + 'X is Note, N eid 1') + self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + def test_diff_scope_identity_deamb(self): rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)') self.assertEquals(rqlst.defined_vars['Z']._q_invariant, True) diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_security.py --- a/server/test/unittest_security.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_security.py Fri Apr 23 12:42:53 2010 +0200 @@ -197,7 +197,7 @@ # to actually get Unauthorized exception, try to delete a relation we can read self.restore_connection() eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}, 'x') + self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}) self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") self.commit() cnx = self.login('iaminusersgrouponly') @@ -214,7 +214,7 @@ cnx = self.login('user') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}, 'x') + {'x': ueid, 'passwd': 'newpwd'}) cnx.commit() cnx.close() cnx = self.login('user', password='newpwd') @@ -224,7 +224,7 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}, 'x') + {'x': ueid, 'passwd': 'newpwd'}) self.assertRaises(Unauthorized, cnx.commit) # read security test @@ -243,22 +243,22 @@ cu = cnx.cursor() rset = cu.execute('Affaire X') self.assertEquals(rset.rows, []) - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) # cache test - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() - rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}, 'x') + rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}) self.assertEquals(rset.rows, [[aff2]]) # more cache test w/ NOT eid - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}, 'x') + rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) self.assertEquals(rset.rows, [[aff2]]) - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}, 'x') + rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) self.assertEquals(rset.rows, []) # test can't update an attribute of an entity that can't be readen - self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) def test_entity_created_in_transaction(self): @@ -270,7 +270,7 @@ cu = cnx.cursor() aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] # entity created in transaction are readable *by eid* - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x')) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) # XXX would be nice if it worked rset = cu.execute("Affaire X WHERE X sujet 'cool'") self.assertEquals(len(rset), 0) @@ -281,18 +281,17 @@ def test_read_erqlexpr_has_text1(self): aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] card1 = self.execute("INSERT Card X: X title 'cool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}, 'x') + self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}, - ('a', 's')) + cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) cnx.commit() - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}, 'x') - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x')) - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}, 'x')) + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1})) rset = cu.execute("Any X WHERE X has_text 'cool'") self.assertEquals(sorted(eid for eid, in rset.rows), [card1, aff2]) @@ -347,7 +346,7 @@ # only managers should be able to edit the 'test' attribute of Personne entities eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0] self.commit() - self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x') + self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() @@ -357,11 +356,11 @@ self.assertRaises(Unauthorized, cnx.commit) eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0] cnx.commit() - cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) self.assertRaises(Unauthorized, cnx.commit) - cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) self.assertRaises(Unauthorized, cnx.commit) - cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) cnx.commit() cnx.close() @@ -370,23 +369,23 @@ note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) self.commit() note.fire_transition('markasdone') - self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}, 'x') + self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) self.assertRaises(Unauthorized, cnx.commit) note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) cnx.commit() note2.fire_transition('markasdone') cnx.commit() - self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid}, 'x')), + self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), 0) - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) self.assertRaises(Unauthorized, cnx.commit) note2.fire_transition('redoit') cnx.commit() - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) cnx.commit() def test_attribute_read_security(self): @@ -447,13 +446,13 @@ # should only be able to read the anonymous user, not another one origuser = self.adminsession.user self.assertRaises(Unauthorized, - cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x') + cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}) # nothing selected, nothing updated, no exception raised #self.assertRaises(Unauthorized, # cu.execute, 'SET X login "toto" WHERE X eid %(x)s', # {'x': self.user.eid}) - rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x') + rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) self.assertEquals(rset.rows, [[anon.eid]]) # but can't modify it cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) @@ -494,7 +493,7 @@ self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U') self.assertRaises(Unauthorized, cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', - {'x': anoneid, 'b': beid1}, 'x') + {'x': anoneid, 'b': beid1}) def test_ambigous_ordered(self): @@ -551,10 +550,10 @@ aff.clear_related_cache('wf_info_for', role='object') self.assertRaises(Unauthorized, self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', - {'ti': trinfo.eid}, 'ti') + {'ti': trinfo.eid}) self.assertRaises(Unauthorized, self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', - {'ti': trinfo.eid}, 'ti') + {'ti': trinfo.eid}) if __name__ == '__main__': unittest_main() diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_storage.py --- a/server/test/unittest_storage.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_storage.py Fri Apr 23 12:42:53 2010 +0200 @@ -132,14 +132,14 @@ ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}, 'x') + ')', {'x': f1.eid}) self.assertEquals(len(rset), 2) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[1][0], f1.eid) self.assertEquals(rset[0][1].getvalue(), 'the-data') self.assertEquals(rset[1][1].getvalue(), 'the-data') rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}, 'x') + {'x': f1.eid}) self.assertEquals(len(rset), 1) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[0][1], len('the-data')) @@ -147,7 +147,7 @@ ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}, 'x') + ')', {'x': f1.eid}) self.assertEquals(len(rset), 2) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[1][0], f1.eid) @@ -155,7 +155,7 @@ self.assertEquals(rset[1][1], len('the-data')) ex = self.assertRaises(QueryError, self.execute, 'Any X,UPPER(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}, 'x') + {'x': f1.eid}) self.assertEquals(str(ex), 'UPPER can not be called on mapped attribute') @@ -178,7 +178,7 @@ {'d': Binary('some other data'), 'f': f1.eid}) self.assertEquals(f1.data.getvalue(), 'some other data') self.commit() - f2 = self.entity('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}) + f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) self.assertEquals(f2.data.getvalue(), 'some other data') diff -r b619531ddbd2 -r b6e250dd7a7d server/test/unittest_undo.py --- a/server/test/unittest_undo.py Fri Apr 23 12:40:48 2010 +0200 +++ b/server/test/unittest_undo.py Fri Apr 23 12:42:53 2010 +0200 @@ -144,8 +144,8 @@ undotxuuid = self.commit() self.assertEquals(undotxuuid, None) # undo not undoable self.assertEquals(errors, []) - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}, 'x')) - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}, 'x')) + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"')) self.assertEquals(toto.state, 'activated') self.assertEquals(toto.get_email(), 'toto@logilab.org') @@ -216,8 +216,8 @@ errors = self.cnx.undo_transaction(txuuid) self.commit() self.failIf(errors) - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}, 'x')) - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}, 'x')) + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) self.failIf(self.execute('Any X,Y WHERE X fiche Y')) self.session.set_pool() for eid in (p.eid, c.eid): diff -r b619531ddbd2 -r b6e250dd7a7d setup.py --- a/setup.py Fri Apr 23 12:40:48 2010 +0200 +++ b/setup.py Fri Apr 23 12:42:53 2010 +0200 @@ -24,38 +24,43 @@ import os import sys import shutil -from distutils.core import setup -from distutils.command import install_lib from os.path import isdir, exists, join, walk +try: + if os.environ.get('NO_SETUPTOOLS'): + raise ImportError() # do as there is no setuptools + from setuptools import setup + from setuptools.command import install_lib + USE_SETUPTOOLS = True +except ImportError: + from distutils.core import setup + from distutils.command import install_lib + USE_SETUPTOOLS = False + # import required features -from __pkginfo__ import modname, version, license, short_desc, long_desc, \ - web, author, author_email +from __pkginfo__ import modname, version, license, description, web, \ + author, author_email + +long_description = file('README').read() + # import optional features -try: - from __pkginfo__ import distname -except ImportError: - distname = modname -try: - from __pkginfo__ import scripts -except ImportError: - scripts = [] -try: - from __pkginfo__ import data_files -except ImportError: - data_files = None -try: - from __pkginfo__ import subpackage_of -except ImportError: - subpackage_of = None -try: - from __pkginfo__ import include_dirs -except ImportError: - include_dirs = [] -try: - from __pkginfo__ import ext_modules -except ImportError: - ext_modules = None +import __pkginfo__ +if USE_SETUPTOOLS: + requires = {} + for entry in ("__depends__", "__recommends__"): + requires.update(getattr(__pkginfo__, entry, {})) + install_requires = [("%s %s" % (d, v and v or "")).strip() + for d, v in requires.iteritems()] +else: + install_requires = [] + +distname = getattr(__pkginfo__, 'distname', modname) +scripts = getattr(__pkginfo__, 'scripts', ()) +include_dirs = getattr(__pkginfo__, 'include_dirs', ()) +data_files = getattr(__pkginfo__, 'data_files', None) +subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) +ext_modules = getattr(__pkginfo__, 'ext_modules', None) + BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog') IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc') @@ -92,7 +97,8 @@ def export(from_dir, to_dir, blacklist=BASE_BLACKLIST, - ignore_ext=IGNORED_EXTENSIONS): + ignore_ext=IGNORED_EXTENSIONS, + verbose=True): """make a mirror of from_dir in to_dir, omitting directories and files listed in the black list """ @@ -111,7 +117,8 @@ continue src = '%s/%s' % (directory, filename) dest = to_dir + src[len(from_dir):] - print >> sys.stderr, src, '->', dest + if verbose: + print >> sys.stderr, src, '->', dest if os.path.isdir(src): if not exists(dest): os.mkdir(dest) @@ -154,28 +161,32 @@ base = modname for directory in include_dirs: dest = join(self.install_dir, base, directory) - export(directory, dest) + export(directory, dest, verbose=False) def install(**kwargs): """setup entry point""" + if USE_SETUPTOOLS: + if '--force-manifest' in sys.argv: + sys.argv.remove('--force-manifest') + # install-layout option was introduced in 2.5.3-1~exp1 + elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: + sys.argv.remove('--install-layout=deb') if subpackage_of: package = subpackage_of + '.' + modname kwargs['package_dir'] = {package : '.'} packages = [package] + get_packages(os.getcwd(), package) + if USE_SETUPTOOLS: + kwargs['namespace_packages'] = [subpackage_of] else: kwargs['package_dir'] = {modname : '.'} packages = [modname] + get_packages(os.getcwd(), modname) + if USE_SETUPTOOLS: + kwargs['install_requires'] = install_requires kwargs['packages'] = packages - return setup(name = distname, - version = version, - license =license, - description = short_desc, - long_description = long_desc, - author = author, - author_email = author_email, - url = web, - scripts = ensure_scripts(scripts), - data_files=data_files, + return setup(name=distname, version=version, license=license, url=web, + description=description, long_description=long_description, + author=author, author_email=author_email, + scripts=ensure_scripts(scripts), data_files=data_files, ext_modules=ext_modules, cmdclass={'install_lib': MyInstallLib}, **kwargs diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/README.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/README.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,3 @@ +Summary +------- +%(longdesc)s diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/__pkginfo__.py.tmpl --- a/skeleton/__pkginfo__.py.tmpl Fri Apr 23 12:40:48 2010 +0200 +++ b/skeleton/__pkginfo__.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -7,15 +7,12 @@ numversion = (0, 1, 0) version = '.'.join(str(num) for num in numversion) -license = 'LCL' -copyright = '''Copyright (c) %(year)s %(author)s. -%(author-web-site)s -- mailto:%(author-email)s''' +license = '%(license)s' author = '%(author)s' author_email = '%(author-email)s' -short_desc = '%(shortdesc)s' -long_desc = '''%(longdesc)s''' +description = '%(shortdesc)s' web = 'http://www.cubicweb.org/project/%%s' %% distname @@ -43,12 +40,6 @@ # Note: here, you'll need to add subdirectories if you want # them to be included in the debian package -# a dict; you might want to provide a version specification -# of the form '>= x.y.z' -__depends__ = {'cubicweb': '>= 3.7.0'} -__depends_cubes__ = %(dependencies)s -__recommends_cubes__ = {} -# obsolete (will be gone in cw 3.8.0) -__use__ = tuple(__depends_cubes__) -__recommend__ = tuple(__recommends_cubes__) +__depends__ = %(dependencies)s +__recommends__ = {} diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/data/cubes.CUBENAME.css --- a/skeleton/data/cubes.CUBENAME.css Fri Apr 23 12:40:48 2010 +0200 +++ b/skeleton/data/cubes.CUBENAME.css Fri Apr 23 12:42:53 2010 +0200 @@ -1,1 +1,1 @@ -/* template specific CSS */ +/* cube-specific CSS */ diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/data/cubes.CUBENAME.js --- a/skeleton/data/cubes.CUBENAME.js Fri Apr 23 12:40:48 2010 +0200 +++ b/skeleton/data/cubes.CUBENAME.js Fri Apr 23 12:42:53 2010 +0200 @@ -1,1 +1,1 @@ -// This contains template-specific javascript \ No newline at end of file +// This contains cube-specific javascript \ No newline at end of file diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/debian/rules.tmpl --- a/skeleton/debian/rules.tmpl Fri Apr 23 12:40:48 2010 +0200 +++ b/skeleton/debian/rules.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -7,7 +7,7 @@ build: build-stamp build-stamp: dh_testdir - python setup.py -q build + NO_SETUPTOOLS=1 python setup.py -q build touch build-stamp clean: @@ -23,7 +23,7 @@ dh_testroot dh_clean -k dh_installdirs -i - python setup.py -q install --no-compile --prefix=debian/%(distname)s/usr/ + NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/%(distname)s/usr/ # remove generated .egg-info file rm -rf debian/%(distname)s/usr/lib/python* diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/entities.py --- a/skeleton/entities.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""this contains the cube-specific entities' classes - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/entities.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/entities.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s entity's classes + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/hooks.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/hooks.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s specific hooks and operations + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/migration/postcreate.py --- a/skeleton/migration/postcreate.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ -# postcreate script. You could setup site properties or a workflow here for example -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -# Example of site property change -#set_property('ui.site-title', "") - diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/migration/postcreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/migration/postcreate.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,14 @@ +"""%(distname)s postcreate script, executed at instance creation time or when +the cube is added to an existing instance. + +You could setup site properties or a workflow here for example. + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" + +# Example of site property change +#set_property('ui.site-title', "") + diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/migration/precreate.py --- a/skeleton/migration/precreate.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -# Instructions here will be read before reading the schema -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# You could create your own groups here, like in : -# create_entity('CWGroup', name=u'mygroup') diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/migration/precreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/migration/precreate.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,10 @@ +"""%(distname)s precreate script, executed at instance creation time or when +the cube is added to an existing instance, before the schema is serialized. + +This is typically to create groups referenced by the cube'schema. + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/schema.py --- a/skeleton/schema.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -# cube's specific schema -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/schema.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/schema.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s schema + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/setup.py --- a/skeleton/setup.py Fri Apr 23 12:40:48 2010 +0200 +++ b/skeleton/setup.py Fri Apr 23 12:42:53 2010 +0200 @@ -1,57 +1,170 @@ #!/usr/bin/env python -""" +# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152 +"""Generic Setup script, takes package info from __pkginfo__.py file. -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152 -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Generic Setup script, takes package info from __pkginfo__.py file """ +__docformat__ = "restructuredtext en" + +import os +import sys +import shutil +from os.path import isdir, exists, join, walk -from distutils.core import setup +try: + if os.environ.get('NO_SETUPTOOLS'): + raise ImportError() + from setuptools import setup + from setuptools.command import install_lib + USE_SETUPTOOLS = 1 +except ImportError: + from distutils.core import setup + from distutils.command import install_lib + USE_SETUPTOOLS = 0 + +sys.modules.pop('__pkginfo__', None) # import required features -from __pkginfo__ import distname, version, license, short_desc, long_desc, \ +from __pkginfo__ import modname, version, license, description, \ web, author, author_email # import optional features -try: - from __pkginfo__ import data_files -except ImportError: - data_files = None -try: - from __pkginfo__ import include_dirs -except ImportError: - include_dirs = [] +import __pkginfo__ +distname = getattr(__pkginfo__, 'distname', modname) +scripts = getattr(__pkginfo__, 'scripts', []) +data_files = getattr(__pkginfo__, 'data_files', None) +include_dirs = getattr(__pkginfo__, 'include_dirs', []) +ext_modules = getattr(__pkginfo__, 'ext_modules', None) +dependency_links = getattr(__pkginfo__, 'dependency_links', []) + +STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') + +IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~') + +if exists('README'): + long_description = file('README').read() +else: + long_description = '' +if USE_SETUPTOOLS: + requires = {} + for entry in ("__depends__", "__recommends__"): + requires.update(getattr(__pkginfo__, entry, {})) + install_requires = [("%s %s" % (d, v and v or "")).strip() + for d, v in requires.iteritems()] +else: + install_requires = [] + + +def ensure_scripts(linux_scripts): + """Creates the proper script names required for each platform + (taken from 4Suite) + """ + from distutils import util + if util.get_platform()[:3] == 'win': + scripts_ = [script + '.bat' for script in linux_scripts] + else: + scripts_ = linux_scripts + return scripts_ + +def get_packages(directory, prefix): + """return a list of subpackages for the given directory""" + result = [] + for package in os.listdir(directory): + absfile = join(directory, package) + if isdir(absfile): + if exists(join(absfile, '__init__.py')) or \ + package in ('test', 'tests'): + if prefix: + result.append('%s.%s' % (prefix, package)) + else: + result.append(package) + result += get_packages(absfile, result[-1]) + return result + +def export(from_dir, to_dir, + blacklist=STD_BLACKLIST, + ignore_ext=IGNORED_EXTENSIONS, + verbose=True): + """make a mirror of from_dir in to_dir, omitting directories and files + listed in the black list + """ + def make_mirror(arg, directory, fnames): + """walk handler""" + for norecurs in blacklist: + try: + fnames.remove(norecurs) + except ValueError: + pass + for filename in fnames: + # don't include binary files + if filename[-4:] in ignore_ext: + continue + if filename[-1] == '~': + continue + src = join(directory, filename) + dest = to_dir + src[len(from_dir):] + if verbose: + print >> sys.stderr, src, '->', dest + if os.path.isdir(src): + if not exists(dest): + os.mkdir(dest) + else: + if exists(dest): + os.remove(dest) + shutil.copy2(src, dest) + try: + os.mkdir(to_dir) + except OSError, ex: + # file exists ? + import errno + if ex.errno != errno.EEXIST: + raise + walk(from_dir, make_mirror, None) + + +class MyInstallLib(install_lib.install_lib): + """extend install_lib command to handle package __init__.py and + include_dirs variable if necessary + """ + def run(self): + """overridden from install_lib class""" + install_lib.install_lib.run(self) + # manually install included directories if any + if include_dirs: + base = modname + for directory in include_dirs: + dest = join(self.install_dir, base, directory) + export(directory, dest, verbose=False) def install(**kwargs): """setup entry point""" - #kwargs['distname'] = modname - return setup(name=distname, - version=version, - license=license, - description=short_desc, - long_description=long_desc, - author=author, - author_email=author_email, - url=web, - data_files=data_files, - **kwargs) + if USE_SETUPTOOLS: + if '--force-manifest' in sys.argv: + sys.argv.remove('--force-manifest') + # install-layout option was introduced in 2.5.3-1~exp1 + elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: + sys.argv.remove('--install-layout=deb') + kwargs['package_dir'] = {modname : '.'} + packages = [modname] + get_packages(os.getcwd(), modname) + if USE_SETUPTOOLS and install_requires: + kwargs['install_requires'] = install_requires + kwargs['dependency_links'] = dependency_links + kwargs['packages'] = packages + return setup(name = distname, + version = version, + license = license, + description = description, + long_description = long_description, + author = author, + author_email = author_email, + url = web, + scripts = ensure_scripts(scripts), + data_files = data_files, + ext_modules = ext_modules, + cmdclass = {'install_lib': MyInstallLib}, + **kwargs + ) if __name__ == '__main__' : install() diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/sobjects.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/sobjects.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s repository side views, usually for notification + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/views.py --- a/skeleton/views.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""cube-specific forms/views/actions/components - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r b619531ddbd2 -r b6e250dd7a7d skeleton/views.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/views.py.tmpl Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s views/forms/actions/components for web ui + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r b619531ddbd2 -r b6e250dd7a7d sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Fri Apr 23 12:40:48 2010 +0200 +++ b/sobjects/test/unittest_supervising.py Fri Apr 23 12:42:53 2010 +0200 @@ -30,9 +30,9 @@ # do some modification user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' 'WHERE G name "users"').get_entity(0, 0) - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}, 'x') + self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) self.execute('DELETE Card B WHERE B title "une news !"') - self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}, 'x') + self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) self.execute('SET X content "duh?" WHERE X is Comment') self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"') # check only one supervision email operation @@ -91,7 +91,7 @@ def test_nonregr1(self): session = self.session # do some unlogged modification - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x') + self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}) self.commit() # no crash diff -r b619531ddbd2 -r b6e250dd7a7d test/data/cubes/file/__pkginfo__.py --- a/test/data/cubes/file/__pkginfo__.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/data/cubes/file/__pkginfo__.py Fri Apr 23 12:42:53 2010 +0200 @@ -13,48 +13,3 @@ numversion = (1, 4, 3) version = '.'.join(str(num) for num in numversion) -license = 'LGPL' -copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - -author = "Logilab" -author_email = "contact@logilab.fr" -web = '' - -short_desc = "Raw file support for the CubicWeb framework" -long_desc = """CubicWeb is a entities / relations bases knowledge management system -developped at Logilab. -. -This package provides schema and views to store files and images in cubicweb -applications. -. -""" - -from os import listdir -from os.path import join - -CUBES_DIR = join('share', 'cubicweb', 'cubes') -try: - data_files = [ - [join(CUBES_DIR, 'file'), - [fname for fname in listdir('.') - if fname.endswith('.py') and fname != 'setup.py']], - [join(CUBES_DIR, 'file', 'data'), - [join('data', fname) for fname in listdir('data')]], - [join(CUBES_DIR, 'file', 'wdoc'), - [join('wdoc', fname) for fname in listdir('wdoc')]], - [join(CUBES_DIR, 'file', 'views'), - [join('views', fname) for fname in listdir('views') if fname.endswith('.py')]], - [join(CUBES_DIR, 'file', 'i18n'), - [join('i18n', fname) for fname in listdir('i18n')]], - [join(CUBES_DIR, 'file', 'migration'), - [join('migration', fname) for fname in listdir('migration')]], - ] -except OSError: - # we are in an installed directory - pass - - -cube_eid = 20320 -# used packages -__use__ = () diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_cwconfig.py Fri Apr 23 12:42:53 2010 +0200 @@ -7,13 +7,16 @@ """ import sys import os +import tempfile from os.path import dirname, join, abspath from logilab.common.modutils import cleanup_sys_modules -from logilab.common.testlib import TestCase, unittest_main +from logilab.common.testlib import (TestCase, unittest_main, + with_tempdir) from logilab.common.changelog import Version from cubicweb.devtools import ApptestConfiguration +from cubicweb.cwconfig import _find_prefix def unabsolutize(path): parts = path.split(os.sep) @@ -32,7 +35,7 @@ self.config._cubes = ('email', 'file') def tearDown(self): - os.environ.pop('CW_CUBES_PATH', None) + ApptestConfiguration.CUBES_PATH = [] def test_reorder_cubes(self): # jpl depends on email and file and comment @@ -52,7 +55,7 @@ def test_reorder_cubes_recommends(self): from cubes.comment import __pkginfo__ as comment_pkginfo - comment_pkginfo.__recommend__ = ('file',) + comment_pkginfo.__recommends_cubes__ = {'file': None} try: # email recommends comment # comment recommends file @@ -65,7 +68,7 @@ self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), ('forge', 'email', 'comment', 'file')) finally: - comment_pkginfo.__use__ = () + comment_pkginfo.__recommends_cubes__ = {} # def test_vc_config(self): @@ -91,11 +94,11 @@ # make sure we don't import the email cube, but the stdlib email package import email self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR) - os.environ['CW_CUBES_PATH'] = CUSTOM_CUBES_DIR + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] self.assertEquals(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) - os.environ['CW_CUBES_PATH'] = os.pathsep.join([ - CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant']) + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR, + self.config.CUBES_DIR, 'unexistant'] # filter out unexistant and duplicates self.assertEquals(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, @@ -114,6 +117,91 @@ from cubes import file self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) +class FindPrefixTC(TestCase): + def make_dirs(self, *args): + path = join(tempfile.tempdir, *args) + if not os.path.exists(path): + os.makedirs(path) + return path + + def make_file(self, *args): + self.make_dirs(*args[: -1]) + file_path = join(tempfile.tempdir, *args) + file_obj = open(file_path, 'w') + file_obj.write('""" None """') + file_obj.close() + return file_path + + @with_tempdir + def test_samedir(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.assertEquals(_find_prefix(prefix), prefix) + + @with_tempdir + def test_samedir_filepath(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + dir_path = self.make_dirs('bob') + self.assertEquals(_find_prefix(dir_path), prefix) + + @with_tempdir + def test_file_in_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_file_in_deeper_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('bob') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_sister_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_parent_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('share', 'cubicweb', 'bob') + self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb') + file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_upper_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bell','bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_no_prefix(self): + prefix = tempfile.tempdir + self.assertEquals(_find_prefix(prefix), sys.prefix) if __name__ == '__main__': unittest_main() diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_cwctl.py --- a/test/unittest_cwctl.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_cwctl.py Fri Apr 23 12:42:53 2010 +0200 @@ -10,15 +10,8 @@ from cStringIO import StringIO from logilab.common.testlib import TestCase, unittest_main -if os.environ.get('APYCOT_ROOT'): - root = os.environ['APYCOT_ROOT'] - CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root - os.environ['CW_CUBES_PATH'] = CUBES_DIR - REGISTRY_DIR = '%s/etc/cubicweb.d/' % root - os.environ['CW_INSTANCES_DIR'] = REGISTRY_DIR - from cubicweb.cwconfig import CubicWebConfiguration -CubicWebConfiguration.load_cwctl_plugins() +CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? class CubicWebCtlTC(TestCase): def setUp(self): diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_dbapi.py --- a/test/unittest_dbapi.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_dbapi.py Fri Apr 23 12:42:53 2010 +0200 @@ -40,21 +40,6 @@ self.assertRaises(ProgrammingError, cnx.user, None) self.assertRaises(ProgrammingError, cnx.describe, 1) - def test_session_data_api(self): - cnx = self.login('anon') - self.assertEquals(cnx.get_session_data('data'), None) - self.assertEquals(cnx.session_data(), {}) - cnx.set_session_data('data', 4) - self.assertEquals(cnx.get_session_data('data'), 4) - self.assertEquals(cnx.session_data(), {'data': 4}) - cnx.del_session_data('data') - cnx.del_session_data('whatever') - self.assertEquals(cnx.get_session_data('data'), None) - self.assertEquals(cnx.session_data(), {}) - cnx.session_data()['data'] = 4 - self.assertEquals(cnx.get_session_data('data'), 4) - self.assertEquals(cnx.session_data(), {'data': 4}) - def test_shared_data_api(self): cnx = self.login('anon') self.assertEquals(cnx.get_shared_data('data'), None) diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_entity.py --- a/test/unittest_entity.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_entity.py Fri Apr 23 12:42:53 2010 +0200 @@ -44,8 +44,8 @@ p = req.create_entity('Personne', nom=u'toto') oe = req.create_entity('Note', type=u'x') self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}, ('t','u')) - self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x') + {'t': oe.eid, 'u': p.eid}) + self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.assertEquals(len(e.ecrit_par), 1) @@ -60,7 +60,7 @@ oe = req.create_entity('Note', type=u'x') self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}, ('t','u')) + {'t': oe.eid, 'u': p.eid}) e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.failIf(e.ecrit_par) @@ -69,12 +69,12 @@ def test_copy_with_composite(self): user = self.user() adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x':user.eid}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.use_email[0].address, "toto@logilab.org") self.assertEquals(e.use_email[0].eid, adeleid) usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' 'WHERE G name "users"')[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x':usereid}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) e.copy_relations(user.eid) self.failIf(e.use_email) self.failIf(e.primary_email) @@ -87,14 +87,14 @@ user.fire_transition('deactivate') self.commit() eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x': eid2}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) e.copy_relations(user.eid) self.commit() e.clear_related_cache('in_state', 'subject') self.assertEquals(e.state, 'activated') def test_related_cache_both(self): - user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x') + user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() self.assertEquals(user._related_cache, {}) @@ -235,7 +235,7 @@ #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') self.login('anon') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' @@ -260,7 +260,7 @@ unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] self.failUnless(p.eid in unrelated) self.execute('SET X tags Y WHERE X is Tag, Y is Personne') - e = self.entity('Any X WHERE X is Tag') + e = self.execute('Any X WHERE X is Tag').get_entity(0, 0) unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] self.failIf(p.eid in unrelated) @@ -281,7 +281,7 @@ self.assertEquals([x.address for x in rset.entities()], [u'hop']) self.create_user('toto') self.login('toto') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') self.assertEquals([x.login for x in rset.entities()], ['toto']) user = self.request().user @@ -291,7 +291,7 @@ rset = user.unrelated('use_email', 'EmailAddress', 'subject') self.assertEquals([x.address for x in rset.entities()], []) self.login('anon') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') self.assertEquals([x.login for x in rset.entities()], []) user = self.request().user @@ -439,7 +439,7 @@ eid = session.execute( 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] - trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x') + trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) trinfo.complete() self.failUnless(isinstance(trinfo['creation_date'], datetime)) self.failUnless(trinfo.relation_cached('from_state', 'subject')) @@ -449,9 +449,9 @@ def test_request_cache(self): req = self.request() - user = self.entity('CWUser X WHERE X login "admin"', req=req) + user = self.execute('CWUser X WHERE X login "admin"', req=req).get_entity(0, 0) state = user.in_state[0] - samestate = self.entity('State X WHERE X name "activated"', req=req) + samestate = self.execute('State X WHERE X name "activated"', req=req).get_entity(0, 0) self.failUnless(state is samestate) def test_rest_path(self): @@ -481,7 +481,7 @@ self.assertEquals(person.prenom, u'adrien') self.assertEquals(person.nom, u'di mascio') person.set_attributes(prenom=u'sylvain', nom=u'thénault') - person = self.entity('Personne P') # XXX retreival needed ? + person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? self.assertEquals(person.prenom, u'sylvain') self.assertEquals(person.nom, u'thénault') diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_rset.py --- a/test/unittest_rset.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_rset.py Fri Apr 23 12:42:53 2010 +0200 @@ -212,7 +212,7 @@ def test_get_entity_simple(self): self.request().create_entity('CWUser', login=u'adim', upassword='adim', surname=u'di mascio', firstname=u'adrien') - e = self.entity('Any X,T WHERE X login "adim", X surname T') + e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) self.assertEquals(e['surname'], 'di mascio') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertRaises(KeyError, e.__getitem__, 'creation_date') diff -r b619531ddbd2 -r b6e250dd7a7d test/unittest_selectors.py --- a/test/unittest_selectors.py Fri Apr 23 12:40:48 2010 +0200 +++ b/test/unittest_selectors.py Fri Apr 23 12:42:53 2010 +0200 @@ -87,6 +87,42 @@ csel = AndSelector(Selector(), sel) self.assertIs(csel.search_selector(implements), sel) + def test_inplace_and(self): + selector = _1_() + selector &= _1_() + selector &= _1_() + self.assertEquals(selector(None), 3) + selector = _1_() + selector &= _0_() + selector &= _1_() + self.assertEquals(selector(None), 0) + selector = _0_() + selector &= _1_() + selector &= _1_() + self.assertEquals(selector(None), 0) + selector = _0_() + selector &= _0_() + selector &= _0_() + self.assertEquals(selector(None), 0) + + def test_inplace_or(self): + selector = _1_() + selector |= _1_() + selector |= _1_() + self.assertEquals(selector(None), 1) + selector = _1_() + selector |= _0_() + selector |= _1_() + self.assertEquals(selector(None), 1) + selector = _0_() + selector |= _1_() + selector |= _1_() + self.assertEquals(selector(None), 1) + selector = _0_() + selector |= _0_() + selector |= _0_() + self.assertEquals(selector(None), 0) + class ImplementsSelectorTC(CubicWebTC): def test_etype_priority(self): diff -r b619531ddbd2 -r b6e250dd7a7d toolsutils.py --- a/toolsutils.py Fri Apr 23 12:40:48 2010 +0200 +++ b/toolsutils.py Fri Apr 23 12:42:53 2010 +0200 @@ -103,10 +103,9 @@ else: print 'no diff between %s and %s' % (appl_file, ref_file) - +SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py') def copy_skeleton(skeldir, targetdir, context, - exclude=('*.py[co]', '*.orig', '*~', '*_flymake.py'), - askconfirm=False): + exclude=SKEL_EXCLUDE, askconfirm=False): import shutil from fnmatch import fnmatch skeldir = normpath(skeldir) @@ -184,7 +183,7 @@ config_file, ex) return config -def env_path(env_var, default, name): +def env_path(env_var, default, name, checkexists=True): """get a path specified in a variable or using the default value and return it. @@ -203,8 +202,8 @@ :raise `ConfigurationError`: if the returned path does not exist """ path = environ.get(env_var, default) - if not exists(path): - raise ConfigurationError('%s path %s doesn\'t exist' % (name, path)) + if checkexists and not exists(path): + raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path)) return abspath(path) diff -r b619531ddbd2 -r b6e250dd7a7d transaction.py --- a/transaction.py Fri Apr 23 12:40:48 2010 +0200 +++ b/transaction.py Fri Apr 23 12:42:53 2010 +0200 @@ -48,7 +48,7 @@ none if not found. """ return self.req.execute('Any X WHERE X eid %(x)s', - {'x': self.user_eid}, 'x').get_entity(0, 0) + {'x': self.user_eid}).get_entity(0, 0) def actions_list(self, public=True): """return an ordered list of action effectued during that transaction diff -r b619531ddbd2 -r b6e250dd7a7d utils.py --- a/utils.py Fri Apr 23 12:40:48 2010 +0200 +++ b/utils.py Fri Apr 23 12:42:53 2010 +0200 @@ -309,35 +309,6 @@ self.body.getvalue()) -def _pdf_conversion_availability(): - try: - import pysixt - except ImportError: - return False - from subprocess import Popen, STDOUT - if not os.path.isfile('/usr/bin/fop'): - return False - try: - Popen(['/usr/bin/fop', '-q'], - stdout=open(os.devnull, 'w'), - stderr=STDOUT) - except OSError, e: - getLogger('cubicweb').info('fop not usable (%s)', e) - return False - return True - -def can_do_pdf_conversion(__answer_cache=[]): - """pdf conversion depends on - * pysixt (python package) - * fop 0.9x - - NOTE: actual check is done by _pdf_conversion_availability and - result is cached - """ - if not __answer_cache: # first time, not in cache - __answer_cache.append(_pdf_conversion_availability()) - return __answer_cache[0] - try: try: # may not be there if cubicweb-web not installed diff -r b619531ddbd2 -r b6e250dd7a7d web/_exceptions.py --- a/web/_exceptions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/_exceptions.py Fri Apr 23 12:42:53 2010 +0200 @@ -40,10 +40,6 @@ self.status = int(status) self.content = content -class ExplicitLogin(AuthenticationError): - """raised when a bad connection id is given or when an attempt to establish - a connection failed""" - class InvalidSession(CubicWebException): """raised when a session id is found but associated session is not found or invalid @@ -62,3 +58,9 @@ except ImportError: from simplejson import dumps return dumps({'reason': self.reason}) + +class LogOut(PublishException): + """raised to ask for deauthentication of a logged in user""" + def __init__(self, url): + super(LogOut, self).__init__() + self.url = url diff -r b619531ddbd2 -r b6e250dd7a7d web/action.py --- a/web/action.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/action.py Fri Apr 23 12:42:53 2010 +0200 @@ -20,18 +20,7 @@ """ __registry__ = 'actions' __select__ = match_search_state('normal') - - cw_property_defs = { - 'visible': dict(type='Boolean', default=True, - help=_('display the action or not')), - 'order': dict(type='Int', default=99, - help=_('display order of the action')), - 'category': dict(type='String', default='moreactions', - vocabulary=('mainactions', 'moreactions', 'addrelated', - 'useractions', 'siteactions', 'hidden'), - help=_('context where this component should be displayed')), - } - site_wide = True # don't want user to configurate actions + order = 99 category = 'moreactions' # actions in category 'moreactions' can specify a sub-menu in which they should be filed submenu = None diff -r b619531ddbd2 -r b6e250dd7a7d web/application.py --- a/web/application.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/application.py Fri Apr 23 12:42:53 2010 +0200 @@ -5,6 +5,8 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +from __future__ import with_statement + __docformat__ = "restructuredtext en" import sys @@ -18,10 +20,11 @@ from cubicweb import ( ValidationError, Unauthorized, AuthenticationError, NoSelectableObject, RepositoryError, CW_EVENT_MANAGER) +from cubicweb.dbapi import DBAPISession from cubicweb.web import LOGGER, component from cubicweb.web import ( - StatusResponse, DirectResponse, Redirect, NotFound, - RemoteCallFailed, ExplicitLogin, InvalidSession, RequestError) + StatusResponse, DirectResponse, Redirect, NotFound, LogOut, + RemoteCallFailed, InvalidSession, RequestError) # make session manager available through a global variable so the debug view can # print information about web session @@ -61,7 +64,7 @@ for session in self.current_sessions(): no_use_time = (time() - session.last_usage_time) total += 1 - if session.anonymous_connection: + if session.anonymous_session: if no_use_time >= self.cleanup_anon_session_time: self.close_session(session) closed += 1 @@ -85,9 +88,11 @@ raise NotImplementedError() def open_session(self, req): - """open and return a new session for the given request + """open and return a new session for the given request. The session is + also bound to the request. - :raise ExplicitLogin: if authentication is required + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) """ raise NotImplementedError() @@ -106,11 +111,24 @@ def __init__(self, vreg): self.vreg = vreg - def authenticate(self, req): - """authenticate user and return corresponding user object + def validate_session(self, req, session): + """check session validity, reconnecting it to the repository if the + associated connection expired in the repository side (hence the + necessity for this method). - :raise ExplicitLogin: if authentication is required (no authentication - info found or wrong user/password) + raise :exc:`InvalidSession` if session is corrupted for a reason or + another and should be closed + """ + raise NotImplementedError() + + def authenticate(self, req): + """authenticate user using connection information found in the request, + and return corresponding a :class:`~cubicweb.dbapi.Connection` instance, + as well as login and authentication information dictionary used to open + the connection. + + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) """ raise NotImplementedError() @@ -168,7 +186,6 @@ :raise Redirect: if authentication has occured and succeed """ - assert req.cnx is None # at this point no cnx should be set on the request cookie = req.get_cookie() try: sessionid = str(cookie[self.SESSION_VAR].value) @@ -178,9 +195,11 @@ try: session = self.get_session(req, sessionid) except InvalidSession: + # try to open a new session, so we get an anonymous session if + # allowed try: session = self.open_session(req) - except ExplicitLogin: + except AuthenticationError: req.remove_cookie(cookie, self.SESSION_VAR) raise # remember last usage time for web session tracking @@ -196,14 +215,14 @@ req.set_cookie(cookie, self.SESSION_VAR, maxage=None) # remember last usage time for web session tracking session.last_usage_time = time() - if not session.anonymous_connection: + if not session.anonymous_session: self._postlogin(req) return session def _update_last_login_time(self, req): try: req.execute('SET X last_login_time NOW WHERE X eid %(x)s', - {'x' : req.user.eid}, 'x') + {'x' : req.user.eid}) req.cnx.commit() except (RepositoryError, Unauthorized): # ldap user are not writeable for instance @@ -240,7 +259,7 @@ """ self.session_manager.close_session(req.cnx) req.remove_cookie(req.get_cookie(), self.SESSION_VAR) - raise AuthenticationError(url=goto_url) + raise LogOut(url=goto_url) class CubicWebPublisher(object): @@ -284,7 +303,10 @@ sessions (i.e. a new connection may be created or an already existing one may be reused """ - self.session_handler.set_session(req) + try: + self.session_handler.set_session(req) + except AuthenticationError: + req.set_session(DBAPISession(None)) # publish methods ######################################################### @@ -296,19 +318,18 @@ return self.main_publish(path, req) finally: cnx = req.cnx - self._logfile_lock.acquire() - try: - try: - result = ['\n'+'*'*80] - result.append(req.url()) - result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q for q in cnx.executed_queries] - cnx.executed_queries = [] - self._query_log.write('\n'.join(result).encode(req.encoding)) - self._query_log.flush() - except Exception: - self.exception('error while logging queries') - finally: - self._logfile_lock.release() + if cnx: + with self._logfile_lock: + try: + result = ['\n'+'*'*80] + result.append(req.url()) + result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q + for q in cnx.executed_queries] + cnx.executed_queries = [] + self._query_log.write('\n'.join(result).encode(req.encoding)) + self._query_log.flush() + except Exception: + self.exception('error while logging queries') @deprecated("[3.4] use vreg['controllers'].select(...)") def select_controller(self, oid, req): @@ -348,12 +369,16 @@ raise Unauthorized(req._('not authorized')) req.update_search_state() result = controller.publish(rset=rset) - if req.cnx is not None: - # req.cnx is None if anonymous aren't allowed and we are - # displaying the cookie authentication form + if req.cnx: + # no req.cnx if anonymous aren't allowed and we are + # displaying some anonymous enabled view such as the cookie + # authentication form req.cnx.commit() except (StatusResponse, DirectResponse): - req.cnx.commit() + if req.cnx: + req.cnx.commit() + raise + except (AuthenticationError, LogOut): raise except Redirect: # redirect is raised by edit controller when everything went fine, @@ -375,10 +400,13 @@ else: # delete validation errors which may have been previously set if '__errorurl' in req.form: - req.del_session_data(req.form['__errorurl']) + req.session.data.pop(req.form['__errorurl'], None) raise - except (AuthenticationError, NotFound, RemoteCallFailed): - raise + except RemoteCallFailed, ex: + req.set_header('content-type', 'application/json') + raise StatusResponse(500, ex.dumps()) + except NotFound: + raise StatusResponse(404, self.notfound_content(req)) except ValidationError, ex: self.validation_error_handler(req, ex) except (Unauthorized, BadRQLQuery, RequestError), ex: @@ -389,7 +417,7 @@ self.critical('Catch all triggered!!!') self.exception('this is what happened') finally: - if req.cnx is not None: + if req.cnx: try: req.cnx.rollback() except: @@ -404,7 +432,7 @@ 'values': req.form, 'eidmap': req.data.get('eidmap', {}) } - req.set_session_data(req.form['__errorurl'], forminfo) + req.session.data[req.form['__errorurl']] = forminfo # XXX form session key / __error_url should be differentiated: # session key is 'url + #
' % self.div_class()) diff -r b619531ddbd2 -r b6e250dd7a7d web/data/cubicweb.acl.css --- a/web/data/cubicweb.acl.css Fri Apr 23 12:40:48 2010 +0200 +++ b/web/data/cubicweb.acl.css Fri Apr 23 12:42:53 2010 +0200 @@ -17,13 +17,13 @@ } -h3.schema{ +h3.schema{ font-weight: bold; } h4 a, h4 a:link, -h4 a:visited{ +h4 a:visited{ color:#000; } @@ -39,11 +39,11 @@ table.schemaInfo td { padding: .3em .5em; border: 1px solid grey; - width:33%; + width:33%; } -table.schemaInfo tr th { +table.schemaInfo tr th { padding: 0.2em 0px 0.2em 5px; background-image:none; background-color:#dfdfdf; @@ -51,47 +51,46 @@ table.schemaInfo thead tr { border: 1px solid #dfdfdf; -} +} table.schemaInfo td { - padding: 3px 10px 3px 5px; + padding: 3px 10px 3px 5px; } -.users{ +a.users{ color : #00CC33; font-weight: bold } -.guests{ +a.guests{ color : #ff7700; font-weight: bold; } -.staff{ - color : #0083ab; - font-weight: bold; -} - -.owners{ +a.owners{ color : #8b0000; font-weight: bold; } +a.managers{ + color: #000000; +} + .discret, -a.grey{ +a.grey{ color:#666; } -a.grey:hover{ +a.grey:hover{ color:#000; } -.red{ +.red{ color : #ff7700; } -div#schema_security{ - width:780px; +div#schema_security{ + width:100%; } /******************************************************************************/ /* user groups edition form (views/euser.py) */ diff -r b619531ddbd2 -r b6e250dd7a7d web/data/cubicweb.css --- a/web/data/cubicweb.css Fri Apr 23 12:40:48 2010 +0200 +++ b/web/data/cubicweb.css Fri Apr 23 12:42:53 2010 +0200 @@ -63,7 +63,7 @@ text-decoration: underline; } -a img { +a img, img { border: none; text-align: center; } diff -r b619531ddbd2 -r b6e250dd7a7d web/data/cubicweb.edition.js --- a/web/data/cubicweb.edition.js Fri Apr 23 12:40:48 2010 +0200 +++ b/web/data/cubicweb.edition.js Fri Apr 23 12:42:53 2010 +0200 @@ -321,9 +321,15 @@ } function _clearPreviousErrors(formid) { - jQuery('#' + formid + 'ErrorMessage').remove(); - jQuery('#' + formid + ' span.errorMsg').remove(); - jQuery('#' + formid + ' .error').removeClass('error'); + // on some case (eg max request size exceeded, we don't know the formid + if (formid) { + jQuery('#' + formid + 'ErrorMessage').remove(); + jQuery('#' + formid + ' span.errorMsg').remove(); + jQuery('#' + formid + ' .error').removeClass('error'); + } else { + jQuery('span.errorMsg').remove(); + jQuery('.error').removeClass('error'); + } } function _displayValidationerrors(formid, eid, errors) { @@ -391,7 +397,6 @@ var descr = result[1]; // Unknown structure if ( !isArrayLike(descr) || descr.length != 2 ) { - log('got strange error :', descr); updateMessage(descr); return false; } @@ -405,7 +410,12 @@ /* unfreeze form buttons when the validation process is over*/ function unfreezeFormButtons(formid) { jQuery('#progress').hide(); - jQuery('#' + formid + ' .validateButton').removeAttr('disabled'); + // on some case (eg max request size exceeded, we don't know the formid + if (formid) { + jQuery('#' + formid + ' .validateButton').removeAttr('disabled'); + } else { + jQuery('.validateButton').removeAttr('disabled'); + } return true; } diff -r b619531ddbd2 -r b6e250dd7a7d web/data/cubicweb.schema.css --- a/web/data/cubicweb.schema.css Fri Apr 23 12:40:48 2010 +0200 +++ b/web/data/cubicweb.schema.css Fri Apr 23 12:42:53 2010 +0200 @@ -25,11 +25,14 @@ div.acl{ position: relative; /* right: 20%;*/ - float: right; - width: 10%; + width: 25%; padding:0px 0px 0px 2em; } +div.acl table tr,td{ + padding: 2px 2px 2px 2px; +} + div.schema table { width : 100%; } @@ -40,7 +43,6 @@ } div.box div.title{ - border-bottom:1px solid black; padding:0.2em 0.2em; margin: 0 auto; } @@ -67,7 +69,6 @@ div.box{ float:left; border:1px solid black; - width:50%; } div.vl{ diff -r b619531ddbd2 -r b6e250dd7a7d web/data/pdf_icon.gif Binary file web/data/pdf_icon.gif has changed diff -r b619531ddbd2 -r b6e250dd7a7d web/facet.py --- a/web/facet.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/facet.py Fri Apr 23 12:42:53 2010 +0200 @@ -8,7 +8,6 @@ """ __docformat__ = "restructuredtext en" -from itertools import chain from copy import deepcopy from datetime import date, datetime, timedelta @@ -199,7 +198,7 @@ # add attribute variable to selection rqlst.add_selected(attrvar) # add is restriction if necessary - if not mainvar.stinfo['typerels']: + if mainvar.stinfo['typerel'] is None: etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions) rqlst.add_type_restriction(mainvar, etypes) return var @@ -228,12 +227,16 @@ for ovarname in linkedvars: vargraph[ovarname].remove(trvarname) # remove relation using this variable - for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']): + for rel in trvar.stinfo['relations']: if rel in removed: # already removed continue rqlst.remove_node(rel) removed.add(rel) + rel = trvar.stinfo['typerel'] + if rel is not None and not rel in removed: + rqlst.remove_node(rel) + removed.add(rel) # cleanup groupby clause if rqlst.groupby: for vref in rqlst.groupby[:]: @@ -329,9 +332,9 @@ def support_and(self): return False - def rqlexec(self, rql, args=None, cachekey=None): + def rqlexec(self, rql, args=None): try: - return self._cw.execute(rql, args, cachekey) + return self._cw.execute(rql, args) except Unauthorized: return [] @@ -372,7 +375,7 @@ if self.target_type is not None: rqlst.add_type_restriction(var, self.target_type) try: - rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey) + rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args) except: self.exception('error while getting vocabulary for %s, rql: %s', self, rqlst.as_string()) @@ -463,7 +466,7 @@ newvar = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role) _set_orderby(rqlst, newvar, self.sortasc, self.sortfunc) try: - rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey) + rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args) except: self.exception('error while getting vocabulary for %s, rql: %s', self, rqlst.as_string()) diff -r b619531ddbd2 -r b6e250dd7a7d web/form.py --- a/web/form.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/form.py Fri Apr 23 12:42:53 2010 +0200 @@ -116,14 +116,16 @@ def form_valerror(self): """the validation error exception if any""" if self.parent_form is None: - return self._form_valerror + # unset if restore_previous_post has not be called + return getattr(self, '_form_valerror', None) return self.parent_form.form_valerror @property def form_previous_values(self): """previously posted values (on validation error)""" if self.parent_form is None: - return self._form_previous_values + # unset if restore_previous_post has not be called + return getattr(self, '_form_previous_values', {}) return self.parent_form.form_previous_values @iclassmethod @@ -209,7 +211,7 @@ warn('[3.6.1] restore_previous_post already called, remove this call', DeprecationWarning, stacklevel=2) return - forminfo = self._cw.get_session_data(sessionkey, pop=True) + forminfo = self._cw.session.data.pop(sessionkey, None) if forminfo: self._form_previous_values = forminfo['values'] self._form_valerror = forminfo['error'] diff -r b619531ddbd2 -r b6e250dd7a7d web/formfields.py --- a/web/formfields.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/formfields.py Fri Apr 23 12:42:53 2010 +0200 @@ -710,9 +710,9 @@ # raise UnmodifiedField instead of returning None, since the later # will try to remove already attached file if any raise UnmodifiedField() - # skip browser submitted mime type - filename, _, stream = value - # value is a 3-uple (filename, mimetype, stream) + # value is a 2-uple (filename, stream) + filename, stream = value + # XXX avoid in memory loading of posted files. Requires Binary handling changes... value = Binary(stream.read()) if not value.getvalue(): # usually an unexistant file value = None diff -r b619531ddbd2 -r b6e250dd7a7d web/http_headers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/http_headers.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,1542 @@ +# This file has been extracted from the abandoned TwistedWeb2 project +# http://twistedmatrix.com/trac/wiki/TwistedWeb2 + + +from __future__ import generators + +import types, time +from calendar import timegm +import base64 +import re + +def dashCapitalize(s): + ''' Capitalize a string, making sure to treat - as a word seperator ''' + return '-'.join([ x.capitalize() for x in s.split('-')]) + +# datetime parsing and formatting +weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] +weekdayname_lower = [name.lower() for name in weekdayname] +monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] +monthname_lower = [name and name.lower() for name in monthname] + +# HTTP Header parsing API + +header_case_mapping = {} + +def casemappingify(d): + global header_case_mapping + newd = dict([(key.lower(),key) for key in d.keys()]) + header_case_mapping.update(newd) + +def lowerify(d): + return dict([(key.lower(),value) for key,value in d.items()]) + + +class HeaderHandler(object): + """HeaderHandler manages header generating and parsing functions. + """ + HTTPParsers = {} + HTTPGenerators = {} + + def __init__(self, parsers=None, generators=None): + """ + @param parsers: A map of header names to parsing functions. + @type parsers: L{dict} + + @param generators: A map of header names to generating functions. + @type generators: L{dict} + """ + + if parsers: + self.HTTPParsers.update(parsers) + if generators: + self.HTTPGenerators.update(generators) + + def parse(self, name, header): + """ + Parse the given header based on its given name. + + @param name: The header name to parse. + @type name: C{str} + + @param header: A list of unparsed headers. + @type header: C{list} of C{str} + + @return: The return value is the parsed header representation, + it is dependent on the header. See the HTTP Headers document. + """ + parser = self.HTTPParsers.get(name, None) + if parser is None: + raise ValueError("No header parser for header '%s', either add one or use getHeaderRaw." % (name,)) + + try: + for p in parser: + # print "Parsing %s: %s(%s)" % (name, repr(p), repr(h)) + header = p(header) + # if isinstance(h, types.GeneratorType): + # h=list(h) + except ValueError,v: + # print v + header=None + + return header + + def generate(self, name, header): + """ + Generate the given header based on its given name. + + @param name: The header name to generate. + @type name: C{str} + + @param header: A parsed header, such as the output of + L{HeaderHandler}.parse. + + @return: C{list} of C{str} each representing a generated HTTP header. + """ + generator = self.HTTPGenerators.get(name, None) + + if generator is None: + # print self.generators + raise ValueError("No header generator for header '%s', either add one or use setHeaderRaw." % (name,)) + + for g in generator: + header = g(header) + + #self._raw_headers[name] = h + return header + + def updateParsers(self, parsers): + """Update en masse the parser maps. + + @param parsers: Map of header names to parser chains. + @type parsers: C{dict} + """ + casemappingify(parsers) + self.HTTPParsers.update(lowerify(parsers)) + + def addParser(self, name, value): + """Add an individual parser chain for the given header. + + @param name: Name of the header to add + @type name: C{str} + + @param value: The parser chain + @type value: C{str} + """ + self.updateParsers({name: value}) + + def updateGenerators(self, generators): + """Update en masse the generator maps. + + @param parsers: Map of header names to generator chains. + @type parsers: C{dict} + """ + casemappingify(generators) + self.HTTPGenerators.update(lowerify(generators)) + + def addGenerators(self, name, value): + """Add an individual generator chain for the given header. + + @param name: Name of the header to add + @type name: C{str} + + @param value: The generator chain + @type value: C{str} + """ + self.updateGenerators({name: value}) + + def update(self, parsers, generators): + """Conveniently update parsers and generators all at once. + """ + self.updateParsers(parsers) + self.updateGenerators(generators) + + +DefaultHTTPHandler = HeaderHandler() + + +## HTTP DateTime parser +def parseDateTime(dateString): + """Convert an HTTP date string (one of three formats) to seconds since epoch.""" + parts = dateString.split() + + if not parts[0][0:3].lower() in weekdayname_lower: + # Weekday is stupid. Might have been omitted. + try: + return parseDateTime("Sun, "+dateString) + except ValueError: + # Guess not. + pass + + partlen = len(parts) + if (partlen == 5 or partlen == 6) and parts[1].isdigit(): + # 1st date format: Sun, 06 Nov 1994 08:49:37 GMT + # (Note: "GMT" is literal, not a variable timezone) + # (also handles without "GMT") + # This is the normal format + day = parts[1] + month = parts[2] + year = parts[3] + time = parts[4] + elif (partlen == 3 or partlen == 4) and parts[1].find('-') != -1: + # 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT + # (Note: "GMT" is literal, not a variable timezone) + # (also handles without without "GMT") + # Two digit year, yucko. + day, month, year = parts[1].split('-') + time = parts[2] + year=int(year) + if year < 69: + year = year + 2000 + elif year < 100: + year = year + 1900 + elif len(parts) == 5: + # 3rd date format: Sun Nov 6 08:49:37 1994 + # ANSI C asctime() format. + day = parts[2] + month = parts[1] + year = parts[4] + time = parts[3] + else: + raise ValueError("Unknown datetime format %r" % dateString) + + day = int(day) + month = int(monthname_lower.index(month.lower())) + year = int(year) + hour, min, sec = map(int, time.split(':')) + return int(timegm((year, month, day, hour, min, sec))) + + +##### HTTP tokenizer +class Token(str): + __slots__=[] + tokens = {} + def __new__(self, char): + token = Token.tokens.get(char) + if token is None: + Token.tokens[char] = token = str.__new__(self, char) + return token + + def __repr__(self): + return "Token(%s)" % str.__repr__(self) + + +http_tokens = " \t\"()<>@,;:\\/[]?={}" +http_ctls = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f" + +def tokenize(header, foldCase=True): + """Tokenize a string according to normal HTTP header parsing rules. + + In particular: + - Whitespace is irrelevant and eaten next to special separator tokens. + Its existance (but not amount) is important between character strings. + - Quoted string support including embedded backslashes. + - Case is insignificant (and thus lowercased), except in quoted strings. + (unless foldCase=False) + - Multiple headers are concatenated with ',' + + NOTE: not all headers can be parsed with this function. + + Takes a raw header value (list of strings), and + Returns a generator of strings and Token class instances. + """ + tokens=http_tokens + ctls=http_ctls + + string = ",".join(header) + list = [] + start = 0 + cur = 0 + quoted = False + qpair = False + inSpaces = -1 + qstring = None + + for x in string: + if quoted: + if qpair: + qpair = False + qstring = qstring+string[start:cur-1]+x + start = cur+1 + elif x == '\\': + qpair = True + elif x == '"': + quoted = False + yield qstring+string[start:cur] + qstring=None + start = cur+1 + elif x in tokens: + if start != cur: + if foldCase: + yield string[start:cur].lower() + else: + yield string[start:cur] + + start = cur+1 + if x == '"': + quoted = True + qstring = "" + inSpaces = False + elif x in " \t": + if inSpaces is False: + inSpaces = True + else: + inSpaces = -1 + yield Token(x) + elif x in ctls: + raise ValueError("Invalid control character: %d in header" % ord(x)) + else: + if inSpaces is True: + yield Token(' ') + inSpaces = False + + inSpaces = False + cur = cur+1 + + if qpair: + raise ValueError, "Missing character after '\\'" + if quoted: + raise ValueError, "Missing end quote" + + if start != cur: + if foldCase: + yield string[start:cur].lower() + else: + yield string[start:cur] + +def split(seq, delim): + """The same as str.split but works on arbitrary sequences. + Too bad it's not builtin to python!""" + + cur = [] + for item in seq: + if item == delim: + yield cur + cur = [] + else: + cur.append(item) + yield cur + +# def find(seq, *args): +# """The same as seq.index but returns -1 if not found, instead +# Too bad it's not builtin to python!""" +# try: +# return seq.index(value, *args) +# except ValueError: +# return -1 + + +def filterTokens(seq): + """Filter out instances of Token, leaving only a list of strings. + + Used instead of a more specific parsing method (e.g. splitting on commas) + when only strings are expected, so as to be a little lenient. + + Apache does it this way and has some comments about broken clients which + forget commas (?), so I'm doing it the same way. It shouldn't + hurt anything, in any case. + """ + + l=[] + for x in seq: + if not isinstance(x, Token): + l.append(x) + return l + +##### parser utilities: +def checkSingleToken(tokens): + if len(tokens) != 1: + raise ValueError, "Expected single token, not %s." % (tokens,) + return tokens[0] + +def parseKeyValue(val): + if len(val) == 1: + return val[0],None + elif len(val) == 3 and val[1] == Token('='): + return val[0],val[2] + raise ValueError, "Expected key or key=value, but got %s." % (val,) + +def parseArgs(field): + args=split(field, Token(';')) + val = args.next() + args = [parseKeyValue(arg) for arg in args] + return val,args + +def listParser(fun): + """Return a function which applies 'fun' to every element in the + comma-separated list""" + def listParserHelper(tokens): + fields = split(tokens, Token(',')) + for field in fields: + if len(field) != 0: + yield fun(field) + + return listParserHelper + +def last(seq): + """Return seq[-1]""" + + return seq[-1] + +##### Generation utilities +def quoteString(s): + return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"') + +def listGenerator(fun): + """Return a function which applies 'fun' to every element in + the given list, then joins the result with generateList""" + def listGeneratorHelper(l): + return generateList([fun(e) for e in l]) + + return listGeneratorHelper + +def generateList(seq): + return ", ".join(seq) + +def singleHeader(item): + return [item] + +def generateKeyValues(kvs): + l = [] + # print kvs + for k,v in kvs: + if v is None: + l.append('%s' % k) + else: + l.append('%s=%s' % (k,v)) + return ";".join(l) + + +class MimeType(object): + def fromString(klass, mimeTypeString): + """Generate a MimeType object from the given string. + + @param mimeTypeString: The mimetype to parse + + @return: L{MimeType} + """ + return DefaultHTTPHandler.parse('content-type', [mimeTypeString]) + + fromString = classmethod(fromString) + + def __init__(self, mediaType, mediaSubtype, params={}, **kwargs): + """ + @type mediaType: C{str} + + @type mediaSubtype: C{str} + + @type params: C{dict} + """ + self.mediaType = mediaType + self.mediaSubtype = mediaSubtype + self.params = dict(params) + + if kwargs: + self.params.update(kwargs) + + def __eq__(self, other): + if not isinstance(other, MimeType): return NotImplemented + return (self.mediaType == other.mediaType and + self.mediaSubtype == other.mediaSubtype and + self.params == other.params) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params) + + def __hash__(self): + return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems())) + +##### Specific header parsers. +def parseAccept(field): + type,args = parseArgs(field) + + if len(type) != 3 or type[1] != Token('/'): + raise ValueError, "MIME Type "+str(type)+" invalid." + + # okay, this spec is screwy. A 'q' parameter is used as the separator + # between MIME parameters and (as yet undefined) additional HTTP + # parameters. + + num = 0 + for arg in args: + if arg[0] == 'q': + mimeparams=tuple(args[0:num]) + params=args[num:] + break + num = num + 1 + else: + mimeparams=tuple(args) + params=[] + + # Default values for parameters: + qval = 1.0 + + # Parse accept parameters: + for param in params: + if param[0] =='q': + qval = float(param[1]) + else: + # Warn? ignored parameter. + pass + + ret = MimeType(type[0],type[2],mimeparams),qval + return ret + +def parseAcceptQvalue(field): + type,args=parseArgs(field) + + type = checkSingleToken(type) + + qvalue = 1.0 # Default qvalue is 1 + for arg in args: + if arg[0] == 'q': + qvalue = float(arg[1]) + return type,qvalue + +def addDefaultCharset(charsets): + if charsets.get('*') is None and charsets.get('iso-8859-1') is None: + charsets['iso-8859-1'] = 1.0 + return charsets + +def addDefaultEncoding(encodings): + if encodings.get('*') is None and encodings.get('identity') is None: + # RFC doesn't specify a default value for identity, only that it + # "is acceptable" if not mentioned. Thus, give it a very low qvalue. + encodings['identity'] = .0001 + return encodings + + +def parseContentType(header): + # Case folding is disabled for this header, because of use of + # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf + # So, we need to explicitly .lower() the type/subtype and arg keys. + + type,args = parseArgs(header) + + if len(type) != 3 or type[1] != Token('/'): + raise ValueError, "MIME Type "+str(type)+" invalid." + + args = [(kv[0].lower(), kv[1]) for kv in args] + + return MimeType(type[0].lower(), type[2].lower(), tuple(args)) + +def parseContentMD5(header): + try: + return base64.decodestring(header) + except Exception,e: + raise ValueError(e) + +def parseContentRange(header): + """Parse a content-range header into (kind, start, end, realLength). + + realLength might be None if real length is not known ('*'). + start and end might be None if start,end unspecified (for response code 416) + """ + kind, other = header.strip().split() + if kind.lower() != "bytes": + raise ValueError("a range of type %r is not supported") + startend, realLength = other.split("/") + if startend.strip() == '*': + start,end=None,None + else: + start, end = map(int, startend.split("-")) + if realLength == "*": + realLength = None + else: + realLength = int(realLength) + return (kind, start, end, realLength) + +def parseExpect(field): + type,args=parseArgs(field) + + type=parseKeyValue(type) + return (type[0], (lambda *args:args)(type[1], *args)) + +def parseExpires(header): + # """HTTP/1.1 clients and caches MUST treat other invalid date formats, + # especially including the value 0, as in the past (i.e., "already expired").""" + + try: + return parseDateTime(header) + except ValueError: + return 0 + +def parseIfModifiedSince(header): + # Ancient versions of netscape and *current* versions of MSIE send + # If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123 + # which is blantantly RFC-violating and not documented anywhere + # except bug-trackers for web frameworks. + + # So, we'll just strip off everything after a ';'. + return parseDateTime(header.split(';', 1)[0]) + +def parseIfRange(headers): + try: + return ETag.parse(tokenize(headers)) + except ValueError: + return parseDateTime(last(headers)) + +def parseRange(range): + range = list(range) + if len(range) < 3 or range[1] != Token('='): + raise ValueError("Invalid range header format: %s" %(range,)) + + type=range[0] + if type != 'bytes': + raise ValueError("Unknown range unit: %s." % (type,)) + rangeset=split(range[2:], Token(',')) + ranges = [] + + for byterangespec in rangeset: + if len(byterangespec) != 1: + raise ValueError("Invalid range header format: %s" % (range,)) + start,end=byterangespec[0].split('-') + + if not start and not end: + raise ValueError("Invalid range header format: %s" % (range,)) + + if start: + start = int(start) + else: + start = None + + if end: + end = int(end) + else: + end = None + + if start and end and start > end: + raise ValueError("Invalid range header, start > end: %s" % (range,)) + ranges.append((start,end)) + return type,ranges + +def parseRetryAfter(header): + try: + # delta seconds + return time.time() + int(header) + except ValueError: + # or datetime + return parseDateTime(header) + +# WWW-Authenticate and Authorization + +def parseWWWAuthenticate(tokenized): + headers = [] + + tokenList = list(tokenized) + + while tokenList: + scheme = tokenList.pop(0) + challenge = {} + last = None + kvChallenge = False + + while tokenList: + token = tokenList.pop(0) + if token == Token('='): + kvChallenge = True + challenge[last] = tokenList.pop(0) + last = None + + elif token == Token(','): + if kvChallenge: + if len(tokenList) > 1 and tokenList[1] != Token('='): + break + + else: + break + + else: + last = token + + if last and scheme and not challenge and not kvChallenge: + challenge = last + last = None + + headers.append((scheme, challenge)) + + if last and last not in (Token('='), Token(',')): + if headers[-1] == (scheme, challenge): + scheme = last + challenge = {} + headers.append((scheme, challenge)) + + return headers + +def parseAuthorization(header): + scheme, rest = header.split(' ', 1) + # this header isn't tokenized because it may eat characters + # in the unquoted base64 encoded credentials + return scheme.lower(), rest + +#### Header generators +def generateAccept(accept): + mimeType,q = accept + + out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + if mimeType.params: + out+=';'+generateKeyValues(mimeType.params.iteritems()) + + if q != 1.0: + out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.') + + return out + +def removeDefaultEncoding(seq): + for item in seq: + if item[0] != 'identity' or item[1] != .0001: + yield item + +def generateAcceptQvalue(keyvalue): + if keyvalue[1] == 1.0: + return "%s" % keyvalue[0:1] + else: + return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.') + +def parseCacheControl(kv): + k, v = parseKeyValue(kv) + if k == 'max-age' or k == 'min-fresh' or k == 's-maxage': + # Required integer argument + if v is None: + v = 0 + else: + v = int(v) + elif k == 'max-stale': + # Optional integer argument + if v is not None: + v = int(v) + elif k == 'private' or k == 'no-cache': + # Optional list argument + if v is not None: + v = [field.strip().lower() for field in v.split(',')] + return k, v + +def generateCacheControl((k, v)): + if v is None: + return str(k) + else: + if k == 'no-cache' or k == 'private': + # quoted list of values + v = quoteString(generateList( + [header_case_mapping.get(name) or dashCapitalize(name) for name in v])) + return '%s=%s' % (k,v) + +def generateContentRange(tup): + """tup is (type, start, end, len) + len can be None. + """ + type, start, end, len = tup + if len == None: + len = '*' + else: + len = int(len) + if start == None and end == None: + startend = '*' + else: + startend = '%d-%d' % (start, end) + + return '%s %s/%s' % (type, startend, len) + +def generateDateTime(secSinceEpoch): + """Convert seconds since epoch to HTTP datetime string.""" + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + weekdayname[wd], + day, monthname[month], year, + hh, mm, ss) + return s + +def generateExpect(item): + if item[1][0] is None: + out = '%s' % (item[0],) + else: + out = '%s=%s' % (item[0], item[1][0]) + if len(item[1]) > 1: + out += ';'+generateKeyValues(item[1][1:]) + return out + +def generateRange(range): + def noneOr(s): + if s is None: + return '' + return s + + type,ranges=range + + if type != 'bytes': + raise ValueError("Unknown range unit: "+type+".") + + return (type+'='+ + ','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1])) + for startend in ranges])) + +def generateRetryAfter(when): + # always generate delta seconds format + return str(int(when - time.time())) + +def generateContentType(mimeType): + out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + if mimeType.params: + out+=';'+generateKeyValues(mimeType.params.iteritems()) + return out + +def generateIfRange(dateOrETag): + if isinstance(dateOrETag, ETag): + return dateOrETag.generate() + else: + return generateDateTime(dateOrETag) + +# WWW-Authenticate and Authorization + +def generateWWWAuthenticate(headers): + _generated = [] + for seq in headers: + scheme, challenge = seq[0], seq[1] + + # If we're going to parse out to something other than a dict + # we need to be able to generate from something other than a dict + + try: + l = [] + for k,v in dict(challenge).iteritems(): + l.append("%s=%s" % (k, quoteString(v))) + + _generated.append("%s %s" % (scheme, ", ".join(l))) + except ValueError: + _generated.append("%s %s" % (scheme, challenge)) + + return _generated + +def generateAuthorization(seq): + return [' '.join(seq)] + + +#### +class ETag(object): + def __init__(self, tag, weak=False): + self.tag = str(tag) + self.weak = weak + + def match(self, other, strongCompare): + # Sec 13.3. + # The strong comparison function: in order to be considered equal, both + # validators MUST be identical in every way, and both MUST NOT be weak. + # + # The weak comparison function: in order to be considered equal, both + # validators MUST be identical in every way, but either or both of + # them MAY be tagged as "weak" without affecting the result. + + if not isinstance(other, ETag) or other.tag != self.tag: + return False + + if strongCompare and (other.weak or self.weak): + return False + return True + + def __eq__(self, other): + return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "Etag(%r, weak=%r)" % (self.tag, self.weak) + + def parse(tokens): + tokens=tuple(tokens) + if len(tokens) == 1 and not isinstance(tokens[0], Token): + return ETag(tokens[0]) + + if(len(tokens) == 3 and tokens[0] == "w" + and tokens[1] == Token('/')): + return ETag(tokens[2], weak=True) + + raise ValueError("Invalid ETag.") + + parse=staticmethod(parse) + + def generate(self): + if self.weak: + return 'W/'+quoteString(self.tag) + else: + return quoteString(self.tag) + +def parseStarOrETag(tokens): + tokens=tuple(tokens) + if tokens == ('*',): + return '*' + else: + return ETag.parse(tokens) + +def generateStarOrETag(etag): + if etag=='*': + return etag + else: + return etag.generate() + +#### Cookies. Blech! +class Cookie(object): + # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version'] + + def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0): + self.name=name + self.value=value + self.path=path + self.domain=domain + self.ports=ports + self.expires=expires + self.discard=discard + self.secure=secure + self.comment=comment + self.commenturl=commenturl + self.version=version + + def __repr__(self): + s="Cookie(%r=%r" % (self.name, self.value) + if self.path is not None: s+=", path=%r" % (self.path,) + if self.domain is not None: s+=", domain=%r" % (self.domain,) + if self.ports is not None: s+=", ports=%r" % (self.ports,) + if self.expires is not None: s+=", expires=%r" % (self.expires,) + if self.secure is not False: s+=", secure=%r" % (self.secure,) + if self.comment is not None: s+=", comment=%r" % (self.comment,) + if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,) + if self.version != 0: s+=", version=%r" % (self.version,) + s+=")" + return s + + def __eq__(self, other): + return (isinstance(other, Cookie) and + other.path == self.path and + other.domain == self.domain and + other.ports == self.ports and + other.expires == self.expires and + other.secure == self.secure and + other.comment == self.comment and + other.commenturl == self.commenturl and + other.version == self.version) + + def __ne__(self, other): + return not self.__eq__(other) + + +def parseCookie(headers): + """Bleargh, the cookie spec sucks. + This surely needs interoperability testing. + There are two specs that are supported: + Version 0) http://wp.netscape.com/newsref/std/cookie_spec.html + Version 1) http://www.faqs.org/rfcs/rfc2965.html + """ + + cookies = [] + # There can't really be multiple cookie headers according to RFC, because + # if multiple headers are allowed, they must be joinable with ",". + # Neither new RFC2965 cookies nor old netscape cookies are. + + header = ';'.join(headers) + if header[0:8].lower() == "$version": + # RFC2965 cookie + h=tokenize([header], foldCase=False) + r_cookies = split(h, Token(',')) + for r_cookie in r_cookies: + last_cookie = None + rr_cookies = split(r_cookie, Token(';')) + for cookie in rr_cookies: + nameval = tuple(split(cookie, Token('='))) + if len(nameval) == 2: + (name,), (value,) = nameval + else: + (name,), = nameval + value = None + + name=name.lower() + if name == '$version': + continue + if name[0] == '$': + if last_cookie is not None: + if name == '$path': + last_cookie.path=value + elif name == '$domain': + last_cookie.domain=value + elif name == '$port': + if value is None: + last_cookie.ports = () + else: + last_cookie.ports=tuple([int(s) for s in value.split(',')]) + else: + last_cookie = Cookie(name, value, version=1) + cookies.append(last_cookie) + else: + # Oldstyle cookies don't do quoted strings or anything sensible. + # All characters are valid for names except ';' and '=', and all + # characters are valid for values except ';'. Spaces are stripped, + # however. + r_cookies = header.split(';') + for r_cookie in r_cookies: + name,value = r_cookie.split('=', 1) + name=name.strip(' \t') + value=value.strip(' \t') + + cookies.append(Cookie(name, value)) + + return cookies + +cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$" +cookie_validname_re = re.compile(cookie_validname) +cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$' +cookie_validvalue_re = re.compile(cookie_validvalue) + +def generateCookie(cookies): + # There's a fundamental problem with the two cookie specifications. + # They both use the "Cookie" header, and the RFC Cookie header only allows + # one version to be specified. Thus, when you have a collection of V0 and + # V1 cookies, you have to either send them all as V0 or send them all as + # V1. + + # I choose to send them all as V1. + + # You might think converting a V0 cookie to a V1 cookie would be lossless, + # but you'd be wrong. If you do the conversion, and a V0 parser tries to + # read the cookie, it will see a modified form of the cookie, in cases + # where quotes must be added to conform to proper V1 syntax. + # (as a real example: "Cookie: cartcontents=oid:94680,qty:1,auto:0,esp:y") + + # However, that is what we will do, anyways. It has a high probability of + # breaking applications that only handle oldstyle cookies, where some other + # application set a newstyle cookie that is applicable over for site + # (or host), AND where the oldstyle cookie uses a value which is invalid + # syntax in a newstyle cookie. + + # Also, the cookie name *cannot* be quoted in V1, so some cookies just + # cannot be converted at all. (e.g. "Cookie: phpAds_capAd[32]=2"). These + # are just dicarded during conversion. + + # As this is an unsolvable problem, I will pretend I can just say + # OH WELL, don't do that, or else upgrade your old applications to have + # newstyle cookie parsers. + + # I will note offhandedly that there are *many* sites which send V0 cookies + # that are not valid V1 cookie syntax. About 20% for my cookies file. + # However, they do not generally mix them with V1 cookies, so this isn't + # an issue, at least right now. I have not tested to see how many of those + # webapps support RFC2965 V1 cookies. I suspect not many. + + max_version = max([cookie.version for cookie in cookies]) + + if max_version == 0: + # no quoting or anything. + return ';'.join(["%s=%s" % (cookie.name, cookie.value) for cookie in cookies]) + else: + str_cookies = ['$Version="1"'] + for cookie in cookies: + if cookie.version == 0: + # Version 0 cookie: we make sure the name and value are valid + # V1 syntax. + + # If they are, we use them as is. This means in *most* cases, + # the cookie will look literally the same on output as it did + # on input. + # If it isn't a valid name, ignore the cookie. + # If it isn't a valid value, quote it and hope for the best on + # the other side. + + if cookie_validname_re.match(cookie.name) is None: + continue + + value=cookie.value + if cookie_validvalue_re.match(cookie.value) is None: + value = quoteString(value) + + str_cookies.append("%s=%s" % (cookie.name, value)) + else: + # V1 cookie, nice and easy + str_cookies.append("%s=%s" % (cookie.name, quoteString(cookie.value))) + + if cookie.path: + str_cookies.append("$Path=%s" % quoteString(cookie.path)) + if cookie.domain: + str_cookies.append("$Domain=%s" % quoteString(cookie.domain)) + if cookie.ports is not None: + if len(cookie.ports) == 0: + str_cookies.append("$Port") + else: + str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports]))) + return ';'.join(str_cookies) + +def parseSetCookie(headers): + setCookies = [] + for header in headers: + try: + parts = header.split(';') + l = [] + + for part in parts: + namevalue = part.split('=',1) + if len(namevalue) == 1: + name=namevalue[0] + value=None + else: + name,value=namevalue + value=value.strip(' \t') + + name=name.strip(' \t') + + l.append((name, value)) + + setCookies.append(makeCookieFromList(l, True)) + except ValueError: + # If we can't parse one Set-Cookie, ignore it, + # but not the rest of Set-Cookies. + pass + return setCookies + +def parseSetCookie2(toks): + outCookies = [] + for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))] + for y in split(toks, Token(','))]: + try: + outCookies.append(makeCookieFromList(cookie, False)) + except ValueError: + # Again, if we can't handle one cookie -- ignore it. + pass + return outCookies + +def makeCookieFromList(tup, netscapeFormat): + name, value = tup[0] + if name is None or value is None: + raise ValueError("Cookie has missing name or value") + if name.startswith("$"): + raise ValueError("Invalid cookie name: %r, starts with '$'." % name) + cookie = Cookie(name, value) + hadMaxAge = False + + for name,value in tup[1:]: + name = name.lower() + + if value is None: + if name in ("discard", "secure"): + # Boolean attrs + value = True + elif name != "port": + # Can be either boolean or explicit + continue + + if name in ("comment", "commenturl", "discard", "domain", "path", "secure"): + # simple cases + setattr(cookie, name, value) + elif name == "expires" and not hadMaxAge: + if netscapeFormat and value[0] == '"' and value[-1] == '"': + value = value[1:-1] + cookie.expires = parseDateTime(value) + elif name == "max-age": + hadMaxAge = True + cookie.expires = int(value) + time.time() + elif name == "port": + if value is None: + cookie.ports = () + else: + if netscapeFormat and value[0] == '"' and value[-1] == '"': + value = value[1:-1] + cookie.ports = tuple([int(s) for s in value.split(',')]) + elif name == "version": + cookie.version = int(value) + + return cookie + + +def generateSetCookie(cookies): + setCookies = [] + for cookie in cookies: + out = ["%s=%s" % (cookie.name, cookie.value)] + if cookie.expires: + out.append("expires=%s" % generateDateTime(cookie.expires)) + if cookie.path: + out.append("path=%s" % cookie.path) + if cookie.domain: + out.append("domain=%s" % cookie.domain) + if cookie.secure: + out.append("secure") + + setCookies.append('; '.join(out)) + return setCookies + +def generateSetCookie2(cookies): + setCookies = [] + for cookie in cookies: + out = ["%s=%s" % (cookie.name, quoteString(cookie.value))] + if cookie.comment: + out.append("Comment=%s" % quoteString(cookie.comment)) + if cookie.commenturl: + out.append("CommentURL=%s" % quoteString(cookie.commenturl)) + if cookie.discard: + out.append("Discard") + if cookie.domain: + out.append("Domain=%s" % quoteString(cookie.domain)) + if cookie.expires: + out.append("Max-Age=%s" % (cookie.expires - time.time())) + if cookie.path: + out.append("Path=%s" % quoteString(cookie.path)) + if cookie.ports is not None: + if len(cookie.ports) == 0: + out.append("Port") + else: + out.append("Port=%s" % quoteString(",".join([str(x) for x in cookie.ports]))) + if cookie.secure: + out.append("Secure") + out.append('Version="1"') + setCookies.append('; '.join(out)) + return setCookies + +def parseDepth(depth): + if depth not in ("0", "1", "infinity"): + raise ValueError("Invalid depth header value: %s" % (depth,)) + return depth + +def parseOverWrite(overwrite): + if overwrite == "F": + return False + elif overwrite == "T": + return True + raise ValueError("Invalid overwrite header value: %s" % (overwrite,)) + +def generateOverWrite(overwrite): + if overwrite: + return "T" + else: + return "F" + +##### Random stuff that looks useful. +# def sortMimeQuality(s): +# def sorter(item1, item2): +# if item1[0] == '*': +# if item2[0] == '*': +# return 0 + + +# def sortQuality(s): +# def sorter(item1, item2): +# if item1[1] < item2[1]: +# return -1 +# if item1[1] < item2[1]: +# return 1 +# if item1[0] == item2[0]: +# return 0 + + +# def getMimeQuality(mimeType, accepts): +# type,args = parseArgs(mimeType) +# type=type.split(Token('/')) +# if len(type) != 2: +# raise ValueError, "MIME Type "+s+" invalid." + +# for accept in accepts: +# accept,acceptQual=accept +# acceptType=accept[0:1] +# acceptArgs=accept[2] + +# if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and +# (args == acceptArgs or len(acceptArgs) == 0)): +# return acceptQual + +# def getQuality(type, accepts): +# qual = accepts.get(type) +# if qual is not None: +# return qual + +# return accepts.get('*') + +# Headers object +class __RecalcNeeded(object): + def __repr__(self): + return "" + +_RecalcNeeded = __RecalcNeeded() + +class Headers(object): + """This class stores the HTTP headers as both a parsed representation and + the raw string representation. It converts between the two on demand.""" + + def __init__(self, headers=None, rawHeaders=None, handler=DefaultHTTPHandler): + self._raw_headers = {} + self._headers = {} + self.handler = handler + if headers is not None: + for key, value in headers.iteritems(): + self.setHeader(key, value) + if rawHeaders is not None: + for key, value in rawHeaders.iteritems(): + self.setRawHeaders(key, value) + + def _setRawHeaders(self, headers): + self._raw_headers = headers + self._headers = {} + + def _toParsed(self, name): + r = self._raw_headers.get(name, None) + h = self.handler.parse(name, r) + if h is not None: + self._headers[name] = h + return h + + def _toRaw(self, name): + h = self._headers.get(name, None) + r = self.handler.generate(name, h) + if r is not None: + self._raw_headers[name] = r + return r + + def hasHeader(self, name): + """Does a header with the given name exist?""" + name=name.lower() + return self._raw_headers.has_key(name) + + def getRawHeaders(self, name, default=None): + """Returns a list of headers matching the given name as the raw string given.""" + + name=name.lower() + raw_header = self._raw_headers.get(name, default) + if raw_header is not _RecalcNeeded: + return raw_header + + return self._toRaw(name) + + def getHeader(self, name, default=None): + """Ret9urns the parsed representation of the given header. + The exact form of the return value depends on the header in question. + + If no parser for the header exists, raise ValueError. + + If the header doesn't exist, return default (or None if not specified) + """ + name=name.lower() + parsed = self._headers.get(name, default) + if parsed is not _RecalcNeeded: + return parsed + return self._toParsed(name) + + def setRawHeaders(self, name, value): + """Sets the raw representation of the given header. + Value should be a list of strings, each being one header of the + given name. + """ + name=name.lower() + self._raw_headers[name] = value + self._headers[name] = _RecalcNeeded + + def setHeader(self, name, value): + """Sets the parsed representation of the given header. + Value should be a list of objects whose exact form depends + on the header in question. + """ + name=name.lower() + self._raw_headers[name] = _RecalcNeeded + self._headers[name] = value + + def addRawHeader(self, name, value): + """ + Add a raw value to a header that may or may not already exist. + If it exists, add it as a separate header to output; do not + replace anything. + """ + name=name.lower() + raw_header = self._raw_headers.get(name) + if raw_header is None: + # No header yet + raw_header = [] + self._raw_headers[name] = raw_header + elif raw_header is _RecalcNeeded: + raw_header = self._toRaw(name) + + raw_header.append(value) + self._headers[name] = _RecalcNeeded + + def removeHeader(self, name): + """Removes the header named.""" + + name=name.lower() + if self._raw_headers.has_key(name): + del self._raw_headers[name] + del self._headers[name] + + def __repr__(self): + return ''% (self._raw_headers, self._headers) + + def canonicalNameCaps(self, name): + """Return the name with the canonical capitalization, if known, + otherwise, Caps-After-Dashes""" + return header_case_mapping.get(name) or dashCapitalize(name) + + def getAllRawHeaders(self): + """Return an iterator of key,value pairs of all headers + contained in this object, as strings. The keys are capitalized + in canonical capitalization.""" + for k,v in self._raw_headers.iteritems(): + if v is _RecalcNeeded: + v = self._toRaw(k) + yield self.canonicalNameCaps(k), v + + def makeImmutable(self): + """Make this header set immutable. All mutating operations will + raise an exception.""" + self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise + + def _mutateRaise(self, *args): + raise AttributeError("This header object is immutable as the headers have already been sent.") + + +"""The following dicts are all mappings of header to list of operations + to perform. The first operation should generally be 'tokenize' if the + header can be parsed according to the normal tokenization rules. If + it cannot, generally the first thing you want to do is take only the + last instance of the header (in case it was sent multiple times, which + is strictly an error, but we're nice.). + """ + +iteritems = lambda x: x.iteritems() + + +parser_general_headers = { + 'Cache-Control':(tokenize, listParser(parseCacheControl), dict), + 'Connection':(tokenize,filterTokens), + 'Date':(last,parseDateTime), +# 'Pragma':tokenize +# 'Trailer':tokenize + 'Transfer-Encoding':(tokenize,filterTokens), +# 'Upgrade':tokenize +# 'Via':tokenize,stripComment +# 'Warning':tokenize +} + +generator_general_headers = { + 'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader), + 'Connection':(generateList,singleHeader), + 'Date':(generateDateTime,singleHeader), +# 'Pragma': +# 'Trailer': + 'Transfer-Encoding':(generateList,singleHeader), +# 'Upgrade': +# 'Via': +# 'Warning': +} + +parser_request_headers = { + 'Accept': (tokenize, listParser(parseAccept), dict), + 'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset), + 'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding), + 'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict), + 'Authorization': (last, parseAuthorization), + 'Cookie':(parseCookie,), + 'Expect':(tokenize, listParser(parseExpect), dict), + 'From':(last,), + 'Host':(last,), + 'If-Match':(tokenize, listParser(parseStarOrETag), list), + 'If-Modified-Since':(last, parseIfModifiedSince), + 'If-None-Match':(tokenize, listParser(parseStarOrETag), list), + 'If-Range':(parseIfRange,), + 'If-Unmodified-Since':(last,parseDateTime), + 'Max-Forwards':(last,int), +# 'Proxy-Authorization':str, # what is "credentials" + 'Range':(tokenize, parseRange), + 'Referer':(last,str), # TODO: URI object? + 'TE':(tokenize, listParser(parseAcceptQvalue), dict), + 'User-Agent':(last,str), +} + +generator_request_headers = { + 'Accept': (iteritems,listGenerator(generateAccept),singleHeader), + 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader), + 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'Authorization': (generateAuthorization,), # what is "credentials" + 'Cookie':(generateCookie,singleHeader), + 'Expect':(iteritems, listGenerator(generateExpect), singleHeader), + 'From':(str,singleHeader), + 'Host':(str,singleHeader), + 'If-Match':(listGenerator(generateStarOrETag), singleHeader), + 'If-Modified-Since':(generateDateTime,singleHeader), + 'If-None-Match':(listGenerator(generateStarOrETag), singleHeader), + 'If-Range':(generateIfRange, singleHeader), + 'If-Unmodified-Since':(generateDateTime,singleHeader), + 'Max-Forwards':(str, singleHeader), +# 'Proxy-Authorization':str, # what is "credentials" + 'Range':(generateRange,singleHeader), + 'Referer':(str,singleHeader), + 'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'User-Agent':(str,singleHeader), +} + +parser_response_headers = { + 'Accept-Ranges':(tokenize, filterTokens), + 'Age':(last,int), + 'ETag':(tokenize, ETag.parse), + 'Location':(last,), # TODO: URI object? +# 'Proxy-Authenticate' + 'Retry-After':(last, parseRetryAfter), + 'Server':(last,), + 'Set-Cookie':(parseSetCookie,), + 'Set-Cookie2':(tokenize, parseSetCookie2), + 'Vary':(tokenize, filterTokens), + 'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False), + parseWWWAuthenticate,) +} + +generator_response_headers = { + 'Accept-Ranges':(generateList, singleHeader), + 'Age':(str, singleHeader), + 'ETag':(ETag.generate, singleHeader), + 'Location':(str, singleHeader), +# 'Proxy-Authenticate' + 'Retry-After':(generateRetryAfter, singleHeader), + 'Server':(str, singleHeader), + 'Set-Cookie':(generateSetCookie,), + 'Set-Cookie2':(generateSetCookie2,), + 'Vary':(generateList, singleHeader), + 'WWW-Authenticate':(generateWWWAuthenticate,) +} + +parser_entity_headers = { + 'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens), + 'Content-Encoding':(tokenize, filterTokens), + 'Content-Language':(tokenize, filterTokens), + 'Content-Length':(last, int), + 'Content-Location':(last,), # TODO: URI object? + 'Content-MD5':(last, parseContentMD5), + 'Content-Range':(last, parseContentRange), + 'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType), + 'Expires':(last, parseExpires), + 'Last-Modified':(last, parseDateTime), + } + +generator_entity_headers = { + 'Allow':(generateList, singleHeader), + 'Content-Encoding':(generateList, singleHeader), + 'Content-Language':(generateList, singleHeader), + 'Content-Length':(str, singleHeader), + 'Content-Location':(str, singleHeader), + 'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader), + 'Content-Range':(generateContentRange, singleHeader), + 'Content-Type':(generateContentType, singleHeader), + 'Expires':(generateDateTime, singleHeader), + 'Last-Modified':(generateDateTime, singleHeader), + } + +DefaultHTTPHandler.updateParsers(parser_general_headers) +DefaultHTTPHandler.updateParsers(parser_request_headers) +DefaultHTTPHandler.updateParsers(parser_response_headers) +DefaultHTTPHandler.updateParsers(parser_entity_headers) + +DefaultHTTPHandler.updateGenerators(generator_general_headers) +DefaultHTTPHandler.updateGenerators(generator_request_headers) +DefaultHTTPHandler.updateGenerators(generator_response_headers) +DefaultHTTPHandler.updateGenerators(generator_entity_headers) + + +# casemappingify(DefaultHTTPParsers) +# casemappingify(DefaultHTTPGenerators) + +# lowerify(DefaultHTTPParsers) +# lowerify(DefaultHTTPGenerators) diff -r b619531ddbd2 -r b6e250dd7a7d web/httpcache.py --- a/web/httpcache.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/httpcache.py Fri Apr 23 12:42:53 2010 +0200 @@ -43,6 +43,8 @@ """ def etag(self): + if not self.req.cnx: # session without established connection to the repo + return self.view.__regid__ return self.view.__regid__ + '/' + ','.join(sorted(self.req.user.groups)) def max_age(self): @@ -131,8 +133,5 @@ # max-age=0 to actually force revalidation when needed viewmod.View.cache_max_age = 0 - -viewmod.EntityView.http_cache_manager = EntityHTTPCacheManager - viewmod.StartupView.http_cache_manager = MaxAgeHTTPCacheManager viewmod.StartupView.cache_max_age = 60*60*2 # stay in http cache for 2 hours by default diff -r b619531ddbd2 -r b6e250dd7a7d web/request.py --- a/web/request.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/request.py Fri Apr 23 12:42:53 2010 +0200 @@ -34,6 +34,7 @@ from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit, RequestError, StatusResponse) +from cubicweb.web.http_headers import Headers _MARKER = object() @@ -91,6 +92,8 @@ self.pageid = None self.datadir_url = self._datadir_url() self._set_pageid() + # prepare output header + self.headers_out = Headers() def _set_pageid(self): """initialize self.pageid @@ -122,11 +125,11 @@ self.set_page_data('rql_varmaker', varmaker) return varmaker - def set_connection(self, cnx, user=None): + def set_session(self, session, user=None): """method called by the session handler when the user is authenticated or an anonymous connection is open """ - super(CubicWebRequestBase, self).set_connection(cnx, user) + super(CubicWebRequestBase, self).set_session(session, user) # set request language vreg = self.vreg if self.user: @@ -151,8 +154,9 @@ gettext, self.pgettext = self.translations[lang] self._ = self.__ = gettext self.lang = lang - self.cnx.set_session_props(lang=lang) self.debug('request language: %s', lang) + if self.cnx: + self.cnx.set_session_props(lang=lang) # input form parameters management ######################################## @@ -236,7 +240,7 @@ @property def message(self): try: - return self.get_session_data(self._msgid, default=u'', pop=True) + return self.session.data.pop(self._msgid, '') except AttributeError: try: return self._msg @@ -257,17 +261,17 @@ def set_redirect_message(self, msg): assert isinstance(msg, unicode) msgid = self.redirect_message_id() - self.set_session_data(msgid, msg) + self.session.data[msgid] = msg return msgid def append_to_redirect_message(self, msg): msgid = self.redirect_message_id() - currentmsg = self.get_session_data(msgid) + currentmsg = self.session.data.get(msgid) if currentmsg is not None: currentmsg = '%s %s' % (currentmsg, msg) else: currentmsg = msg - self.set_session_data(msgid, currentmsg) + self.session.data[msgid] = currentmsg return msgid def reset_message(self): @@ -279,8 +283,8 @@ def update_search_state(self): """update the current search state""" searchstate = self.form.get('__mode') - if not searchstate and self.cnx is not None: - searchstate = self.get_session_data('search_state', 'normal') + if not searchstate and self.cnx: + searchstate = self.session.data.get('search_state', 'normal') self.set_search_state(searchstate) def set_search_state(self, searchstate): @@ -290,8 +294,8 @@ else: self.search_state = ('linksearch', searchstate.split(':')) assert len(self.search_state[-1]) == 4 - if self.cnx is not None: - self.set_session_data('search_state', searchstate) + if self.cnx: + self.session.data['search_state'] = searchstate def match_search_state(self, rset): """when searching an entity to create a relation, return True if entities in @@ -308,12 +312,12 @@ def update_breadcrumbs(self): """stores the last visisted page in session data""" - searchstate = self.get_session_data('search_state') + searchstate = self.session.data.get('search_state') if searchstate == 'normal': - breadcrumbs = self.get_session_data('breadcrumbs', None) + breadcrumbs = self.session.data.get('breadcrumbs') if breadcrumbs is None: breadcrumbs = SizeConstrainedList(10) - self.set_session_data('breadcrumbs', breadcrumbs) + self.session.data['breadcrumbs'] = breadcrumbs breadcrumbs.append(self.url()) else: url = self.url() @@ -321,7 +325,7 @@ breadcrumbs.append(url) def last_visited_page(self): - breadcrumbs = self.get_session_data('breadcrumbs', None) + breadcrumbs = self.session.data.get('breadcrumbs') if breadcrumbs: return breadcrumbs.pop() return self.base_url() @@ -368,11 +372,10 @@ self.del_page_data(cbname) def clear_user_callbacks(self): - if self.cnx is not None: - sessdata = self.session_data() - callbacks = [key for key in sessdata if key.startswith('cb_')] - for callback in callbacks: - self.del_session_data(callback) + if self.session is not None: # XXX + for key in self.session.data.keys(): + if key.startswith('cb_'): + del self.session.data[key] # web edition helpers ##################################################### @@ -438,13 +441,13 @@ This is needed when the edition is completed (whether it's validated or cancelled) """ - self.del_session_data('pending_insert') - self.del_session_data('pending_delete') + self.session.data.pop('pending_insert', None) + self.session.data.pop('pending_delete', None) def cancel_edition(self, errorurl): """remove pending operations and `errorurl`'s specific stored data """ - self.del_session_data(errorurl) + self.session.data.pop(errorurl, None) self.remove_pending_operations() # high level methods for HTTP headers management ########################## @@ -663,17 +666,26 @@ """ raise NotImplementedError() - def set_header(self, header, value): + def set_header(self, header, value, raw=True): """set an output HTTP header""" - raise NotImplementedError() + if raw: + # adding encoded header is important, else page content + # will be reconverted back to unicode and apart unefficiency, this + # may cause decoding problem (e.g. when downloading a file) + self.headers_out.setRawHeaders(header, [str(value)]) + else: + self.headers_out.setHeader(header, value) def add_header(self, header, value): """add an output HTTP header""" - raise NotImplementedError() + # adding encoded header is important, else page content + # will be reconverted back to unicode and apart unefficiency, this + # may cause decoding problem (e.g. when downloading a file) + self.headers_out.addRawHeader(header, str(value)) def remove_header(self, header): """remove an output HTTP header""" - raise NotImplementedError() + self.headers_out.removeHeader(header) def header_authorization(self): """returns a couple (auth-type, auth-value)""" @@ -739,26 +751,29 @@ def get_page_data(self, key, default=None): """return value associated to `key` in curernt page data""" - page_data = self.cnx.get_session_data(self.pageid, {}) + page_data = self.session.data.get(self.pageid) + if page_data is None: + return default return page_data.get(key, default) def set_page_data(self, key, value): """set value associated to `key` in current page data""" self.html_headers.add_unload_pagedata() - page_data = self.cnx.get_session_data(self.pageid, {}) + page_data = self.session.data.setdefault(self.pageid, {}) page_data[key] = value - return self.cnx.set_session_data(self.pageid, page_data) + self.session.data[self.pageid] = page_data def del_page_data(self, key=None): """remove value associated to `key` in current page data if `key` is None, all page data will be cleared """ if key is None: - self.cnx.del_session_data(self.pageid) + self.session.data.pop(self.pageid, None) else: - page_data = self.cnx.get_session_data(self.pageid, {}) - page_data.pop(key, None) - self.cnx.set_session_data(self.pageid, page_data) + try: + del self.session.data[self.pageid][key] + except KeyError: + pass # user-agent detection #################################################### diff -r b619531ddbd2 -r b6e250dd7a7d web/schemaviewer.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/schemaviewer.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,231 @@ +"""an helper class to display CubicWeb schema using ureports + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +__docformat__ = "restructuredtext en" +_ = unicode + +from logilab.common.ureports import Section, Title, Table, Link, Span, Text + +from yams.schema2dot import CARD_MAP +from yams.schema import RelationDefinitionSchema +from operator import attrgetter + +TYPE_GETTER = attrgetter('type') + +I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')] + + +class SchemaViewer(object): + """return an ureport layout for some part of a schema""" + def __init__(self, req=None, encoding=None): + self.req = req + if req is not None: + req.add_css('cubicweb.schema.css') + if encoding is None: + encoding = req.encoding + self._ = req._ + else: + encoding = 'ascii' + self._ = unicode + self.encoding = encoding + + # no self.req managements + + def may_read(self, rdef, action): + """Return true if request user may read the given schema. + Always return True when no request is provided. + """ + if self.req is None: + return True + return sch.may_have_permission('read', self.req) + + def format_eschema(self, eschema): + text = eschema.type + if self.req is None: + return Text(text) + return Link(self.req.build_url('cwetype/%s' % eschema), text) + + def format_rschema(self, rschema, label=None): + if label is None: + label = rschema.type + if self.req is None: + return Text(label) + return Link(self.req.build_url('cwrtype/%s' % rschema), label) + + # end of no self.req managements + + def visit_schema(self, schema, display_relations=0, skiptypes=()): + """get a layout for a whole schema""" + title = Title(self._('Schema %s') % schema.name, + klass='titleUnderline') + layout = Section(children=(title,)) + esection = Section(children=(Title(self._('Entities'), + klass='titleUnderline'),)) + layout.append(esection) + eschemas = [eschema for eschema in schema.entities() + if not (eschema.final or eschema in skiptypes)] + for eschema in sorted(eschemas, key=TYPE_GETTER): + esection.append(self.visit_entityschema(eschema, skiptypes)) + if display_relations: + title = Title(self._('Relations'), klass='titleUnderline') + rsection = Section(children=(title,)) + layout.append(rsection) + relations = [rschema for rschema in sorted(schema.relations(), key=TYPE_GETTER) + if not (rschema.final or rschema.type in skiptypes)] + keys = [(rschema.type, rschema) for rschema in relations] + for key, rschema in sorted(keys, cmp=(lambda x, y: cmp(x[1], y[1]))): + relstr = self.visit_relationschema(rschema) + rsection.append(relstr) + return layout + + def _entity_attributes_data(self, eschema): + _ = self._ + data = [_('attribute'), _('type'), _('default'), _('constraints')] + attributes = sorted(eschema.attribute_definitions(), cmp=(lambda x, y: cmp(x[0].type, y[0].type))) + for rschema, aschema in attributes: + rdef = eschema.rdef(rschema) + if not self.may_read(rdef): + continue + aname = rschema.type + if aname == 'eid': + continue + data.append('%s (%s)' % (aname, _(aname))) + data.append(_(aschema.type)) + defaultval = eschema.default(aname) + if defaultval is not None: + default = self.to_string(defaultval) + elif rdef.cardinality[0] == '1': + default = _('required field') + else: + default = '' + data.append(default) + constraints = rschema.rproperty(eschema.type, aschema.type, + 'constraints') + data.append(', '.join(str(constr) for constr in constraints)) + return data + + + def stereotype(self, name): + return Span((' <<%s>>' % name,), klass='stereotype') + + def visit_entityschema(self, eschema, skiptypes=()): + """get a layout for an entity schema""" + etype = eschema.type + layout = Section(children=' ', klass='clear') + layout.append(Link(etype,' ' , id=etype)) # anchor + title = self.format_eschema(eschema) + boxchild = [Section(children=(title,), klass='title')] + data = [] + data.append(Section(children=boxchild, klass='box')) + data.append(Section(children='', klass='vl')) + data.append(Section(children='', klass='hl')) + t_vars = [] + rels = [] + first = True + + rel_defs = sorted(eschema.relation_definitions(), + cmp=(lambda x, y: cmp((x[0].type, x[0].cardinality), + (y[0].type, y[0].cardinality)))) + for rschema, targetschemas, role in rel_defs: + if rschema.type in skiptypes: + continue + for oeschema in sorted(targetschemas, key=TYPE_GETTER): + rdef = rschema.role_rdef(eschema, oeschema, role) + if not self.may_read(rdef): + continue + label = rschema.type + if role == 'subject': + cards = rschema.rproperty(eschema, oeschema, 'cardinality') + else: + cards = rschema.rproperty(oeschema, eschema, 'cardinality') + cards = cards[::-1] + label = '%s %s %s' % (CARD_MAP[cards[1]], label, + CARD_MAP[cards[0]]) + rlink = self.format_rschema(rschema, label) + elink = self.format_eschema(oeschema) + if first: + t_vars.append(Section(children=(elink,), klass='firstvar')) + rels.append(Section(children=(rlink,), klass='firstrel')) + first = False + else: + t_vars.append(Section(children=(elink,), klass='var')) + rels.append(Section(children=(rlink,), klass='rel')) + data.append(Section(children=rels, klass='rels')) + data.append(Section(children=t_vars, klass='vars')) + layout.append(Section(children=data, klass='entityAttributes')) + return layout + + def visit_relationschema(self, rschema, title=True): + """get a layout for a relation schema""" + _ = self._ + if title: + title = self.format_rschema(rschema) + stereotypes = [] + if rschema.meta: + stereotypes.append('meta') + if rschema.symmetric: + stereotypes.append('symmetric') + if rschema.inlined: + stereotypes.append('inlined') + title = Section(children=(title,), klass='title') + if stereotypes: + title.append(self.stereotype(','.join(stereotypes))) + layout = Section(children=(title,), klass='schema') + else: + layout = Section(klass='schema') + data = [_('from'), _('to')] + schema = rschema.schema + rschema_objects = rschema.objects() + if rschema_objects: + # might be empty + properties = [p for p in RelationDefinitionSchema.rproperty_defs(rschema_objects[0]) + if not p in ('cardinality', 'composite', 'eid')] + else: + properties = [] + data += [_(prop) for prop in properties] + cols = len(data) + done = set() + for subjtype, objtypes in sorted(rschema.associations()): + for objtype in objtypes: + if (subjtype, objtype) in done: + continue + done.add((subjtype, objtype)) + if rschema.symmetric: + done.add((objtype, subjtype)) + data.append(self.format_eschema(schema[subjtype])) + data.append(self.format_eschema(schema[objtype])) + rdef = rschema.rdef(subjtype, objtype) + for prop in properties: + val = getattr(rdef, prop) + if val is None: + val = '' + elif prop == 'constraints': + val = ', '.join([c.restriction for c in val]) + elif isinstance(val, dict): + for key, value in val.iteritems(): + if isinstance(value, (list, tuple)): + val[key] = ', '.join(sorted( str(v) for v in value)) + val = str(val) + + elif isinstance(val, (list, tuple)): + val = sorted(val) + val = ', '.join(str(v) for v in val) + elif val and isinstance(val, basestring): + val = _(val) + else: + val = str(val) + data.append(Text(val)) + table = Table(cols=cols, rheaders=1, children=data, klass='listing') + layout.append(Section(children=(table,), klass='relationDefinition')) + layout.append(Section(children='', klass='clear')) + return layout + + def to_string(self, value): + """used to converte arbitrary values to encoded string""" + if isinstance(value, unicode): + return value.encode(self.encoding, 'replace') + return str(value) diff -r b619531ddbd2 -r b6e250dd7a7d web/test/data/sample1.pdf Binary file web/test/data/sample1.pdf has changed diff -r b619531ddbd2 -r b6e250dd7a7d web/test/data/sample1.xml --- a/web/test/data/sample1.xml Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,138 +0,0 @@ - - - - - ] > - - - - - - - -Comet 0.2.0 (unset title) - - - - - - - - - - - - - - - - - - - - - - -
-
- - - \ No newline at end of file diff -r b619531ddbd2 -r b6e250dd7a7d web/test/data/views.py --- a/web/test/data/views.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/data/views.py Fri Apr 23 12:42:53 2010 +0200 @@ -14,7 +14,7 @@ # user # NOTE: this require "cookie" authentication mode def auto_login_publish(self, path, req): - if (req.cnx is None or req.cnx.anonymous_connection) and req.form.get('__fblogin'): + if (not req.cnx or req.cnx.anonymous_connection) and req.form.get('__fblogin'): login = password = req.form.pop('__fblogin') self.repo.register_user(login, password) req.form['__login'] = login diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_application.py --- a/web/test/unittest_application.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/unittest_application.py Fri Apr 23 12:42:53 2010 +0200 @@ -14,9 +14,10 @@ from logilab.common.testlib import TestCase, unittest_main from logilab.common.decorators import clear_cache +from cubicweb import AuthenticationError from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.fake import FakeRequest -from cubicweb.web import Redirect, AuthenticationError, ExplicitLogin, INTERNAL_FIELD_VALUE +from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE from cubicweb.web.views.basecontrollers import ViewController class FakeMapping: @@ -29,7 +30,7 @@ class MockCursor: def __init__(self): self.executed = [] - def execute(self, rql, args=None, cachekey=None): + def execute(self, rql, args=None, build_descr=False): args = args or {} self.executed.append(rql % args) @@ -39,10 +40,12 @@ def __init__(self, form=None): self._cw = FakeRequest() self._cw.form = form or {} - self._cursor = self._cw.cursor = MockCursor() + self._cursor = MockCursor() + self._cw.execute = self._cursor.execute def new_cursor(self): - self._cursor = self._cw.cursor = MockCursor() + self._cursor = MockCursor() + self._cw.execute = self._cursor.execute def set_form(self, form): self._cw.form = form @@ -178,7 +181,7 @@ '__errorurl': 'view?vid=edition...' } path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req) - forminfo = req.get_session_data('view?vid=edition...') + forminfo = req.session.data['view?vid=edition...'] eidmap = forminfo['eidmap'] self.assertEquals(eidmap, {}) values = forminfo['values'] @@ -208,7 +211,7 @@ '__errorurl': 'view?vid=edition...', } path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req) - forminfo = req.get_session_data('view?vid=edition...') + forminfo = req.session.data['view?vid=edition...'] self.assertEquals(set(forminfo['eidmap']), set('XY')) self.assertEquals(forminfo['eidmap']['X'], None) self.assertIsInstance(forminfo['eidmap']['Y'], int) @@ -237,7 +240,7 @@ '__errorurl': 'view?vid=edition...', } path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req) - forminfo = req.get_session_data('view?vid=edition...') + forminfo = req.session.data['view?vid=edition...'] self.assertEquals(set(forminfo['eidmap']), set('XY')) self.assertIsInstance(forminfo['eidmap']['X'], int) self.assertIsInstance(forminfo['eidmap']['Y'], int) @@ -299,29 +302,29 @@ # authentication tests #################################################### def test_http_auth_no_anon(self): - req, origcnx = self.init_authentication('http') + req, origsession = self.init_authentication('http') self.assertAuthFailure(req) - self.assertRaises(ExplicitLogin, self.app_publish, req, 'login') + self.assertRaises(AuthenticationError, self.app_publish, req, 'login') self.assertEquals(req.cnx, None) - authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password'])) + authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password'])) req._headers['Authorization'] = 'basic %s' % authstr - self.assertAuthSuccess(req, origcnx) - self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']}) - self.assertRaises(AuthenticationError, self.app_publish, req, 'logout') + self.assertAuthSuccess(req, origsession) + self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']}) + self.assertRaises(LogOut, self.app_publish, req, 'logout') self.assertEquals(len(self.open_sessions), 0) def test_cookie_auth_no_anon(self): - req, origcnx = self.init_authentication('cookie') + req, origsession = self.init_authentication('cookie') self.assertAuthFailure(req) form = self.app_publish(req, 'login') self.failUnless('__login' in form) self.failUnless('__password' in form) self.assertEquals(req.cnx, None) - req.form['__login'] = origcnx.login - req.form['__password'] = origcnx.authinfo['password'] - self.assertAuthSuccess(req, origcnx) - self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']}) - self.assertRaises(AuthenticationError, self.app_publish, req, 'logout') + req.form['__login'] = origsession.login + req.form['__password'] = origsession.authinfo['password'] + self.assertAuthSuccess(req, origsession) + self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']}) + self.assertRaises(LogOut, self.app_publish, req, 'logout') self.assertEquals(len(self.open_sessions), 0) def test_login_by_email(self): @@ -331,71 +334,72 @@ 'WHERE U login %(login)s', {'address': address, 'login': login}) self.commit() # option allow-email-login not set - req, origcnx = self.init_authentication('cookie') + req, origsession = self.init_authentication('cookie') req.form['__login'] = address - req.form['__password'] = origcnx.authinfo['password'] + req.form['__password'] = origsession.authinfo['password'] self.assertAuthFailure(req) # option allow-email-login set - origcnx.login = address + origsession.login = address self.set_option('allow-email-login', True) req.form['__login'] = address - req.form['__password'] = origcnx.authinfo['password'] - self.assertAuthSuccess(req, origcnx) - self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']}) - self.assertRaises(AuthenticationError, self.app_publish, req, 'logout') + req.form['__password'] = origsession.authinfo['password'] + self.assertAuthSuccess(req, origsession) + self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']}) + self.assertRaises(LogOut, self.app_publish, req, 'logout') self.assertEquals(len(self.open_sessions), 0) def _reset_cookie(self, req): # preparing the suite of the test # set session id in cookie cookie = Cookie.SimpleCookie() - cookie['__session'] = req.cnx.sessionid + cookie['__session'] = req.session.sessionid req._headers['Cookie'] = cookie['__session'].OutputString() clear_cache(req, 'get_authorization') - # reset cnx as if it was a new incoming request - req.cnx = None + # reset session as if it was a new incoming request + req.session = req.cnx = None def _test_auth_anon(self, req): self.app.connect(req) - acnx = req.cnx + asession = req.session self.assertEquals(len(self.open_sessions), 1) - self.assertEquals(acnx.login, 'anon') - self.assertEquals(acnx.authinfo['password'], 'anon') - self.failUnless(acnx.anonymous_connection) + self.assertEquals(asession.login, 'anon') + self.assertEquals(asession.authinfo['password'], 'anon') + self.failUnless(asession.anonymous_session) self._reset_cookie(req) def _test_anon_auth_fail(self, req): self.assertEquals(len(self.open_sessions), 1) self.app.connect(req) self.assertEquals(req.message, 'authentication failure') - self.assertEquals(req.cnx.anonymous_connection, True) + self.assertEquals(req.session.anonymous_session, True) self.assertEquals(len(self.open_sessions), 1) self._reset_cookie(req) def test_http_auth_anon_allowed(self): - req, origcnx = self.init_authentication('http', 'anon') + req, origsession = self.init_authentication('http', 'anon') self._test_auth_anon(req) authstr = base64.encodestring('toto:pouet') req._headers['Authorization'] = 'basic %s' % authstr self._test_anon_auth_fail(req) - authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password'])) + authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password'])) req._headers['Authorization'] = 'basic %s' % authstr - self.assertAuthSuccess(req, origcnx) - self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']}) - self.assertRaises(AuthenticationError, self.app_publish, req, 'logout') + self.assertAuthSuccess(req, origsession) + self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']}) + self.assertRaises(LogOut, self.app_publish, req, 'logout') self.assertEquals(len(self.open_sessions), 0) def test_cookie_auth_anon_allowed(self): - req, origcnx = self.init_authentication('cookie', 'anon') + req, origsession = self.init_authentication('cookie', 'anon') self._test_auth_anon(req) req.form['__login'] = 'toto' req.form['__password'] = 'pouet' self._test_anon_auth_fail(req) - req.form['__login'] = origcnx.login - req.form['__password'] = origcnx.authinfo['password'] - self.assertAuthSuccess(req, origcnx) - self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']}) - self.assertRaises(AuthenticationError, self.app_publish, req, 'logout') + req.form['__login'] = origsession.login + req.form['__password'] = origsession.authinfo['password'] + self.assertAuthSuccess(req, origsession) + self.assertEquals(req.session.authinfo, + {'password': origsession.authinfo['password']}) + self.assertRaises(LogOut, self.app_publish, req, 'logout') self.assertEquals(len(self.open_sessions), 0) def test_non_regr_optional_first_var(self): diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_pdf.py --- a/web/test/unittest_pdf.py Fri Apr 23 12:40:48 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -import os.path as osp -from tempfile import NamedTemporaryFile -from subprocess import Popen as sub -from xml.etree.cElementTree import ElementTree, fromstring, tostring, dump - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.utils import can_do_pdf_conversion -from cubicweb.ext.xhtml2fo import ReportTransformer - -DATADIR = osp.join(osp.dirname(__file__), 'data') - -class PDFTC(TestCase): - - def test_xhtml_to_fop_to_pdf(self): - if not can_do_pdf_conversion(): - self.skip('dependencies not available : check pysixt and fop') - xmltree = ElementTree() - xmltree.parse(osp.join(DATADIR, 'sample1.xml')) - foptree = ReportTransformer(u'contentmain').transform(xmltree) - # next - foptmp = NamedTemporaryFile() - foptree.write(foptmp) - foptmp.flush() - pdftmp = NamedTemporaryFile() - fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name]) - fopproc.wait() - del foptmp - if fopproc.returncode: - self.skip('fop returned status %s' % fopproc.returncode) - pdftmp.seek(0) # a bit superstitious - reference = open(osp.join(DATADIR, 'sample1.pdf'), 'r').read() - output = pdftmp.read() - # XXX almost equals due to ID, creation date, so it seems to fail - self.assertEquals( len(output), len(reference) ) - # cut begin & end 'cause they contain variyng data - self.assertTextEquals(output[150:1500], reference[150:1500]) - -if __name__ == '__main__': - unittest_main() - diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_session.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/unittest_session.py Fri Apr 23 12:42:53 2010 +0200 @@ -0,0 +1,33 @@ +# -*- coding: iso-8859-1 -*- +"""unit tests for cubicweb.web.application + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" +from cubicweb.devtools.testlib import CubicWebTC + +class SessionTC(CubicWebTC): + + def test_auto_reconnection(self): + sm = self.app.session_handler.session_manager + # make is if the web session has been opened by the session manager + sm._sessions[self.cnx.sessionid] = self.websession + sessionid = self.websession.sessionid + self.assertEquals(len(sm._sessions), 1) + self.assertEquals(self.websession.sessionid, self.websession.cnx.sessionid) + # fake the repo session is expiring + self.repo.close(sessionid) + # fake an incoming http query with sessionid in session cookie + # don't use self.request() which try to call req.set_session + req = self.requestcls(self.vreg) + websession = sm.get_session(req, sessionid) + self.assertEquals(len(sm._sessions), 1) + self.assertIs(websession, self.websession) + self.assertEquals(websession.sessionid, sessionid) + self.assertNotEquals(websession.sessionid, websession.cnx.sessionid) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_urlpublisher.py --- a/web/test/unittest_urlpublisher.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/unittest_urlpublisher.py Fri Apr 23 12:42:53 2010 +0200 @@ -26,7 +26,7 @@ req = self.request() b = req.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla') c = req.create_entity('Tag', name=u'yo') # take care: Tag's name normalized to lower case - self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}, 'b') + self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}) def process(self, url): req = self.req = self.request() diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_views_basecontrollers.py --- a/web/test/unittest_views_basecontrollers.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/unittest_views_basecontrollers.py Fri Apr 23 12:42:53 2010 +0200 @@ -74,7 +74,7 @@ 'in_group-subject:'+eid: groups, } path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.firstname, u'Sylvain') self.assertEquals(e.surname, u'Th\xe9nault') self.assertEquals(e.login, user.login) @@ -114,7 +114,7 @@ 'surname-subject:'+eid: u'Sylvain', } path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.login, user.login) self.assertEquals(e.firstname, u'Th\xe9nault') self.assertEquals(e.surname, u'Sylvain') @@ -250,7 +250,7 @@ tmpgroup = self.request().create_entity('CWGroup', name=u"test") user = self.user() req = self.request(**req_form(user)) - req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)])) + req.session.data['pending_insert'] = set([(user.eid, 'in_group', tmpgroup.eid)]) path, params = self.expect_redirect_publish(req, 'edit') usergroups = [gname for gname, in self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})] @@ -269,7 +269,7 @@ self.assertUnorderedIterableEquals(usergroups, ['managers', 'test']) # now try to delete the relation req = self.request(**req_form(user)) - req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)])) + req.session.data['pending_delete'] = set([(user.eid, 'in_group', groupeid)]) path, params = self.expect_redirect_publish(req, 'edit') usergroups = [gname for gname, in self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})] @@ -349,7 +349,7 @@ self.assertIn('_cwmsgid', params) eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid self.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', - {'x': self.session.user.eid, 'e': eid}, 'x') + {'x': self.session.user.eid, 'e': eid}) self.commit() req = req req.form = {'eid': u(eid), '__type:%s'%eid: 'EmailAddress', @@ -388,7 +388,7 @@ } try: path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0) self.assertEquals(e.name, 'CWEType') self.assertEquals(sorted(g.eid for g in e.read_permission), groupeids) finally: @@ -410,7 +410,7 @@ path, params = self.expect_redirect_publish(req, 'edit') self.failUnless(path.startswith('blogentry/')) eid = path.split('/')[1] - e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}, 'x').get_entity(0, 0) + e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0) self.assertEquals(e.title, '"13:03:40"') self.assertEquals(e.content, '"13:03:43"') @@ -557,17 +557,21 @@ def test_remote_add_existing_tag(self): self.remote_call('tag_entity', self.john.eid, ['python']) - self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')], - ['python', 'cubicweb']) - self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, - [['python']]) + self.assertUnorderedIterableEquals( + [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')], + ['python', 'cubicweb']) + self.assertEquals( + self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, + [['python']]) def test_remote_add_new_tag(self): self.remote_call('tag_entity', self.john.eid, ['javascript']) - self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')], - ['python', 'cubicweb', 'javascript']) - self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, - [['javascript']]) + self.assertUnorderedIterableEquals( + [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')], + ['python', 'cubicweb', 'javascript']) + self.assertEquals( + self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows, + [['javascript']]) def test_pending_insertion(self): res, req = self.remote_call('add_pending_inserts', [['12', 'tags', '13']]) diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_views_baseviews.py --- a/web/test/unittest_views_baseviews.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/unittest_views_baseviews.py Fri Apr 23 12:42:53 2010 +0200 @@ -93,7 +93,7 @@ req = self.request() e = req.create_entity("State", name=u'', description=u'loo"ong blabla') rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, X description D, X creation_date CD, X eid %(x)s', - {'x': e.eid}, 'x') + {'x': e.eid}) view = self.vreg['views'].select('table', req, rset=rset) return e, rset, view diff -r b619531ddbd2 -r b6e250dd7a7d web/test/unittest_viewselector.py --- a/web/test/unittest_viewselector.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/test/unittest_viewselector.py Fri Apr 23 12:42:53 2010 +0200 @@ -96,7 +96,7 @@ ('list', baseviews.ListView), ('oneline', baseviews.OneLineView), ('owlabox', owl.OWLABOXView), - ('primary', primary.PrimaryView), + ('primary', cwuser.CWGroupPrimaryView), ('rsetxml', xmlrss.XMLRsetView), ('rss', xmlrss.RSSView), ('sameetypelist', baseviews.SameETypeListView), @@ -120,7 +120,7 @@ ('list', baseviews.ListView), ('oneline', baseviews.OneLineView), ('owlabox', owl.OWLABOXView), - ('primary', primary.PrimaryView), + ('primary', cwuser.CWGroupPrimaryView), ('rsetxml', xmlrss.XMLRsetView), ('rss', xmlrss.RSSView), ('sameetypelist', baseviews.SameETypeListView), diff -r b619531ddbd2 -r b6e250dd7a7d web/views/actions.py --- a/web/views/actions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/actions.py Fri Apr 23 12:42:53 2010 +0200 @@ -239,7 +239,7 @@ __select__ = action.Action.__select__ & one_line_rset() & non_final_entity() submenu = _('addrelated') - order = 20 + order = 17 def fill_menu(self, box, menu): # when there is only one item in the sub-menu, replace the sub-menu by diff -r b619531ddbd2 -r b6e250dd7a7d web/views/authentication.py --- a/web/views/authentication.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/authentication.py Fri Apr 23 12:42:53 2010 +0200 @@ -5,14 +5,18 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +from __future__ import with_statement + __docformat__ = "restructuredtext en" +from threading import Lock + from logilab.common.decorators import clear_cache from cubicweb import AuthenticationError, BadConnectionId from cubicweb.view import Component from cubicweb.dbapi import repo_connect, ConnectionProperties -from cubicweb.web import ExplicitLogin, InvalidSession +from cubicweb.web import InvalidSession from cubicweb.web.application import AbstractAuthenticationManager class NoAuthInfo(Exception): pass @@ -28,9 +32,10 @@ """ raise NotImplementedError() - def authenticated(self, req, cnx, retreiver): + def authenticated(self, retreiver, req, cnx, login, authinfo): """callback when return authentication information have opened a - repository connection successfully + repository connection successfully. Take care req has no session + attached yet, hence req.execute isn't available. """ pass @@ -59,50 +64,58 @@ self.authinforetreivers = sorted(vreg['webauth'].possible_objects(vreg), key=lambda x: x.order) assert self.authinforetreivers + # 2-uple login / password, login is None when no anonymous access + # configured self.anoninfo = vreg.config.anonymous_user() + if self.anoninfo[0]: + self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]}) def validate_session(self, req, session): - """check session validity, and return eventually hijacked session + """check session validity, reconnecting it to the repository if the + associated connection expired in the repository side (hence the + necessity for this method). Return the connected user on success. - :raise InvalidSession: - if session is corrupted for a reason or another and should be closed + raise :exc:`InvalidSession` if session is corrupted for a reason or + another and should be closed """ # with this authentication manager, session is actually a dbapi # connection - cnx = session login = req.get_authorization()[0] + # check session.login and not user.login, since in case of login by + # email, login and cnx.login are the email while user.login is the + # actual user login + if login and session.login != login: + raise InvalidSession('login mismatch') try: - # calling cnx.user() check connection validity, raise - # BadConnectionId on failure - user = cnx.user(req) - # check cnx.login and not user.login, since in case of login by - # email, login and cnx.login are the email while user.login is the - # actual user login - if login and cnx.login != login: - cnx.close() - raise InvalidSession('login mismatch') - except BadConnectionId: - # check if a connection should be automatically restablished - if (login is None or login == cnx.login): - cnx = self._authenticate(req, cnx.login, cnx.authinfo) + lock = session.reconnection_lock + except AttributeError: + lock = session.reconnection_lock = Lock() + # need to be locked two avoid duplicated reconnections on concurrent + # requests + with lock: + cnx = session.cnx + try: + # calling cnx.user() check connection validity, raise + # BadConnectionId on failure user = cnx.user(req) - # backport session's data - cnx.data = session.data - else: - raise InvalidSession('bad connection id') - # associate the connection to the current request - req.set_connection(cnx, user) - return cnx + except BadConnectionId: + # check if a connection should be automatically restablished + if (login is None or login == session.login): + cnx = self._authenticate(session.login, session.authinfo) + user = cnx.user(req) + session.cnx = cnx + else: + raise InvalidSession('bad connection id') + return user def authenticate(self, req): - """authenticate user and return corresponding user object + """authenticate user using connection information found in the request, + and return corresponding a :class:`~cubicweb.dbapi.Connection` instance, + as well as login and authentication information dictionary used to open + the connection. - :raise ExplicitLogin: if authentication is required (no authentication - info found or wrong user/password) - - Note: this method is violating AuthenticationManager interface by - returning a session instance instead of the user. This is expected by - the InMemoryRepositorySessionManager. + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) """ for retreiver in self.authinforetreivers: try: @@ -110,44 +123,28 @@ except NoAuthInfo: continue try: - cnx = self._authenticate(req, login, authinfo) - except ExplicitLogin: + cnx = self._authenticate(login, authinfo) + except AuthenticationError: continue # the next one may succeed for retreiver_ in self.authinforetreivers: - retreiver_.authenticated(req, cnx, retreiver) - break - else: - # false if no authentication info found, eg this is not an - # authentication failure - if 'login' in locals(): - req.set_message(req._('authentication failure')) - cnx = self._open_anonymous_connection(req) - return cnx + retreiver_.authenticated(retreiver, req, cnx, login, authinfo) + return cnx, login, authinfo + # false if no authentication info found, eg this is not an + # authentication failure + if 'login' in locals(): + req.set_message(req._('authentication failure')) + login, authinfo = self.anoninfo + if login: + cnx = self._authenticate(login, authinfo) + cnx.anonymous_connection = True + return cnx, login, authinfo + raise AuthenticationError() - def _authenticate(self, req, login, authinfo): + def _authenticate(self, login, authinfo): cnxprops = ConnectionProperties(self.vreg.config.repo_method, close=False, log=self.log_queries) - try: - cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo) - except AuthenticationError: - raise ExplicitLogin() - self._init_cnx(cnx, login, authinfo) - # associate the connection to the current request - req.set_connection(cnx) + cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo) + # decorate connection + cnx.vreg = self.vreg return cnx - def _open_anonymous_connection(self, req): - # restore an anonymous connection if possible - login, password = self.anoninfo - if login: - cnx = self._authenticate(req, login, {'password': password}) - cnx.anonymous_connection = True - return cnx - raise ExplicitLogin() - - def _init_cnx(self, cnx, login, authinfo): - # decorate connection - cnx.vreg = self.vreg - cnx.login = login - cnx.authinfo = authinfo - diff -r b619531ddbd2 -r b6e250dd7a7d web/views/autoform.py --- a/web/views/autoform.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/autoform.py Fri Apr 23 12:42:53 2010 +0200 @@ -357,7 +357,7 @@ This is where are stored relations being added while editing an entity. This used to be stored in a temporary cookie. """ - pending = req.get_session_data('pending_insert') or () + pending = req.session.data.get('pending_insert', ()) return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending if eid is None or eid in (subj, obj)] @@ -367,7 +367,7 @@ This is where are stored relations being removed while editing an entity. This used to be stored in a temporary cookie. """ - pending = req.get_session_data('pending_delete') or () + pending = req.session.data.get('pending_delete', ()) return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending if eid is None or eid in (subj, obj)] @@ -390,7 +390,7 @@ execute = req.execute for subj, rtype, obj in parse_relations_descr(rdefs): rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) + execute(rql, {'x': subj, 'y': obj}) req.set_message(req._('relations deleted')) def insert_relations(req, rdefs): @@ -398,7 +398,7 @@ execute = req.execute for subj, rtype, obj in parse_relations_descr(rdefs): rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) + execute(rql, {'x': subj, 'y': obj}) class GenericRelationsWidget(fw.FieldWidget): diff -r b619531ddbd2 -r b6e250dd7a7d web/views/basecomponents.py --- a/web/views/basecomponents.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/basecomponents.py Fri Apr 23 12:42:53 2010 +0200 @@ -2,7 +2,6 @@ * the rql input form * the logged user link -* pdf view link :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. diff -r b619531ddbd2 -r b6e250dd7a7d web/views/basecontrollers.py --- a/web/views/basecontrollers.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/basecontrollers.py Fri Apr 23 12:42:53 2010 +0200 @@ -10,8 +10,6 @@ """ __docformat__ = "restructuredtext en" -from smtplib import SMTP - try: import json except ImportError: @@ -20,15 +18,15 @@ from logilab.common.decorators import cached from logilab.common.date import strptime -from cubicweb import (NoSelectableObject, ValidationError, ObjectNotFound, - typed_eid) +from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError, + AuthenticationError, typed_eid) from cubicweb.utils import CubicWebJsonEncoder from cubicweb.selectors import authenticated_user, match_form_params from cubicweb.mail import format_mail -from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed, json_dumps +from cubicweb.web import Redirect, RemoteCallFailed, DirectResponse, json_dumps from cubicweb.web.controller import Controller -from cubicweb.web.views import vid_from_rset -from cubicweb.web.views.formrenderers import FormRenderer +from cubicweb.web.views import vid_from_rset, formrenderers + try: from cubicweb.web.facet import (FilterRQLBuilder, get_facet, prepare_facets_rqlst) @@ -61,7 +59,7 @@ user's session data """ def wrapper(self, *args, **kwargs): - data = self._cw.get_session_data(self._cw.pageid) + data = self._cw.session.data.get(self._cw.pageid) if data is None: raise RemoteCallFailed(self._cw._('pageid-not-found')) return func(self, *args, **kwargs) @@ -75,7 +73,7 @@ """log in the instance""" if self._cw.vreg.config['auth-mode'] == 'http': # HTTP authentication - raise ExplicitLogin() + raise AuthenticationError() else: # Cookie authentication return self.appli.need_login_content(self._cw) @@ -121,7 +119,10 @@ req = self._cw if rset is None and not hasattr(req, '_rql_processed'): req._rql_processed = True - rset = self.process_rql(req.form.get('rql')) + if req.cnx: + rset = self.process_rql(req.form.get('rql')) + else: + rset = None if rset and rset.rowcount == 1 and '__method' in req.form: entity = rset.get_entity(0, 0) try: @@ -182,14 +183,14 @@ else: rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype for teid in eids: - req.execute(rql, {'x': eid, 'y': typed_eid(teid)}, ('x', 'y')) + req.execute(rql, {'x': eid, 'y': typed_eid(teid)}) def _validation_error(req, ex): req.cnx.rollback() # XXX necessary to remove existant validation error? # imo (syt), it's not necessary - req.get_session_data(req.form.get('__errorurl'), pop=True) + req.session.data.pop(req.form.get('__errorurl'), None) foreid = ex.entity eidmap = req.data.get('eidmap', {}) for var, eid in eidmap.items(): @@ -241,7 +242,6 @@ self._cw.set_content_type('text/html') jsargs = json.dumps((status, args, entity), cls=CubicWebJsonEncoder) return """""" % (domid, callback, errback, jsargs, cbargs) @@ -286,7 +286,7 @@ raise RemoteCallFailed(repr(exc)) try: result = func(*args) - except RemoteCallFailed: + except (RemoteCallFailed, DirectResponse): raise except Exception, ex: self.exception('an exception occured while calling js_%s(%s): %s', @@ -319,12 +319,12 @@ form['__action_%s' % action] = u'whatever' return form - def _exec(self, rql, args=None, eidkey=None, rocheck=True): + def _exec(self, rql, args=None, rocheck=True): """json mode: execute RQL and return resultset as json""" if rocheck: self._cw.ensure_ro_rql(rql) try: - return self._cw.execute(rql, args, eidkey) + return self._cw.execute(rql, args) except Exception, ex: self.exception("error in _exec(rql=%s): %s", rql, ex) return None @@ -382,7 +382,7 @@ form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity) form.build_context() vfield = form.field_by_name('value') - renderer = FormRenderer(self._cw) + renderer = formrenderers.FormRenderer(self._cw) return vfield.render(form, renderer, tabindex=tabindex) \ + renderer.render_help(form, vfield) @@ -476,7 +476,7 @@ @check_pageid @jsonize def js_user_callback(self, cbname): - page_data = self._cw.get_session_data(self._cw.pageid, {}) + page_data = self._cw.session.data.get(self._cw.pageid, {}) try: cb = page_data[cbname] except KeyError: @@ -505,7 +505,7 @@ self._cw.unregister_callback(self._cw.pageid, cbname) def js_unload_page_data(self): - self._cw.del_session_data(self._cw.pageid) + self._cw.session.data.pop(self._cw.pageid, None) def js_cancel_edition(self, errorurl): """cancelling edition from javascript @@ -550,15 +550,13 @@ def _add_pending(self, eidfrom, rel, eidto, kind): key = 'pending_%s' % kind - pendings = self._cw.get_session_data(key, set()) + pendings = self._cw.session.data.setdefault(key, set()) pendings.add( (typed_eid(eidfrom), rel, typed_eid(eidto)) ) - self._cw.set_session_data(key, pendings) def _remove_pending(self, eidfrom, rel, eidto, kind): key = 'pending_%s' % kind - pendings = self._cw.get_session_data(key) + pendings = self._cw.session.data[key] pendings.remove( (typed_eid(eidfrom), rel, typed_eid(eidto)) ) - self._cw.set_session_data(key, pendings) def js_remove_pending_insert(self, (eidfrom, rel, eidto)): self._remove_pending(eidfrom, rel, eidto, 'insert') @@ -644,7 +642,7 @@ def redirect(self): req = self._cw - breadcrumbs = req.get_session_data('breadcrumbs', None) + breadcrumbs = req.session.data.get('breadcrumbs', None) if breadcrumbs is not None and len(breadcrumbs) > 1: url = req.rebuild_url(breadcrumbs[-2], __message=req._('transaction undoed')) diff -r b619531ddbd2 -r b6e250dd7a7d web/views/basetemplates.py --- a/web/views/basetemplates.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/basetemplates.py Fri Apr 23 12:42:53 2010 +0200 @@ -12,9 +12,9 @@ from logilab.common.deprecation import class_renamed from cubicweb.appobject import objectify_selector -from cubicweb.selectors import match_kwargs +from cubicweb.selectors import match_kwargs, no_cnx from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW -from cubicweb.utils import UStringIO, can_do_pdf_conversion +from cubicweb.utils import UStringIO from cubicweb.schema import display_name from cubicweb.web import component, formfields as ff, formwidgets as fw from cubicweb.web.views import forms @@ -78,7 +78,6 @@ return 0 return view.templatable - class NonTemplatableViewTemplate(MainTemplate): """main template for any non templatable views (xml, binaries, etc.)""" __regid__ = 'main-template' @@ -192,9 +191,9 @@ class ErrorTemplate(TheMainTemplate): - """fallback template if an internal error occured during displaying the - main template. This template may be called for authentication error, - which means that req.cnx and req.user may not be set. + """fallback template if an internal error occured during displaying the main + template. This template may be called for authentication error, which means + that req.cnx and req.user may not be set. """ __regid__ = 'error-template' @@ -268,61 +267,6 @@ self.w(u'\n') self.w(u'\n') -if can_do_pdf_conversion(): - try: - from xml.etree.cElementTree import ElementTree - except ImportError: #python2.4 - from elementtree import ElementTree - from subprocess import Popen as sub - from StringIO import StringIO - from tempfile import NamedTemporaryFile - from cubicweb.ext.xhtml2fo import ReportTransformer - - - class PdfViewComponent(component.EntityVComponent): - __regid__ = 'pdfview' - - context = 'ctxtoolbar' - - def cell_call(self, row, col, view): - entity = self.cw_rset.get_entity(row, col) - url = entity.absolute_url(vid=view.__regid__, __template='pdf-main-template') - iconurl = self._cw.build_url('data/pdf_icon.gif') - label = self._cw._('Download page as pdf') - self.w(u'%s' % - (xml_escape(url), label, xml_escape(iconurl), label)) - - class PdfMainTemplate(TheMainTemplate): - __regid__ = 'pdf-main-template' - - def call(self, view): - """build the standard view, then when it's all done, convert xhtml to pdf - """ - super(PdfMainTemplate, self).call(view) - section = self._cw.form.pop('section', 'contentmain') - pdf = self.to_pdf(self._stream, section) - self._cw.set_content_type('application/pdf', filename='report.pdf') - self.binary = True - self.w = None - self.set_stream() - # pylint needs help - self.w(pdf) - - def to_pdf(self, stream, section): - # XXX see ticket/345282 - stream = stream.getvalue().replace(' ', ' ').encode('utf-8') - xmltree = ElementTree() - xmltree.parse(StringIO(stream)) - foptree = ReportTransformer(section).transform(xmltree) - foptmp = NamedTemporaryFile() - pdftmp = NamedTemporaryFile() - foptree.write(foptmp) - foptmp.flush() - fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name]) - fopproc.wait() - pdftmp.seek(0) - pdf = pdftmp.read() - return pdf # page parts templates ######################################################## @@ -405,7 +349,7 @@ self.w(u'') self.w(u'\n') self.w(u'\n') - if self._cw.cnx.anonymous_connection: + if self._cw.session.anonymous_session: self.wview('logform', rset=self.cw_rset, id='popupLoginBox', klass='hidden', title=False, showmessage=False) diff -r b619531ddbd2 -r b6e250dd7a7d web/views/bookmark.py --- a/web/views/bookmark.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/bookmark.py Fri Apr 23 12:42:53 2010 +0200 @@ -112,7 +112,7 @@ else: # we can't edit shared bookmarks we don't own bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s' - erset = req.execute(bookmarksrql, {'x': ueid}, 'x', + erset = req.execute(bookmarksrql, {'x': ueid}, build_descr=False) bookmarksrql %= {'x': ueid} if erset: diff -r b619531ddbd2 -r b6e250dd7a7d web/views/cwproperties.py --- a/web/views/cwproperties.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/cwproperties.py Fri Apr 23 12:42:53 2010 +0200 @@ -28,7 +28,6 @@ # groups _('navigation') _('ui') -_('actions') _('boxes') _('components') _('contentnavigation') diff -r b619531ddbd2 -r b6e250dd7a7d web/views/cwuser.py --- a/web/views/cwuser.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/cwuser.py Fri Apr 23 12:42:53 2010 +0200 @@ -1,4 +1,4 @@ -"""Specific views for users +"""Specific views for users and groups :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. @@ -12,6 +12,7 @@ from cubicweb.selectors import one_line_rset, implements, match_user_groups from cubicweb.view import EntityView from cubicweb.web import action, uicfg +from cubicweb.web.views import tabs _pvs = uicfg.primaryview_section _pvs.tag_attribute(('CWUser', 'login'), 'hidden') @@ -71,3 +72,65 @@ if emailaddr: self.w(u'%s\n' % xml_escape(emailaddr)) self.w(u'\n') + + +# group views ################################################################## + +_pvs.tag_object_of(('CWUser', 'in_group', 'CWGroup'), 'hidden') +_pvs.tag_object_of(('*', 'require_group', 'CWGroup'), 'hidden') + + +class CWGroupPrimaryView(tabs.TabbedPrimaryView): + __select__ = implements('CWGroup') + tabs = [_('cwgroup-main'), _('cwgroup-permissions')] + default_tab = 'cwgroup-main' + + +class CWGroupMainTab(tabs.PrimaryTab): + __regid__ = 'cwgroup-main' + __select__ = tabs.PrimaryTab.__select__ & implements('CWGroup') + + def render_entity_attributes(self, entity): + rql = 'Any U, FN, LN, CD, LL ORDERBY L WHERE U in_group G, ' \ + 'U login L, U firstname FN, U surname LN, U creation_date CD, ' \ + 'U last_login_time LL, G eid %(x)s' + rset = self._cw.execute(rql, {'x': entity.eid}) + headers = (_(u'user'), _(u'first name'), _(u'last name'), + _(u'creation date'), _(u'last login time')) + self.wview('editable-table', rset, 'null', displayfilter=True, + displaycols=range(5), mainindex=0, headers=headers) + +class CWGroupPermTab(EntityView): + __regid__ = 'cwgroup-permissions' + __select__ = implements('CWGroup') + + def cell_call(self, row, col): + self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css')) + access_types = ('read', 'delete', 'add', 'update') + w = self.w + entity = self.cw_rset.get_entity(row, col) + objtype_access = {'CWEType': ('read', 'delete', 'add', 'update'), + 'CWRelation': ('add', 'delete')} + rql_cwetype = 'DISTINCT Any X WHERE X %s_permission CWG, X is CWEType, ' \ + 'CWG eid %%(e)s' + rql_cwrelation = 'DISTINCT Any RT WHERE X %s_permission CWG, X is CWRelation, ' \ + 'X relation_type RT, CWG eid %%(e)s' + self.render_objtype_access(entity, 'CWEType', objtype_access, rql_cwetype) + self.render_objtype_access(entity, 'CWRelation', objtype_access, rql_cwrelation) + + def render_objtype_access(self, entity, objtype, objtype_access, rql): + self.w(u'

%s

' % self._cw._(objtype)) + for access_type in objtype_access[objtype]: + rset = self._cw.execute(rql % access_type, {'e': entity.eid}) + if rset: + self.w(u'
%s:
' % self._cw.__(access_type + '_permission')) + self.w(u'
%s

' % self._cw.view('csv', rset, 'null')) + +class CWGroupInContextView(EntityView): + __regid__ = 'incontext' + __select__ = implements('CWGroup') + + def cell_call(self, row, col): + entity = self.cw_rset.complete_entity(row, col) + self.w(u'%s' % ( + entity.absolute_url(), entity.name, entity.printable_value('name'))) diff -r b619531ddbd2 -r b6e250dd7a7d web/views/editcontroller.py --- a/web/views/editcontroller.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/editcontroller.py Fri Apr 23 12:42:53 2010 +0200 @@ -249,13 +249,13 @@ rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( subjvar, rschema, objvar) for reid in origvalues.difference(values): - self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y'))) + self.relations_rql.append((rql, {'x': eid, 'y': reid})) seteids = values.difference(origvalues) if seteids: rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( subjvar, rschema, objvar) for reid in seteids: - self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y'))) + self.relations_rql.append((rql, {'x': eid, 'y': reid})) def delete_entities(self, eidtypes): """delete entities from the repository""" diff -r b619531ddbd2 -r b6e250dd7a7d web/views/editviews.py --- a/web/views/editviews.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/editviews.py Fri Apr 23 12:42:53 2010 +0200 @@ -113,5 +113,5 @@ text, data = captcha.captcha(self._cw.vreg.config['captcha-font-file'], self._cw.vreg.config['captcha-font-size']) key = self._cw.form.get('captchakey', 'captcha') - self._cw.set_session_data(key, text) + self._cw.session.data[key] = text self.w(data.read()) diff -r b619531ddbd2 -r b6e250dd7a7d web/views/emailaddress.py --- a/web/views/emailaddress.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/emailaddress.py Fri Apr 23 12:42:53 2010 +0200 @@ -12,8 +12,13 @@ from cubicweb.schema import display_name from cubicweb.selectors import implements from cubicweb import Unauthorized +from cubicweb.web import uicfg from cubicweb.web.views import baseviews, primary +_pvs = uicfg.primaryview_section +_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes') +_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden') + class EmailAddressPrimaryView(primary.PrimaryView): __select__ = implements('EmailAddress') diff -r b619531ddbd2 -r b6e250dd7a7d web/views/iprogress.py --- a/web/views/iprogress.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/iprogress.py Fri Apr 23 12:42:53 2010 +0200 @@ -17,9 +17,9 @@ from cubicweb.interfaces import IProgress, IMileStone from cubicweb.schema import display_name from cubicweb.view import EntityView - +from cubicweb.web.views.tableview import EntityAttributesTableView -class ProgressTableView(EntityView): +class ProgressTableView(EntityAttributesTableView): """The progress table view is able to display progress information of any object implement IMileStone. @@ -39,26 +39,13 @@ __regid__ = 'progress_table_view' title = _('task progression') __select__ = implements(IMileStone) + table_css = "progress" + css_files = ('cubicweb.iprogress.css',) # default columns of the table columns = (_('project'), _('milestone'), _('state'), _('eta_date'), _('cost'), _('progress'), _('todo_by')) - - def call(self, columns=None): - """displays all versions in a table""" - self._cw.add_css('cubicweb.iprogress.css') - _ = self._cw._ - self.columns = columns or self.columns - ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0]) - self.w(u'') - self.table_header(ecls) - self.w(u'') - for row in xrange(self.cw_rset.rowcount): - self.cell_call(row=row, col=0) - self.w(u'') - self.w(u'
') - def cell_call(self, row, col): _ = self._cw._ entity = self.cw_rset.get_entity(row, col) @@ -91,20 +78,6 @@ """use entity's type as label""" return display_name(self._cw, ecls.__regid__) - def table_header(self, ecls): - """builds the table's header""" - self.w(u'') - _ = self._cw._ - for column in self.columns: - meth = getattr(self, 'header_for_%s' % column, None) - if meth: - colname = meth(ecls) - else: - colname = _(column) - self.w(u'%s' % xml_escape(colname)) - self.w(u'\n') - - ## cell management ######################################################## def build_project_cell(self, entity): """``project`` column cell renderer""" diff -r b619531ddbd2 -r b6e250dd7a7d web/views/management.py --- a/web/views/management.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/management.py Fri Apr 23 12:42:53 2010 +0200 @@ -16,56 +16,14 @@ from cubicweb.uilib import html_traceback, rest_traceback from cubicweb.web import formwidgets as wdgs from cubicweb.web.formfields import guess_field +from cubicweb.web.views.schema import SecurityViewMixIn + +from yams.buildobjs import EntityType SUBMIT_MSGID = _('Submit bug report') MAIL_SUBMIT_MSGID = _('Submit bug report by mail') - -class SecurityViewMixIn(object): - """display security information for a given schema """ - - def schema_definition(self, eschema, link=True, access_types=None): - w = self.w - _ = self._cw._ - if not access_types: - access_types = eschema.ACTIONS - w(u'') - w(u'' % ( - _("permission"), _('granted to groups'), _('rql expressions'))) - for access_type in access_types: - w(u'') - w(u'' % self._cw.__('%s_perm' % access_type)) - groups = eschema.get_groups(access_type) - l = [] - groups = [(_(group), group) for group in groups] - for trad, group in sorted(groups): - if link: - # XXX we should get a group entity and call its absolute_url - # method - l.append(u'%s
' % ( - self._cw.build_url('cwgroup/%s' % group), group, trad)) - else: - l.append(u'
%s
' % (group, trad)) - w(u'' % u''.join(l)) - rqlexprs = eschema.get_rqlexprs(access_type) - w(u'' % u'

'.join(expr.expression for expr in rqlexprs)) - w(u'\n') - w(u'
%s%s%s
%s%s%s
') - - def has_schema_modified_permissions(self, eschema, access_types): - """ return True if eschema's actual permissions are diffrents - from the default ones - """ - for access_type in access_types: - if eschema.get_rqlexprs(access_type): - return True - if eschema.get_groups(access_type) != \ - frozenset(eschema.get_default_groups()[access_type]): - return True - return False - - -class SecurityManagementView(EntityView, SecurityViewMixIn): +class SecurityManagementView(SecurityViewMixIn, EntityView): """display security information for a given entity""" __regid__ = 'security' __select__ = EntityView.__select__ & authenticated_user() @@ -88,7 +46,7 @@ xml_escape(entity.dc_title()))) # first show permissions defined by the schema self.w('

%s

' % _('schema\'s permissions definitions')) - self.schema_definition(entity.e_schema) + self.permissions_table(entity.e_schema) self.w('

%s

' % _('manage security')) # ownership information if self._cw.vreg.schema.rschema('owned_by').has_perm(self._cw, 'add', @@ -240,7 +198,7 @@ # creates a bug submission link if submit-mail is set if self._cw.vreg.config['submit-mail']: form = self._cw.vreg['forms'].select('base', self._cw, rset=None, - mainform=False) + mainform=False) binfo = text_error_description(ex, excinfo, req, eversion, cversions) form.add_hidden('description', binfo, # we must use a text area to keep line breaks diff -r b619531ddbd2 -r b6e250dd7a7d web/views/primary.py --- a/web/views/primary.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/primary.py Fri Apr 23 12:42:53 2010 +0200 @@ -113,7 +113,7 @@ """default implementation return an empty string""" return u'' - def render_entity_attributes(self, entity, siderelations=None): + def render_entity_attributes(self, entity): display_attributes = [] for rschema, _, role, dispctrl in self._section_def(entity, 'attributes'): vid = dispctrl.get('vid', 'reledit') @@ -139,7 +139,7 @@ self._render_attribute(rschema, value, role=role, table=True) self.w(u'') - def render_entity_relations(self, entity, siderelations=None): + def render_entity_relations(self, entity): for rschema, tschemas, role, dispctrl in self._section_def(entity, 'relations'): rset = self._relation_rset(entity, rschema, role, dispctrl) if rset: @@ -297,18 +297,7 @@ _pvs = uicfg.primaryview_section for rtype in ('eid', 'creation_date', 'modification_date', 'cwuri', - 'is', 'is_instance_of', 'identity', - 'owned_by', 'created_by', 'in_state', - 'wf_info_for', 'by_transition', 'from_state', 'to_state', - 'require_permission', 'from_entity', 'to_entity', - 'see_also'): + 'is', 'is_instance_of', 'identity', 'owned_by', 'created_by', + 'require_permission', 'see_also'): _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') _pvs.tag_object_of(('*', rtype, '*'), 'hidden') - -_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes') -_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden') - -for attr in ('name', 'final'): - _pvs.tag_attribute(('CWEType', attr), 'hidden') -for attr in ('name', 'final', 'symmetric', 'inlined'): - _pvs.tag_attribute(('CWRType', attr), 'hidden') diff -r b619531ddbd2 -r b6e250dd7a7d web/views/schema.py --- a/web/views/schema.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/schema.py Fri Apr 23 12:42:53 2010 +0200 @@ -9,19 +9,20 @@ from itertools import cycle +from logilab.common.ureports import Section, Table from logilab.mtconverter import xml_escape from yams import BASE_TYPES, schema2dot as s2d +from yams.buildobjs import DEFAULT_ATTRPERMS from cubicweb.selectors import (implements, yes, match_user_groups, - has_related_entities) + has_related_entities, authenticated_user) from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES, WORKFLOW_TYPES, INTERNAL_TYPES) -from cubicweb.schemaviewer import SchemaViewer from cubicweb.view import EntityView, StartupView from cubicweb import tags, uilib -from cubicweb.web import action, facet, uicfg +from cubicweb.web import action, facet, uicfg, schemaviewer from cubicweb.web.views import TmpFileViewMixin -from cubicweb.web.views import primary, baseviews, tabs, management +from cubicweb.web.views import primary, baseviews, tabs, tableview, iprogress ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES SKIP_TYPES = (ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES | WORKFLOW_TYPES @@ -34,27 +35,106 @@ return ALWAYS_SKIP_TYPES _pvs = uicfg.primaryview_section +_pvdc = uicfg.primaryview_display_ctrl + for _action in ('read', 'add', 'update', 'delete'): _pvs.tag_subject_of(('*', '%s_permission' % _action, '*'), 'hidden') _pvs.tag_object_of(('*', '%s_permission' % _action, '*'), 'hidden') +for _etype in ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'): + _pvdc.tag_attribute((_etype, 'description'), {'showlabel': False}) + +_pvs.tag_attribute(('CWEType', 'name'), 'hidden') +_pvs.tag_attribute(('CWEType', 'final'), 'hidden') +_pvs.tag_object_of(('*', 'workflow_of', 'CWEType'), 'hidden') +_pvs.tag_subject_of(('CWEType', 'default_workflow', '*'), 'hidden') +_pvs.tag_object_of(('*', 'specializes', 'CWEType'), 'hidden') +_pvs.tag_subject_of(('CWEType', 'specializes', '*'), 'hidden') +_pvs.tag_object_of(('*', 'from_entity', 'CWEType'), 'hidden') +_pvs.tag_object_of(('*', 'to_entity', 'CWEType'), 'hidden') + +_pvs.tag_attribute(('CWRType', 'name'), 'hidden') +_pvs.tag_attribute(('CWRType', 'final'), 'hidden') +_pvs.tag_object_of(('*', 'relation_type', 'CWRType'), 'hidden') + +_pvs.tag_subject_of(('CWAttribute', 'constrained_by', '*'), 'hidden') +_pvs.tag_subject_of(('CWRelation', 'constrained_by', '*'), 'hidden') + + +class SecurityViewMixIn(object): + """mixin providing methods to display security information for a entity, + relation or relation definition schema + """ + + def permissions_table(self, erschema, permissions=None): + self._cw.add_css('cubicweb.acl.css') + w = self.w + _ = self._cw._ + w(u'') + w(u'' % ( + _("permission"), _('granted to groups'), _('rql expressions'))) + for action in erschema.ACTIONS: + w(u'\n') + w(u'
%s%s%s
%s' % _(action)) + if permissions is None: + groups = erschema.get_groups(action) + rqlexprs = sorted(e.expression for e in erschema.get_rqlexprs(action)) + else: + groups = permissions[action][0] + rqlexprs = permissions[action][1] + # XXX get group entity and call it's incontext view + groups = [u'%s' % ( + group, self._cw.build_url('cwgroup/%s' % group), label) + for group, label in sorted((_(g), g) for g in groups)] + w(u'
'.join(groups)) + w(u'
') + w(u'
'.join(rqlexprs)) + w(u'
') + + def grouped_permissions_table(self, rschema): + # group relation definitions with identical permissions + perms = {} + for rdef in rschema.rdefs.itervalues(): + rdef_perms = [] + for action in ('read', 'add', 'delete'): + groups = sorted(rdef.get_groups(action)) + exprs = sorted(e.expression for e in rdef.get_rqlexprs(action)) + rdef_perms.append( (action, (tuple(groups), tuple(exprs))) ) + rdef_perms = tuple(rdef_perms) + if rdef_perms in perms: + perms[rdef_perms].append( (rdef.subject, rdef.object) ) + else: + perms[rdef_perms] = [(rdef.subject, rdef.object)] + # set layout permissions in a table for each group of relation + # definition + w = self.w + w(u'
') + tmpl = u'%s %s %s' + for perm, rdefs in perms.iteritems(): + w(u'
%s
' % u', '.join( + tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs)) + # accessing rdef from previous loop by design: only used to get + # ACTIONS + self.permissions_table(rdef, dict(perm)) + w(u'
') + + # global schema view ########################################################### class SchemaView(tabs.TabsMixin, StartupView): + """display schema information (graphically, listing tables...) in tabs""" __regid__ = 'schema' title = _('instance schema') - tabs = [_('schema-text'), _('schema-image')] - default_tab = 'schema-text' + tabs = [_('schema-image'), _('schema-entity-types'), + _('schema-relation-types'), _('schema-security')] + default_tab = 'schema-image' def call(self): - """display schema information""" - self._cw.add_js('cubicweb.ajax.js') - self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css')) self.w(u'

%s

' % _('Schema of the data model')) self.render_tabs(self.tabs, self.default_tab) -class SchemaTabImageView(StartupView): +class SchemaImageTab(StartupView): __regid__ = 'schema-image' def call(self): @@ -62,31 +142,36 @@ u'meta-data, but you can also display a complete ' u'schema with meta-data.
') % xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=0))) + self.w(u'' % + (self._cw.build_url('view', vid='owl'), + self._cw._(u'Download schema as OWL'))) self.w(u'%s\n' % ( xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=1)), self._cw._("graphical representation of the instance'schema"))) -class SchemaTabTextView(StartupView): - __regid__ = 'schema-text' +class SchemaETypeTab(StartupView): + __regid__ = 'schema-entity-types' def call(self): - rset = self._cw.execute('Any X ORDERBY N WHERE X is CWEType, X name N, ' - 'X final FALSE') - self.wview('table', rset, displayfilter=True) + self.wview('table', self._cw.execute( + 'Any X ORDERBY N WHERE X is CWEType, X name N, X final FALSE')) -class ManagerSchemaPermissionsView(StartupView, management.SecurityViewMixIn): +class SchemaRTypeTab(StartupView): + __regid__ = 'schema-relation-types' + + def call(self): + self.wview('table', self._cw.execute( + 'Any X ORDERBY N WHERE X is CWRType, X name N, X final FALSE')) + + +class SchemaPermissionsTab(SecurityViewMixIn, StartupView): __regid__ = 'schema-security' __select__ = StartupView.__select__ & match_user_groups('managers') def call(self, display_relations=True): - self._cw.add_css('cubicweb.acl.css') skiptypes = skip_types(self._cw) - formparams = {} - formparams['sec'] = self.__regid__ - if not skiptypes: - formparams['skipmeta'] = u'0' schema = self._cw.vreg.schema # compute entities entities = sorted(eschema for eschema in schema.entities() @@ -101,249 +186,422 @@ relations = [] # index _ = self._cw._ - self.w(u'
') - self.w(u'

%s

' % _('index').capitalize()) - self.w(u'

%s

' % _('Entities').capitalize()) + url = xml_escape(self._cw.build_url('schema')) + self.w(u'
') + self.w(u'

%s

' % _('Index')) + self.w(u'

%s

' % _('Entity types')) ents = [] for eschema in sorted(entities): - url = xml_escape(self._cw.build_url('schema', **formparams)) - ents.append(u'
%s (%s)' % ( - url, eschema.type, eschema.type, _(eschema.type))) + ents.append(u'%s' % ( + url, eschema.type, eschema.type)) self.w(u', '.join(ents)) - self.w(u'

%s

' % (_('relations').capitalize())) + self.w(u'

%s

' % _('Relation types')) rels = [] for rschema in sorted(relations): - url = xml_escape(self._cw.build_url('schema', **formparams)) - rels.append(u'%s (%s), ' % ( - url , rschema.type, rschema.type, _(rschema.type))) - self.w(u', '.join(ents)) - # entities - self.display_entities(entities, formparams) - # relations + rels.append(u'%s' % ( + url , rschema.type, rschema.type)) + self.w(u', '.join(rels)) + # permissions tables + self.display_entities(entities) if relations: - self.display_relations(relations, formparams) + self.display_relations(relations) self.w(u'
') - def display_entities(self, entities, formparams): + def has_non_default_perms(self, rdef): + """return true if the given *attribute* relation definition has custom + permission + """ + for action in rdef.ACTIONS: + def_rqlexprs = [] + def_groups = [] + for perm in DEFAULT_ATTRPERMS[action]: + if not isinstance(perm, basestring): + def_rqlexprs.append(perm.expression) + else: + def_groups.append(perm) + rqlexprs = [rql.expression for rql in rdef.get_rqlexprs(action)] + groups = rdef.get_groups(action) + if groups != frozenset(def_groups) or \ + frozenset(rqlexprs) != frozenset(def_rqlexprs): + return True + return False + + def display_entities(self, entities): _ = self._cw._ - self.w(u'') - self.w(u'

%s

' % _('permissions for entities').capitalize()) + url = xml_escape(self._cw.build_url('schema')) + self.w(u'

%s

' % _('Permissions for entity types')) for eschema in entities: - self.w(u'
' % (eschema.type, eschema.type)) - self.w(u'

%s (%s) ' % (eschema.type, _(eschema.type))) - url = xml_escape(self._cw.build_url('schema', **formparams) + '#index') - self.w(u'%s' % ( + self.w(u'

%s (%s) ' % ( + eschema.type, self._cw.build_url('cwetype/%s' % eschema.type), + eschema.type, _(eschema.type))) + self.w(u'%s' % ( url, self._cw.external_resource('UP_ICON'), _('up'))) self.w(u'

') self.w(u'
') - self._cw.vreg.schema_definition(eschema, link=False) + self.permissions_table(eschema) # display entity attributes only if they have some permissions modified modified_attrs = [] for attr, etype in eschema.attribute_definitions(): - if self.has_schema_modified_permissions(attr, attr.ACTIONS): - modified_attrs.append(attr) - if modified_attrs: - self.w(u'

%s

' % _('attributes with modified permissions:').capitalize()) + rdef = eschema.rdef(attr) + if attr not in META_RTYPES and self.has_non_default_perms(rdef): + modified_attrs.append(rdef) + if modified_attrs: + self.w(u'

%s

' % _('Attributes with non default permissions:')) self.w(u'
') self.w(u'
') - for attr in modified_attrs: - self.w(u'

%s (%s)

' % (attr.type, _(attr.type))) - self._cw.vreg.schema_definition(attr, link=False) + for rdef in modified_attrs: + attrtype = str(rdef.rtype) + self.w(u'

%s (%s)

' % (attrtype, _(attrtype))) + self.permissions_table(rdef) self.w(u'
') - def display_relations(self, relations, formparams): + def display_relations(self, relations): _ = self._cw._ - self.w(u'') - self.w(u'

%s

' % _('permissions for relations').capitalize()) + url = xml_escape(self._cw.build_url('schema')) + self.w(u'

%s

' % _('Permissions for relations')) for rschema in relations: - self.w(u'
' % (rschema.type, rschema.type)) - self.w(u'

%s (%s) ' % (rschema.type, _(rschema.type))) - url = xml_escape(self._cw.build_url('schema', **formparams) + '#index') - self.w(u'%s' % ( + self.w(u'

%s (%s) ' % ( + rschema.type, self._cw.build_url('cwrtype/%s' % rschema.type), + rschema.type, _(rschema.type))) + self.w(u'%s' % ( url, self._cw.external_resource('UP_ICON'), _('up'))) self.w(u'

') - self.w(u'
') - subjects = [str(subj) for subj in rschema.subjects()] - self.w(u'
%s %s (%s)
' % ( - _('subject_plural:'), - ', '.join(str(subj) for subj in rschema.subjects()), - ', '.join(_(str(subj)) for subj in rschema.subjects()))) - self.w(u'
%s %s (%s)
' % ( - _('object_plural:'), - ', '.join(str(obj) for obj in rschema.objects()), - ', '.join(_(str(obj)) for obj in rschema.objects()))) - self._cw.vreg.schema_definition(rschema, link=False) - self.w(u'
') - - -class SchemaUreportsView(StartupView): - __regid__ = 'schema-block' - - def call(self): - viewer = SchemaViewer(self._cw) - layout = viewer.visit_schema(self._cw.vreg.schema, display_relations=True, - skiptypes=skip_types(self._cw)) - self.w(uilib.ureport_as_html(layout)) - - -# CWAttribute / CWRelation ##################################################### - -class CWRDEFPrimaryView(primary.PrimaryView): - __select__ = implements('CWAttribute', 'CWRelation') - cache_max_age = 60*60*2 # stay in http cache for 2 hours by default - - def render_entity_title(self, entity): - self.w(u'

%s %s

' - % (entity.dc_type().capitalize(), - xml_escape(entity.dc_long_title()))) + self.grouped_permissions_table(rschema) # CWEType ###################################################################### +# register msgid generated in entity relations tables +_('i18ncard_1'), _('i18ncard_?'), _('i18ncard_+'), _('i18ncard_*') + +class CWETypePrimaryView(tabs.TabbedPrimaryView): + __select__ = implements('CWEType') + tabs = [_('cwetype-description'), _('cwetype-box'), _('cwetype-workflow'), + _('cwetype-views'), _('cwetype-permissions')] + default_tab = 'cwetype-description' + + +class CWETypeDescriptionTab(tabs.PrimaryTab): + __regid__ = 'cwetype-description' + __select__ = tabs.PrimaryTab.__select__ & implements('CWEType') + + def render_entity_attributes(self, entity): + super(CWETypeDescriptionTab, self).render_entity_attributes(entity) + _ = self._cw._ + # inheritance + if entity.specializes: + self.w(u'
%s' % _('Parent classes:')) + self.wview('csv', entity.related('specializes', 'subject')) + self.w(u'
') + if entity.reverse_specializes: + self.w(u'
%s' % _('Sub-classes:')) + self.wview('csv', entity.related('specializes', 'object')) + self.w(u'
') + # entity schema image + self.w(u'%s' % ( + xml_escape(entity.absolute_url(vid='schemagraph')), + xml_escape(_('graphical schema for %s') % entity.name))) + # entity schema attributes + self.w(u'

%s

' % _('CWAttribute_plural')) + rset = self._cw.execute( + 'Any A,ON,D,C,A,DE,A, IDX,FTI,I18N,R,O,RN,S ORDERBY AA ' + 'WHERE A is CWAttribute, A from_entity S, S eid %(x)s, ' + 'A ordernum AA, A defaultval D, A description DE, A cardinality C, ' + 'A fulltextindexed FTI, A internationalizable I18N, A indexed IDX, ' + 'A relation_type R, R name RN, A to_entity O, O name ON', + {'x': entity.eid}) + self.wview('table', rset, 'null', + cellvids={0: 'rdef-name-cell', + 3: 'etype-attr-cardinality-cell', + 4: 'rdef-constraints-cell', + 6: 'rdef-options-cell'}, + headers=(_(u'name'), _(u'type'), + _(u'default value'), _(u'required'), + _(u'constraints'), _(u'description'), _('options'))) + # entity schema relations + self.w(u'

%s

' % _('CWRelation_plural')) + cellvids = {0: 'rdef-name-cell', + 2: 'etype-rel-cardinality-cell', + 3: 'rdef-constraints-cell', + 4: 'rdef-options-cell'} + headers= [_(u'name'), _(u'object type'), _(u'cardinality'), + _(u'constraints'), _(u'options')] + rset = self._cw.execute( + 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' + 'WHERE A is CWRelation, A from_entity S, S eid %(x)s, ' + 'A composite K, A cardinality C, ' + 'A relation_type R, R name RN, A to_entity TT, TT name TTN', + {'x': entity.eid}) + if rset: + self.w(u'
%s %s
' % (entity.name, _('is subject of:'))) + self.wview('table', rset, cellvids=cellvids, headers=headers) + rset = self._cw.execute( + 'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN ' + 'WHERE A is CWRelation, A to_entity O, O eid %(x)s, ' + 'A composite K, A cardinality C, ' + 'A relation_type R, R name RN, A from_entity TT, TT name TTN', + {'x': entity.eid}) + if rset: + cellvids[0] = 'rdef-object-name-cell' + headers[1] = _(u'subject type') + self.w(u'
%s %s
' % (entity.name, _('is object of:'))) + self.wview('table', rset, cellvids=cellvids, headers=headers) + + +class CWETypeAttributeCardinalityCell(baseviews.FinalView): + __regid__ = 'etype-attr-cardinality-cell' + + def cell_call(self, row, col): + if self.cw_rset.rows[row][col][0] == '1': + self.w(self._cw._(u'yes')) + else: + self.w(self._cw._(u'no')) + + +class CWETypeRelationCardinalityCell(baseviews.FinalView): + __regid__ = 'etype-rel-cardinality-cell' + + def cell_call(self, row, col): + self.w(self._cw._(self.cw_rset.rows[row][col])) + + +class CWETypeBoxTab(EntityView): + __regid__ = 'cwetype-box' + __select__ = implements('CWEType') + + def cell_call(self, row, col): + viewer = schemaviewer.SchemaViewer(self._cw) + entity = self.cw_rset.get_entity(row, col) + eschema = self._cw.vreg.schema.eschema(entity.name) + layout = viewer.visit_entityschema(eschema) + self.w(uilib.ureport_as_html(layout)) + self.w(u'
') + + +class CWETypePermTab(SecurityViewMixIn, EntityView): + __regid__ = 'cwetype-permissions' + __select__ = implements('CWEType') & authenticated_user() + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + eschema = self._cw.vreg.schema.eschema(entity.name) + self.w(u'

%s

' % _('This entity type permissions:').capitalize()) + self.permissions_table(eschema) + self.w(u'
') + self.w(u'

%s

' % _('Attributes permissions:').capitalize()) + for attr, etype in eschema.attribute_definitions(): + if attr not in META_RTYPES: + rdef = eschema.rdef(attr) + attrtype = str(rdef.rtype) + self.w(u'

%s (%s)

' % (attrtype, _(attrtype))) + self.permissions_table(rdef) + self.w(u'
') + + +class CWETypeWorkflowTab(EntityView): + __regid__ = 'cwetype-workflow' + __select__ = (implements('CWEType') + & has_related_entities('workflow_of', 'object')) + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + if entity.default_workflow: + wf = entity.default_workflow[0] + if len(entity.reverse_workflow_of) > 1: + self.w(u'

%s (%s)

' + % (wf.name, self._cw._('default_workflow'))) + self.display_workflow(wf) + defaultwfeid = wf.eid + else: + self.w(u'
%s
' + % self._cw._('There is no default workflow')) + defaultwfeid = None + for altwf in entity.reverse_workflow_of: + if altwf.eid == defaultwfeid: + continue + self.w(u'

%s

' % altwf.name) + self.display_workflow(altwf) + + def display_workflow(self, wf): + self.w(wf.view('wfgraph')) + self.w('%s' % ( + wf.absolute_url(), self._cw._('more info about this workflow'))) + + +class CWETypeViewsTab(EntityView): + """possible views for this entity type""" + __regid__ = 'cwetype-views' + __select__ = EntityView.__select__ & implements('CWEType') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + _ = self._cw._ + self.w('
%s
' % _('Non exhaustive list of views that may ' + 'apply to entities of this type')) + views = [(view.content_type, view.__regid__, _(view.title)) + for view in self.possible_views(entity.name)] + self.wview('pyvaltable', pyvalue=sorted(views), + headers=(_(u'content type'), _(u'view identifier'), + _(u'view title'))) + + def possible_views(self, etype): + rset = self._cw.etype_rset(etype) + return [v for v in self._cw.vreg['views'].possible_views(self._cw, rset) + if v.category != 'startupview'] + + class CWETypeOneLineView(baseviews.OneLineView): __select__ = implements('CWEType') def cell_call(self, row, col, **kwargs): entity = self.cw_rset.get_entity(row, col) - final = entity.final - if final: + if entity.final: self.w(u'') super(CWETypeOneLineView, self).cell_call(row, col, **kwargs) - if final: + if entity.final: self.w(u'') -class CWETypePrimaryView(tabs.TabsMixin, primary.PrimaryView): - __select__ = implements('CWEType') - title = _('in memory entity schema') - main_related_section = False - tabs = [_('cwetype-schema-text'), _('cwetype-schema-image'), - _('cwetype-schema-permissions'), _('cwetype-workflow')] - default_tab = 'cwetype-schema-text' - - def render_entity(self, entity): - self.render_entity_title(entity) - self.w(u'
%s
' % entity.description) - self.render_tabs(self.tabs, self.default_tab, entity) - - -class CWETypeSTextView(EntityView): - __regid__ = 'cwetype-schema-text' - __select__ = EntityView.__select__ & implements('CWEType') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - self.w(u'

%s

' % _('Attributes')) - rset = self._cw.execute('Any N,F,D,I,J,DE,A ' - 'ORDERBY AA WHERE A is CWAttribute, ' - 'A ordernum AA, A defaultval D, ' - 'A description DE, ' - 'A fulltextindexed I, A internationalizable J, ' - 'A relation_type R, R name N, ' - 'A to_entity O, O name F, ' - 'A from_entity S, S eid %(x)s', - {'x': entity.eid}) - self.wview('editable-table', rset, 'null', displayfilter=True) - self.w(u'

%s

' % _('Relations')) - rset = self._cw.execute( - 'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN ' - 'WHERE A is CWRelation, A description D, A composite K, ' - 'A relation_type R, R name RN, A to_entity TT, TT name TTN, ' - 'A cardinality C, A from_entity S, S eid %(x)s', - {'x': entity.eid}) - self.wview('editable-table', rset, 'null', displayfilter=True, - displaycols=range(6), mainindex=5) - rset = self._cw.execute( - 'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN ' - 'WHERE A is CWRelation, A description D, A composite K, ' - 'A relation_type R, R name RN, A from_entity TT, TT name TTN, ' - 'A cardinality C, A to_entity O, O eid %(x)s', - {'x': entity.eid}) - self.wview('editable-table', rset, 'null', displayfilter=True, - displaycols=range(6), mainindex=5) - - -class CWETypeSImageView(EntityView): - __regid__ = 'cwetype-schema-image' - __select__ = EntityView.__select__ & implements('CWEType') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - url = entity.absolute_url(vid='schemagraph') - self.w(u'%s' % ( - xml_escape(url), - xml_escape(self._cw._('graphical schema for %s') % entity.name))) - - -class CWETypeSPermView(EntityView): - __regid__ = 'cwetype-schema-permissions' - __select__ = EntityView.__select__ & implements('CWEType') - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - _ = self._cw._ - self.w(u'

%s

' % _('Add permissions')) - rset = self._cw.execute('Any P WHERE X add_permission P, ' - 'X eid %(x)s', - {'x': entity.eid}) - self.wview('outofcontext', rset, 'null') - self.w(u'

%s

' % _('Read permissions')) - rset = self._cw.execute('Any P WHERE X read_permission P, ' - 'X eid %(x)s', - {'x': entity.eid}) - self.wview('outofcontext', rset, 'null') - self.w(u'

%s

' % _('Update permissions')) - rset = self._cw.execute('Any P WHERE X update_permission P, ' - 'X eid %(x)s', - {'x': entity.eid}) - self.wview('outofcontext', rset, 'null') - self.w(u'

%s

' % _('Delete permissions')) - rset = self._cw.execute('Any P WHERE X delete_permission P, ' - 'X eid %(x)s', - {'x': entity.eid}) - self.wview('outofcontext', rset, 'null') - - -class CWETypeSWorkflowView(EntityView): - __regid__ = 'cwetype-workflow' - __select__ = (EntityView.__select__ & implements('CWEType') & - has_related_entities('workflow_of', 'object')) - - def cell_call(self, row, col): - entity = self.cw_rset.get_entity(row, col) - if entity.default_workflow: - wf = entity.default_workflow[0] - self.w(u'

%s (%s)

' % (wf.name, self._cw._('default'))) - self.wf_image(wf) - for altwf in entity.reverse_workflow_of: - if altwf.eid == wf.eid: - continue - self.w(u'

%s

' % altwf.name) - self.wf_image(altwf) - - def wf_image(self, wf): - self.w(u'%s' % ( - xml_escape(wf.absolute_url(vid='wfgraph')), - xml_escape(self._cw._('graphical representation of %s') % wf.name))) - - # CWRType ###################################################################### -class CWRTypeSchemaView(primary.PrimaryView): +class CWRTypePrimaryView(tabs.TabbedPrimaryView): + __select__ = implements('CWRType') + tabs = [_('cwrtype-description'), _('cwrtype-permissions')] + default_tab = 'cwrtype-description' + + +class CWRTypeDescriptionTab(tabs.PrimaryTab): + __regid__ = 'cwrtype-description' __select__ = implements('CWRType') - title = _('in memory relation schema') - main_related_section = False + + def render_entity_attributes(self, entity): + super(CWRTypeDescriptionTab, self).render_entity_attributes(entity) + _ = self._cw._ + if not entity.final: + msg = _('graphical schema for %s') % entity.name + self.w(tags.img(src=entity.absolute_url(vid='schemagraph'), + alt=msg)) + rset = self._cw.execute('Any R,C,R,R, RT WHERE ' + 'R relation_type RT, RT eid %(x)s, ' + 'R cardinality C', {'x': entity.eid}) + self.wview('table', rset, 'null', + headers=(_(u'relation'), _(u'cardinality'), _(u'constraints'), + _(u'options')), + cellvids={2: 'rdef-constraints-cell', + 3: 'rdef-options-cell'}) + + +class CWRTypePermTab(SecurityViewMixIn, EntityView): + __regid__ = 'cwrtype-permissions' + __select__ = implements('CWRType') & authenticated_user() + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rschema = self._cw.vreg.schema.rschema(entity.name) + self.grouped_permissions_table(rschema) + + +# CWAttribute / CWRelation ##################################################### + +class RDEFPrimaryView(tabs.TabbedPrimaryView): + __select__ = implements('CWRelation', 'CWAttribute') + tabs = [_('rdef-description'), _('rdef-permissions')] + default_tab = 'rdef-description' + + +class RDEFDescriptionTab(tabs.PrimaryTab): + __regid__ = 'rdef-description' + __select__ = implements('CWRelation', 'CWAttribute') def render_entity_attributes(self, entity): - super(CWRTypeSchemaView, self).render_entity_attributes(entity) - rschema = self._cw.vreg.schema.rschema(entity.name) - viewer = SchemaViewer(self._cw) - layout = viewer.visit_relationschema(rschema, title=False) - self.w(uilib.ureport_as_html(layout)) - if not rschema.final: - msg = self._cw._('graphical schema for %s') % entity.name - self.w(tags.img(src=entity.absolute_url(vid='schemagraph'), - alt=msg)) + super(RDEFDescriptionTab, self).render_entity_attributes(entity) + rdef = entity.yams_schema() + if rdef.constraints: + self.w(u'

%s

' % self._cw._('constrained_by')) + self.w(entity.view('rdef-constraints-cell')) + + +class RDEFPermTab(SecurityViewMixIn, EntityView): + __regid__ = 'rdef-permissions' + __select__ = implements('CWRelation', 'CWAttribute') & authenticated_user() + + def cell_call(self, row, col): + self.permissions_table(self.cw_rset.get_entity(row, col).yams_schema()) + + +class RDEFNameView(tableview.CellView): + """display relation name and its translation only in a cell view, link to + relation definition's primary view (for use in entity type relations table + for instance) + """ + __regid__ = 'rdef-name-cell' + __select__ = implements('CWRelation', 'CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.relation_type[0].name + # XXX use context entity + pgettext + self.w(u'%s (%s)' % ( + entity.absolute_url(), rtype, self._cw._(rtype))) + +class RDEFObjectNameView(tableview.CellView): + """same as RDEFNameView but when the context is the object entity + """ + __regid__ = 'rdef-object-name-cell' + __select__ = implements('CWRelation', 'CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.relation_type[0].name + # XXX use context entity + pgettext + self.w(u'%s (%s)' % ( + entity.absolute_url(), rtype, self._cw.__(rtype + '_object'))) + +class RDEFConstraintsCell(EntityView): + __regid__ = 'rdef-constraints-cell' + __select__ = implements('CWAttribute', 'CWRelation') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rschema = self._cw.vreg.schema.rschema(entity.rtype.name) + rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)] + constraints = [xml_escape(str(c)) for c in getattr(rdef, 'constraints')] + self.w(u'
'.join(constraints)) + +class CWAttributeOptionsCell(EntityView): + __regid__ = 'rdef-options-cell' + __select__ = implements('CWAttribute') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + options = [] + if entity.indexed: + options.append(self._cw._('indexed')) + if entity.fulltextindexed: + options.append(self._cw._('fulltextindexed')) + if entity.internationalizable: + options.append(self._cw._('internationalizable')) + self.w(u','.join(options)) + +class CWRelationOptionsCell(EntityView): + __regid__ = 'rdef-options-cell' + __select__ = implements('CWRelation',) + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + rtype = entity.rtype + options = [] + if rtype.symmetric: + options.append(self._cw._('symmetric')) + if rtype.inlined: + options.append(self._cw._('inlined')) + if rtype.fulltext_container: + options.append('%s=%s' % (self._cw._('fulltext_container'), + self._cw._(rtype.fulltext_container))) + if entity.composite: + options.append('%s=%s' % (self._cw._('composite'), + self._cw._(entity.composite))) + self.w(u','.join(options)) # schema images ############################################################### diff -r b619531ddbd2 -r b6e250dd7a7d web/views/sessions.py --- a/web/views/sessions.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/sessions.py Fri Apr 23 12:42:53 2010 +0200 @@ -10,6 +10,7 @@ from cubicweb.web import InvalidSession from cubicweb.web.application import AbstractSessionManager +from cubicweb.dbapi import DBAPISession class InMemoryRepositorySessionManager(AbstractSessionManager): @@ -40,26 +41,28 @@ if self.has_expired(session): self.close_session(session) raise InvalidSession() - # give an opportunity to auth manager to hijack the session (necessary - # with the RepositoryAuthenticationManager in case the connection to the - # repository has expired) try: - session = self.authmanager.validate_session(req, session) - # necessary in case session has been hijacked - self._sessions[session.sessionid] = session + user = self.authmanager.validate_session(req, session) except InvalidSession: # invalid session - del self._sessions[sessionid] + self.close_session(session) raise + # associate the connection to the current request + req.set_session(session, user) return session def open_session(self, req): - """open and return a new session for the given request + """open and return a new session for the given request. The session is + also bound to the request. - :raise ExplicitLogin: if authentication is required + raise :exc:`cubicweb.AuthenticationError` if authentication failed + (no authentication info found or wrong user/password) """ - session = self.authmanager.authenticate(req) + cnx, login, authinfo = self.authmanager.authenticate(req) + session = DBAPISession(cnx, login, authinfo) self._sessions[session.sessionid] = session + # associate the connection to the current request + req.set_session(session) return session def close_session(self, session): @@ -69,8 +72,9 @@ self.info('closing http session %s' % session) del self._sessions[session.sessionid] try: - session.close() + session.cnx.close() except: # already closed, may occurs if the repository session expired but # not the web session pass + session.cnx = None diff -r b619531ddbd2 -r b6e250dd7a7d web/views/tableview.py --- a/web/views/tableview.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/tableview.py Fri Apr 23 12:42:53 2010 +0200 @@ -96,10 +96,12 @@ continue return None - def displaycols(self, displaycols): + def displaycols(self, displaycols, headers): if displaycols is None: if 'displaycols' in self._cw.form: displaycols = [int(idx) for idx in self._cw.form['displaycols']] + elif headers is not None: + displaycols = range(len(headers)) else: displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection)) return displaycols @@ -136,7 +138,7 @@ hidden = False if displayactions is None and 'displayactions' in req.form: displayactions = True - displaycols = self.displaycols(displaycols) + displaycols = self.displaycols(displaycols, headers) fromformfilter = 'fromformfilter' in req.form # if fromformfilter is true, this is an ajax call and we only want to # replace the inner div, so don't regenerate everything under the if @@ -306,7 +308,7 @@ """Dumps a table displaying a composite query""" actrql = self._cw.form['actualrql'] self._cw.ensure_ro_rql(actrql) - displaycols = self.displaycols(displaycols) + displaycols = self.displaycols(displaycols, headers) if displayactions is None and 'displayactions' in self._cw.form: displayactions = True if divid is None and 'divid' in self._cw.form: @@ -335,3 +337,67 @@ class EditableInitialTableTableView(InitialTableView): __regid__ = 'editable-initialtable' finalview = 'editable-final' + + +class EntityAttributesTableView(EntityView): + """This table displays entity attributes in a table and allow to set a + specific method to help building cell content for each attribute as well as + column header. + + Table will render entity cell by using the appropriate build_COLNAME_cell + methods if defined otherwise cell content will be entity.COLNAME. + + Table will render column header using the method header_for_COLNAME if + defined otherwise COLNAME will be used. + """ + __abstract__ = True + columns = () + table_css = "listing" + css_files = () + + def call(self, columns=None): + if self.css_files: + self._cw.add_css(self.css_files) + _ = self._cw._ + self.columns = columns or self.columns + ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0]) + self.w(u'' % self.table_css) + self.table_header(ecls) + self.w(u'') + for row in xrange(self.cw_rset.rowcount): + self.cell_call(row=row, col=0) + self.w(u'') + self.w(u'
') + + def cell_call(self, row, col): + _ = self._cw._ + entity = self.cw_rset.get_entity(row, col) + infos = {} + for col in self.columns: + meth = getattr(self, 'build_%s_cell' % col, None) + # find the build method or try to find matching attribute + if meth: + content = meth(entity) + else: + content = entity.printable_value(col) + infos[col] = content + self.w(u"""""") + line = u''.join(u'%%(%s)s' % col for col in self.columns) + self.w(line % infos) + self.w(u'\n') + + def table_header(self, ecls): + """builds the table's header""" + self.w(u'') + _ = self._cw._ + for column in self.columns: + meth = getattr(self, 'header_for_%s' % column, None) + if meth: + colname = meth(ecls) + else: + colname = _(column) + self.w(u'%s' % xml_escape(colname)) + self.w(u'\n') + + diff -r b619531ddbd2 -r b6e250dd7a7d web/views/urlpublishing.py --- a/web/views/urlpublishing.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/urlpublishing.py Fri Apr 23 12:42:53 2010 +0200 @@ -129,8 +129,7 @@ if len(parts) != 1: raise PathDontMatch() try: - rset = req.execute('Any X WHERE X eid %(x)s', - {'x': typed_eid(parts[0])}, 'x') + rset = req.execute('Any X WHERE X eid %(x)s', {'x': typed_eid(parts[0])}) except ValueError: raise PathDontMatch() if rset.rowcount == 0: @@ -177,7 +176,7 @@ rql = u'Any X WHERE X is %s, X %s %%(x)s' % (etype, attrname) if attrname == 'eid': try: - rset = req.execute(rql, {'x': typed_eid(value)}, 'x') + rset = req.execute(rql, {'x': typed_eid(value)}) except (ValueError, TypeResolverException): # conflicting eid/type raise PathDontMatch() diff -r b619531ddbd2 -r b6e250dd7a7d web/views/workflow.py --- a/web/views/workflow.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/views/workflow.py Fri Apr 23 12:42:53 2010 +0200 @@ -11,6 +11,9 @@ __docformat__ = "restructuredtext en" _ = unicode +import tempfile +import os + from logilab.mtconverter import xml_escape from logilab.common.graph import escape, GraphGenerator, DotBackend @@ -24,11 +27,17 @@ from cubicweb.web import uicfg, stdmsgs, action, component, form, action from cubicweb.web import formfields as ff, formwidgets as fwdgs from cubicweb.web.views import TmpFileViewMixin, forms, primary, autoform +from cubicweb.web.views.tabs import TabbedPrimaryView, PrimaryTab _pvs = uicfg.primaryview_section _pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') _pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') _pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') +for rtype in ('in_state', 'by_transition', 'from_state', 'to_state'): + _pvs.tag_subject_of(('*', rtype, '*'), 'hidden') + _pvs.tag_object_of(('*', rtype, '*'), 'hidden') +_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden') _abaa = uicfg.actionbox_appearsin_addmenu _abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) @@ -129,7 +138,7 @@ headers = (_('from_state'), _('to_state'), _('comment'), _('date')) rql = '%s %s, X eid %%(x)s' % (sel, rql) try: - rset = self._cw.execute(rql, {'x': eid}, 'x') + rset = self._cw.execute(rql, {'x': eid}) except Unauthorized: return if rset: @@ -187,6 +196,7 @@ _pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden') _pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden') _pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden') +_pvs.tag_object_of(('*', 'default_workflow', 'Workflow'), 'hidden') _abaa = uicfg.actionbox_appearsin_addmenu _abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False) @@ -198,14 +208,10 @@ _abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True) _abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True) -class WorkflowPrimaryView(primary.PrimaryView): +class WorkflowPrimaryView(TabbedPrimaryView): __select__ = implements('Workflow') - - def render_entity_attributes(self, entity): - self.w(entity.view('reledit', rtype='description')) - self.w(u'%s' % ( - xml_escape(entity.absolute_url(vid='wfgraph')), - xml_escape(self._cw._('graphical workflow for %s') % entity.name))) + tabs = [ _('wf_tab_info'), _('wfgraph'),] + default_tab = 'wf_tab_info' class CellView(view.EntityView): @@ -225,6 +231,59 @@ self.w(xml_escape(self._cw.view('textincontext', self.cw_rset, row=row, col=col))) +class WorkflowTabTextView(PrimaryTab): + __regid__ = 'wf_tab_info' + __select__ = PrimaryTab.__select__ & one_line_rset() & implements('Workflow') + + def render_entity_attributes(self, entity): + _ = self._cw._ + self.w(u'
%s
' % (entity.printable_value('description'))) + self.w(u'%s%s' % (_("workflow_of").capitalize(), _(" :"))) + html = [] + for e in entity.workflow_of: + view = e.view('outofcontext') + if entity.eid == e.default_workflow[0].eid: + view += u' [%s]' % _('default_workflow') + html.append(view) + self.w(', '.join(v for v in html)) + self.w(u'

%s

' % _("Transition_plural")) + rset = self._cw.execute( + 'Any T,T,DS,T,TT ORDERBY TN WHERE T transition_of WF, WF eid %(x)s,' + 'T type TT, T name TN, T destination_state DS?', {'x': entity.eid}) + self.wview('editable-table', rset, 'null', + cellvids={ 1: 'trfromstates', 2: 'outofcontext', 3:'trsecurity',}, + headers = (_('Transition'), _('from_state'), + _('to_state'), _('permissions'), _('type') ), + ) + + +class TransitionSecurityTextView(view.EntityView): + __regid__ = 'trsecurity' + __select__ = implements('Transition') + + def cell_call(self, row, col): + _ = self._cw._ + entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) + if entity.require_group: + self.w(u'
%s%s %s
' % + (_('groups'), _(" :"), + u', '.join((g.view('incontext') for g + in entity.require_group)))) + if entity.condition: + self.w(u'
%s%s %s
' % + ( _('conditions'), _(" :"), + u'
'.join((e.dc_title() for e + in entity.condition)))) + +class TransitionAllowedTextView(view.EntityView): + __regid__ = 'trfromstates' + __select__ = implements('Transition') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) + self.w(u', '.join((e.view('outofcontext') for e + in entity.reverse_allowed_transition))) + # workflow entity types edition ################################################ @@ -284,24 +343,18 @@ def node_properties(self, stateortransition): """return default DOT drawing options for a state or transition""" props = {'label': stateortransition.printable_value('name'), - 'fontname': 'Courier'} + 'fontname': 'Courier', 'fontsize':10, + 'href': stateortransition.absolute_url(), + } if hasattr(stateortransition, 'state_of'): props['shape'] = 'box' props['style'] = 'filled' if stateortransition.reverse_initial_state: - props['color'] = '#88CC88' + props['fillcolor'] = '#88CC88' else: props['shape'] = 'ellipse' descr = [] tr = stateortransition - if tr.require_group: - descr.append('%s %s'% ( - self._('groups:'), - ','.join(g.printable_value('name') for g in tr.require_group))) - if tr.condition: - descr.append('%s %s'% ( - self._('condition:'), - ' | '.join(e.expression for e in tr.condition))) if descr: props['label'] += escape('\n'.join(descr)) return props @@ -331,17 +384,39 @@ yield transition.eid, outgoingstate.eid, transition +class WorkflowGraphView(view.EntityView): + __regid__ = 'wfgraph' + __select__ = EntityView.__select__ & one_line_rset() & implements('Workflow') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + visitor = WorkflowVisitor(entity) + prophdlr = WorkflowDotPropsHandler(self._cw) + wfname = 'workflow%s' % str(entity.eid) + generator = GraphGenerator(DotBackend(wfname, None, + ratio='compress', size='30,10')) + # map file + pmap, mapfile = tempfile.mkstemp(".map", wfname) + os.close(pmap) + # image file + fd, tmpfile = tempfile.mkstemp('.png') + os.close(fd) + generator.generate(visitor, prophdlr, tmpfile, mapfile) + self.w(u'%s' % ( + xml_escape(entity.absolute_url(vid='wfimage', tmpfile=tmpfile)), + xml_escape(self._cw._('graphical workflow for %s') % entity.name), + wfname)) + stream = open(mapfile, 'r').read() + stream = stream.decode(self._cw.encoding) + self.w(stream) + os.unlink(mapfile) + class WorkflowImageView(TmpFileViewMixin, view.EntityView): - __regid__ = 'wfgraph' + __regid__ = 'wfimage' __select__ = implements('Workflow') content_type = 'image/png' - def _generate(self, tmpfile): - """display schema information for an entity""" - entity = self.cw_rset.get_entity(self.cw_row, self.cw_col) - visitor = WorkflowVisitor(entity) - prophdlr = WorkflowDotPropsHandler(self._cw) - generator = GraphGenerator(DotBackend('workflow', 'LR', - ratio='compress', size='30,12')) - return generator.generate(visitor, prophdlr, tmpfile) - + def cell_call(self, row=0, col=0): + tmpfile = self._cw.form.get('tmpfile', None) + self.w(open(tmpfile, 'rb').read()) + os.unlink(tmpfile) diff -r b619531ddbd2 -r b6e250dd7a7d web/webconfig.py --- a/web/webconfig.py Fri Apr 23 12:40:48 2010 +0200 +++ b/web/webconfig.py Fri Apr 23 12:42:53 2010 +0200 @@ -70,20 +70,20 @@ {'type' : 'string', 'default': None, 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('anonymous-password', {'type' : 'string', 'default': None, 'help': 'password of the CubicWeb user account to use for anonymous user, ' 'if anonymous-user is set', - 'group': 'main', 'inputlevel': 1, + 'group': 'main', 'level': 1, }), ('query-log-file', {'type' : 'string', 'default': None, 'help': 'web instance query log file', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), # web configuration ('https-url', @@ -97,20 +97,20 @@ 'differentiate between http vs https access. For instance: \n'\ 'RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]\n'\ 'where the cubicweb web server is listening on port 8080.', - 'group': 'main', 'inputlevel': 2, + 'group': 'main', 'level': 2, }), ('auth-mode', {'type' : 'choice', 'choices' : ('cookie', 'http'), 'default': 'cookie', 'help': 'authentication mode (cookie / http)', - 'group': 'web', 'inputlevel': 1, + 'group': 'web', 'level': 1, }), ('realm', {'type' : 'string', 'default': 'cubicweb', 'help': 'realm to use on HTTP authentication mode', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('http-session-time', {'type' : 'time', @@ -129,7 +129,7 @@ 'So even if http-session-time is 0 and the user don\'t close his ' 'browser, he will have to reauthenticate after this time of ' 'inactivity. Default to 24h.', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('cleanup-anonymous-session-time', {'type' : 'time', @@ -137,14 +137,14 @@ 'help': 'Same as cleanup-session-time but specific to anonymous ' 'sessions. You can have a much smaller timeout here since it will be ' 'transparent to the user. Default to 5min.', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('force-html-content-type', {'type' : 'yn', 'default': False, 'help': 'force text/html content type for your html pages instead of cubicweb user-agent based'\ 'deduction of an appropriate content type', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('embed-allowed', {'type' : 'regexp', @@ -152,14 +152,14 @@ 'help': 'regular expression matching URLs that may be embeded. \ leave it blank if you don\'t want the embedding feature, or set it to ".*" \ if you want to allow everything', - 'group': 'web', 'inputlevel': 1, + 'group': 'web', 'level': 1, }), ('submit-mail', {'type' : 'string', 'default': None, 'help': ('Mail used as recipient to report bug in this instance, ' 'if you want this feature on'), - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('language-negociation', @@ -167,14 +167,14 @@ 'default': True, 'help': 'use Accept-Language http header to try to set user '\ 'interface\'s language according to browser defined preferences', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('print-traceback', {'type' : 'yn', 'default': CubicWebConfiguration.mode != 'system', 'help': 'print the traceback on the error page when an error occured', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('captcha-font-file', @@ -182,14 +182,14 @@ 'default': join(CubicWebConfiguration.shared_dir(), 'data', 'porkys.ttf'), 'help': 'True type font to use for captcha image generation (you \ must have the python imaging library installed to use captcha)', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), ('captcha-font-size', {'type' : 'int', 'default': 25, 'help': 'Font size to use for captcha image generation (you must \ have the python imaging library installed to use captcha)', - 'group': 'web', 'inputlevel': 2, + 'group': 'web', 'level': 2, }), )) diff -r b619531ddbd2 -r b6e250dd7a7d wsgi/handler.py --- a/wsgi/handler.py Fri Apr 23 12:40:48 2010 +0200 +++ b/wsgi/handler.py Fri Apr 23 12:42:53 2010 +0200 @@ -9,8 +9,7 @@ __docformat__ = "restructuredtext en" from cubicweb import AuthenticationError -from cubicweb.web import (NotFound, Redirect, DirectResponse, StatusResponse, - ExplicitLogin) +from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut from cubicweb.web.application import CubicWebPublisher from cubicweb.wsgi.request import CubicWebWsgiRequest @@ -113,8 +112,6 @@ req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) try: self.appli.connect(req) - except AuthenticationError: - return self.request_auth(req) except Redirect, ex: return self.redirect(req, ex.location) path = req.path @@ -126,12 +123,9 @@ return WSGIResponse(200, req, ex.response) except StatusResponse, ex: return WSGIResponse(ex.status, req, ex.content) - except NotFound: - result = self.appli.notfound_content(req) - return WSGIResponse(404, req, result) - except ExplicitLogin: # must be before AuthenticationError + except AuthenticationError: # must be before AuthenticationError return self.request_auth(req) - except AuthenticationError: + except LogOut: if self.config['auth-mode'] == 'cookie': # in cookie mode redirecting to the index view is enough : # either anonymous connection is allowed and the page will diff -r b619531ddbd2 -r b6e250dd7a7d wsgi/request.py --- a/wsgi/request.py Fri Apr 23 12:40:48 2010 +0200 +++ b/wsgi/request.py Fri Apr 23 12:42:53 2010 +0200 @@ -38,9 +38,9 @@ post, files = self.get_posted_data() super(CubicWebWsgiRequest, self).__init__(vreg, https, post) if files is not None: - for fdef in files.itervalues(): - fdef[0] = unicode(fdef[0], self.encoding) - self.form.update(files) + for key, (name, _, stream) in files.iteritems(): + name = unicode(name, self.encoding) + self.form[key] = (name, stream) # prepare output headers self.headers_out = {}