--- a/README Tue Apr 13 19:22:46 2010 +0200
+++ b/README Tue Apr 13 19:43:51 2010 +0200
@@ -1,6 +1,15 @@
CubicWeb semantic web framework
===============================
+CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+* a repository server
+* a RQL command line client to the repository
+* an adaptative modpython interface to the server
+* a bunch of other management tools
+
Install
-------
--- a/__pkginfo__.py Tue Apr 13 19:22:46 2010 +0200
+++ b/__pkginfo__.py Tue Apr 13 19:43:51 2010 +0200
@@ -1,36 +1,21 @@
# pylint: disable-msg=W0622,C0103
"""cubicweb global packaging information for the cubicweb knowledge management
software
+
:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
"""
-distname = "cubicweb"
-modname = "cubicweb"
+modname = distname = "cubicweb"
numversion = (3, 7, 3)
version = '.'.join(str(num) for num in numversion)
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
+description = "a repository of entities / relations for knowledge management"
author = "Logilab"
author_email = "contact@logilab.fr"
-
-short_desc = "a repository of entities / relations for knowledge management"
-long_desc = """CubicWeb is a entities / relations based knowledge management system
-developped at Logilab.
-
-This package contains:
-* a repository server
-* a RQL command line client to the repository
-* an adaptative modpython interface to the server
-* a bunch of other management tools
-"""
-
web = 'http://www.cubicweb.org'
ftp = 'ftp://ftp.logilab.org/pub/cubicweb'
-pyversions = ['2.5', '2.6']
+license = 'LGPL'
classifiers = [
'Environment :: Web Environment',
@@ -39,6 +24,32 @@
'Programming Language :: JavaScript',
]
+__depends__ = {
+ 'logilab-common': '>= 0.49.0',
+ 'logilab-mtconverter': '>= 0.6.0',
+ 'rql': '>= 0.26.0',
+ 'yams': '>= 0.28.1',
+ 'docutils': '>= 0.6',
+ #gettext # for xgettext, msgcat, etc...
+ # web dependancies
+ 'simplejson': '>= 2.0.9',
+ 'lxml': '',
+ 'Twisted': '',
+ # XXX graphviz
+ # server dependencies
+ 'logilab-database': '',
+ 'pysqlite': '>= 2.5.5', # XXX install pysqlite2
+ }
+
+__recommends__ = {
+ 'Pyro': '>= 3.9.1',
+ 'PIL': '', # for captcha
+ 'pycrypto': '', # for crypto extensions
+ 'fyzz': '>= 0.1.0', # for sparql
+ 'vobject': '>= 0.6.0', # for ical view
+ #'Products.FCKeditor':'',
+ #'SimpleTAL':'>= 4.1.6',
+ }
import sys
from os import listdir, environ
@@ -49,57 +60,53 @@
if not s.endswith('.bat')]
include_dirs = [join('test', 'data'),
join('server', 'test', 'data'),
+ join('hooks', 'test', 'data'),
join('web', 'test', 'data'),
join('devtools', 'test', 'data'),
'skeleton']
-entities_dir = 'entities'
-schema_dir = 'schemas'
-sobjects_dir = 'sobjects'
-server_migration_dir = join('misc', 'migration')
-data_dir = join('web', 'data')
-wdoc_dir = join('web', 'wdoc')
-wdocimages_dir = join(wdoc_dir, 'images')
-views_dir = join('web', 'views')
-i18n_dir = 'i18n'
+_server_migration_dir = join('misc', 'migration')
+_data_dir = join('web', 'data')
+_wdoc_dir = join('web', 'wdoc')
+_wdocimages_dir = join(_wdoc_dir, 'images')
+_views_dir = join('web', 'views')
+_i18n_dir = 'i18n'
-if environ.get('APYCOT_ROOT'):
+_pyversion = '.'.join(str(num) for num in sys.version_info[0:2])
+if '--home' in sys.argv:
# --home install
- pydir = 'python'
+ pydir = 'python' + _pyversion
else:
- python_version = '.'.join(str(num) for num in sys.version_info[0:2])
- pydir = join('python' + python_version, 'site-packages')
+ pydir = join('python' + _pyversion, 'site-packages')
try:
data_files = [
- # common data
- #[join('share', 'cubicweb', 'entities'),
- # [join(entities_dir, filename) for filename in listdir(entities_dir)]],
# server data
[join('share', 'cubicweb', 'schemas'),
- [join(schema_dir, filename) for filename in listdir(schema_dir)]],
- #[join('share', 'cubicweb', 'sobjects'),
- # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]],
+ [join('schemas', filename) for filename in listdir('schemas')]],
[join('share', 'cubicweb', 'migration'),
- [join(server_migration_dir, filename)
- for filename in listdir(server_migration_dir)]],
+ [join(_server_migration_dir, filename)
+ for filename in listdir(_server_migration_dir)]],
# web data
[join('share', 'cubicweb', 'cubes', 'shared', 'data'),
- [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]],
+ [join(_data_dir, fname) for fname in listdir(_data_dir)
+ if not isdir(join(_data_dir, fname))]],
[join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
- [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]],
+ [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]],
[join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'),
- [join(data_dir, 'images', fname) for fname in listdir(join(data_dir, 'images'))]],
+ [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]],
[join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
- [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]],
+ [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir)
+ if not isdir(join(_wdoc_dir, fname))]],
[join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
- [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]],
- # XXX: .pt install should be handled properly in a near future version
+ [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+ [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]],
+ # XXX: drop .pt files
[join('lib', pydir, 'cubicweb', 'web', 'views'),
- [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]],
- [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
- [join(i18n_dir, fname) for fname in listdir(i18n_dir)]],
+ [join(_views_dir, fname) for fname in listdir(_views_dir)
+ if fname.endswith('.pt')]],
# skeleton
]
except OSError:
--- a/_exceptions.py Tue Apr 13 19:22:46 2010 +0200
+++ b/_exceptions.py Tue Apr 13 19:43:51 2010 +0200
@@ -52,9 +52,6 @@
"""raised when when an attempt to establish a connection failed do to wrong
connection information (login / password or other authentication token)
"""
- def __init__(self, *args, **kwargs):
- super(AuthenticationError, self).__init__(*args)
- self.__dict__.update(kwargs)
class BadConnectionId(ConnectionError):
"""raised when a bad connection id is given"""
--- a/cwconfig.py Tue Apr 13 19:22:46 2010 +0200
+++ b/cwconfig.py Tue Apr 13 19:43:51 2010 +0200
@@ -126,12 +126,11 @@
import sys
import os
import logging
-import tempfile
from smtplib import SMTP
from threading import Lock
-from os.path import exists, join, expanduser, abspath, normpath, basename, isdir
+from os.path import (exists, join, expanduser, abspath, normpath,
+ basename, isdir, dirname)
from warnings import warn
-
from logilab.common.decorators import cached, classproperty
from logilab.common.deprecation import deprecated
from logilab.common.logging_ext import set_log_methods, init_log
@@ -179,6 +178,23 @@
% (directory, modes))
return modes[0]
+def _find_prefix(start_path=CW_SOFTWARE_ROOT):
+ """Runs along the parent directories of *start_path* (default to cubicweb source directory)
+ looking for one containing a 'share/cubicweb' directory.
+ The first matching directory is assumed as the prefix installation of cubicweb
+
+ Returns the matching prefix or None.
+ """
+ prefix = start_path
+ old_prefix = None
+ if not isdir(start_path):
+ prefix = dirname(start_path)
+ while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix:
+ old_prefix = prefix
+ prefix = dirname(prefix)
+ if isdir(join(prefix, 'share', 'cubicweb')):
+ return prefix
+ return sys.prefix
# persistent options definition
PERSISTENT_OPTIONS = (
@@ -251,6 +267,11 @@
CWDEV = exists(join(CW_SOFTWARE_ROOT, '.hg'))
+try:
+ _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX']
+except KeyError:
+ _INSTALL_PREFIX = _find_prefix()
+
class CubicWebNoAppConfiguration(ConfigurationMixIn):
"""base class for cubicweb configuration without a specific instance directory
"""
@@ -264,25 +285,16 @@
# debug mode
debugmode = False
- if os.environ.get('APYCOT_ROOT'):
- mode = 'test'
- # allow to test cubes within apycot using cubicweb not installed by
- # apycot
- if __file__.startswith(os.environ['APYCOT_ROOT']):
- CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ
- # create __init__ file
- file(join(CUBES_DIR, '__init__.py'), 'w').close()
- else:
- CUBES_DIR = '/usr/share/cubicweb/cubes/'
- elif (CWDEV and _forced_mode != 'system'):
+
+ if (CWDEV and _forced_mode != 'system'):
mode = 'user'
- CUBES_DIR = abspath(normpath(join(CW_SOFTWARE_ROOT, '../cubes')))
+ _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
else:
- if _forced_mode == 'user':
- mode = 'user'
- else:
- mode = 'system'
- CUBES_DIR = '/usr/share/cubicweb/cubes/'
+ mode = _forced_mode or 'system'
+ _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
+
+ CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False)
+ CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep)
options = (
('log-threshold',
@@ -344,7 +356,6 @@
}),
)
# static and class methods used to get instance independant resources ##
-
@staticmethod
def cubicweb_version():
"""return installed cubicweb version"""
@@ -383,21 +394,17 @@
% directory)
continue
for cube in os.listdir(directory):
- if isdir(join(directory, cube)) and not cube == 'shared':
+ if cube in ('CVS', '.svn', 'shared', '.hg'):
+ continue
+ if isdir(join(directory, cube)):
cubes.add(cube)
return sorted(cubes)
@classmethod
def cubes_search_path(cls):
"""return the path of directories where cubes should be searched"""
- path = []
- try:
- for directory in os.environ['CW_CUBES_PATH'].split(os.pathsep):
- directory = abspath(normpath(directory))
- if exists(directory) and not directory in path:
- path.append(directory)
- except KeyError:
- pass
+ path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH
+ if directory.strip() and exists(directory.strip())]
if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR):
path.append(cls.CUBES_DIR)
return path
@@ -413,7 +420,7 @@
@classmethod
def cube_dir(cls, cube):
"""return the cube directory for the given cube id,
- raise ConfigurationError if it doesn't exists
+ raise `ConfigurationError` if it doesn't exists
"""
for directory in cls.cubes_search_path():
cubedir = join(directory, cube)
@@ -431,10 +438,12 @@
"""return the information module for the given cube"""
cube = CW_MIGRATION_MAP.get(cube, cube)
try:
- return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__
+ parent = __import__('cubes.%s.__pkginfo__' % cube)
+ return getattr(parent, cube).__pkginfo__
except Exception, ex:
- raise ConfigurationError('unable to find packaging information for '
- 'cube %s (%s: %s)' % (cube, ex.__class__.__name__, ex))
+ raise ConfigurationError(
+ 'unable to find packaging information for cube %s (%s: %s)'
+ % (cube, ex.__class__.__name__, ex))
@classmethod
def cube_version(cls, cube):
@@ -446,14 +455,43 @@
return Version(version)
@classmethod
+ def _cube_deps(cls, cube, key, oldkey):
+ """return cubicweb cubes used by the given cube"""
+ pkginfo = cls.cube_pkginfo(cube)
+ try:
+ # explicit __xxx_cubes__ attribute
+ deps = getattr(pkginfo, key)
+ except AttributeError:
+ # deduce cubes from generic __xxx__ attribute
+ try:
+ gendeps = getattr(pkginfo, key.replace('_cubes', ''))
+ except AttributeError:
+ # bw compat
+ if hasattr(pkginfo, oldkey):
+ warn('[3.8] %s is deprecated, use %s dict' % (oldkey, key),
+ DeprecationWarning)
+ deps = getattr(pkginfo, oldkey)
+ else:
+ deps = {}
+ else:
+ deps = dict( (x[len('cubicweb-'):], v)
+ for x, v in gendeps.iteritems()
+ if x.startswith('cubicweb-'))
+ if not isinstance(deps, dict):
+ deps = dict((key, None) for key in deps)
+ warn('[3.8] cube %s should define %s as a dict' % (cube, key),
+ DeprecationWarning)
+ return deps
+
+ @classmethod
def cube_dependencies(cls, cube):
"""return cubicweb cubes used by the given cube"""
- return getattr(cls.cube_pkginfo(cube), '__use__', ())
+ return cls._cube_deps(cube, '__depends_cubes__', '__use__')
@classmethod
def cube_recommends(cls, cube):
"""return cubicweb cubes recommended by the given cube"""
- return getattr(cls.cube_pkginfo(cube), '__recommend__', ())
+ return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__')
@classmethod
def expand_cubes(cls, cubes, with_recommends=False):
@@ -486,9 +524,10 @@
graph = {}
for cube in cubes:
cube = CW_MIGRATION_MAP.get(cube, cube)
- deps = cls.cube_dependencies(cube) + \
- cls.cube_recommends(cube)
- graph[cube] = set(dep for dep in deps if dep in cubes)
+ graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+ if dep in cubes)
+ graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+ if dep in cubes)
cycles = get_cycles(graph)
if cycles:
cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles)
@@ -636,6 +675,7 @@
cw_rest_init()
def adjust_sys_path(self):
+ # overriden in CubicWebConfiguration
self.cls_adjust_sys_path()
def init_log(self, logthreshold=None, debug=False,
@@ -685,35 +725,24 @@
"""
return None
+
class CubicWebConfiguration(CubicWebNoAppConfiguration):
"""base class for cubicweb server and web configurations"""
- INSTANCES_DATA_DIR = None
- if os.environ.get('APYCOT_ROOT'):
- root = os.environ['APYCOT_ROOT']
- REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
- if not exists(REGISTRY_DIR):
- os.makedirs(REGISTRY_DIR)
- RUNTIME_DIR = tempfile.gettempdir()
- # allow to test cubes within apycot using cubicweb not installed by
- # apycot
- if __file__.startswith(os.environ['APYCOT_ROOT']):
- MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root
+ if CubicWebNoAppConfiguration.mode == 'user':
+ _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/')
+ else: #mode = 'system'
+ if _INSTALL_PREFIX == '/usr':
+ _INSTANCES_DIR = '/etc/cubicweb.d/'
else:
- MIGRATION_DIR = '/usr/share/cubicweb/migration/'
- else:
- if CubicWebNoAppConfiguration.mode == 'user':
- REGISTRY_DIR = expanduser('~/etc/cubicweb.d/')
- RUNTIME_DIR = tempfile.gettempdir()
- INSTANCES_DATA_DIR = REGISTRY_DIR
- else: #mode = 'system'
- REGISTRY_DIR = '/etc/cubicweb.d/'
- RUNTIME_DIR = '/var/run/cubicweb/'
- INSTANCES_DATA_DIR = '/var/lib/cubicweb/instances/'
- if CWDEV:
- MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration')
- else:
- MIGRATION_DIR = '/usr/share/cubicweb/migration/'
+ _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d')
+
+ if os.environ.get('APYCOT_ROOT'):
+ _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py')
+ if not exists(_cubes_init):
+ file(join(_cubes_init), 'w').close()
+ if not exists(_INSTANCES_DIR):
+ os.makedirs(_INSTANCES_DIR)
# for some commands (creation...) we don't want to initialize gettext
set_language = True
@@ -757,25 +786,19 @@
)
@classmethod
- def runtime_dir(cls):
- """run time directory for pid file..."""
- return env_path('CW_RUNTIME_DIR', cls.RUNTIME_DIR, 'run time')
-
- @classmethod
- def registry_dir(cls):
+ def instances_dir(cls):
"""return the control directory"""
- return env_path('CW_INSTANCES_DIR', cls.REGISTRY_DIR, 'registry')
-
- @classmethod
- def instance_data_dir(cls):
- """return the instance data directory"""
- return env_path('CW_INSTANCES_DATA_DIR', cls.INSTANCES_DATA_DIR,
- 'additional data')
+ return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry')
@classmethod
def migration_scripts_dir(cls):
"""cubicweb migration scripts directory"""
- return env_path('CW_MIGRATION_DIR', cls.MIGRATION_DIR, 'migration')
+ if CWDEV:
+ return join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+ mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration')
+ if not exists(mdir):
+ raise ConfigurationError('migration path %s doesn\'t exist' % mdir)
+ return mdir
@classmethod
def config_for(cls, appid, config=None):
@@ -798,9 +821,10 @@
"""return the home directory of the instance with the given
instance id
"""
- home = join(cls.registry_dir(), appid)
+ home = join(cls.instances_dir(), appid)
if not exists(home):
- raise ConfigurationError('no such instance %s (check it exists with "cubicweb-ctl list")' % appid)
+ raise ConfigurationError('no such instance %s (check it exists with'
+ ' "cubicweb-ctl list")' % appid)
return home
MODES = ('common', 'repository', 'Any', 'web')
@@ -823,7 +847,9 @@
def default_log_file(self):
"""return default path to the log file of the instance'server"""
if self.mode == 'user':
- basepath = join(tempfile.gettempdir(), '%s-%s' % (basename(self.appid), self.name))
+ import tempfile
+ basepath = join(tempfile.gettempdir(), '%s-%s' % (
+ basename(self.appid), self.name))
path = basepath + '.log'
i = 1
while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
@@ -838,7 +864,13 @@
def default_pid_file(self):
"""return default path to the pid file of the instance'server"""
- return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name))
+ if self.mode == 'system':
+ # XXX not under _INSTALL_PREFIX, right?
+ rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time')
+ else:
+ import tempfile
+ rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time')
+ return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
# instance methods used to get instance specific resources #############
@@ -858,11 +890,17 @@
@property
def apphome(self):
- return join(self.registry_dir(), self.appid)
+ return join(self.instances_dir(), self.appid)
@property
def appdatahome(self):
- return join(self.instance_data_dir(), self.appid)
+ if self.mode == 'system':
+ # XXX not under _INSTALL_PREFIX, right?
+ iddir = '/var/lib/cubicweb/instances/'
+ else:
+ iddir = self.instances_dir()
+ iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data')
+ return join(iddir, self.appid)
def init_cubes(self, cubes):
assert self._cubes is None, self._cubes
@@ -927,7 +965,8 @@
if exists(sitefile) and not sitefile in self._site_loaded:
self._load_site_cubicweb(sitefile)
self._site_loaded.add(sitefile)
- self.warning('[3.5] site_erudi.py is deprecated, should be renamed to site_cubicweb.py')
+ self.warning('[3.5] site_erudi.py is deprecated, should be '
+ 'renamed to site_cubicweb.py')
def _load_site_cubicweb(self, sitefile):
# XXX extrapath argument to load_module_from_file only in lgc > 0.46
--- a/cwctl.py Tue Apr 13 19:22:46 2010 +0200
+++ b/cwctl.py Tue Apr 13 19:43:51 2010 +0200
@@ -13,6 +13,7 @@
# possible (for cubicweb-ctl reactivity, necessary for instance for usable bash
# completion). So import locally in command helpers.
import sys
+from warnings import warn
from os import remove, listdir, system, pathsep
try:
from os import kill, getpgid
@@ -85,7 +86,7 @@
Instance used by another one should appears first in the file (one
instance per line)
"""
- regdir = cwcfg.registry_dir()
+ regdir = cwcfg.instances_dir()
_allinstances = list_instances(regdir)
if isfile(join(regdir, 'startorder')):
allinstances = []
@@ -168,84 +169,6 @@
# base commands ###############################################################
-def version_strictly_lower(a, b):
- from logilab.common.changelog import Version
- if a:
- a = Version(a)
- if b:
- b = Version(b)
- return a < b
-
-def max_version(a, b):
- from logilab.common.changelog import Version
- return str(max(Version(a), Version(b)))
-
-class ConfigurationProblem(object):
- """Each cube has its own list of dependencies on other cubes/versions.
-
- The ConfigurationProblem is used to record the loaded cubes, then to detect
- inconsistencies in their dependencies.
-
- See configuration management on wikipedia for litterature.
- """
-
- def __init__(self):
- self.cubes = {}
-
- def add_cube(self, name, info):
- self.cubes[name] = info
-
- def solve(self):
- self.warnings = []
- self.errors = []
- self.read_constraints()
- for cube, versions in sorted(self.constraints.items()):
- oper, version = None, None
- # simplify constraints
- if versions:
- for constraint in versions:
- op, ver = constraint
- if oper is None:
- oper = op
- version = ver
- elif op == '>=' and oper == '>=':
- version = max_version(ver, version)
- else:
- print 'unable to handle this case', oper, version, op, ver
- # "solve" constraint satisfaction problem
- if cube not in self.cubes:
- self.errors.append( ('add', cube, version) )
- elif versions:
- lower_strict = version_strictly_lower(self.cubes[cube].version, version)
- if oper in ('>=','='):
- if lower_strict:
- self.errors.append( ('update', cube, version) )
- else:
- print 'unknown operator', oper
-
- def read_constraints(self):
- self.constraints = {}
- self.reverse_constraints = {}
- for cube, info in self.cubes.items():
- if hasattr(info,'__depends_cubes__'):
- use = info.__depends_cubes__
- if not isinstance(use, dict):
- use = dict((key, None) for key in use)
- self.warnings.append('cube %s should define __depends_cubes__ as a dict not a list')
- else:
- self.warnings.append('cube %s should define __depends_cubes__' % cube)
- use = dict((key, None) for key in info.__use__)
- for name, constraint in use.items():
- self.constraints.setdefault(name,set())
- if constraint:
- try:
- oper, version = constraint.split()
- self.constraints[name].add( (oper, version) )
- except:
- self.warnings.append('cube %s depends on %s but constraint badly formatted: %s'
- % (cube, name, constraint))
- self.reverse_constraints.setdefault(name, set()).add(cube)
-
class ListCommand(Command):
"""List configurations, cubes and instances.
@@ -262,6 +185,7 @@
"""run the command with its specific arguments"""
if args:
raise BadCommandUsage('Too much arguments')
+ from cubicweb.migration import ConfigurationProblem
print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
print
print 'Available configurations:'
@@ -273,7 +197,7 @@
continue
print ' ', line
print
- cfgpb = ConfigurationProblem()
+ cfgpb = ConfigurationProblem(cwcfg)
try:
cubesdir = pathsep.join(cwcfg.cubes_search_path())
namesize = max(len(x) for x in cwcfg.available_cubes())
@@ -284,26 +208,31 @@
else:
print 'Available cubes (%s):' % cubesdir
for cube in cwcfg.available_cubes():
- if cube in ('CVS', '.svn', 'shared', '.hg'):
- continue
try:
tinfo = cwcfg.cube_pkginfo(cube)
tversion = tinfo.version
- cfgpb.add_cube(cube, tinfo)
+ cfgpb.add_cube(cube, tversion)
except ConfigurationError:
tinfo = None
tversion = '[missing cube information]'
print '* %s %s' % (cube.ljust(namesize), tversion)
if self.config.verbose:
- shortdesc = tinfo and (getattr(tinfo, 'short_desc', '')
- or tinfo.__doc__)
- if shortdesc:
- print ' '+ ' \n'.join(shortdesc.splitlines())
+ if tinfo:
+ descr = getattr(tinfo, 'description', '')
+ if not descr:
+ descr = getattr(tinfo, 'short_desc', '')
+ if descr:
+ warn('[3.8] short_desc is deprecated, update %s'
+ ' pkginfo' % cube, DeprecationWarning)
+ else:
+ descr = tinfo.__doc__
+ if descr:
+ print ' '+ ' \n'.join(descr.splitlines())
modes = detect_available_modes(cwcfg.cube_dir(cube))
print ' available modes: %s' % ', '.join(modes)
print
try:
- regdir = cwcfg.registry_dir()
+ regdir = cwcfg.instances_dir()
except ConfigurationError, ex:
print 'No instance available:', ex
print
@@ -611,7 +540,7 @@
actionverb = 'restarted'
def run_args(self, args, askconfirm):
- regdir = cwcfg.registry_dir()
+ regdir = cwcfg.instances_dir()
if not isfile(join(regdir, 'startorder')) or len(args) <= 1:
# no specific startorder
super(RestartInstanceCommand, self).run_args(args, askconfirm)
@@ -953,7 +882,7 @@
def run(self, args):
"""run the command with its specific arguments"""
- regdir = cwcfg.registry_dir()
+ regdir = cwcfg.instances_dir()
for appid in sorted(listdir(regdir)):
print appid
--- a/cwvreg.py Tue Apr 13 19:22:46 2010 +0200
+++ b/cwvreg.py Tue Apr 13 19:43:51 2010 +0200
@@ -602,7 +602,7 @@
def solutions(self, req, rqlst, args):
def type_from_eid(eid, req=req):
return req.describe(eid)[0]
- self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
+ return self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
def parse(self, req, rql, args=None):
rqlst = self.rqlhelper.parse(rql)
--- a/dbapi.py Tue Apr 13 19:22:46 2010 +0200
+++ b/dbapi.py Tue Apr 13 19:43:51 2010 +0200
@@ -14,12 +14,14 @@
from logging import getLogger
from time import time, clock
from itertools import count
+from warnings import warn
from logilab.common.logging_ext import set_log_methods
from logilab.common.decorators import monkeypatch
from logilab.common.deprecation import deprecated
-from cubicweb import ETYPE_NAME_MAP, ConnectionError, cwvreg, cwconfig
+from cubicweb import ETYPE_NAME_MAP, ConnectionError, AuthenticationError,\
+ cwvreg, cwconfig
from cubicweb.req import RequestSessionBase
@@ -155,9 +157,25 @@
return repo, cnx
+class DBAPISession(object):
+ def __init__(self, cnx, login=None, authinfo=None):
+ self.cnx = cnx
+ self.data = {}
+ self.login = login
+ self.authinfo = authinfo
+
+ @property
+ def anonymous_session(self):
+ return self.cnx is None or self.cnx.anonymous_connection
+
+ @property
+ def sessionid(self):
+ return self.cnx.sessionid
+
+
class DBAPIRequest(RequestSessionBase):
- def __init__(self, vreg, cnx=None):
+ def __init__(self, vreg, session=None):
super(DBAPIRequest, self).__init__(vreg)
try:
# no vreg or config which doesn't handle translations
@@ -167,12 +185,12 @@
self.set_default_language(vreg)
# cache entities built during the request
self._eid_cache = {}
- # these args are initialized after a connection is
- # established
- self.cnx = None # connection associated to the request
- self._user = None # request's user, set at authentication
- if cnx is not None:
- self.set_connection(cnx)
+ if session is not None:
+ self.set_session(session)
+ else:
+ # these args are initialized after a connection is
+ # established
+ self.session = self.cnx = self._user = None
def base_url(self):
return self.vreg.config['base-url']
@@ -180,14 +198,22 @@
def from_controller(self):
return 'view'
- def set_connection(self, cnx, user=None):
+ def set_session(self, session, user=None):
"""method called by the session handler when the user is authenticated
or an anonymous connection is open
"""
- self.cnx = cnx
- self.cursor = cnx.cursor(self)
+ self.session = session
+ if session.cnx is not None:
+ self.cnx = session.cnx
+ self.execute = session.cnx.cursor(self).execute
self.set_user(user)
+ def execute(self, *args, **kwargs):
+ """overriden when session is set. By default raise authentication error
+ so authentication is requested.
+ """
+ raise AuthenticationError()
+
def set_default_language(self, vreg):
try:
self.lang = vreg.property_value('ui.language')
@@ -204,14 +230,6 @@
self.pgettext = lambda x, y: y
self.debug('request default language: %s', self.lang)
- def describe(self, eid):
- """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
- return self.cnx.describe(eid)
-
- def source_defs(self):
- """return the definition of sources used by the repository."""
- return self.cnx.source_defs()
-
# entities cache management ###############################################
def entity_cache(self, eid):
@@ -231,26 +249,10 @@
# low level session data management #######################################
- def session_data(self):
- """return a dictionnary containing session data"""
- return self.cnx.session_data()
-
- def get_session_data(self, key, default=None, pop=False):
- """return value associated to `key` in session data"""
+ def get_shared_data(self, key, default=None, pop=False):
+ """return value associated to `key` in shared data"""
if self.cnx is None:
return default # before the connection has been established
- return self.cnx.get_session_data(key, default, pop)
-
- def set_session_data(self, key, value):
- """set value associated to `key` in session data"""
- return self.cnx.set_session_data(key, value)
-
- def del_session_data(self, key):
- """remove value associated to `key` in session data"""
- return self.cnx.del_session_data(key)
-
- def get_shared_data(self, key, default=None, pop=False):
- """return value associated to `key` in shared data"""
return self.cnx.get_shared_data(key, default, pop)
def set_shared_data(self, key, value, querydata=False):
@@ -265,10 +267,18 @@
# server session compat layer #############################################
+ def describe(self, eid):
+ """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+ return self.cnx.describe(eid)
+
+ def source_defs(self):
+ """return the definition of sources used by the repository."""
+ return self.cnx.source_defs()
+
def hijack_user(self, user):
"""return a fake request/session using specified user"""
req = DBAPIRequest(self.vreg)
- req.set_connection(self.cnx, user)
+ req.set_session(self.session, user)
return req
@property
@@ -282,9 +292,25 @@
if user:
self.set_entity_cache(user)
- def execute(self, *args, **kwargs):
- """Session interface compatibility"""
- return self.cursor.execute(*args, **kwargs)
+ @deprecated('[3.8] use direct access to req.session.data dictionary')
+ def session_data(self):
+ """return a dictionnary containing session data"""
+ return self.session.data
+
+ @deprecated('[3.8] use direct access to req.session.data dictionary')
+ def get_session_data(self, key, default=None, pop=False):
+ if pop:
+ return self.session.data.pop(key, default)
+ return self.session.data.get(key, default)
+
+ @deprecated('[3.8] use direct access to req.session.data dictionary')
+ def set_session_data(self, key, value):
+ self.session.data[key] = value
+
+ @deprecated('[3.8] use direct access to req.session.data dictionary')
+ def del_session_data(self, key):
+ self.session.data.pop(key, None)
+
set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
@@ -299,68 +325,104 @@
etc.
"""
-# module level objects ########################################################
+
+# cursor / connection objects ##################################################
+
+class Cursor(object):
+ """These objects represent a database cursor, which is used to manage the
+ context of a fetch operation. Cursors created from the same connection are
+ not isolated, i.e., any changes done to the database by a cursor are
+ immediately visible by the other cursors. Cursors created from different
+ connections are isolated.
+ """
+
+ def __init__(self, connection, repo, req=None):
+ """This read-only attribute return a reference to the Connection
+ object on which the cursor was created.
+ """
+ self.connection = connection
+ """optionnal issuing request instance"""
+ self.req = req
+ self._repo = repo
+ self._sessid = connection.sessionid
+
+ def close(self):
+ """no effect"""
+ pass
+
+ def execute(self, rql, args=None, eid_key=None, build_descr=True):
+ """execute a rql query, return resulting rows and their description in
+ a :class:`~cubicweb.rset.ResultSet` object
+
+ * `rql` should be an Unicode string or a plain ASCII string, containing
+ the rql query
+
+ * `args` the optional args dictionary associated to the query, with key
+ matching named substitution in `rql`
+
+ * `build_descr` is a boolean flag indicating if the description should
+ be built on select queries (if false, the description will be en empty
+ list)
+
+ on INSERT queries, there will be one row for each inserted entity,
+ containing its eid
+
+ on SET queries, XXX describe
+
+ DELETE queries returns no result.
+
+ .. Note::
+ to maximize the rql parsing/analyzing cache performance, you should
+ always use substitute arguments in queries, i.e. avoid query such as::
+
+ execute('Any X WHERE X eid 123')
+
+ use::
+
+ execute('Any X WHERE X eid %(x)s', {'x': 123})
+ """
+ if eid_key is not None:
+ warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
+ rset = self._repo.execute(self._sessid, rql, args, build_descr)
+ rset.req = self.req
+ return rset
-apilevel = '2.0'
-
-"""Integer constant stating the level of thread safety the interface supports.
-Possible values are:
-
- 0 Threads may not share the module.
- 1 Threads may share the module, but not connections.
- 2 Threads may share the module and connections.
- 3 Threads may share the module, connections and
- cursors.
-
-Sharing in the above context means that two threads may use a resource without
-wrapping it using a mutex semaphore to implement resource locking. Note that
-you cannot always make external resources thread safe by managing access using
-a mutex: the resource may rely on global variables or other external sources
-that are beyond your control.
-"""
-threadsafety = 1
+class LogCursor(Cursor):
+ """override the standard cursor to log executed queries"""
-"""String constant stating the type of parameter marker formatting expected by
-the interface. Possible values are :
+ def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+ """override the standard cursor to log executed queries"""
+ if eid_key is not None:
+ warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
+ tstart, cstart = time(), clock()
+ rset = Cursor.execute(self, operation, parameters, build_descr=build_descr)
+ self.connection.executed_queries.append((operation, parameters,
+ time() - tstart, clock() - cstart))
+ return rset
- 'qmark' Question mark style,
- e.g. '...WHERE name=?'
- 'numeric' Numeric, positional style,
- e.g. '...WHERE name=:1'
- 'named' Named style,
- e.g. '...WHERE name=:name'
- 'format' ANSI C printf format codes,
- e.g. '...WHERE name=%s'
- 'pyformat' Python extended format codes,
- e.g. '...WHERE name=%(name)s'
-"""
-paramstyle = 'pyformat'
-
-
-# connection object ###########################################################
class Connection(object):
"""DB-API 2.0 compatible Connection object for CubicWeb
"""
# make exceptions available through the connection object
ProgrammingError = ProgrammingError
+ # attributes that may be overriden per connection instance
+ anonymous_connection = False
+ cursor_class = Cursor
+ vreg = None
+ _closed = None
def __init__(self, repo, cnxid, cnxprops=None):
self._repo = repo
self.sessionid = cnxid
self._close_on_del = getattr(cnxprops, 'close_on_del', True)
self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro')
- self._closed = None
if cnxprops and cnxprops.log_queries:
self.executed_queries = []
self.cursor_class = LogCursor
- else:
- self.cursor_class = Cursor
- self.anonymous_connection = False
- self.vreg = None
- # session's data
- self.data = {}
def __repr__(self):
if self.anonymous_connection:
@@ -378,29 +440,7 @@
return False #propagate the exception
def request(self):
- return DBAPIRequest(self.vreg, self)
-
- def session_data(self):
- """return a dictionnary containing session data"""
- return self.data
-
- def get_session_data(self, key, default=None, pop=False):
- """return value associated to `key` in session data"""
- if pop:
- return self.data.pop(key, default)
- else:
- return self.data.get(key, default)
-
- def set_session_data(self, key, value):
- """set value associated to `key` in session data"""
- self.data[key] = value
-
- def del_session_data(self, key):
- """remove value associated to `key` in session data"""
- try:
- del self.data[key]
- except KeyError:
- pass
+ return DBAPIRequest(self.vreg, DBAPISession(self))
def check(self):
"""raise `BadConnectionId` if the connection is no more valid"""
@@ -474,8 +514,6 @@
if self._repo.config.instance_hooks:
hm.register_hooks(config.load_hooks(self.vreg))
- load_vobjects = deprecated()(load_appobjects)
-
def use_web_compatible_requests(self, baseurl, sitetitle=None):
"""monkey patch DBAPIRequest to fake a cw.web.request, so you should
able to call html views using rset from a simple dbapi connection.
@@ -659,212 +697,3 @@
him).
"""
return self._repo.undo_transaction(self.sessionid, txuuid)
-
-
-# cursor object ###############################################################
-
-class Cursor(object):
- """These objects represent a database cursor, which is used to manage the
- context of a fetch operation. Cursors created from the same connection are
- not isolated, i.e., any changes done to the database by a cursor are
- immediately visible by the other cursors. Cursors created from different
- connections can or can not be isolated, depending on how the transaction
- support is implemented (see also the connection's rollback() and commit()
- methods.)
- """
-
- def __init__(self, connection, repo, req=None):
- """This read-only attribute return a reference to the Connection
- object on which the cursor was created.
- """
- self.connection = connection
- """optionnal issuing request instance"""
- self.req = req
-
- """This read/write attribute specifies the number of rows to fetch at a
- time with fetchmany(). It defaults to 1 meaning to fetch a single row
- at a time.
-
- Implementations must observe this value with respect to the fetchmany()
- method, but are free to interact with the database a single row at a
- time. It may also be used in the implementation of executemany().
- """
- self.arraysize = 1
-
- self._repo = repo
- self._sessid = connection.sessionid
- self._res = None
- self._closed = None
- self._index = 0
-
-
- def close(self):
- """Close the cursor now (rather than whenever __del__ is called). The
- cursor will be unusable from this point forward; an Error (or subclass)
- exception will be raised if any operation is attempted with the cursor.
- """
- self._closed = True
-
-
- def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
- """Prepare and execute a database operation (query or command).
- Parameters may be provided as sequence or mapping and will be bound to
- variables in the operation. Variables are specified in a
- database-specific notation (see the module's paramstyle attribute for
- details).
-
- A reference to the operation will be retained by the cursor. If the
- same operation object is passed in again, then the cursor can optimize
- its behavior. This is most effective for algorithms where the same
- operation is used, but different parameters are bound to it (many
- times).
-
- For maximum efficiency when reusing an operation, it is best to use the
- setinputsizes() method to specify the parameter types and sizes ahead
- of time. It is legal for a parameter to not match the predefined
- information; the implementation should compensate, possibly with a loss
- of efficiency.
-
- The parameters may also be specified as list of tuples to e.g. insert
- multiple rows in a single operation, but this kind of usage is
- depreciated: executemany() should be used instead.
-
- Return values are not defined by the DB-API, but this here it returns a
- ResultSet object.
- """
- self._res = rset = self._repo.execute(self._sessid, operation,
- parameters, eid_key, build_descr)
- rset.req = self.req
- self._index = 0
- return rset
-
-
- def executemany(self, operation, seq_of_parameters):
- """Prepare a database operation (query or command) and then execute it
- against all parameter sequences or mappings found in the sequence
- seq_of_parameters.
-
- Modules are free to implement this method using multiple calls to the
- execute() method or by using array operations to have the database
- process the sequence as a whole in one call.
-
- Use of this method for an operation which produces one or more result
- sets constitutes undefined behavior, and the implementation is
- permitted (but not required) to raise an exception when it detects that
- a result set has been created by an invocation of the operation.
-
- The same comments as for execute() also apply accordingly to this
- method.
-
- Return values are not defined.
- """
- for parameters in seq_of_parameters:
- self.execute(operation, parameters)
- if self._res.rows is not None:
- self._res = None
- raise ProgrammingError('Operation returned a result set')
-
-
- def fetchone(self):
- """Fetch the next row of a query result set, returning a single
- sequence, or None when no more data is available.
-
- An Error (or subclass) exception is raised if the previous call to
- execute*() did not produce any result set or no call was issued yet.
- """
- if self._res is None:
- raise ProgrammingError('No result set')
- row = self._res.rows[self._index]
- self._index += 1
- return row
-
-
- def fetchmany(self, size=None):
- """Fetch the next set of rows of a query result, returning a sequence
- of sequences (e.g. a list of tuples). An empty sequence is returned
- when no more rows are available.
-
- The number of rows to fetch per call is specified by the parameter. If
- it is not given, the cursor's arraysize determines the number of rows
- to be fetched. The method should try to fetch as many rows as indicated
- by the size parameter. If this is not possible due to the specified
- number of rows not being available, fewer rows may be returned.
-
- An Error (or subclass) exception is raised if the previous call to
- execute*() did not produce any result set or no call was issued yet.
-
- Note there are performance considerations involved with the size
- parameter. For optimal performance, it is usually best to use the
- arraysize attribute. If the size parameter is used, then it is best
- for it to retain the same value from one fetchmany() call to the next.
- """
- if self._res is None:
- raise ProgrammingError('No result set')
- if size is None:
- size = self.arraysize
- rows = self._res.rows[self._index:self._index + size]
- self._index += size
- return rows
-
-
- def fetchall(self):
- """Fetch all (remaining) rows of a query result, returning them as a
- sequence of sequences (e.g. a list of tuples). Note that the cursor's
- arraysize attribute can affect the performance of this operation.
-
- An Error (or subclass) exception is raised if the previous call to
- execute*() did not produce any result set or no call was issued yet.
- """
- if self._res is None:
- raise ProgrammingError('No result set')
- if not self._res.rows:
- return []
- rows = self._res.rows[self._index:]
- self._index = len(self._res)
- return rows
-
-
- def setinputsizes(self, sizes):
- """This can be used before a call to execute*() to predefine memory
- areas for the operation's parameters.
-
- sizes is specified as a sequence -- one item for each input parameter.
- The item should be a Type Object that corresponds to the input that
- will be used, or it should be an integer specifying the maximum length
- of a string parameter. If the item is None, then no predefined memory
- area will be reserved for that column (this is useful to avoid
- predefined areas for large inputs).
-
- This method would be used before the execute*() method is invoked.
-
- Implementations are free to have this method do nothing and users are
- free to not use it.
- """
- pass
-
-
- def setoutputsize(self, size, column=None):
- """Set a column buffer size for fetches of large columns (e.g. LONGs,
- BLOBs, etc.). The column is specified as an index into the result
- sequence. Not specifying the column will set the default size for all
- large columns in the cursor.
-
- This method would be used before the execute*() method is invoked.
-
- Implementations are free to have this method do nothing and users are
- free to not use it.
- """
- pass
-
-
-class LogCursor(Cursor):
- """override the standard cursor to log executed queries"""
-
- def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
- """override the standard cursor to log executed queries"""
- tstart, cstart = time(), clock()
- rset = Cursor.execute(self, operation, parameters, eid_key, build_descr)
- self.connection.executed_queries.append((operation, parameters,
- time() - tstart, clock() - cstart))
- return rset
-
--- a/debian/control Tue Apr 13 19:22:46 2010 +0200
+++ b/debian/control Tue Apr 13 19:43:51 2010 +0200
@@ -83,7 +83,7 @@
Architecture: all
XB-Python-Version: ${python:Versions}
Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3)
-Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-pysixt, fop, python-imaging
+Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging
Description: web interface library for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -97,7 +97,7 @@
Package: cubicweb-common
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.25.0), python-lxml
+Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.26.0), python-lxml
Recommends: python-simpletal (>= 4.0), python-crypto
Conflicts: cubicweb-core
Replaces: cubicweb-core
--- a/devtools/__init__.py Tue Apr 13 19:22:46 2010 +0200
+++ b/devtools/__init__.py Tue Apr 13 19:43:51 2010 +0200
@@ -97,9 +97,6 @@
}),
))
- if not os.environ.get('APYCOT_ROOT'):
- REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes'))
-
def __init__(self, appid, log_threshold=logging.CRITICAL+10):
ServerConfiguration.__init__(self, appid)
self.init_log(log_threshold, force=True)
--- a/devtools/devctl.py Tue Apr 13 19:22:46 2010 +0200
+++ b/devtools/devctl.py Tue Apr 13 19:43:51 2010 +0200
@@ -22,7 +22,8 @@
from cubicweb.__pkginfo__ import version as cubicwebversion
from cubicweb import CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage
-from cubicweb.toolsutils import Command, copy_skeleton, underline_title
+from cubicweb.toolsutils import (SKEL_EXCLUDE, Command,
+ copy_skeleton, underline_title)
from cubicweb.web.webconfig import WebConfiguration
from cubicweb.server.serverconfig import ServerConfiguration
@@ -440,12 +441,19 @@
"""Create a new cube.
<cubename>
- the name of the new cube
+ the name of the new cube. It should be a valid python module name.
"""
name = 'newcube'
arguments = '<cubename>'
options = (
+ ("layout",
+ {'short': 'L', 'type' : 'choice', 'metavar': '<cube layout>',
+ 'default': 'simple', 'choices': ('simple', 'full'),
+ 'help': 'cube layout. You\'ll get a minimal cube with the "simple" \
+layout, and a full featured cube with "full" layout.',
+ }
+ ),
("directory",
{'short': 'd', 'type' : 'string', 'metavar': '<cubes directory>',
'help': 'directory where the new cube should be created',
@@ -475,14 +483,28 @@
'help': 'cube author\'s web site',
}
),
+ ("license",
+ {'short': 'l', 'type' : 'choice', 'metavar': '<license>',
+ 'default': 'LGPL', 'choices': ('GPL', 'LGPL', ''),
+ 'help': 'cube license',
+ }
+ ),
)
+ LICENSES = {
+ 'LGPL': 'GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses',
+ 'GPL': 'GNU General Public License, v2.1 - http://www.gnu.org/licenses',
+ '': 'closed source'
+ }
def run(self, args):
+ import re
from logilab.common.shellutils import ASK
if len(args) != 1:
raise BadCommandUsage("exactly one argument (cube name) is expected")
- cubename, = args
+ cubename = args[0]
+ if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cubename):
+ raise BadCommandUsage("cube name should be a valid python module name")
verbose = self.get('verbose')
cubesdir = self.get('directory')
if not cubesdir:
@@ -533,8 +555,14 @@
'author': self['author'],
'author-email': self['author-email'],
'author-web-site': self['author-web-site'],
+ 'license': self['license'],
+ 'long-license': self.LICENSES[self['license']],
}
- copy_skeleton(skeldir, cubedir, context)
+ exclude = SKEL_EXCLUDE
+ if self['layout'] == 'simple':
+ exclude += ('sobjects.py*', 'precreate.py*', 'realdb_test*',
+ 'cubes.*', 'external_resources*')
+ copy_skeleton(skeldir, cubedir, context, exclude=exclude)
def _ask_for_dependancies(self):
from logilab.common.shellutils import ASK
--- a/devtools/repotest.py Tue Apr 13 19:22:46 2010 +0200
+++ b/devtools/repotest.py Tue Apr 13 19:43:51 2010 +0200
@@ -233,8 +233,8 @@
self._dumb_sessions.append(s)
return s
- def execute(self, rql, args=None, eid_key=None, build_descr=True):
- return self.o.execute(self.session, rql, args, eid_key, build_descr)
+ def execute(self, rql, args=None, build_descr=True):
+ return self.o.execute(self.session, rql, args, build_descr)
def commit(self):
self.session.commit()
--- a/devtools/testlib.py Tue Apr 13 19:22:46 2010 +0200
+++ b/devtools/testlib.py Tue Apr 13 19:43:51 2010 +0200
@@ -5,6 +5,8 @@
:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
"""
+from __future__ import with_statement
+
__docformat__ = "restructuredtext en"
import os
@@ -13,6 +15,7 @@
from urllib import unquote
from math import log
from contextlib import contextmanager
+from warnings import warn
import simplejson
@@ -27,9 +30,10 @@
from cubicweb import ValidationError, NoSelectableObject, AuthenticationError
from cubicweb import cwconfig, devtools, web, server
-from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+from cubicweb.dbapi import ProgrammingError, DBAPISession, repo_connect
from cubicweb.sobjects import notification
from cubicweb.web import Redirect, application
+from cubicweb.server.session import security_enabled
from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
from cubicweb.devtools import fake, htmlparser
@@ -209,11 +213,10 @@
cls.init_config(cls.config)
cls.repo.hm.call_hooks('server_startup', repo=cls.repo)
cls.vreg = cls.repo.vreg
- cls._orig_cnx = cls.cnx
+ cls.websession = DBAPISession(cls.cnx, cls.admlogin,
+ {'password': cls.admpassword})
+ cls._orig_cnx = (cls.cnx, cls.websession)
cls.config.repository = lambda x=None: cls.repo
- # necessary for authentication tests
- cls.cnx.login = cls.admlogin
- cls.cnx.authinfo = {'password': cls.admpassword}
@classmethod
def _refresh_repo(cls):
@@ -236,7 +239,7 @@
@property
def adminsession(self):
"""return current server side session (using default manager account)"""
- return self.repo._sessions[self._orig_cnx.sessionid]
+ return self.repo._sessions[self._orig_cnx[0].sessionid]
def set_option(self, optname, value):
self.config.global_set_option(optname, value)
@@ -286,12 +289,12 @@
if password is None:
password = login.encode('utf8')
if req is None:
- req = self._orig_cnx.request()
+ req = self._orig_cnx[0].request()
user = req.create_entity('CWUser', login=unicode(login),
upassword=password, **kwargs)
req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
% ','.join(repr(g) for g in groups),
- {'x': user.eid}, 'x')
+ {'x': user.eid})
user.clear_related_cache('in_group', 'subject')
if commit:
req.cnx.commit()
@@ -304,22 +307,21 @@
else:
if not kwargs:
kwargs['password'] = str(login)
- self.cnx = repo_connect(self.repo, unicode(login),
- cnxprops=ConnectionProperties('inmemory'),
- **kwargs)
+ self.cnx = repo_connect(self.repo, unicode(login), **kwargs)
+ self.websession = DBAPISession(self.cnx)
self._cnxs.append(self.cnx)
if login == self.vreg.config.anonymous_user()[0]:
self.cnx.anonymous_connection = True
return self.cnx
def restore_connection(self):
- if not self.cnx is self._orig_cnx:
+ if not self.cnx is self._orig_cnx[0]:
try:
self.cnx.close()
self._cnxs.remove(self.cnx)
except ProgrammingError:
pass # already closed
- self.cnx = self._orig_cnx
+ self.cnx, self.websession = self._orig_cnx
# db api ##################################################################
@@ -332,8 +334,11 @@
"""executes <rql>, builds a resultset, and returns a couple (rset, req)
where req is a FakeRequest
"""
+ if eidkey is not None:
+ warn('[3.8] eidkey is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
req = req or self.request(rql=rql)
- return self.cnx.cursor(req).execute(unicode(rql), args, eidkey)
+ return req.execute(unicode(rql), args)
@nocoverage
def commit(self):
@@ -354,14 +359,14 @@
# # server side db api #######################################################
def sexecute(self, rql, args=None, eid_key=None):
+ if eid_key is not None:
+ warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
self.session.set_pool()
- return self.session.execute(rql, args, eid_key)
+ return self.session.execute(rql, args)
# other utilities #########################################################
- def entity(self, rql, args=None, eidkey=None, req=None):
- return self.execute(rql, args, eidkey, req=req).get_entity(0, 0)
-
@contextmanager
def temporary_appobjects(self, *appobjects):
self.vreg._loadedmods.setdefault(self.__module__, {})
@@ -478,7 +483,7 @@
def request(self, *args, **kwargs):
"""return a web ui request"""
req = self.requestcls(self.vreg, form=kwargs)
- req.set_connection(self.cnx)
+ req.set_session(self.websession)
return req
def remote_call(self, fname, *args):
@@ -534,27 +539,31 @@
self.set_option('auth-mode', authmode)
self.set_option('anonymous-user', anonuser)
req = self.request()
- origcnx = req.cnx
- req.cnx = None
+ origsession = req.session
+ req.session = req.cnx = None
+ del req.execute # get back to class implementation
sh = self.app.session_handler
authm = sh.session_manager.authmanager
authm.anoninfo = self.vreg.config.anonymous_user()
+ authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]}
# not properly cleaned between tests
self.open_sessions = sh.session_manager._sessions = {}
- return req, origcnx
+ return req, origsession
- def assertAuthSuccess(self, req, origcnx, nbsessions=1):
+ def assertAuthSuccess(self, req, origsession, nbsessions=1):
sh = self.app.session_handler
path, params = self.expect_redirect(lambda x: self.app.connect(x), req)
- cnx = req.cnx
+ session = req.session
self.assertEquals(len(self.open_sessions), nbsessions, self.open_sessions)
- self.assertEquals(cnx.login, origcnx.login)
- self.assertEquals(cnx.anonymous_connection, False)
+ self.assertEquals(session.login, origsession.login)
+ self.assertEquals(session.anonymous_session, False)
self.assertEquals(path, 'view')
- self.assertEquals(params, {'__message': 'welcome %s !' % cnx.user().login})
+ self.assertEquals(params, {'__message': 'welcome %s !' % req.user.login})
def assertAuthFailure(self, req, nbsessions=0):
- self.assertRaises(AuthenticationError, self.app.connect, req)
+ self.app.connect(req)
+ self.assertIsInstance(req.session, DBAPISession)
+ self.assertEquals(req.session.cnx, None)
self.assertEquals(req.cnx, None)
self.assertEquals(len(self.open_sessions), nbsessions)
clear_cache(req, 'get_authorization')
@@ -685,29 +694,19 @@
# deprecated ###############################################################
+ @deprecated('[3.8] use self.execute(...).get_entity(0, 0)')
+ def entity(self, rql, args=None, eidkey=None, req=None):
+ if eidkey is not None:
+ warn('[3.8] eidkey is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
+ return self.execute(rql, args, req=req).get_entity(0, 0)
+
@deprecated('[3.6] use self.request().create_entity(...)')
def add_entity(self, etype, req=None, **kwargs):
if req is None:
req = self.request()
return req.create_entity(etype, **kwargs)
- @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())')
- def etype_instance(self, etype, req=None):
- req = req or self.request()
- e = self.vreg['etypes'].etype_class(etype)(req)
- e.eid = None
- return e
-
- @nocoverage
- @deprecated('[3.4] use req = self.request(); rset = req.execute()',
- stacklevel=3)
- def rset_and_req(self, rql, optional_args=None, args=None, eidkey=None):
- """executes <rql>, builds a resultset, and returns a
- couple (rset, req) where req is a FakeRequest
- """
- return (self.execute(rql, args, eidkey),
- self.request(rql=rql, **optional_args or {}))
-
# auto-populating test classes and utilities ###################################
@@ -791,6 +790,10 @@
"""this method populates the database with `how_many` entities
of each possible type. It also inserts random relations between them
"""
+ with security_enabled(self.session, read=False, write=False):
+ self._auto_populate(how_many)
+
+ def _auto_populate(self, how_many):
cu = self.cursor()
self.custom_populate(how_many, cu)
vreg = self.vreg
--- a/entities/authobjs.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/authobjs.py Tue Apr 13 19:43:51 2010 +0200
@@ -96,7 +96,7 @@
try:
return self._cw.execute(
'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
- {'x': eid, 'u': self.eid}, 'x')
+ {'x': eid, 'u': self.eid})
except Unauthorized:
return False
owns = cached(owns, keyarg=1)
@@ -105,13 +105,11 @@
rql = 'Any P WHERE P is CWPermission, U eid %(u)s, U in_group G, '\
'P name %(pname)s, P require_group G'
kwargs = {'pname': pname, 'u': self.eid}
- cachekey = None
if contexteid is not None:
rql += ', X require_permission P, X eid %(x)s'
kwargs['x'] = contexteid
- cachekey = 'x'
try:
- return self._cw.execute(rql, kwargs, cachekey)
+ return self._cw.execute(rql, kwargs)
except Unauthorized:
return False
--- a/entities/lib.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/lib.py Tue Apr 13 19:43:51 2010 +0200
@@ -23,6 +23,7 @@
return address
return '%s at %s' % (name, host.replace('.', ' dot '))
+
class EmailAddress(AnyEntity):
__regid__ = 'EmailAddress'
fetch_attrs, fetch_order = fetch_config(['address', 'alias'])
@@ -50,8 +51,10 @@
subjrels = self.e_schema.object_relations()
if not ('sender' in subjrels and 'recipients' in subjrels):
return
- rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s'
- rset = self._cw.execute(rql, {'y': self.eid}, 'y')
+ rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC '
+ 'WHERE X sender Y or X recipients Y, '
+ 'X subject S, X date D, Y eid %(y)s',
+ {'y': self.eid})
if skipeids is None:
skipeids = set()
for i in xrange(len(rset)):
@@ -131,7 +134,7 @@
def touch(self):
self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s',
- {'t': datetime.now(), 'x': self.eid}, 'x')
+ {'t': datetime.now(), 'x': self.eid})
def valid(self, date):
if date:
--- a/entities/schemaobjs.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/schemaobjs.py Tue Apr 13 19:43:51 2010 +0200
@@ -162,6 +162,9 @@
fetch_attrs, fetch_order = fetch_config(['exprtype', 'mainvars', 'expression'])
def dc_title(self):
+ return self.expression or u''
+
+ def dc_long_title(self):
return '%s(%s)' % (self.exprtype, self.expression or u'')
@property
--- a/entities/test/unittest_base.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/test/unittest_base.py Tue Apr 13 19:43:51 2010 +0200
@@ -69,18 +69,18 @@
class CWUserTC(BaseEntityTC):
def test_complete(self):
- e = self.entity('CWUser X WHERE X login "admin"')
+ e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
e.complete()
def test_matching_groups(self):
- e = self.entity('CWUser X WHERE X login "admin"')
+ e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
self.failUnless(e.matching_groups('managers'))
self.failIf(e.matching_groups('xyz'))
self.failUnless(e.matching_groups(('xyz', 'managers')))
self.failIf(e.matching_groups(('xyz', 'abcd')))
def test_dc_title_and_name(self):
- e = self.entity('CWUser U WHERE U login "member"')
+ e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
self.assertEquals(e.dc_title(), 'member')
self.assertEquals(e.name(), 'member')
e.set_attributes(firstname=u'bouah')
@@ -91,7 +91,7 @@
self.assertEquals(e.name(), u'bouah lôt')
def test_allowed_massmail_keys(self):
- e = self.entity('CWUser U WHERE U login "member"')
+ e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
# Bytes/Password attributes should be omited
self.assertEquals(e.allowed_massmail_keys(),
set(('surname', 'firstname', 'login', 'last_login_time',
--- a/entities/test/unittest_wfobjs.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/test/unittest_wfobjs.py Tue Apr 13 19:43:51 2010 +0200
@@ -96,7 +96,7 @@
self.assertEquals(e.latest_trinfo().comment, 'deactivate 2')
def test_possible_transitions(self):
- user = self.entity('CWUser X')
+ user = self.execute('CWUser X').get_entity(0, 0)
trs = list(user.possible_transitions())
self.assertEquals(len(trs), 1)
self.assertEquals(trs[0].name, u'deactivate')
@@ -131,7 +131,7 @@
with security_enabled(self.session, write=False):
ex = self.assertRaises(ValidationError, self.session.execute,
'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
- {'x': self.user().eid, 's': s.eid}, 'x')
+ {'x': self.user().eid, 's': s.eid})
self.assertEquals(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. "
"You may want to set a custom workflow for this entity first."})
@@ -413,7 +413,7 @@
wf = add_wf(self, 'Company')
wf.add_state('asleep', initial=True)
self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid}, 'x')
+ {'wf': wf.eid, 'x': self.member.eid})
ex = self.assertRaises(ValidationError, self.commit)
self.assertEquals(ex.errors, {'custom_workflow-subject': 'workflow isn\'t a workflow for this type'})
--- a/entities/wfobjs.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entities/wfobjs.py Tue Apr 13 19:43:51 2010 +0200
@@ -65,7 +65,7 @@
def state_by_name(self, statename):
rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, '
'S state_of WF, WF eid %(wf)s',
- {'n': statename, 'wf': self.eid}, 'wf')
+ {'n': statename, 'wf': self.eid})
if rset:
return rset.get_entity(0, 0)
return None
@@ -73,7 +73,7 @@
def state_by_eid(self, eid):
rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, '
'S state_of WF, WF eid %(wf)s',
- {'s': eid, 'wf': self.eid}, ('wf', 's'))
+ {'s': eid, 'wf': self.eid})
if rset:
return rset.get_entity(0, 0)
return None
@@ -81,7 +81,7 @@
def transition_by_name(self, trname):
rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, '
'T transition_of WF, WF eid %(wf)s',
- {'n': trname, 'wf': self.eid}, 'wf')
+ {'n': trname, 'wf': self.eid})
if rset:
return rset.get_entity(0, 0)
return None
@@ -89,7 +89,7 @@
def transition_by_eid(self, eid):
rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, '
'T transition_of WF, WF eid %(wf)s',
- {'t': eid, 'wf': self.eid}, ('wf', 't'))
+ {'t': eid, 'wf': self.eid})
if rset:
return rset.get_entity(0, 0)
return None
@@ -100,12 +100,12 @@
"""add a state to this workflow"""
state = self._cw.create_entity('State', name=unicode(name), **kwargs)
self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s',
- {'s': state.eid, 'wf': self.eid}, ('s', 'wf'))
+ {'s': state.eid, 'wf': self.eid})
if initial:
assert not self.initial, "Initial state already defined as %s" % self.initial
self._cw.execute('SET WF initial_state S '
'WHERE S eid %(s)s, WF eid %(wf)s',
- {'s': state.eid, 'wf': self.eid}, ('s', 'wf'))
+ {'s': state.eid, 'wf': self.eid})
return state
def _add_transition(self, trtype, name, fromstates,
@@ -113,7 +113,7 @@
tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs)
self._cw.execute('SET T transition_of WF '
'WHERE T eid %(t)s, WF eid %(wf)s',
- {'t': tr.eid, 'wf': self.eid}, ('t', 'wf'))
+ {'t': tr.eid, 'wf': self.eid})
assert fromstates, fromstates
if not isinstance(fromstates, (tuple, list)):
fromstates = (fromstates,)
@@ -122,7 +122,7 @@
state = state.eid
self._cw.execute('SET S allowed_transition T '
'WHERE S eid %(s)s, T eid %(t)s',
- {'s': state, 't': tr.eid}, ('s', 't'))
+ {'s': state, 't': tr.eid})
tr.set_permissions(requiredgroups, conditions, reset=False)
return tr
@@ -136,7 +136,7 @@
tostate = tostate.eid
self._cw.execute('SET T destination_state S '
'WHERE S eid %(s)s, T eid %(t)s',
- {'t': tr.eid, 's': tostate}, ('s', 't'))
+ {'t': tr.eid, 's': tostate})
return tr
def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(),
@@ -147,7 +147,7 @@
if hasattr(subworkflow, 'eid'):
subworkflow = subworkflow.eid
assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s',
- {'t': tr.eid, 'wf': subworkflow}, ('wf', 't'))
+ {'t': tr.eid, 'wf': subworkflow})
for fromstate, tostate in exitpoints:
tr.add_exit_point(fromstate, tostate)
return tr
@@ -159,11 +159,11 @@
if not hasattr(replacement, 'eid'):
replacement = self.state_by_name(replacement)
execute = self._cw.execute
- execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}, 's')
+ execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid})
execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
- {'os': todelstate.eid, 'ns': replacement.eid}, 's')
+ {'os': todelstate.eid, 'ns': replacement.eid})
execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
- {'os': todelstate.eid, 'ns': replacement.eid}, 's')
+ {'os': todelstate.eid, 'ns': replacement.eid})
todelstate.delete()
@@ -174,7 +174,7 @@
fired by the logged user
"""
__regid__ = 'BaseTransition'
- fetch_attrs, fetch_order = fetch_config(['name'])
+ fetch_attrs, fetch_order = fetch_config(['name', 'type'])
def __init__(self, *args, **kwargs):
if self.__regid__ == 'BaseTransition':
@@ -227,13 +227,13 @@
"""
if reset:
self._cw.execute('DELETE T require_group G WHERE T eid %(x)s',
- {'x': self.eid}, 'x')
+ {'x': self.eid})
self._cw.execute('DELETE T condition R WHERE T eid %(x)s',
- {'x': self.eid}, 'x')
+ {'x': self.eid})
for gname in requiredgroups:
rset = self._cw.execute('SET T require_group G '
'WHERE T eid %(x)s, G name %(gn)s',
- {'x': self.eid, 'gn': gname}, 'x')
+ {'x': self.eid, 'gn': gname})
assert rset, '%s is not a known group' % gname
if isinstance(conditions, basestring):
conditions = (conditions,)
@@ -247,7 +247,7 @@
kwargs.setdefault('mainvars', u'X')
self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
'X expression %(expr)s, X mainvars %(mainvars)s, '
- 'T condition X WHERE T eid %(x)s',kwargs, 'x')
+ 'T condition X WHERE T eid %(x)s', kwargs)
# XXX clear caches?
@deprecated('[3.6.1] use set_permission')
@@ -299,15 +299,14 @@
if tostate is None:
self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s',
- {'t': self.eid, 'fs': fromstate}, ('t', 'fs'))
+ {'t': self.eid, 'fs': fromstate})
else:
if hasattr(tostate, 'eid'):
tostate = tostate.eid
self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
'X subworkflow_state FS, X destination_state TS '
'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s',
- {'t': self.eid, 'fs': fromstate, 'ts': tostate},
- ('t', 'fs', 'ts'))
+ {'t': self.eid, 'fs': fromstate, 'ts': tostate})
def get_exit_point(self, entity, stateeid):
"""if state is an exit point, return its associated destination state"""
@@ -469,7 +468,7 @@
'T type TT, T type %(type)s, '
'T name TN, T transition_of WF, WF eid %(wfeid)s',
{'x': self.current_state.eid, 'type': type,
- 'wfeid': self.current_workflow.eid}, 'x')
+ 'wfeid': self.current_workflow.eid})
for tr in rset.entities():
if tr.may_be_fired(self.eid):
yield tr
--- a/entity.py Tue Apr 13 19:22:46 2010 +0200
+++ b/entity.py Tue Apr 13 19:43:51 2010 +0200
@@ -553,7 +553,7 @@
continue
rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % (
rschema.type, rschema.type)
- execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+ execute(rql, {'x': self.eid, 'y': ceid})
self.clear_related_cache(rschema.type, 'subject')
for rschema in self.e_schema.object_relations():
if rschema.meta:
@@ -571,7 +571,7 @@
continue
rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % (
rschema.type, rschema.type)
- execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+ execute(rql, {'x': self.eid, 'y': ceid})
self.clear_related_cache(rschema.type, 'object')
# data fetching methods ###################################################
@@ -673,8 +673,7 @@
# if some outer join are included to fetch inlined relations
rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected),
','.join(rql))
- rset = self._cw.execute(rql, {'x': self.eid}, 'x',
- build_descr=False)[0]
+ rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0]
# handle attributes
for i in xrange(1, lastattr):
self[str(selected[i-1][0])] = rset[i]
@@ -703,7 +702,7 @@
return None
rql = "Any A WHERE X eid %%(x)s, X %s A" % name
try:
- rset = self._cw.execute(rql, {'x': self.eid}, 'x')
+ rset = self._cw.execute(rql, {'x': self.eid})
except Unauthorized:
self[name] = value = None
else:
@@ -734,7 +733,7 @@
pass
assert self.has_eid()
rql = self.related_rql(rtype, role)
- rset = self._cw.execute(rql, {'x': self.eid}, 'x')
+ rset = self._cw.execute(rql, {'x': self.eid})
self.set_related_cache(rtype, role, rset)
return self.related(rtype, role, limit, entities)
@@ -860,7 +859,7 @@
if limit is not None:
before, after = rql.split(' WHERE ', 1)
rql = '%s LIMIT %s WHERE %s' % (before, limit, after)
- return self._cw.execute(rql, args, tuple(args))
+ return self._cw.execute(rql, args)
# relations cache handling ################################################
@@ -943,7 +942,7 @@
# and now update the database
kwargs['x'] = self.eid
self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
- kwargs, 'x')
+ kwargs)
kwargs.pop('x')
# update current local object _after_ the rql query to avoid
# interferences between the query execution itself and the
@@ -966,13 +965,13 @@
restr = 'X %s Y' % attr
if values is None:
self._cw.execute('DELETE %s WHERE X eid %%(x)s' % restr,
- {'x': self.eid}, 'x')
+ {'x': self.eid})
continue
if not isinstance(values, (tuple, list, set, frozenset)):
values = (values,)
self._cw.execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % (
restr, ','.join(str(r.eid) for r in values)),
- {'x': self.eid}, 'x')
+ {'x': self.eid})
def delete(self, **kwargs):
assert self.has_eid(), self.eid
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/http.py Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,71 @@
+"""twisted server for CubicWeb web instances
+
+:organization: Logilab
+:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web.http_headers import Headers
+
+class HTTPResponse(object):
+ """An object representing an HTTP Response to be sent to the client.
+ """
+ def __init__(self, twisted_request, code=None, headers=None, stream=None):
+ self._headers_out = headers
+ self._twreq = twisted_request
+ self._stream = stream
+ self._code = code
+
+ self._init_headers()
+ self._finalize()
+
+ def _init_headers(self):
+ if self._headers_out is None:
+ return
+
+ # initialize cookies
+ cookies = self._headers_out.getHeader('set-cookie') or []
+ for cookie in cookies:
+ self._twreq.addCookie(cookie.name, cookie.value, cookie.expires,
+ cookie.domain, cookie.path, #TODO max-age
+ comment = cookie.comment, secure=cookie.secure)
+ self._headers_out.removeHeader('set-cookie')
+
+ # initialize other headers
+ for k, v in self._headers_out.getAllRawHeaders():
+ self._twreq.setHeader(k, v[0])
+
+ # add content-length if not present
+ if (self._headers_out.getHeader('content-length') is None
+ and self._stream is not None):
+ self._twreq.setHeader('content-length', len(self._stream))
+
+
+ def _finalize(self):
+ if self._stream is not None:
+ self._twreq.write(str(self._stream))
+ if self._code is not None:
+ self._twreq.setResponseCode(self._code)
+ self._twreq.finish()
+
+ def __repr__(self):
+ return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code)
+
+
+def not_modified_response(twisted_request, headers_in):
+ headers_out = Headers()
+
+ for header in (
+ # Required from sec 10.3.5:
+ 'date', 'etag', 'content-location', 'expires',
+ 'cache-control', 'vary',
+ # Others:
+ 'server', 'proxy-authenticate', 'www-authenticate', 'warning'):
+ value = headers_in.getRawHeaders(header)
+ if value is not None:
+ headers_out.setRawHeaders(header, value)
+ return HTTPResponse(twisted_request=twisted_request,
+ headers=headers_out)
--- a/etwist/request.py Tue Apr 13 19:22:46 2010 +0200
+++ b/etwist/request.py Tue Apr 13 19:43:51 2010 +0200
@@ -9,22 +9,13 @@
from datetime import datetime
-from twisted.web2 import http, http_headers
+from twisted.web import http
from cubicweb.web import DirectResponse
from cubicweb.web.request import CubicWebRequestBase
from cubicweb.web.httpcache import GMTOFFSET
-
-def cleanup_files(dct, encoding):
- d = {}
- for k, infos in dct.items():
- for (filename, mt, stream) in infos:
- if filename:
- # XXX: suppose that no file submitted <-> no filename
- filename = unicode(filename, encoding)
- mt = u'%s/%s' % (mt.mediaType, mt.mediaSubtype)
- d[k] = (filename, mt, stream)
- return d
+from cubicweb.web.http_headers import Headers
+from cubicweb.etwist.http import not_modified_response
class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
@@ -32,10 +23,15 @@
self._twreq = req
self._base_url = base_url
super(CubicWebTwistedRequestAdapter, self).__init__(vreg, https, req.args)
- self.form.update(cleanup_files(req.files, self.encoding))
- # prepare output headers
- self.headers_out = http_headers.Headers()
- self._headers = req.headers
+ for key, (name, stream) in req.files.iteritems():
+ if name is None:
+ self.form[key] = (name, stream)
+ else:
+ self.form[key] = (unicode(name, self.encoding), stream)
+ # XXX can't we keep received_headers?
+ self._headers_in = Headers()
+ for k, v in req.received_headers.iteritems():
+ self._headers_in.addRawHeader(k, v)
def base_url(self):
"""return the root url of the instance"""
@@ -63,29 +59,8 @@
raise KeyError if the header is not set
"""
if raw:
- return self._twreq.headers.getRawHeaders(header, [default])[0]
- return self._twreq.headers.getHeader(header, default)
-
- def set_header(self, header, value, raw=True):
- """set an output HTTP header"""
- if raw:
- # adding encoded header is important, else page content
- # will be reconverted back to unicode and apart unefficiency, this
- # may cause decoding problem (e.g. when downloading a file)
- self.headers_out.setRawHeaders(header, [str(value)])
- else:
- self.headers_out.setHeader(header, value)
-
- def add_header(self, header, value):
- """add an output HTTP header"""
- # adding encoded header is important, else page content
- # will be reconverted back to unicode and apart unefficiency, this
- # may cause decoding problem (e.g. when downloading a file)
- self.headers_out.addRawHeader(header, str(value))
-
- def remove_header(self, header):
- """remove an output HTTP header"""
- self.headers_out.removeHeader(header)
+ return self._headers_in.getRawHeaders(header, [default])[0]
+ return self._headers_in.getHeader(header, default)
def _validate_cache(self):
"""raise a `DirectResponse` exception if a cached page along the way
@@ -95,11 +70,32 @@
# Expires header seems to be required by IE7
self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
return
- try:
- http.checkPreconditions(self._twreq, _PreResponse(self))
- except http.HTTPError, ex:
- self.info('valid http cache, no actual rendering')
- raise DirectResponse(ex.response)
+
+ # when using both 'Last-Modified' and 'ETag' response headers
+ # (i.e. using respectively If-Modified-Since and If-None-Match request
+ # headers, see
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4 for
+ # reference
+
+ cached_because_not_modified_since = False
+
+ last_modified = self.headers_out.getHeader('last-modified')
+ if last_modified is not None:
+ cached_because_not_modified_since = (self._twreq.setLastModified(last_modified)
+ == http.CACHED)
+
+ if not cached_because_not_modified_since:
+ return
+
+ cached_because_etag_is_same = False
+ etag = self.headers_out.getRawHeaders('etag')
+ if etag is not None:
+ cached_because_etag_is_same = self._twreq.setETag(etag[0]) == http.CACHED
+
+ if cached_because_etag_is_same:
+ response = not_modified_response(self._twreq, self._headers_in)
+ raise DirectResponse(response)
+
# Expires header seems to be required by IE7
self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
@@ -120,9 +116,3 @@
# :/ twisted is returned a localized time stamp
return datetime.fromtimestamp(mtime) + GMTOFFSET
return None
-
-
-class _PreResponse(object):
- def __init__(self, request):
- self.headers = request.headers_out
- self.code = 200
--- a/etwist/server.py Tue Apr 13 19:22:46 2010 +0200
+++ b/etwist/server.py Tue Apr 13 19:43:51 2010 +0200
@@ -11,22 +11,26 @@
import os
import select
import errno
+from os.path import join
from time import mktime
from datetime import date, timedelta
from urlparse import urlsplit, urlunsplit
+from cgi import FieldStorage, parse_header
from twisted.internet import reactor, task, threads
from twisted.internet.defer import maybeDeferred
-from twisted.web2 import channel, http, server, iweb
-from twisted.web2 import static, resource, responsecode
+from twisted.web import http, server
+from twisted.web import static, resource
+from twisted.web.server import NOT_DONE_YET
+
+from logilab.common.decorators import monkeypatch
-from cubicweb import ConfigurationError, CW_EVENT_MANAGER
-from cubicweb.web import (AuthenticationError, NotFound, Redirect,
- RemoteCallFailed, DirectResponse, StatusResponse,
- ExplicitLogin)
+from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER
+from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
from cubicweb.web.application import CubicWebPublisher
-
+from cubicweb.web.http_headers import generateDateTime
from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
+from cubicweb.etwist.http import HTTPResponse
def daemonize():
# XXX unix specific
@@ -67,8 +71,20 @@
return baseurl
-class LongTimeExpiringFile(static.File):
- """overrides static.File and sets a far futre ``Expires`` date
+class ForbiddenDirectoryLister(resource.Resource):
+ def render(self, request):
+ return HTTPResponse(twisted_request=request,
+ code=http.FORBIDDEN,
+ stream='Access forbidden')
+
+class File(static.File):
+ """Prevent from listing directories"""
+ def directoryListing(self):
+ return ForbiddenDirectoryLister()
+
+
+class LongTimeExpiringFile(File):
+ """overrides static.File and sets a far future ``Expires`` date
on the resouce.
versions handling is done by serving static files by different
@@ -79,22 +95,16 @@
etc.
"""
- def renderHTTP(self, request):
- def setExpireHeader(response):
- response = iweb.IResponse(response)
- # Don't provide additional resource information to error responses
- if response.code < 400:
- # the HTTP RFC recommands not going further than 1 year ahead
- expires = date.today() + timedelta(days=6*30)
- response.headers.setHeader('Expires', mktime(expires.timetuple()))
- return response
- d = maybeDeferred(super(LongTimeExpiringFile, self).renderHTTP, request)
- return d.addCallback(setExpireHeader)
+ def render(self, request):
+ # XXX: Don't provide additional resource information to error responses
+ #
+ # the HTTP RFC recommands not going further than 1 year ahead
+ expires = date.today() + timedelta(days=6*30)
+ request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
+ return File.render(self, request)
-class CubicWebRootResource(resource.PostableResource):
- addSlash = False
-
+class CubicWebRootResource(resource.Resource):
def __init__(self, config, debug=None):
self.debugmode = debug
self.config = config
@@ -103,8 +113,9 @@
self.appli = CubicWebPublisher(config, debug=self.debugmode)
self.base_url = config['base-url']
self.https_url = config['https-url']
- self.versioned_datadir = 'data%s' % config.instance_md5_version()
-
+ self.children = {}
+ self.static_directories = set(('data%s' % config.instance_md5_version(),
+ 'data', 'static', 'fckeditor'))
def init_publisher(self):
config = self.config
# when we have an in-memory repository, clean unused sessions every XX
@@ -145,35 +156,34 @@
except select.error:
return
- def locateChild(self, request, segments):
+ def getChild(self, path, request):
"""Indicate which resource to use to process down the URL's path"""
- if segments:
- if segments[0] == 'https':
- segments = segments[1:]
- if len(segments) >= 2:
- if segments[0] in (self.versioned_datadir, 'data', 'static'):
- # Anything in data/, static/ is treated as static files
- if segments[0] == 'static':
- # instance static directory
- datadir = self.config.static_directory
- elif segments[1] == 'fckeditor':
- fckeditordir = self.config.ext_resources['FCKEDITOR_PATH']
- return static.File(fckeditordir), segments[2:]
- else:
- # cube static data file
- datadir = self.config.locate_resource(segments[1])
- if datadir is None:
- return None, []
- self.debug('static file %s from %s', segments[-1], datadir)
- if segments[0] == 'data':
- return static.File(str(datadir)), segments[1:]
- else:
- return LongTimeExpiringFile(datadir), segments[1:]
- elif segments[0] == 'fckeditor':
- fckeditordir = self.config.ext_resources['FCKEDITOR_PATH']
- return static.File(fckeditordir), segments[1:]
+ pre_path = request.path.split('/')[1:]
+ if pre_path[0] == 'https':
+ pre_path.pop(0)
+ directory = pre_path[0]
+ # Anything in data/, static/, fckeditor/ and the generated versioned
+ # data directory is treated as static files
+ if directory in self.static_directories:
+ if path == directory: # recurse
+ return self
+ cls = File
+ if directory == 'static':
+ datadir = self.config.static_directory
+ elif directory == 'fckeditor':
+ datadir = self.config.ext_resources['FCKEDITOR_PATH']
+ else:
+ datadir = self.config.locate_resource(path)
+ if datadir is None:
+ return self
+ if directory != 'data':
+ # versioned directory, use specific file with http cache
+ # headers so their are cached for a very long time
+ cls = LongTimeExpiringFile
+ self.debug('static file %s from %s', path, datadir)
+ return cls(join(datadir, path))
# Otherwise we use this single resource
- return self, ()
+ return self
def render(self, request):
"""Render a page from the root resource"""
@@ -183,7 +193,8 @@
if self.config['profile']: # default profiler don't trace threads
return self.render_request(request)
else:
- return threads.deferToThread(self.render_request, request)
+ deferred = threads.deferToThread(self.render_request, request)
+ return NOT_DONE_YET
def render_request(self, request):
origpath = request.path
@@ -208,13 +219,11 @@
req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
try:
self.appli.connect(req)
- except AuthenticationError:
- return self.request_auth(req)
except Redirect, ex:
- return self.redirect(req, ex.location)
- if https and req.cnx.anonymous_connection:
+ return self.redirect(request=req, location=ex.location)
+ if https and req.session.anonymous_session:
# don't allow anonymous on https connection
- return self.request_auth(req)
+ return self.request_auth(request=req)
if self.url_rewriter is not None:
# XXX should occur before authentication?
try:
@@ -231,234 +240,106 @@
except DirectResponse, ex:
return ex.response
except StatusResponse, ex:
- return http.Response(stream=ex.content, code=ex.status,
- headers=req.headers_out or None)
- except RemoteCallFailed, ex:
- req.set_header('content-type', 'application/json')
- return http.Response(stream=ex.dumps(),
- code=responsecode.INTERNAL_SERVER_ERROR)
- except NotFound:
- result = self.appli.notfound_content(req)
- return http.Response(stream=result, code=responsecode.NOT_FOUND,
- headers=req.headers_out or None)
- except ExplicitLogin: # must be before AuthenticationError
- return self.request_auth(req)
- except AuthenticationError, ex:
- if self.config['auth-mode'] == 'cookie' and getattr(ex, 'url', None):
- return self.redirect(req, ex.url)
+ return HTTPResponse(stream=ex.content, code=ex.status,
+ twisted_request=req._twreq,
+ headers=req.headers_out)
+ except AuthenticationError:
+ return self.request_auth(request=req)
+ except LogOut, ex:
+ if self.config['auth-mode'] == 'cookie' and ex.url:
+ return self.redirect(request=req, location=ex.url)
# in http we have to request auth to flush current http auth
# information
- return self.request_auth(req, loggedout=True)
+ return self.request_auth(request=req, loggedout=True)
except Redirect, ex:
- return self.redirect(req, ex.location)
+ return self.redirect(request=req, location=ex.location)
# request may be referenced by "onetime callback", so clear its entity
# cache to avoid memory usage
req.drop_entity_cache()
- return http.Response(stream=result, code=responsecode.OK,
- headers=req.headers_out or None)
- def redirect(self, req, location):
- req.headers_out.setHeader('location', str(location))
- self.debug('redirecting to %s', location)
- # 303 See other
- return http.Response(code=303, headers=req.headers_out)
+ return HTTPResponse(twisted_request=req._twreq, code=http.OK,
+ stream=result, headers=req.headers_out)
- def request_auth(self, req, loggedout=False):
- if self.https_url and req.base_url() != self.https_url:
- req.headers_out.setHeader('location', self.https_url + 'login')
- return http.Response(code=303, headers=req.headers_out)
+ def redirect(self, request, location):
+ self.debug('redirecting to %s', str(location))
+ request.headers_out.setHeader('location', str(location))
+ # 303 See other
+ return HTTPResponse(twisted_request=request._twreq, code=303,
+ headers=request.headers_out)
+
+ def request_auth(self, request, loggedout=False):
+ if self.https_url and request.base_url() != self.https_url:
+ return self.redirect(request, self.https_url + 'login')
if self.config['auth-mode'] == 'http':
- code = responsecode.UNAUTHORIZED
+ code = http.UNAUTHORIZED
else:
- code = responsecode.FORBIDDEN
+ code = http.FORBIDDEN
if loggedout:
- if req.https:
- req._base_url = self.base_url
- req.https = False
- content = self.appli.loggedout_content(req)
+ if request.https:
+ request._base_url = self.base_url
+ request.https = False
+ content = self.appli.loggedout_content(request)
else:
- content = self.appli.need_login_content(req)
- return http.Response(code, req.headers_out, content)
+ content = self.appli.need_login_content(request)
+ return HTTPResponse(twisted_request=request._twreq,
+ stream=content, code=code,
+ headers=request.headers_out)
-from twisted.internet import defer
-from twisted.web2 import fileupload
+#TODO
+# # XXX max upload size in the configuration
-# XXX set max file size to 100Mo: put max upload size in the configuration
-# line below for twisted >= 8.0, default param value for earlier version
-resource.PostableResource.maxSize = 100*1024*1024
-def parsePOSTData(request, maxMem=100*1024, maxFields=1024,
- maxSize=100*1024*1024):
- if request.stream.length == 0:
- return defer.succeed(None)
+@monkeypatch(http.Request)
+def requestReceived(self, command, path, version):
+ """Called by channel when all data has been received.
- ctype = request.headers.getHeader('content-type')
-
- if ctype is None:
- return defer.succeed(None)
-
- def updateArgs(data):
- args = data
- request.args.update(args)
-
- def updateArgsAndFiles(data):
- args, files = data
- request.args.update(args)
- request.files.update(files)
-
- def error(f):
- f.trap(fileupload.MimeFormatError)
- raise http.HTTPError(responsecode.BAD_REQUEST)
-
- if ctype.mediaType == 'application' and ctype.mediaSubtype == 'x-www-form-urlencoded':
- d = fileupload.parse_urlencoded(request.stream, keep_blank_values=True)
- d.addCallbacks(updateArgs, error)
- return d
- elif ctype.mediaType == 'multipart' and ctype.mediaSubtype == 'form-data':
- boundary = ctype.params.get('boundary')
- if boundary is None:
- return defer.fail(http.HTTPError(
- http.StatusResponse(responsecode.BAD_REQUEST,
- "Boundary not specified in Content-Type.")))
- d = fileupload.parseMultipartFormData(request.stream, boundary,
- maxMem, maxFields, maxSize)
- d.addCallbacks(updateArgsAndFiles, error)
- return d
+ This method is not intended for users.
+ """
+ self.content.seek(0,0)
+ self.args = {}
+ self.files = {}
+ self.stack = []
+ self.method, self.uri = command, path
+ self.clientproto = version
+ x = self.uri.split('?', 1)
+ if len(x) == 1:
+ self.path = self.uri
else:
- raise http.HTTPError(responsecode.BAD_REQUEST)
-
-server.parsePOSTData = parsePOSTData
+ self.path, argstring = x
+ self.args = http.parse_qs(argstring, 1)
+ # cache the client and server information, we'll need this later to be
+ # serialized and sent with the request so CGIs will work remotely
+ self.client = self.channel.transport.getPeer()
+ self.host = self.channel.transport.getHost()
+ # Argument processing
+ ctype = self.getHeader('content-type')
+ if self.method == "POST" and ctype:
+ key, pdict = parse_header(ctype)
+ if key == 'application/x-www-form-urlencoded':
+ self.args.update(http.parse_qs(self.content.read(), 1))
+ elif key == 'multipart/form-data':
+ self.content.seek(0,0)
+ form = FieldStorage(self.content, self.received_headers,
+ environ={'REQUEST_METHOD': 'POST'},
+ keep_blank_values=1,
+ strict_parsing=1)
+ for key in form:
+ value = form[key]
+ if isinstance(value, list):
+ self.args[key] = [v.value for v in value]
+ elif value.filename:
+ if value.done != -1: # -1 is transfer has been interrupted
+ self.files[key] = (value.filename, value.file)
+ else:
+ self.files[key] = (None, None)
+ else:
+ self.args[key] = value.value
+ self.process()
from logging import getLogger
from cubicweb import set_log_methods
-set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted'))
-
-
-listiterator = type(iter([]))
-
-def _gc_debug(all=True):
- import gc
- from pprint import pprint
- from cubicweb.appobject import AppObject
- gc.collect()
- count = 0
- acount = 0
- fcount = 0
- rcount = 0
- ccount = 0
- scount = 0
- ocount = {}
- from rql.stmts import Union
- from cubicweb.schema import CubicWebSchema
- from cubicweb.rset import ResultSet
- from cubicweb.dbapi import Connection, Cursor
- from cubicweb.req import RequestSessionBase
- from cubicweb.server.repository import Repository
- from cubicweb.server.sources.native import NativeSQLSource
- from cubicweb.server.session import Session
- from cubicweb.devtools.testlib import CubicWebTC
- from logilab.common.testlib import TestSuite
- from optparse import Values
- import types, weakref
- for obj in gc.get_objects():
- if isinstance(obj, RequestSessionBase):
- count += 1
- if isinstance(obj, Session):
- print ' session', obj, referrers(obj, True)
- elif isinstance(obj, AppObject):
- acount += 1
- elif isinstance(obj, ResultSet):
- rcount += 1
- #print ' rset', obj, referrers(obj)
- elif isinstance(obj, Repository):
- print ' REPO', obj, referrers(obj, True)
- #elif isinstance(obj, NativeSQLSource):
- # print ' SOURCe', obj, referrers(obj)
- elif isinstance(obj, CubicWebTC):
- print ' TC', obj, referrers(obj)
- elif isinstance(obj, TestSuite):
- print ' SUITE', obj, referrers(obj)
- #elif isinstance(obj, Values):
- # print ' values', '%#x' % id(obj), referrers(obj, True)
- elif isinstance(obj, Connection):
- ccount += 1
- #print ' cnx', obj, referrers(obj)
- #elif isinstance(obj, Cursor):
- # ccount += 1
- # print ' cursor', obj, referrers(obj)
- elif isinstance(obj, file):
- fcount += 1
- # print ' open file', file.name, file.fileno
- elif isinstance(obj, CubicWebSchema):
- scount += 1
- print ' schema', obj, referrers(obj)
- elif not isinstance(obj, (type, tuple, dict, list, set, frozenset,
- weakref.ref, weakref.WeakKeyDictionary,
- listiterator,
- property, classmethod,
- types.ModuleType, types.MemberDescriptorType,
- types.FunctionType, types.MethodType)):
- try:
- ocount[obj.__class__] += 1
- except KeyError:
- ocount[obj.__class__] = 1
- except AttributeError:
- pass
- if count:
- print ' NB REQUESTS/SESSIONS', count
- if acount:
- print ' NB APPOBJECTS', acount
- if ccount:
- print ' NB CONNECTIONS', ccount
- if rcount:
- print ' NB RSETS', rcount
- if scount:
- print ' NB SCHEMAS', scount
- if fcount:
- print ' NB FILES', fcount
- if all:
- ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20]
- pprint(ocount)
- if gc.garbage:
- print 'UNREACHABLE', gc.garbage
-
-def referrers(obj, showobj=False):
- try:
- return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x)))
- for x in _referrers(obj)))
- except TypeError:
- s = set()
- unhashable = []
- for x in _referrers(obj):
- try:
- s.add(x)
- except TypeError:
- unhashable.append(x)
- return sorted(s) + unhashable
-
-def _referrers(obj, seen=None, level=0):
- import gc, types
- from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
- interesting = []
- if seen is None:
- seen = set()
- for x in gc.get_referrers(obj):
- if id(x) in seen:
- continue
- seen.add(id(x))
- if isinstance(x, types.FrameType):
- continue
- if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)):
- continue
- if isinstance(x, (list, tuple, set, dict, listiterator)):
- if level >= 5:
- pass
- #interesting.append(x)
- else:
- interesting += _referrers(x, seen, level+1)
- else:
- interesting.append(x)
- return interesting
+LOGGER = getLogger('cubicweb.twisted')
+set_log_methods(CubicWebRootResource, LOGGER)
def run(config, debug):
# create the site
@@ -466,7 +347,7 @@
website = server.Site(root_resource)
# serve it via standard HTTP on port set in the configuration
port = config['port'] or 8080
- reactor.listenTCP(port, channel.HTTPFactory(website))
+ reactor.listenTCP(port, website)
logger = getLogger('cubicweb.twisted')
if not debug:
if sys.platform == 'win32':
--- a/ext/xhtml2fo.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,137 +0,0 @@
-from xml.etree.ElementTree import QName
-from pysixt.standard.xhtml_xslfo.transformer import XHTML2FOTransformer
-from pysixt.utils.xslfo.standard import cm
-from pysixt.utils.xslfo import SimplePageMaster
-from pysixt.standard.xhtml_xslfo.default_styling import default_styles
-from pysixt.standard.xhtml_xslfo import XHTML_NS
-
-
-class ReportTransformer(XHTML2FOTransformer):
- """
- Class transforming an XHTML input tree into a FO document
- displaying reports (one report for each <div class="contentmain">
- element in the input tree.
- """
-
- def __init__(self, section,
- page_width=21.0, page_height=29.7,
- margin_top=1.0, margin_bottom=1.0,
- margin_left=1.0, margin_right=1.0,
- header_footer_height=0.75,
- standard_font_size=11.0, default_lang=u"fr" ):
- """
- Initializes a transformer turning an XHTML input tree
- containing <div class="contentmain"> elements representing
- main content sections into a FO output tree displaying the
- reports.
-
- page_width: float - width of the page (in cm)
- page_height: float - height of the page (in cm)
- margin_top: float - top margin of the page (in cm)
- margin_bottom: float - bottom margin of the page (in cm)
- margin_left: float - left margin of the page (in cm)
- margin_right: float - right margin of the page (in cm)
- header_footer_height: float - height of the header or the footer of the
- page that the page number (if any) will be
- inserted in.
- standard_font_size: float - standard size of the font (in pt)
- default_lang: u"" - default language (used for hyphenation)
- """
- self.section = section
- self.page_width = page_width
- self.page_height = page_height
-
- self.page_tmargin = margin_top
- self.page_bmargin = margin_bottom
- self.page_lmargin = margin_left
- self.page_rmargin = margin_right
-
- self.hf_height = header_footer_height
-
- self.font_size = standard_font_size
- self.lang = default_lang
-
- XHTML2FOTransformer.__init__(self)
-
-
- def define_pagemasters(self):
- """
- Defines the page masters for the FO output document.
- """
- pm = SimplePageMaster(u"page-report")
- pm.set_page_dims( self.page_width*cm, self.page_height*cm )
- pm.set_page_margins({u'top' : self.page_tmargin*cm,
- u'bottom': self.page_bmargin*cm,
- u'left' : self.page_lmargin*cm,
- u'right' : self.page_rmargin*cm })
- pm.add_peripheral_region(u"end", self.hf_height)
- dims = {}
- dims[u"bottom"] = self.hf_height + 0.25
- pm.set_main_region_margins(dims)
- return [pm]
-
- def _visit_report(self, in_elt, _out_elt, params):
- """
- Specific visit function for the input <div> elements whose class is
- "report". The _root_visit method of this class selects these input
- elements and asks the process of these elements with this specific
- visit function.
- """
-
- ps = self.create_pagesequence(u"page-report")
- props = { u"force-page-count": u"no-force",
- u"initial-page-number": u"1",
- u"format": u"1", }
- self._output_properties(ps, props)
-
- sc = self.create_staticcontent(ps, u"end")
- sc_bl = self.create_block(sc)
- attrs = { u"hyphenate": u"false", }
- attrs[u"font-size"] = u"%.1fpt" % (self.font_size * 0.7)
- attrs[u"language"] = self.lang
- attrs[u"text-align"] = u"center"
- self._output_properties(sc_bl, attrs)
- sc_bl.text = u"Page" + u" " # ### Should be localised!
- pn = self.create_pagenumber(sc_bl)
- pn.tail = u"/"
- self.create_pagenumbercitation(
- sc_bl, u"last-block-of-report-%d" % params[u"context_pos"])
-
- fl = self.create_flow(ps, u"body")
- bl = self.create_block(fl)
-
- # Sets on the highest block element the properties of the XHTML body
- # element. These properties (at the least the inheritable ones) will
- # be inherited by all the future FO elements.
- bodies = list(self.in_tree.getiterator(QName(XHTML_NS, u"body")))
- if len(bodies) > 0:
- attrs = self._extract_properties([bodies[0]])
- else:
- attrs = default_styles[u"body"].copy()
- attrs[u"font-size"] = u"%.1fpt" % self.font_size
- attrs[u"language"] = self.lang
- self._output_properties(bl,attrs)
-
- # Processes the report content
- self._copy_text(in_elt, bl)
- self._process_nodes(in_elt.getchildren(), bl)
-
- # Inserts an empty block at the end of the report in order to be able
- # to compute the last page number of this report.
- last_bl = self.create_block(bl)
- props = { u"keep-with-previous": u"always", }
- props[u"id"] = u"last-block-of-report-%d" % params[u"context_pos"]
- self._output_properties(last_bl,props)
-
-
- def _root_visit(self):
- """
- Visit function called when starting the process of the input tree.
- """
- content = [ d for d in self.in_tree.getiterator(QName(XHTML_NS, u"div"))
- if d.get(u"id") == self.section ]
- # Asks the process of the report elements with a specific visit
- # function
- self._process_nodes(content, self.fo_root,
- with_function=self._visit_report)
-
--- a/hooks/email.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/email.py Tue Apr 13 19:43:51 2010 +0200
@@ -28,7 +28,7 @@
if self.condition():
self.session.execute(
'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype,
- {'x': self.entity.eid, 'y': self.email.eid}, 'x')
+ {'x': self.entity.eid, 'y': self.email.eid})
class SetPrimaryEmailRelationOp(SetUseEmailRelationOp):
--- a/hooks/integrity.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/integrity.py Tue Apr 13 19:43:51 2010 +0200
@@ -77,7 +77,7 @@
continue
if rtype in pendingrtypes:
continue
- if not session.execute(self.base_rql % rtype, {'x': eid}, 'x'):
+ if not session.execute(self.base_rql % rtype, {'x': eid}):
etype = session.describe(eid)[0]
_ = session._
msg = _('at least one relation %(rtype)s is required on '
@@ -304,7 +304,7 @@
# don't do anything if the entity is being created or deleted
if not (eid in pendingeids or eid in neweids):
etype = session.describe(eid)[0]
- session.execute(self.base_rql % (etype, rtype), {'x': eid}, 'x')
+ session.execute(self.base_rql % (etype, rtype), {'x': eid})
class _DelayedDeleteSEntityOp(_DelayedDeleteOp):
"""delete orphan subject entity of a composite relation"""
--- a/hooks/metadata.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/metadata.py Tue Apr 13 19:43:51 2010 +0200
@@ -102,8 +102,7 @@
def precommit_event(self):
self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,'
'NOT EXISTS(X owned_by U, X eid %(x)s)',
- {'c': self.compositeeid, 'x': self.composedeid},
- ('c', 'x'))
+ {'c': self.compositeeid, 'x': self.composedeid})
class SyncCompositeOwner(MetaDataHook):
--- a/hooks/notification.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/notification.py Tue Apr 13 19:43:51 2010 +0200
@@ -124,7 +124,7 @@
rqlsel.append(var)
rqlrestr.append('X %s %s' % (attr, var))
rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr))
- rset = session.execute(rql, {'x': self.entity.eid}, 'x')
+ rset = session.execute(rql, {'x': self.entity.eid})
for i, attr in enumerate(attrs):
oldvalue = rset[0][i]
newvalue = self.entity[attr]
--- a/hooks/syncschema.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/syncschema.py Tue Apr 13 19:43:51 2010 +0200
@@ -1004,7 +1004,7 @@
DropRelationTable(session, rschema.type)
# if this is the last instance, drop associated relation type
if lastrel and not self.eidto in pendings:
- execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x')
+ execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto})
MemSchemaRDefDel(session, (subjschema, rschema, objschema))
--- a/hooks/syncsession.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/syncsession.py Tue Apr 13 19:43:51 2010 +0200
@@ -36,7 +36,7 @@
no query should be emitted while comitting
"""
rql = 'Any N WHERE G eid %(x)s, G name N'
- result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False)
+ result = session.execute(rql, {'x': kwargs['geid']}, build_descr=False)
hook.Operation.__init__(self, session, *args, **kwargs)
self.group = result[0][0]
@@ -216,7 +216,7 @@
if not session.describe(eidfrom)[0] == 'CWProperty':
return
key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V',
- {'x': eidfrom}, 'x')[0]
+ {'x': eidfrom})[0]
if session.vreg.property_info(key)['sitewide']:
qname = role_name('for_user', 'subject')
msg = session._("site-wide property can't be set for user")
@@ -234,7 +234,7 @@
def __call__(self):
session = self._cw
key = session.execute('Any K WHERE P eid %(x)s, P pkey K',
- {'x': self.eidfrom}, 'x')[0][0]
+ {'x': self.eidfrom})[0][0]
session.transaction_data.setdefault('pendingrelations', []).append(
(self.eidfrom, self.rtype, self.eidto))
for session_ in get_user_sessions(session.repo, self.eidto):
--- a/hooks/test/unittest_bookmarks.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/test/unittest_bookmarks.py Tue Apr 13 19:43:51 2010 +0200
@@ -1,7 +1,7 @@
"""
:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
"""
@@ -18,10 +18,10 @@
self.commit()
self.execute('DELETE X bookmarked_by U WHERE U login "admin"')
self.commit()
- self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+ self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}))
self.execute('DELETE X bookmarked_by U WHERE U login "anon"')
self.commit()
- self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+ self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}))
if __name__ == '__main__':
unittest_main()
--- a/hooks/test/unittest_syncschema.py Tue Apr 13 19:22:46 2010 +0200
+++ b/hooks/test/unittest_syncschema.py Tue Apr 13 19:43:51 2010 +0200
@@ -32,17 +32,17 @@
def _set_perms(self, eid):
self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
- {'x': eid}, 'x')
+ {'x': eid})
self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"',
- {'x': eid}, 'x')
+ {'x': eid})
self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"',
- {'x': eid}, 'x')
+ {'x': eid})
def _set_attr_perms(self, eid):
self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
- {'x': eid}, 'x')
+ {'x': eid})
self.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"',
- {'x': eid}, 'x')
+ {'x': eid})
def test_base(self):
schema = self.repo.schema
@@ -88,7 +88,7 @@
'WHERE RT name "concerne2", E name "CWUser"')[0][0]
self._set_perms(rdefeid)
self.commit()
- self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x')
+ self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid})
self.commit()
self.failUnless('concerne2' in schema['CWUser'].subject_relations())
self.failIf('concerne2' in schema['Societe2'].subject_relations())
@@ -248,7 +248,7 @@
attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
'WHERE RT name "messageid", E name "BaseTransition", F name "String"')[0][0]
assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"',
- {'x': attreid}, 'x')
+ {'x': attreid})
self.commit()
self.schema.rebuild_infered_relations()
self.failUnless('Transition' in self.schema['messageid'].subjects())
@@ -299,10 +299,10 @@
if not getattr(cstr, 'eid', None):
self.skip('start me alone') # bug in schema reloading, constraint's eid not restored
self.execute('SET X value %(v)s WHERE X eid %(x)s',
- {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}, 'x')
+ {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"})
self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
'WHERE CT name %(ct)s, EDEF eid %(x)s',
- {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}, 'x')
+ {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid})
self.commit()
cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
self.assertEquals(cstr.values, (u'normal', u'auto', u'new'))
--- a/i18n/en.po Tue Apr 13 19:22:46 2010 +0200
+++ b/i18n/en.po Tue Apr 13 19:43:51 2010 +0200
@@ -30,6 +30,9 @@
msgid " from state %(fromstate)s to state %(tostate)s\n"
msgstr ""
+msgid " :"
+msgstr ""
+
#, python-format
msgid "%(attr)s set to %(newvalue)s"
msgstr ""
@@ -188,15 +191,15 @@
msgid "AND"
msgstr ""
-msgid "Add permissions"
-msgstr ""
-
msgid "Any"
msgstr ""
msgid "Attributes"
msgstr ""
+msgid "Attributes with non default permissions:"
+msgstr ""
+
# schema pot file, generated on 2009-09-16 16:46:55
#
# singular and plural forms for each entity type
@@ -362,13 +365,10 @@
msgid "Decimal_plural"
msgstr "Decimal numbers"
-msgid "Delete permissions"
-msgstr ""
-
msgid "Do you want to delete the following element(s) ?"
msgstr ""
-msgid "Download page as pdf"
+msgid "Download schema as OWL"
msgstr ""
msgctxt "inlined:CWUser.use_email.subject"
@@ -384,6 +384,9 @@
msgid "Entities"
msgstr ""
+msgid "Entity types"
+msgstr ""
+
msgid "ExternalUri"
msgstr "External Uri"
@@ -411,6 +414,9 @@
msgid "Help"
msgstr ""
+msgid "Index"
+msgstr ""
+
msgid "Instance"
msgstr ""
@@ -513,6 +519,12 @@
msgid "Password_plural"
msgstr "Passwords"
+msgid "Permissions for entity types"
+msgstr ""
+
+msgid "Permissions for relations"
+msgstr ""
+
msgid "Please note that this is only a shallow copy"
msgstr ""
@@ -531,9 +543,6 @@
msgid "RQLVocabularyConstraint"
msgstr "RQL vocabulary constraint"
-msgid "Read permissions"
-msgstr ""
-
msgid "Recipients:"
msgstr ""
@@ -543,6 +552,9 @@
msgid "Registry's content"
msgstr ""
+msgid "Relation types"
+msgstr ""
+
msgid "Relations"
msgstr ""
@@ -606,6 +618,9 @@
msgid "The view %s could not be found"
msgstr ""
+msgid "There is no default workflow"
+msgstr ""
+
msgid "This BaseTransition"
msgstr "This abstract transition"
@@ -696,9 +711,6 @@
msgid "Unreachable objects"
msgstr ""
-msgid "Update permissions"
-msgstr ""
-
msgid "Used by:"
msgstr ""
@@ -1198,7 +1210,7 @@
msgid "attribute"
msgstr ""
-msgid "attributes with modified permissions:"
+msgid "attributes permissions:"
msgstr ""
msgid "august"
@@ -1517,9 +1529,6 @@
msgid "condition"
msgstr "condition"
-msgid "condition:"
-msgstr "condtion:"
-
msgctxt "RQLExpression"
msgid "condition_object"
msgstr "condition of"
@@ -1527,6 +1536,9 @@
msgid "condition_object"
msgstr "condition of"
+msgid "conditions"
+msgstr ""
+
msgid "config mode"
msgstr ""
@@ -1578,12 +1590,6 @@
msgid "contentnavigation_metadata_description"
msgstr ""
-msgid "contentnavigation_pdfview"
-msgstr "view page as pdf icon"
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
msgid "contentnavigation_prevnext"
msgstr "previous / next entity"
@@ -1768,6 +1774,9 @@
msgid "creation"
msgstr ""
+msgid "creation date"
+msgstr ""
+
msgid "creation time of an entity"
msgstr ""
@@ -1803,18 +1812,39 @@
msgid "custom_workflow_object"
msgstr "custom workflow of"
-msgid "cwetype-schema-image"
-msgstr "schema"
-
-msgid "cwetype-schema-permissions"
+msgid "cwetype-box"
+msgstr "\"box\" view"
+
+msgid "cwetype-description"
+msgstr "description"
+
+msgid "cwetype-permissions"
msgstr "permissions"
-msgid "cwetype-schema-text"
-msgstr "description"
+msgid "cwetype-views"
+msgstr "views"
msgid "cwetype-workflow"
msgstr "workflow"
+msgid "cwgroup-main"
+msgstr "description"
+
+msgid "cwgroup-permissions"
+msgstr "permissions"
+
+msgid "cwrdef-description"
+msgstr "description"
+
+msgid "cwrdef-permissions"
+msgstr "permissions"
+
+msgid "cwrtype-description"
+msgstr "description"
+
+msgid "cwrtype-permissions"
+msgstr "permissions"
+
msgid "cwuri"
msgstr "internal uri"
@@ -1842,6 +1872,9 @@
msgid "default user workflow"
msgstr ""
+msgid "default value"
+msgstr ""
+
msgid "default workflow for an entity type"
msgstr ""
@@ -2281,6 +2314,9 @@
msgid "final"
msgstr ""
+msgid "first name"
+msgstr ""
+
msgid "firstname"
msgstr ""
@@ -2363,6 +2399,9 @@
msgid "full text or RQL query"
msgstr ""
+msgid "fulltext indexed"
+msgstr ""
+
msgid "fulltext_container"
msgstr "fulltext container"
@@ -2394,10 +2433,6 @@
msgid "granted to groups"
msgstr ""
-#, python-format
-msgid "graphical representation of %s"
-msgstr ""
-
msgid "graphical representation of the instance'schema"
msgstr ""
@@ -2421,9 +2456,6 @@
msgid "groups to which the permission is granted"
msgstr ""
-msgid "groups:"
-msgstr ""
-
msgid "guests"
msgstr ""
@@ -2462,6 +2494,18 @@
msgid "i18n_login_popup"
msgstr "login"
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
msgid "i18nprevnext_next"
msgstr "next"
@@ -2497,12 +2541,6 @@
msgid "image"
msgstr ""
-msgid "in memory entity schema"
-msgstr ""
-
-msgid "in memory relation schema"
-msgstr ""
-
msgid "in_group"
msgstr "in group"
@@ -2533,9 +2571,6 @@
msgid "incorrect value (%(value)s) for type \"%(type)s\""
msgstr ""
-msgid "index"
-msgstr ""
-
msgid "index this attribute's value in the plain text index"
msgstr ""
@@ -2608,6 +2643,12 @@
msgid "is"
msgstr ""
+msgid "is object of:"
+msgstr ""
+
+msgid "is subject of:"
+msgstr ""
+
msgid ""
"is the subject/object entity of the relation composed of the other ? This "
"implies that when the composite is deleted, composants are also deleted."
@@ -2655,6 +2696,12 @@
msgid "last connection date"
msgstr ""
+msgid "last login time"
+msgstr ""
+
+msgid "last name"
+msgstr ""
+
msgid "last usage"
msgstr ""
@@ -2799,6 +2846,9 @@
msgid "more actions"
msgstr ""
+msgid "more info about this workflow"
+msgstr ""
+
msgid "multiple edit"
msgstr ""
@@ -2940,8 +2990,8 @@
msgid "object"
msgstr ""
-msgid "object_plural:"
-msgstr "objects:"
+msgid "object type"
+msgstr ""
msgid "october"
msgstr ""
@@ -3022,10 +3072,7 @@
msgid "permission"
msgstr ""
-msgid "permissions for entities"
-msgstr ""
-
-msgid "permissions for relations"
+msgid "permissions"
msgstr ""
msgid "permissions for this entity"
@@ -3106,7 +3153,7 @@
msgstr ""
msgid "read_perm"
-msgstr "read perm"
+msgstr "read permission"
msgid "read_permission"
msgstr "can be read by"
@@ -3143,6 +3190,9 @@
msgid "related entity has no workflow set"
msgstr ""
+msgid "relation"
+msgstr ""
+
#, python-format
msgid "relation %(relname)s of %(ent)s"
msgstr ""
@@ -3150,6 +3200,9 @@
msgid "relation add"
msgstr ""
+msgid "relation direction"
+msgstr ""
+
msgid "relation removal"
msgstr ""
@@ -3171,9 +3224,6 @@
msgid "relation_type_object"
msgstr "relation definitions"
-msgid "relations"
-msgstr ""
-
msgid "relations deleted"
msgstr ""
@@ -3215,6 +3265,9 @@
msgid "require_permission_object"
msgstr "required by"
+msgid "required"
+msgstr ""
+
msgid "required attribute"
msgstr ""
@@ -3258,11 +3311,14 @@
msgid "schema's permissions definitions"
msgstr ""
+msgid "schema-description"
+msgstr "entity and relation types"
+
msgid "schema-image"
-msgstr "schema"
-
-msgid "schema-text"
-msgstr "description"
+msgstr "image"
+
+msgid "schema-security"
+msgstr "permissions"
msgid "search"
msgstr ""
@@ -3449,12 +3505,12 @@
msgid "subject"
msgstr ""
+msgid "subject type"
+msgstr ""
+
msgid "subject/object cardinality"
msgstr ""
-msgid "subject_plural:"
-msgstr "subjects:"
-
msgid "subworkflow"
msgstr ""
@@ -3853,6 +3909,9 @@
msgid "used to grant a permission to a group"
msgstr ""
+msgid "user"
+msgstr ""
+
#, python-format
msgid ""
"user %s has made the following change(s):\n"
@@ -3940,6 +3999,12 @@
msgid "wf_info_for_object"
msgstr "workflow history"
+msgid "wf_tab_info"
+msgstr ""
+
+msgid "wfgraph"
+msgstr ""
+
msgid ""
"when multiple addresses are equivalent (such as python-projects@logilab.org "
"and python-projects@lists.logilab.org), set this to indicate which is the "
--- a/i18n/es.po Tue Apr 13 19:22:46 2010 +0200
+++ b/i18n/es.po Tue Apr 13 19:43:51 2010 +0200
@@ -35,6 +35,9 @@
msgid " from state %(fromstate)s to state %(tostate)s\n"
msgstr " del estado %(fromstate)s hacia el estado %(tostate)s\n"
+msgid " :"
+msgstr ""
+
#, python-format
msgid "%(attr)s set to %(newvalue)s"
msgstr ""
@@ -196,15 +199,15 @@
msgid "AND"
msgstr "Y"
-msgid "Add permissions"
-msgstr "Añadir autorizaciónes"
-
msgid "Any"
msgstr "Cualquiera"
msgid "Attributes"
msgstr "Atributos"
+msgid "Attributes with non default permissions:"
+msgstr ""
+
# schema pot file, generated on 2009-09-16 16:46:55
#
# singular and plural forms for each entity type
@@ -370,13 +373,10 @@
msgid "Decimal_plural"
msgstr "Decimales"
-msgid "Delete permissions"
-msgstr "Autorización de suprimir"
-
msgid "Do you want to delete the following element(s) ?"
msgstr "Desea suprimir el(los) elemento(s) siguiente(s)"
-msgid "Download page as pdf"
+msgid "Download schema as OWL"
msgstr ""
msgctxt "inlined:CWUser.use_email.subject"
@@ -392,6 +392,9 @@
msgid "Entities"
msgstr "Entidades"
+msgid "Entity types"
+msgstr ""
+
msgid "ExternalUri"
msgstr ""
@@ -419,6 +422,9 @@
msgid "Help"
msgstr ""
+msgid "Index"
+msgstr ""
+
msgid "Instance"
msgstr ""
@@ -521,6 +527,12 @@
msgid "Password_plural"
msgstr "Contraseñas"
+msgid "Permissions for entity types"
+msgstr ""
+
+msgid "Permissions for relations"
+msgstr ""
+
msgid "Please note that this is only a shallow copy"
msgstr "Recuerde que no es más que una copia superficial"
@@ -539,9 +551,6 @@
msgid "RQLVocabularyConstraint"
msgstr ""
-msgid "Read permissions"
-msgstr "Autorización de leer"
-
msgid "Recipients:"
msgstr "Destinatarios"
@@ -551,6 +560,9 @@
msgid "Registry's content"
msgstr ""
+msgid "Relation types"
+msgstr ""
+
msgid "Relations"
msgstr "Relaciones"
@@ -614,6 +626,9 @@
msgid "The view %s could not be found"
msgstr "La vista %s no ha podido ser encontrada"
+msgid "There is no default workflow"
+msgstr ""
+
msgid "This BaseTransition"
msgstr ""
@@ -704,9 +719,6 @@
msgid "Unreachable objects"
msgstr ""
-msgid "Update permissions"
-msgstr "Autorización de modificar"
-
msgid "Used by:"
msgstr "Utilizado por :"
@@ -1223,8 +1235,8 @@
msgid "attribute"
msgstr "Atributo"
-msgid "attributes with modified permissions:"
-msgstr "atributos con autorizaciónes modificadas:"
+msgid "attributes permissions:"
+msgstr ""
msgid "august"
msgstr "Agosto"
@@ -1548,9 +1560,6 @@
msgid "condition"
msgstr ""
-msgid "condition:"
-msgstr "condición:"
-
msgctxt "RQLExpression"
msgid "condition_object"
msgstr ""
@@ -1558,6 +1567,9 @@
msgid "condition_object"
msgstr "condición de"
+msgid "conditions"
+msgstr ""
+
msgid "config mode"
msgstr ""
@@ -1609,12 +1621,6 @@
msgid "contentnavigation_metadata_description"
msgstr ""
-msgid "contentnavigation_pdfview"
-msgstr ""
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
msgid "contentnavigation_prevnext"
msgstr "Elemento anterior / siguiente"
@@ -1809,6 +1815,9 @@
msgid "creation"
msgstr "Creación"
+msgid "creation date"
+msgstr ""
+
msgid "creation time of an entity"
msgstr "Fecha de creación de una entidad"
@@ -1844,17 +1853,38 @@
msgid "custom_workflow_object"
msgstr ""
-msgid "cwetype-schema-image"
-msgstr "Esquema"
-
-msgid "cwetype-schema-permissions"
-msgstr "Autorizaciónes"
-
-msgid "cwetype-schema-text"
-msgstr "Modelo de datos"
+msgid "cwetype-box"
+msgstr ""
+
+msgid "cwetype-description"
+msgstr ""
+
+msgid "cwetype-permissions"
+msgstr ""
+
+msgid "cwetype-views"
+msgstr ""
msgid "cwetype-workflow"
-msgstr "Workflow"
+msgstr ""
+
+msgid "cwgroup-main"
+msgstr ""
+
+msgid "cwgroup-permissions"
+msgstr ""
+
+msgid "cwrdef-description"
+msgstr ""
+
+msgid "cwrdef-permissions"
+msgstr ""
+
+msgid "cwrtype-description"
+msgstr ""
+
+msgid "cwrtype-permissions"
+msgstr ""
msgid "cwuri"
msgstr ""
@@ -1883,6 +1913,9 @@
msgid "default user workflow"
msgstr ""
+msgid "default value"
+msgstr ""
+
msgid "default workflow for an entity type"
msgstr ""
@@ -2331,6 +2364,9 @@
msgid "final"
msgstr ""
+msgid "first name"
+msgstr ""
+
msgid "firstname"
msgstr "Nombre"
@@ -2413,6 +2449,9 @@
msgid "full text or RQL query"
msgstr "Texto de búsqueda o demanda RQL"
+msgid "fulltext indexed"
+msgstr ""
+
msgid "fulltext_container"
msgstr "Contenedor de texto indexado"
@@ -2444,10 +2483,6 @@
msgid "granted to groups"
msgstr "Otorgado a los grupos"
-#, python-format
-msgid "graphical representation of %s"
-msgstr ""
-
msgid "graphical representation of the instance'schema"
msgstr ""
@@ -2471,9 +2506,6 @@
msgid "groups to which the permission is granted"
msgstr "Grupos quienes tienen otorgada esta autorización"
-msgid "groups:"
-msgstr "Grupos :"
-
msgid "guests"
msgstr "Invitados"
@@ -2518,6 +2550,18 @@
msgid "i18n_login_popup"
msgstr "Identificarse"
+msgid "i18ncard_*"
+msgstr ""
+
+msgid "i18ncard_+"
+msgstr ""
+
+msgid "i18ncard_1"
+msgstr ""
+
+msgid "i18ncard_?"
+msgstr ""
+
msgid "i18nprevnext_next"
msgstr "Siguiente"
@@ -2555,12 +2599,6 @@
msgid "image"
msgstr "Imagen"
-msgid "in memory entity schema"
-msgstr "Esquema de la entidad en memoria"
-
-msgid "in memory relation schema"
-msgstr "Esquema de la relación en memoria"
-
msgid "in_group"
msgstr "En el grupo"
@@ -2591,9 +2629,6 @@
msgid "incorrect value (%(value)s) for type \"%(type)s\""
msgstr "valor %(value)s incorrecto para el tipo \"%(type)s\""
-msgid "index"
-msgstr "Indice"
-
msgid "index this attribute's value in the plain text index"
msgstr "Indexar el valor de este atributo en el Ãndice de texto simple"
@@ -2667,6 +2702,12 @@
msgid "is"
msgstr "es"
+msgid "is object of:"
+msgstr "es objeto de"
+
+msgid "is subject of:"
+msgstr "es sujeto de"
+
msgid ""
"is the subject/object entity of the relation composed of the other ? This "
"implies that when the composite is deleted, composants are also deleted."
@@ -2718,6 +2759,12 @@
msgid "last connection date"
msgstr "Ultima fecha de conexión"
+msgid "last login time"
+msgstr ""
+
+msgid "last name"
+msgstr ""
+
msgid "last usage"
msgstr ""
@@ -2867,6 +2914,9 @@
msgid "more actions"
msgstr "mas acciones"
+msgid "more info about this workflow"
+msgstr ""
+
msgid "multiple edit"
msgstr "Edicion multiple"
@@ -3014,8 +3064,8 @@
msgid "object"
msgstr "objeto"
-msgid "object_plural:"
-msgstr "objetos:"
+msgid "object type"
+msgstr ""
msgid "october"
msgstr "octubre"
@@ -3095,11 +3145,8 @@
msgid "permission"
msgstr "Permiso"
-msgid "permissions for entities"
-msgstr "autorizaciónes para entidades"
-
-msgid "permissions for relations"
-msgstr "autorizaciónes para relaciones"
+msgid "permissions"
+msgstr ""
msgid "permissions for this entity"
msgstr "Permisos para esta entidad"
@@ -3216,6 +3263,9 @@
msgid "related entity has no workflow set"
msgstr ""
+msgid "relation"
+msgstr ""
+
#, python-format
msgid "relation %(relname)s of %(ent)s"
msgstr "relación %(relname)s de %(ent)s"
@@ -3223,6 +3273,9 @@
msgid "relation add"
msgstr ""
+msgid "relation direction"
+msgstr ""
+
msgid "relation removal"
msgstr ""
@@ -3244,9 +3297,6 @@
msgid "relation_type_object"
msgstr "Definición"
-msgid "relations"
-msgstr "relaciones"
-
msgid "relations deleted"
msgstr "Relaciones eliminadas"
@@ -3288,6 +3338,9 @@
msgid "require_permission_object"
msgstr "Requerido por autorización"
+msgid "required"
+msgstr ""
+
msgid "required attribute"
msgstr "Atributo requerido"
@@ -3335,11 +3388,14 @@
msgid "schema's permissions definitions"
msgstr "definiciones de permisos del esquema"
+msgid "schema-description"
+msgstr ""
+
msgid "schema-image"
msgstr "esquema imagen"
-msgid "schema-text"
-msgstr "esquema text"
+msgid "schema-security"
+msgstr ""
msgid "search"
msgstr "buscar"
@@ -3530,12 +3586,12 @@
msgid "subject"
msgstr "sujeto"
+msgid "subject type"
+msgstr ""
+
msgid "subject/object cardinality"
msgstr "cardinalidad sujeto/objeto"
-msgid "subject_plural:"
-msgstr "sujetos:"
-
msgid "subworkflow"
msgstr ""
@@ -3938,6 +3994,9 @@
msgid "used to grant a permission to a group"
msgstr "utilizado para otorgar permisos a un grupo"
+msgid "user"
+msgstr ""
+
#, python-format
msgid ""
"user %s has made the following change(s):\n"
@@ -4027,6 +4086,12 @@
msgid "wf_info_for_object"
msgstr "historial de transiciones"
+msgid "wf_tab_info"
+msgstr ""
+
+msgid "wfgraph"
+msgstr ""
+
msgid ""
"when multiple addresses are equivalent (such as python-projects@logilab.org "
"and python-projects@lists.logilab.org), set this to indicate which is the "
@@ -4228,6 +4293,9 @@
#~ msgid "comment:"
#~ msgstr "Comentario:"
+#~ msgid "condition:"
+#~ msgstr "condición:"
+
#~ msgid "copy edition"
#~ msgstr "Edición de una copia"
@@ -4323,6 +4391,9 @@
#~ msgid "groups allowed to update entities of this type"
#~ msgstr "Grupos autorizados a actualizar entidades de este tipo"
+#~ msgid "groups:"
+#~ msgstr "Grupos :"
+
#~ msgid "home"
#~ msgstr "Inicio"
@@ -4411,6 +4482,9 @@
#~ msgid "rql expression allowing to update entities of this type"
#~ msgstr "expresion RQL permitiendo actualizar entidades de este tipo"
+#~ msgid "schema-text"
+#~ msgstr "esquema text"
+
#~ msgid "server debug information"
#~ msgstr "server debug information"
--- a/i18n/fr.po Tue Apr 13 19:22:46 2010 +0200
+++ b/i18n/fr.po Tue Apr 13 19:43:51 2010 +0200
@@ -35,6 +35,9 @@
msgid " from state %(fromstate)s to state %(tostate)s\n"
msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n"
+msgid " :"
+msgstr ""
+
#, python-format
msgid "%(attr)s set to %(newvalue)s"
msgstr "%(attr)s modifié à %(newvalue)s"
@@ -195,15 +198,15 @@
msgid "AND"
msgstr "ET"
-msgid "Add permissions"
-msgstr "Permissions d'ajouter"
-
msgid "Any"
msgstr "N'importe"
msgid "Attributes"
msgstr "Attributs"
+msgid "Attributes with non default permissions:"
+msgstr "Attributs ayant des permissions non-standard"
+
# schema pot file, generated on 2009-09-16 16:46:55
#
# singular and plural forms for each entity type
@@ -381,14 +384,11 @@
msgid "Decimal_plural"
msgstr "Nombres décimaux"
-msgid "Delete permissions"
-msgstr "Permissions de supprimer"
-
msgid "Do you want to delete the following element(s) ?"
msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?"
-msgid "Download page as pdf"
-msgstr "télécharger la page au format PDF"
+msgid "Download schema as OWL"
+msgstr "Télécharger le schéma au format OWL"
msgctxt "inlined:CWUser.use_email.subject"
msgid "EmailAddress"
@@ -403,6 +403,9 @@
msgid "Entities"
msgstr "entités"
+msgid "Entity types"
+msgstr "Types d'entités"
+
msgid "ExternalUri"
msgstr "Uri externe"
@@ -430,6 +433,9 @@
msgid "Help"
msgstr "Aide"
+msgid "Index"
+msgstr "Index"
+
msgid "Instance"
msgstr "Instance"
@@ -532,6 +538,12 @@
msgid "Password_plural"
msgstr "Mots de passe"
+msgid "Permissions for entity types"
+msgstr "Permissions pour les types d'entités"
+
+msgid "Permissions for relations"
+msgstr "Permissions pour les relations"
+
msgid "Please note that this is only a shallow copy"
msgstr "Attention, cela n'effectue qu'une copie de surface"
@@ -550,9 +562,6 @@
msgid "RQLVocabularyConstraint"
msgstr "contrainte rql de vocabulaire"
-msgid "Read permissions"
-msgstr "Permissions de lire"
-
msgid "Recipients:"
msgstr "Destinataires :"
@@ -562,6 +571,9 @@
msgid "Registry's content"
msgstr "Contenu du registre"
+msgid "Relation types"
+msgstr "Types de relation"
+
msgid "Relations"
msgstr "Relations"
@@ -625,6 +637,9 @@
msgid "The view %s could not be found"
msgstr "La vue %s est introuvable"
+msgid "There is no default workflow"
+msgstr "Ce type d'entité n'a pas de workflow par défault"
+
msgid "This BaseTransition"
msgstr "Cette transition abstraite"
@@ -715,9 +730,6 @@
msgid "Unreachable objects"
msgstr "Objets inacessible"
-msgid "Update permissions"
-msgstr "Permissions de modifier"
-
msgid "Used by:"
msgstr "Utilisé par :"
@@ -1242,8 +1254,8 @@
msgid "attribute"
msgstr "attribut"
-msgid "attributes with modified permissions:"
-msgstr "attributs ayant des permissions modifiées :"
+msgid "attributes permissions:"
+msgstr "permissions des attributs :"
msgid "august"
msgstr "août"
@@ -1568,9 +1580,6 @@
msgid "condition"
msgstr "condition"
-msgid "condition:"
-msgstr "condition :"
-
msgctxt "RQLExpression"
msgid "condition_object"
msgstr "condition de"
@@ -1578,6 +1587,9 @@
msgid "condition_object"
msgstr "condition de"
+msgid "conditions"
+msgstr "conditions"
+
msgid "config mode"
msgstr "mode de configuration"
@@ -1630,12 +1642,6 @@
msgid "contentnavigation_metadata_description"
msgstr ""
-msgid "contentnavigation_pdfview"
-msgstr "icône pdf"
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
msgid "contentnavigation_prevnext"
msgstr "élément précedent / suivant"
@@ -1832,6 +1838,9 @@
msgid "creation"
msgstr "création"
+msgid "creation date"
+msgstr "date de création"
+
msgid "creation time of an entity"
msgstr "date de création d'une entité"
@@ -1867,18 +1876,39 @@
msgid "custom_workflow_object"
msgstr "workflow de"
-msgid "cwetype-schema-image"
-msgstr "schéma"
-
-msgid "cwetype-schema-permissions"
+msgid "cwetype-box"
+msgstr "vue \"boîte\""
+
+msgid "cwetype-description"
+msgstr "description"
+
+msgid "cwetype-permissions"
msgstr "permissions"
-msgid "cwetype-schema-text"
-msgstr "description"
+msgid "cwetype-views"
+msgstr "vues"
msgid "cwetype-workflow"
msgstr "workflow"
+msgid "cwgroup-main"
+msgstr "description"
+
+msgid "cwgroup-permissions"
+msgstr "permissions"
+
+msgid "cwrdef-description"
+msgstr "description"
+
+msgid "cwrdef-permissions"
+msgstr "permissions"
+
+msgid "cwrtype-description"
+msgstr "description"
+
+msgid "cwrtype-permissions"
+msgstr "permissions"
+
msgid "cwuri"
msgstr "uri interne"
@@ -1906,6 +1936,9 @@
msgid "default user workflow"
msgstr "workflow par défaut des utilisateurs"
+msgid "default value"
+msgstr "valeur par défaut"
+
msgid "default workflow for an entity type"
msgstr "workflow par défaut pour un type d'entité"
@@ -2365,6 +2398,9 @@
msgid "final"
msgstr "final"
+msgid "first name"
+msgstr "prénom"
+
msgid "firstname"
msgstr "prénom"
@@ -2383,7 +2419,7 @@
msgstr "suivez ce lien pour plus d'information sur ce %s"
msgid "follow this link if javascript is deactivated"
-msgstr ""
+msgstr "suivez ce lien si javascript est désactivé"
msgid "for_user"
msgstr "pour l'utilisateur"
@@ -2447,6 +2483,9 @@
msgid "full text or RQL query"
msgstr "texte à rechercher ou requête RQL"
+msgid "fulltext indexed"
+msgstr "indexation du texte"
+
msgid "fulltext_container"
msgstr "conteneur du texte indexé"
@@ -2480,10 +2519,6 @@
msgid "granted to groups"
msgstr "accordée aux groupes"
-#, python-format
-msgid "graphical representation of %s"
-msgstr "représentation graphique de %s"
-
msgid "graphical representation of the instance'schema"
msgstr "représentation graphique du schéma de l'instance"
@@ -2508,9 +2543,6 @@
msgid "groups to which the permission is granted"
msgstr "groupes auquels cette permission est donnée"
-msgid "groups:"
-msgstr "groupes :"
-
msgid "guests"
msgstr "invités"
@@ -2555,6 +2587,18 @@
msgid "i18n_login_popup"
msgstr "s'authentifier"
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
msgid "i18nprevnext_next"
msgstr "suivant"
@@ -2592,12 +2636,6 @@
msgid "image"
msgstr "image"
-msgid "in memory entity schema"
-msgstr "schéma de l'entité en mémoire"
-
-msgid "in memory relation schema"
-msgstr "schéma de la relation en mémoire"
-
msgid "in_group"
msgstr "dans le groupe"
@@ -2628,9 +2666,6 @@
msgid "incorrect value (%(value)s) for type \"%(type)s\""
msgstr "valeur %(value)s incorrecte pour le type \"%(type)s\""
-msgid "index"
-msgstr "index"
-
msgid "index this attribute's value in the plain text index"
msgstr "indexer la valeur de cet attribut dans l'index plein texte"
@@ -2704,6 +2739,12 @@
msgid "is"
msgstr "de type"
+msgid "is object of:"
+msgstr "est object de"
+
+msgid "is subject of:"
+msgstr "est sujet de"
+
msgid ""
"is the subject/object entity of the relation composed of the other ? This "
"implies that when the composite is deleted, composants are also deleted."
@@ -2756,6 +2797,12 @@
msgid "last connection date"
msgstr "dernière date de connexion"
+msgid "last login time"
+msgstr "dernière date de connexion"
+
+msgid "last name"
+msgstr "nom"
+
msgid "last usage"
msgstr "dernier usage"
@@ -2905,6 +2952,9 @@
msgid "more actions"
msgstr "plus d'actions"
+msgid "more info about this workflow"
+msgstr "plus d'information sur ce workflow"
+
msgid "multiple edit"
msgstr "édition multiple"
@@ -3048,8 +3098,8 @@
msgid "object"
msgstr "objet"
-msgid "object_plural:"
-msgstr "objets :"
+msgid "object type"
+msgstr "type de l'objet"
msgid "october"
msgstr "octobre"
@@ -3131,11 +3181,8 @@
msgid "permission"
msgstr "permission"
-msgid "permissions for entities"
-msgstr "permissions pour les entités"
-
-msgid "permissions for relations"
-msgstr "permissions pour les relations"
+msgid "permissions"
+msgstr "permissions"
msgid "permissions for this entity"
msgstr "permissions pour cette entité"
@@ -3252,6 +3299,9 @@
msgid "related entity has no workflow set"
msgstr "l'entité lié n'a pas de workflow"
+msgid "relation"
+msgstr "relation"
+
#, python-format
msgid "relation %(relname)s of %(ent)s"
msgstr "relation %(relname)s de %(ent)s"
@@ -3259,6 +3309,9 @@
msgid "relation add"
msgstr "ajout de relation"
+msgid "relation direction"
+msgstr "sens de la relation"
+
msgid "relation removal"
msgstr "suppression de relation"
@@ -3280,9 +3333,6 @@
msgid "relation_type_object"
msgstr "définition"
-msgid "relations"
-msgstr "relations"
-
msgid "relations deleted"
msgstr "relations supprimées"
@@ -3324,6 +3374,9 @@
msgid "require_permission_object"
msgstr "permission of"
+msgid "required"
+msgstr "requis"
+
msgid "required attribute"
msgstr "attribut requis"
@@ -3372,11 +3425,14 @@
msgid "schema's permissions definitions"
msgstr "permissions définies dans le schéma"
+msgid "schema-description"
+msgstr "types d'entité et de relation"
+
msgid "schema-image"
-msgstr "schéma"
-
-msgid "schema-text"
-msgstr "description"
+msgstr "image"
+
+msgid "schema-security"
+msgstr "permissions"
msgid "search"
msgstr "rechercher"
@@ -3569,12 +3625,12 @@
msgid "subject"
msgstr "sujet"
+msgid "subject type"
+msgstr "type du sujet"
+
msgid "subject/object cardinality"
msgstr "cardinalité sujet/objet"
-msgid "subject_plural:"
-msgstr "sujets :"
-
msgid "subworkflow"
msgstr "sous-workflow"
@@ -3981,6 +4037,9 @@
msgid "used to grant a permission to a group"
msgstr "utiliser pour donner une permission à un groupe"
+msgid "user"
+msgstr "utilisateur"
+
#, python-format
msgid ""
"user %s has made the following change(s):\n"
@@ -4071,6 +4130,12 @@
msgid "wf_info_for_object"
msgstr "historique des transitions"
+msgid "wf_tab_info"
+msgstr "description"
+
+msgid "wfgraph"
+msgstr "image du workflow"
+
msgid ""
"when multiple addresses are equivalent (such as python-projects@logilab.org "
"and python-projects@lists.logilab.org), set this to indicate which is the "
--- a/migration.py Tue Apr 13 19:22:46 2010 +0200
+++ b/migration.py Tue Apr 13 19:43:51 2010 +0200
@@ -16,6 +16,7 @@
from logilab.common.decorators import cached
from logilab.common.configuration import REQUIRED, read_old_config
from logilab.common.shellutils import ASK
+from logilab.common.changelog import Version
from cubicweb import ConfigurationError
@@ -374,3 +375,75 @@
from logging import getLogger
from cubicweb import set_log_methods
set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
+
+
+def version_strictly_lower(a, b):
+ if a:
+ a = Version(a)
+ if b:
+ b = Version(b)
+ return a < b
+
+def max_version(a, b):
+ return str(max(Version(a), Version(b)))
+
+class ConfigurationProblem(object):
+ """Each cube has its own list of dependencies on other cubes/versions.
+
+ The ConfigurationProblem is used to record the loaded cubes, then to detect
+ inconsistencies in their dependencies.
+
+ See configuration management on wikipedia for litterature.
+ """
+
+ def __init__(self, config):
+ self.cubes = {}
+ self.config = config
+
+ def add_cube(self, name, version):
+ self.cubes[name] = version
+
+ def solve(self):
+ self.warnings = []
+ self.errors = []
+ self.read_constraints()
+ for cube, versions in sorted(self.constraints.items()):
+ oper, version = None, None
+ # simplify constraints
+ if versions:
+ for constraint in versions:
+ op, ver = constraint
+ if oper is None:
+ oper = op
+ version = ver
+ elif op == '>=' and oper == '>=':
+ version = max_version(ver, version)
+ else:
+ print 'unable to handle this case', oper, version, op, ver
+ # "solve" constraint satisfaction problem
+ if cube not in self.cubes:
+ self.errors.append( ('add', cube, version) )
+ elif versions:
+ lower_strict = version_strictly_lower(self.cubes[cube], version)
+ if oper in ('>=','='):
+ if lower_strict:
+ self.errors.append( ('update', cube, version) )
+ else:
+ print 'unknown operator', oper
+
+ def read_constraints(self):
+ self.constraints = {}
+ self.reverse_constraints = {}
+ for cube in self.cubes:
+ use = self.config.cube_dependencies(cube)
+ for name, constraint in use.iteritems():
+ self.constraints.setdefault(name,set())
+ if constraint:
+ try:
+ oper, version = constraint.split()
+ self.constraints[name].add( (oper, version) )
+ except:
+ self.warnings.append(
+ 'cube %s depends on %s but constraint badly '
+ 'formatted: %s' % (cube, name, constraint))
+ self.reverse_constraints.setdefault(name, set()).add(cube)
--- a/misc/migration/postcreate.py Tue Apr 13 19:22:46 2010 +0200
+++ b/misc/migration/postcreate.py Tue Apr 13 19:43:51 2010 +0200
@@ -43,7 +43,7 @@
# need this since we already have at least one user in the database (the default admin)
for user in rql('Any X WHERE X is CWUser').entities():
rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
- {'x': user.eid, 's': activated.eid}, 'x')
+ {'x': user.eid, 's': activated.eid})
# on interactive mode, ask for level 0 persistent options
if interactive_mode:
@@ -55,11 +55,12 @@
default = cfg.option_default(optname, optdict)
# only record values differing from default
if value != default:
- rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', {'k': key, 'v': value})
+ rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s',
+ {'k': key, 'v': value})
# add PERM_USE_TEMPLATE_FORMAT permission
from cubicweb.schema import PERM_USE_TEMPLATE_FORMAT
usetmplperm = create_entity('CWPermission', name=PERM_USE_TEMPLATE_FORMAT,
label=_('use template languages'))
rql('SET X require_group G WHERE G name "managers", X eid %(x)s',
- {'x': usetmplperm.eid}, 'x')
+ {'x': usetmplperm.eid})
--- a/pytestconf.py Tue Apr 13 19:22:46 2010 +0200
+++ b/pytestconf.py Tue Apr 13 19:43:51 2010 +0200
@@ -5,8 +5,6 @@
from os.path import split, splitext
from logilab.common.pytest import PyTester
-from cubicweb.etwist.server import _gc_debug
-
class CustomPyTester(PyTester):
def testfile(self, filename, batchmode=False):
try:
@@ -22,7 +20,6 @@
if getattr(cls, '__module__', None) != modname:
continue
clean_repo_test_cls(cls)
- #_gc_debug()
def clean_repo_test_cls(cls):
if 'repo' in cls.__dict__:
--- a/rqlrewrite.py Tue Apr 13 19:22:46 2010 +0200
+++ b/rqlrewrite.py Tue Apr 13 19:43:51 2010 +0200
@@ -41,15 +41,15 @@
except KeyError:
continue
stinfo = var.stinfo
- if stinfo.get('uidrels'):
+ if stinfo.get('uidrel') is not None:
continue # eid specified, no need for additional type specification
try:
- typerels = rqlst.defined_vars[varname].stinfo.get('typerels')
+ typerel = rqlst.defined_vars[varname].stinfo.get('typerel')
except KeyError:
assert varname in rqlst.aliases
continue
- if newroot is rqlst and typerels:
- mytyperel = iter(typerels).next()
+ if newroot is rqlst and typerel is not None:
+ mytyperel = typerel
else:
for vref in newroot.defined_vars[varname].references():
rel = vref.relation()
@@ -80,7 +80,7 @@
# tree is not annotated yet, no scope set so add the restriction
# to the root
rel = newroot.add_type_restriction(var, possibletypes)
- stinfo['typerels'] = frozenset((rel,))
+ stinfo['typerel'] = rel
stinfo['possibletypes'] = possibletypes
--- a/rset.py Tue Apr 13 19:22:46 2010 +0200
+++ b/rset.py Tue Apr 13 19:43:51 2010 +0200
@@ -31,14 +31,12 @@
:type rql: str or unicode
:ivar rql: the original RQL query string
"""
- def __init__(self, results, rql, args=None, description=(), cachekey=None,
- rqlst=None):
+ def __init__(self, results, rql, args=None, description=(), rqlst=None):
self.rows = results
self.rowcount = results and len(results) or 0
# original query and arguments
self.rql = rql
self.args = args
- self.cachekey = cachekey
# entity types for each cell (same shape as rows)
# maybe discarded if specified when the query has been executed
self.description = description
--- a/schema.py Tue Apr 13 19:22:46 2010 +0200
+++ b/schema.py Tue Apr 13 19:43:51 2010 +0200
@@ -705,14 +705,14 @@
if eidto is None:
# checking constraint for an attribute relation
restriction = 'S eid %(s)s, ' + self.restriction
- args, ck = {'s': eidfrom}, 's'
+ args = {'s': eidfrom}
else:
restriction = 'S eid %(s)s, O eid %(o)s, ' + self.restriction
- args, ck = {'s': eidfrom, 'o': eidto}, ('s', 'o')
+ args = {'s': eidfrom, 'o': eidto}
rql = 'Any %s WHERE %s' % (self.mainvars, restriction)
if self.distinct_query:
rql = 'DISTINCT ' + rql
- return session.execute(rql, args, ck, build_descr=False)
+ return session.execute(rql, args, build_descr=False)
class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint):
@@ -839,9 +839,8 @@
return False
if keyarg is None:
kwargs.setdefault('u', session.user.eid)
- cachekey = kwargs.keys()
try:
- rset = session.execute(rql, kwargs, cachekey, build_descr=True)
+ rset = session.execute(rql, kwargs, build_descr=True)
except NotImplementedError:
self.critical('cant check rql expression, unsupported rql %s', rql)
if self.eid is not None:
--- a/schemaviewer.py Tue Apr 13 19:22:46 2010 +0200
+++ b/schemaviewer.py Tue Apr 13 19:43:51 2010 +0200
@@ -22,11 +22,8 @@
self.req = req
if req is not None:
self.req.add_css('cubicweb.schema.css')
- self._possible_views = req.vreg['views'].possible_views
if not encoding:
encoding = req.encoding
- else:
- self._possible_views = lambda x: ()
self.encoding = encoding
def format_acls(self, schema, access_types):
@@ -42,7 +39,6 @@
return Section(children=(Table(cols=2, cheaders=1, rheaders=1, children=data),),
klass='acl')
-
def visit_schema(self, schema, display_relations=0, skiptypes=()):
"""get a layout for a whole schema"""
title = Title(self.req._('Schema %s') % schema.name,
@@ -98,11 +94,6 @@
def rschema_link_url(self, rschema):
return self.req.build_url('cwrtype/%s' % rschema)
- def possible_views(self, etype):
- rset = self.req.etype_rset(etype)
- return [v for v in self._possible_views(self.req, rset)
- if v.category != 'startupview']
-
def stereotype(self, name):
return Span((' <<%s>>' % name,), klass='stereotype')
@@ -113,9 +104,6 @@
layout.append(Link(etype,' ' , id=etype)) # anchor
title = Link(self.eschema_link_url(eschema), etype)
boxchild = [Section(children=(title, ' (%s)'% eschema.display_name(self.req)), klass='title')]
- table = Table(cols=4, rheaders=1, klass='listing',
- children=self._entity_attributes_data(eschema))
- boxchild.append(Section(children=(table,), klass='body'))
data = []
data.append(Section(children=boxchild, klass='box'))
data.append(Section(children='', klass='vl'))
@@ -152,16 +140,6 @@
data.append(Section(children=rels, klass='rels'))
data.append(Section(children=t_vars, klass='vars'))
layout.append(Section(children=data, klass='entityAttributes'))
- if eschema.final: # stop here for final entities
- return layout
- _ = self.req._
- if self.req.user.matching_groups('managers'):
- # layout.append(self.format_acls(eschema, ('read', 'add', 'delete', 'update')))
- # possible views for this entity type
- views = [_(view.title) for view in self.possible_views(etype)]
- layout.append(Section(children=(Table(cols=1, rheaders=1,
- children=[_('views')]+views),),
- klass='views'))
return layout
def visit_relationschema(self, rschema, title=True):
@@ -208,6 +186,8 @@
val = getattr(rdef, prop)
if val is None:
val = ''
+ elif prop == 'constraints':
+ val = ', '.join([c.restriction for c in val])
elif isinstance(val, (list, tuple)):
val = ', '.join(str(v) for v in val)
elif val and isinstance(val, basestring):
--- a/selectors.py Tue Apr 13 19:22:46 2010 +0200
+++ b/selectors.py Tue Apr 13 19:43:51 2010 +0200
@@ -107,7 +107,7 @@
__regid__ = 'loggeduserlink'
def call(self):
- if self._cw.cnx.anonymous_connection:
+ if self._cw.session.anonymous_session:
# display login link
...
else:
@@ -1030,7 +1030,7 @@
def score(self, req, rset, row, col):
try:
return len(req.execute(self.rql, {'x': rset[row][col],
- 'u': req.user.eid}, 'x'))
+ 'u': req.user.eid}))
except Unauthorized:
return 0
@@ -1038,12 +1038,24 @@
@objectify_selector
@lltrace
+def no_cnx(cls, req, rset, *args, **kwargs):
+ """Return 1 if the web session has no connection set. This occurs when
+ anonymous access is not allowed and user isn't authenticated.
+
+ May only be used on the web side, not on the data repository side.
+ """
+ if req.cnx is None:
+ return 1
+ return 0
+
+@objectify_selector
+@lltrace
def authenticated_user(cls, req, **kwargs):
"""Return 1 if the user is authenticated (e.g. not the anonymous user).
May only be used on the web side, not on the data repository side.
"""
- if req.cnx.anonymous_connection:
+ if req.session.anonymous_session:
return 0
return 1
--- a/server/hook.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/hook.py Tue Apr 13 19:43:51 2010 +0200
@@ -74,10 +74,7 @@
self.unregister(cls)
def register(self, obj, **kwargs):
- for event in obj.events:
- if event not in ALL_HOOKS:
- raise Exception('bad event %s on %s.%s' % (
- event, obj.__module__, obj.__name__))
+ obj.check_events()
super(HooksRegistry, self).register(obj, **kwargs)
def call_hooks(self, event, session=None, **kwargs):
@@ -198,10 +195,13 @@
# XXX deprecated
enabled = True
- @classproperty
- def __registries__(cls):
+ @classmethod
+ def check_events(cls):
try:
- return ['%s_hooks' % ev for ev in cls.events]
+ for event in cls.events:
+ if event not in ALL_HOOKS:
+ raise Exception('bad event %s on %s.%s' % (
+ event, cls.__module__, cls.__name__))
except AttributeError:
raise
except TypeError:
@@ -209,6 +209,11 @@
cls.events, cls.__module__, cls.__name__))
@classproperty
+ def __registries__(cls):
+ cls.check_events()
+ return ['%s_hooks' % ev for ev in cls.events]
+
+ @classproperty
def __regid__(cls):
warn('[3.6] %s.%s: please specify an id for your hook'
% (cls.__module__, cls.__name__), DeprecationWarning)
--- a/server/migractions.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/migractions.py Tue Apr 13 19:43:51 2010 +0200
@@ -268,9 +268,9 @@
if self.session:
self.session.set_pool()
- def rqlexecall(self, rqliter, cachekey=None, ask_confirm=True):
+ def rqlexecall(self, rqliter, ask_confirm=True):
for rql, kwargs in rqliter:
- self.rqlexec(rql, kwargs, cachekey, ask_confirm=ask_confirm)
+ self.rqlexec(rql, kwargs, ask_confirm=ask_confirm)
@cached
def _create_context(self):
@@ -361,14 +361,14 @@
# handle groups
newgroups = list(erschema.get_groups(action))
for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, '
- 'T eid %%(x)s' % perm, {'x': teid}, 'x',
+ 'T eid %%(x)s' % perm, {'x': teid},
ask_confirm=False):
if not gname in newgroups:
if not confirm or self.confirm('Remove %s permission of %s to %s?'
% (action, erschema, gname)):
self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s'
% (perm, teid),
- {'x': geid}, 'x', ask_confirm=False)
+ {'x': geid}, ask_confirm=False)
else:
newgroups.remove(gname)
for gname in newgroups:
@@ -376,7 +376,7 @@
% (action, erschema, gname)):
self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s'
% (perm, teid),
- {'x': gm[gname]}, 'x', ask_confirm=False)
+ {'x': gm[gname]}, ask_confirm=False)
# handle rql expressions
newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action))
for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, '
@@ -388,7 +388,7 @@
# deleting the relation will delete the expression entity
self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s'
% (perm, teid),
- {'x': expreid}, 'x', ask_confirm=False)
+ {'x': expreid}, ask_confirm=False)
else:
newexprs.pop(expression)
for expression in newexprs.values():
@@ -399,7 +399,7 @@
'X expression %%(expr)s, X mainvars %%(vars)s, T %s X '
'WHERE T eid %%(x)s' % perm,
{'expr': expr, 'exprtype': exprtype,
- 'vars': expression.mainvars, 'x': teid}, 'x',
+ 'vars': expression.mainvars, 'x': teid},
ask_confirm=False)
def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True, syncprops=True):
@@ -524,14 +524,13 @@
newcstr = None
if newcstr is None:
self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s',
- {'x': cstr.eid}, 'x',
- ask_confirm=confirm)
+ {'x': cstr.eid}, ask_confirm=confirm)
else:
newconstraints.remove(newcstr)
value = unicode(newcstr.serialize())
if value != unicode(cstr.serialize()):
self.rqlexec('SET X value %(v)s WHERE X eid %(x)s',
- {'x': cstr.eid, 'v': value}, 'x',
+ {'x': cstr.eid, 'v': value},
ask_confirm=confirm)
# 2. add new constraints
cstrtype_map = self.cstrtype_mapping()
@@ -644,10 +643,10 @@
self.cmd_drop_relation_definition(
str(fromtype), rschema.type, str(totype))
# execute post-remove files
- for pack in reversed(removedcubes):
- self.exec_event_script('postremove', self.config.cube_dir(pack))
+ for cube in reversed(removedcubes):
+ self.exec_event_script('postremove', self.config.cube_dir(cube))
self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s',
- {'pk': u'system.version.'+pack}, ask_confirm=False)
+ {'pk': u'system.version.'+cube}, ask_confirm=False)
self.commit()
# schema migration actions ################################################
@@ -736,8 +735,8 @@
continue
if instspschema.specializes() != eschema:
self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s',
- {'d': instspschema.eid,
- 'pn': eschema.type}, ask_confirm=confirm)
+ {'d': instspschema.eid, 'pn': eschema.type},
+ ask_confirm=confirm)
for rschema, tschemas, role in spschema.relation_definitions(True):
for tschema in tschemas:
if not tschema in instschema:
@@ -1073,12 +1072,12 @@
for etype in wfof:
rset = self.rqlexec(
'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s',
- {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False)
+ {'x': wf.eid, 'et': etype}, ask_confirm=False)
assert rset, 'unexistant entity type %s' % etype
if default:
self.rqlexec(
'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s',
- {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False)
+ {'x': wf.eid, 'et': etype}, ask_confirm=False)
if commit:
self.commit()
return wf
@@ -1202,6 +1201,9 @@
def rqlexec(self, rql, kwargs=None, cachekey=None, build_descr=True,
ask_confirm=True):
"""rql action"""
+ if cachekey is not None:
+ warn('[3.8] cachekey is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
if not isinstance(rql, (tuple, list)):
rql = ( (rql, kwargs), )
res = None
@@ -1213,7 +1215,7 @@
msg = rql
if not ask_confirm or self.confirm('Execute rql: %s ?' % msg):
try:
- res = execute(rql, kwargs, cachekey, build_descr=build_descr)
+ res = execute(rql, kwargs, build_descr=build_descr)
except Exception, ex:
if self.confirm('Error: %s\nabort?' % ex):
raise
--- a/server/msplanner.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/msplanner.py Tue Apr 13 19:43:51 2010 +0200
@@ -309,21 +309,24 @@
# find for each source which variable/solution are supported
for varname, varobj in self.rqlst.defined_vars.items():
# if variable has an eid specified, we can get its source directly
- # NOTE: use uidrels and not constnode to deal with "X eid IN(1,2,3,4)"
- if varobj.stinfo['uidrels']:
- vrels = varobj.stinfo['relations'] - varobj.stinfo['uidrels']
- for rel in varobj.stinfo['uidrels']:
- for const in rel.children[1].get_nodes(Constant):
- eid = const.eval(self.plan.args)
- source = self._session.source_from_eid(eid)
- if vrels and not any(source.support_relation(r.r_type)
- for r in vrels):
- self._set_source_for_term(self.system_source, varobj)
- else:
- self._set_source_for_term(source, varobj)
+ # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)"
+ if varobj.stinfo['uidrel'] is not None:
+ rel = varobj.stinfo['uidrel']
+ hasrel = len(varobj.stinfo['relations']) > 1
+ for const in rel.children[1].get_nodes(Constant):
+ eid = const.eval(self.plan.args)
+ source = self._session.source_from_eid(eid)
+ if (source is self.system_source
+ or (hasrel and
+ not any(source.support_relation(r.r_type)
+ for r in varobj.stinfo['relations']
+ if not r is rel))):
+ self._set_source_for_term(self.system_source, varobj)
+ else:
+ self._set_source_for_term(source, varobj)
continue
rels = varobj.stinfo['relations']
- if not rels and not varobj.stinfo['typerels']:
+ if not rels and varobj.stinfo['typerel'] is None:
# (rare) case where the variable has no type specified nor
# relation accessed ex. "Any MAX(X)"
self._set_source_for_term(self.system_source, varobj)
@@ -700,7 +703,7 @@
for var in select.defined_vars.itervalues():
if not var in terms:
stinfo = var.stinfo
- for ovar, rtype in stinfo['attrvars']:
+ for ovar, rtype in stinfo.get('attrvars', ()):
if ovar in terms:
needsel.add(var.name)
terms.append(var)
@@ -778,20 +781,19 @@
# variable is refed by an outer scope and should be substituted
# using an 'identity' relation (else we'll get a conflict of
# temporary tables)
- if rhsvar in terms and not lhsvar in terms:
+ if rhsvar in terms and not lhsvar in terms and lhsvar.scope is lhsvar.stmt:
self._identity_substitute(rel, lhsvar, terms, needsel)
- elif lhsvar in terms and not rhsvar in terms:
+ elif lhsvar in terms and not rhsvar in terms and rhsvar.scope is rhsvar.stmt:
self._identity_substitute(rel, rhsvar, terms, needsel)
def _identity_substitute(self, relation, var, terms, needsel):
newvar = self._insert_identity_variable(relation.scope, var)
- if newvar is not None:
- # ensure relation is using '=' operator, else we rely on a
- # sqlgenerator side effect (it won't insert an inequality operator
- # in this case)
- relation.children[1].operator = '='
- terms.append(newvar)
- needsel.add(newvar.name)
+ # ensure relation is using '=' operator, else we rely on a
+ # sqlgenerator side effect (it won't insert an inequality operator
+ # in this case)
+ relation.children[1].operator = '='
+ terms.append(newvar)
+ needsel.add(newvar.name)
def _choose_term(self, sourceterms):
"""pick one term among terms supported by a source, which will be used
@@ -1419,7 +1421,7 @@
return False
if not var in terms or used_in_outer_scope(var, self.current_scope):
return False
- if any(v for v, _ in var.stinfo['attrvars'] if not v in terms):
+ if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms):
return False
return True
--- a/server/mssteps.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/mssteps.py Tue Apr 13 19:43:51 2010 +0200
@@ -61,7 +61,7 @@
if not isinstance(vref, VariableRef):
continue
var = vref.variable
- if var.stinfo['attrvars']:
+ if var.stinfo.get('attrvars'):
for lhsvar, rtype in var.stinfo['attrvars']:
if lhsvar.name in srqlst.defined_vars:
key = '%s.%s' % (lhsvar.name, rtype)
--- a/server/querier.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/querier.py Tue Apr 13 19:43:51 2010 +0200
@@ -326,16 +326,9 @@
varkwargs = {}
if not session.transaction_data.get('security-rqlst-cache'):
for var in rqlst.defined_vars.itervalues():
- for rel in var.stinfo['uidrels']:
- const = rel.children[1].children[0]
- try:
- varkwargs[var.name] = typed_eid(const.eval(self.args))
- break
- except AttributeError:
- #from rql.nodes import Function
- #assert isinstance(const, Function)
- # X eid IN(...)
- pass
+ if var.stinfo['constnode'] is not None:
+ eid = var.stinfo['constnode'].eval(self.args)
+ varkwargs[var.name] = typed_eid(eid)
# dictionnary of variables restricted for security reason
localchecks = {}
restricted_vars = set()
@@ -529,16 +522,22 @@
def set_schema(self, schema):
self.schema = schema
repo = self._repo
+ # rql st and solution cache. Don't bother using a Cache instance: we
+ # should have a limited number of queries in there, since there are no
+ # entries in this cache for user queries (which have no args)
+ self._rql_cache = {}
+ # rql cache key cache
+ self._rql_ck_cache = Cache(repo.config['rql-cache-size'])
+ # some cache usage stats
+ self.cache_hit, self.cache_miss = 0, 0
# rql parsing / analysing helper
self.solutions = repo.vreg.solutions
- self._rql_cache = Cache(repo.config['rql-cache-size'])
- self.cache_hit, self.cache_miss = 0, 0
+ rqlhelper = repo.vreg.rqlhelper
+ self._parse = rqlhelper.parse
+ self._annotate = rqlhelper.annotate
# rql planner
# note: don't use repo.sources, may not be built yet, and also "admin"
# isn't an actual source
- rqlhelper = repo.vreg.rqlhelper
- self._parse = rqlhelper.parse
- self._annotate = rqlhelper.annotate
if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2:
from cubicweb.server.ssplanner import SSPlanner
self._planner = SSPlanner(schema, rqlhelper)
@@ -561,7 +560,7 @@
return InsertPlan(self, rqlst, args, session)
return ExecutionPlan(self, rqlst, args, session)
- def execute(self, session, rql, args=None, eid_key=None, build_descr=True):
+ def execute(self, session, rql, args=None, build_descr=True):
"""execute a rql query, return resulting rows and their description in
a `ResultSet` object
@@ -570,12 +569,6 @@
* `build_descr` is a boolean flag indicating if the description should
be built on select queries (if false, the description will be en empty
list)
- * `eid_key` must be both a key in args and a substitution in the rql
- query. It should be used to enhance cacheability of rql queries.
- It may be a tuple for keys in args.
- `eid_key` must be provided in cases where a eid substitution is provided
- and resolves ambiguities in the possible solutions inferred for each
- variable in the query.
on INSERT queries, there will be one row with the eid of each inserted
entity
@@ -591,40 +584,33 @@
print '*'*80
print 'querier input', rql, args
# parse the query and binds variables
- if eid_key is not None:
- if not isinstance(eid_key, (tuple, list)):
- eid_key = (eid_key,)
- cachekey = [rql]
- for key in eid_key:
- try:
- etype = self._repo.type_from_eid(args[key], session)
- except KeyError:
- raise QueryError('bad cache key %s (no value)' % key)
- except TypeError:
- raise QueryError('bad cache key %s (value: %r)' % (
- key, args[key]))
- except UnknownEid:
- # we want queries such as "Any X WHERE X eid 9999"
- # return an empty result instead of raising UnknownEid
- return empty_rset(rql, args)
- cachekey.append(etype)
- # ensure eid is correctly typed in args
- args[key] = typed_eid(args[key])
- cachekey = tuple(cachekey)
- else:
+ try:
cachekey = rql
- try:
+ if args:
+ eidkeys = self._rql_ck_cache[rql]
+ if eidkeys:
+ try:
+ cachekey = self._repo.querier_cache_key(session, rql,
+ args, eidkeys)
+ except UnknownEid:
+ # we want queries such as "Any X WHERE X eid 9999"
+ # return an empty result instead of raising UnknownEid
+ return empty_rset(rql, args)
rqlst = self._rql_cache[cachekey]
self.cache_hit += 1
except KeyError:
self.cache_miss += 1
rqlst = self.parse(rql)
try:
- self.solutions(session, rqlst, args)
+ eidkeys = self.solutions(session, rqlst, args)
except UnknownEid:
# we want queries such as "Any X WHERE X eid 9999" return an
# empty result instead of raising UnknownEid
return empty_rset(rql, args, rqlst)
+ self._rql_ck_cache[rql] = eidkeys
+ if eidkeys:
+ cachekey = self._repo.querier_cache_key(session, rql, args,
+ eidkeys)
self._rql_cache[cachekey] = rqlst
orig_rqlst = rqlst
if rqlst.TYPE != 'select':
@@ -684,7 +670,7 @@
# FIXME: get number of affected entities / relations on non
# selection queries ?
# return a result set object
- return ResultSet(results, rql, args, descr, eid_key, orig_rqlst)
+ return ResultSet(results, rql, args, descr, orig_rqlst)
from logging import getLogger
from cubicweb import set_log_methods
--- a/server/repository.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/repository.py Tue Apr 13 19:43:51 2010 +0200
@@ -33,7 +33,7 @@
from yams.schema import role_name
from rql import RQLSyntaxError
-from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
+from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError,
UnknownEid, AuthenticationError, ExecutionError,
ETypeNotSupportedBySources, MultiSourcesError,
BadConnectionId, Unauthorized, ValidationError,
@@ -76,12 +76,12 @@
with security_enabled(session, read=False):
session.execute('DELETE X %s Y WHERE X eid %%(x)s, '
'NOT Y eid %%(y)s' % rtype,
- {'x': eidfrom, 'y': eidto}, 'x')
+ {'x': eidfrom, 'y': eidto})
if card[1] in '1?':
with security_enabled(session, read=False):
session.execute('DELETE X %sY WHERE Y eid %%(y)s, '
'NOT X eid %%(x)s' % rtype,
- {'x': eidfrom, 'y': eidto}, 'y')
+ {'x': eidfrom, 'y': eidto})
class Repository(object):
@@ -408,7 +408,7 @@
"""return a CWUser entity for user with the given eid"""
cls = self.vreg['etypes'].etype_class('CWUser')
rql = cls.fetch_rql(session.user, ['X eid %(x)s'])
- rset = session.execute(rql, {'x': eid}, 'x')
+ rset = session.execute(rql, {'x': eid})
assert len(rset) == 1, rset
cwuser = rset.get_entity(0, 0)
# pylint: disable-msg=W0104
@@ -567,7 +567,7 @@
session.commit()
return session.id
- def execute(self, sessionid, rqlstring, args=None, eid_key=None, build_descr=True):
+ def execute(self, sessionid, rqlstring, args=None, build_descr=True):
"""execute a RQL query
* rqlstring should be an unicode string or a plain ascii string
@@ -578,7 +578,7 @@
session = self._get_session(sessionid, setpool=True)
try:
try:
- return self.querier.execute(session, rqlstring, args, eid_key,
+ return self.querier.execute(session, rqlstring, args,
build_descr)
except (Unauthorized, RQLSyntaxError):
raise
@@ -836,6 +836,21 @@
"""return the source for the given entity's eid"""
return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]]
+ def querier_cache_key(self, session, rql, args, eidkeys):
+ cachekey = [rql]
+ for key in sorted(eidkeys):
+ try:
+ etype = self.type_from_eid(args[key], session)
+ except KeyError:
+ raise QueryError('bad cache key %s (no value)' % key)
+ except TypeError:
+ raise QueryError('bad cache key %s (value: %r)' % (
+ key, args[key]))
+ cachekey.append(etype)
+ # ensure eid is correctly typed in args
+ args[key] = typed_eid(args[key])
+ return tuple(cachekey)
+
def eid2extid(self, source, eid, session=None):
"""get local id from an eid"""
etype, uri, extid = self.type_and_source_from_eid(eid, session)
@@ -901,7 +916,7 @@
else:
# minimal meta-data
session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
- {'x': entity.eid, 'name': entity.__regid__}, 'x')
+ {'x': entity.eid, 'name': entity.__regid__})
session.commit(reset_pool)
return eid
except:
@@ -949,7 +964,7 @@
rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype
else:
rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
- session.execute(rql, {'x': eid}, 'x', build_descr=False)
+ session.execute(rql, {'x': eid}, build_descr=False)
self.system_source.delete_info(session, entity, sourceuri, extid)
def locate_relation_source(self, session, subject, rtype, object):
--- a/server/rqlannotation.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/rqlannotation.py Tue Apr 13 19:43:51 2010 +0200
@@ -38,7 +38,7 @@
stinfo['invariant'] = False
stinfo['principal'] = _select_main_var(stinfo['rhsrelations'])
continue
- if not stinfo['relations'] and not stinfo['typerels']:
+ if not stinfo['relations'] and stinfo['typerel'] is None:
# Any X, Any MAX(X)...
# those particular queries should be executed using the system
# entities table unless there is some type restriction
@@ -80,7 +80,7 @@
continue
rschema = getrschema(rel.r_type)
if rel.optional:
- if rel in stinfo['optrelations']:
+ if rel in stinfo.get('optrelations', ()):
# optional variable can't be invariant if this is the lhs
# variable of an inlined relation
if not rel in stinfo['rhsrelations'] and rschema.inlined:
@@ -296,7 +296,7 @@
def compute(self, rqlst):
# set domains for each variable
for varname, var in rqlst.defined_vars.iteritems():
- if var.stinfo['uidrels'] or \
+ if var.stinfo['uidrel'] is not None or \
self.eschema(rqlst.solutions[0][varname]).final:
ptypes = var.stinfo['possibletypes']
else:
@@ -339,7 +339,7 @@
def set_rel_constraint(self, term, rel, etypes_func):
if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
var = term.variable
- if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \
+ if len(var.stinfo['relations']) == 1 \
or rel.sqlscope is var.sqlscope or rel.r_type == 'identity':
self.restrict(var, frozenset(etypes_func()))
try:
@@ -356,7 +356,7 @@
if isinstance(other, VariableRef) and isinstance(other.variable, Variable):
deambiguifier = other.variable
if not var is self.deambification_map.get(deambiguifier):
- if not var.stinfo['typerels']:
+ if var.stinfo['typerel'] is None:
otheretypes = deambiguifier.stinfo['possibletypes']
elif not self.is_ambiguous(deambiguifier):
otheretypes = self.varsols[deambiguifier]
@@ -364,7 +364,7 @@
# we know variable won't be invariant, try to use
# it to deambguify the current variable
otheretypes = self.varsols[deambiguifier]
- if not deambiguifier.stinfo['typerels']:
+ if deambiguifier.stinfo['typerel'] is None:
# if deambiguifier has no type restriction using 'is',
# don't record it
deambiguifier = None
--- a/server/session.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/session.py Tue Apr 13 19:43:51 2010 +0200
@@ -13,6 +13,7 @@
import threading
from time import time
from uuid import uuid4
+from warnings import warn
from logilab.common.deprecation import deprecated
from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj
@@ -642,8 +643,14 @@
return self.repo.source_from_eid(eid, self)
def execute(self, rql, kwargs=None, eid_key=None, build_descr=True):
- """db-api like method directly linked to the querier execute method"""
- rset = self._execute(self, rql, kwargs, eid_key, build_descr)
+ """db-api like method directly linked to the querier execute method.
+
+ See :meth:`cubicweb.dbapi.Cursor.execute` documentation.
+ """
+ if eid_key is not None:
+ warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+ DeprecationWarning, stacklevel=2)
+ rset = self._execute(self, rql, kwargs, build_descr)
rset.req = self
return rset
--- a/server/sources/ldapuser.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/sources/ldapuser.py Tue Apr 13 19:43:51 2010 +0200
@@ -230,10 +230,10 @@
elif rset:
if not execute('SET X address %(addr)s WHERE '
'U primary_email X, U eid %(u)s',
- {'addr': ldapemailaddr, 'u': eid}, 'u'):
+ {'addr': ldapemailaddr, 'u': eid}):
execute('SET X address %(addr)s WHERE '
'X eid %(x)s',
- {'addr': ldapemailaddr, 'x': rset[0][0]}, 'x')
+ {'addr': ldapemailaddr, 'x': rset[0][0]})
else:
# no email found, create it
_insert_email(session, ldapemailaddr, eid)
@@ -546,7 +546,7 @@
super(LDAPUserSource, self).after_entity_insertion(session, dn, entity)
for group in self.user_default_groups:
session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s',
- {'x': entity.eid, 'group': group}, 'x')
+ {'x': entity.eid, 'group': group})
# search for existant email first
try:
emailaddr = self._cache[dn][self.user_rev_attrs['email']]
@@ -556,7 +556,7 @@
{'addr': emailaddr})
if rset:
session.execute('SET U primary_email X WHERE U eid %(u)s, X eid %(x)s',
- {'x': rset[0][0], 'u': entity.eid}, 'u')
+ {'x': rset[0][0], 'u': entity.eid})
else:
# not found, create it
_insert_email(session, emailaddr, entity.eid)
@@ -571,7 +571,7 @@
def _insert_email(session, emailaddr, ueid):
session.execute('INSERT EmailAddress X: X address %(addr)s, U primary_email X '
- 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}, 'x')
+ 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid})
class GotDN(Exception):
"""exception used when a dn localizing the searched user has been found"""
--- a/server/sources/pyrorql.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/sources/pyrorql.py Tue Apr 13 19:43:51 2010 +0200
@@ -286,7 +286,7 @@
session.set_shared_data('sources_error', msg % self.uri)
return []
try:
- rql, cachekey = RQL2RQL(self).generate(session, union, args)
+ rql = RQL2RQL(self).generate(session, union, args)
except UnknownEid, ex:
if server.DEBUG:
print ' unknown eid', ex, 'no results'
@@ -294,7 +294,7 @@
if server.DEBUG & server.DBG_RQL:
print ' translated rql', rql
try:
- rset = cu.execute(rql, args, cachekey)
+ rset = cu.execute(rql, args)
except Exception, ex:
self.exception(str(ex))
msg = session._("error while querying source %s, some data may be missing")
@@ -396,9 +396,8 @@
def generate(self, session, rqlst, args):
self._session = session
self.kwargs = args
- self.cachekey = []
self.need_translation = False
- return self.visit_union(rqlst), self.cachekey
+ return self.visit_union(rqlst)
def visit_union(self, node):
s = self._accept_children(node)
@@ -547,7 +546,6 @@
# ensure we have not yet translated the value...
if not key in self._const_var:
self.kwargs[key] = self.eid2extid(self.kwargs[key])
- self.cachekey.append(key)
self._const_var[key] = None
return node.as_string()
--- a/server/sources/rql2sql.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/sources/rql2sql.py Tue Apr 13 19:43:51 2010 +0200
@@ -87,7 +87,7 @@
modified = False
for varname in tuple(unstable):
var = select.defined_vars[varname]
- if not var.stinfo['optrelations']:
+ if not var.stinfo.get('optrelations'):
continue
modified = True
unstable.remove(varname)
@@ -114,13 +114,13 @@
var.stinfo['relations'].remove(rel)
newvar.stinfo['relations'].add(newrel)
if rel.optional in ('left', 'both'):
- newvar.stinfo['optrelations'].add(newrel)
+ newvar.add_optional_relation(newrel)
for vref in newrel.children[1].iget_nodes(VariableRef):
var = vref.variable
var.stinfo['relations'].add(newrel)
var.stinfo['rhsrelations'].add(newrel)
if rel.optional in ('right', 'both'):
- var.stinfo['optrelations'].add(newrel)
+ var.add_optional_relation(newrel)
# extract subquery solutions
mysolutions = [sol.copy() for sol in solutions]
cleanup_solutions(newselect, mysolutions)
@@ -888,7 +888,7 @@
condition = '%s=%s' % (lhssql, rhsconst.accept(self))
if relation.r_type != 'identity':
condition = '(%s OR %s IS NULL)' % (condition, lhssql)
- if not lhsvar.stinfo['optrelations']:
+ if not lhsvar.stinfo.get('optrelations'):
return condition
self.add_outer_join_condition(lhsvar, t1, condition)
return
@@ -987,7 +987,7 @@
sql = '%s%s' % (lhssql, rhssql)
except AttributeError:
sql = '%s%s' % (lhssql, rhssql)
- if lhs.variable.stinfo['optrelations']:
+ if lhs.variable.stinfo.get('optrelations'):
self.add_outer_join_condition(lhs.variable, table, sql)
else:
return sql
@@ -1002,7 +1002,7 @@
lhsvar = lhs.variable
me_is_principal = lhsvar.stinfo.get('principal') is rel
if me_is_principal:
- if not lhsvar.stinfo['typerels']:
+ if lhsvar.stinfo['typerel'] is None:
# the variable is using the fti table, no join needed
jointo = None
elif not lhsvar.name in self._varmap:
@@ -1135,7 +1135,7 @@
vtablename = '_' + variable.name
self.add_table('entities AS %s' % vtablename, vtablename)
sql = '%s.eid' % vtablename
- if variable.stinfo['typerels']:
+ if variable.stinfo['typerel'] is not None:
# add additional restriction on entities.type column
pts = variable.stinfo['possibletypes']
if len(pts) == 1:
@@ -1297,7 +1297,7 @@
tablealias = self._state.outer_tables[table]
actualtables = self._state.actual_tables[-1]
except KeyError:
- for rel in var.stinfo['optrelations']:
+ for rel in var.stinfo.get('optrelations'):
self.visit_relation(rel)
assert self._state.outer_tables
self.add_outer_join_condition(var, table, condition)
--- a/server/test/unittest_ldapuser.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_ldapuser.py Tue Apr 13 19:43:51 2010 +0200
@@ -176,7 +176,7 @@
rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,'
'WF creation_date D, WF from_state FS,'
'WF owned_by U?, X eid %(x)s',
- {'x': adim.eid}, 'x')
+ {'x': adim.eid})
self.assertEquals(rset.rows, [[syt.eid]])
finally:
# restore db state
--- a/server/test/unittest_migractions.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_migractions.py Tue Apr 13 19:43:51 2010 +0200
@@ -108,8 +108,8 @@
testdate = date(2005, 12, 13)
eid1 = self.mh.rqlexec('INSERT Note N')[0][0]
eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
- d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1}, 'x')[0][0]
- d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2}, 'x')[0][0]
+ d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0]
+ d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0]
self.assertEquals(d1, date.today())
self.assertEquals(d2, testdate)
self.mh.rollback()
@@ -503,13 +503,13 @@
note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0)
aff = self.execute('INSERT Affaire X').get_entity(0, 0)
self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}, 'x'))
+ {'x': text.eid, 'y': aff.eid}))
self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}, 'x'))
+ {'x': note.eid, 'y': aff.eid}))
self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}, 'x'))
+ {'x': text.eid, 'y': aff.eid}))
self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}, 'x'))
+ {'x': note.eid, 'y': aff.eid}))
# XXX remove specializes by ourselves, else tearDown fails when removing
# Para because of Note inheritance. This could be fixed by putting the
# MemSchemaCWETypeDel(session, name) operation in the
--- a/server/test/unittest_multisources.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_multisources.py Tue Apr 13 19:43:51 2010 +0200
@@ -109,7 +109,7 @@
self.assertEquals(metainf['type'], 'Card')
self.assert_(metainf['extid'])
etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s',
- {'x': externent.eid}, 'x')[0][0]
+ {'x': externent.eid})[0][0]
self.assertEquals(etype, 'Card')
def test_order_limit_offset(self):
@@ -129,7 +129,7 @@
self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0]
aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0]
# grant read access
- self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x')
+ self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1})
self.commit()
cnx = self.login('anon')
cu = cnx.cursor()
@@ -139,8 +139,8 @@
def test_synchronization(self):
cu = cnx2.cursor()
- assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}, 'x')
- cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}, 'x')
+ assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1})
+ cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1})
aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0]
cnx2.commit()
try:
@@ -155,20 +155,20 @@
self.failIf(rset)
finally:
# restore state
- cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}, 'x')
+ cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1})
cnx2.commit()
def test_simplifiable_var(self):
affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB',
- {'x': affeid}, 'x')
+ {'x': affeid})
self.assertEquals(len(rset), 1)
self.assertEquals(rset[0][1], "pitetre")
def test_simplifiable_var_2(self):
affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
- {'x': affeid, 'u': self.session.user.eid}, 'x')
+ {'x': affeid, 'u': self.session.user.eid})
self.assertEquals(len(rset), 1)
def test_sort_func(self):
@@ -216,7 +216,7 @@
rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T')
self.assertEquals(len(rset), 2, rset.rows)
finally:
- cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}, 'x')
+ cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2})
cnx2.commit()
def test_attr_unification_neq_1(self):
@@ -258,15 +258,15 @@
userstate = self.session.user.in_state[0]
states.remove((userstate.eid, userstate.name))
notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
- {'x': self.session.user.eid}, 'x'))
+ {'x': self.session.user.eid}))
self.assertSetEquals(notstates, states)
aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0]
- aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0]
+ aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0]
self.assertEquals(aff1statename, 'pitetre')
states.add((userstate.eid, userstate.name))
states.remove((aff1stateeid, aff1statename))
notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
- {'x': aff1}, 'x'))
+ {'x': aff1}))
self.assertSetEquals(notstates, states)
def test_absolute_url_base_url(self):
--- a/server/test/unittest_querier.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_querier.py Tue Apr 13 19:43:51 2010 +0200
@@ -220,13 +220,13 @@
def test_typed_eid(self):
# should return an empty result set
- rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}, 'x')
+ rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'})
self.assertIsInstance(rset[0][0], (int, long))
def test_bytes_storage(self):
feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s',
{'data': Binary("xxx")})[0][0]
- fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid}, 'x')[0][0]
+ fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0]
self.assertIsInstance(fdata, Binary)
self.assertEquals(fdata.getvalue(), 'xxx')
@@ -356,17 +356,17 @@
def test_select_outer_join_optimized(self):
peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
- rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}, 'x')
+ rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1})
self.assertEquals(rset.rows, [[peid1]])
rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?',
- {'x':peid1}, 'x')
+ {'x':peid1})
self.assertEquals(rset.rows, [[peid1]])
def test_select_left_outer_join(self):
rset = self.execute('DISTINCT Any G WHERE U? in_group G')
self.assertEquals(len(rset), 4)
rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s',
- {'x': self.session.user.eid}, 'x')
+ {'x': self.session.user.eid})
self.assertEquals(len(rset), 4)
def test_select_ambigous_outer_join(self):
@@ -374,7 +374,7 @@
self.execute("INSERT Tag X: X name 'tagbis'")[0][0]
geid = self.execute("CWGroup G WHERE G name 'users'")[0][0]
self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s",
- {'g': geid, 't': teid}, 'g')
+ {'g': geid, 't': teid})
rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN")
self.failUnless(['users', 'tag'] in rset.rows)
self.failUnless(['activated', None] in rset.rows)
@@ -882,7 +882,7 @@
def test_insert_5bis(self):
peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
- {'x': peid}, 'x')
+ {'x': peid})
rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
self.assert_(rset.rows)
self.assertEquals(rset.description, [('Personne', 'Societe',)])
@@ -1000,17 +1000,17 @@
eid = self.execute("INSERT Folder T: T name 'toto'")[0][0]
self.commit()
# fill the cache
- self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+ self.execute("Any X WHERE X eid %(x)s", {'x': eid})
self.execute("Any X WHERE X eid %s" %eid)
- self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+ self.execute("Folder X WHERE X eid %(x)s", {'x': eid})
self.execute("Folder X WHERE X eid %s" %eid)
self.execute("DELETE Folder T WHERE T eid %s"%eid)
self.commit()
- rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+ rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid})
self.assertEquals(rset.rows, [])
rset = self.execute("Any X WHERE X eid %s" %eid)
self.assertEquals(rset.rows, [])
- rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+ rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid})
self.assertEquals(rset.rows, [])
rset = self.execute("Folder X WHERE X eid %s" %eid)
self.assertEquals(rset.rows, [])
@@ -1086,7 +1086,7 @@
def test_update_string_concat(self):
beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0]
self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'})
- newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid}, 'x')[0][0]
+ newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0]
self.assertEquals(newname, 'toto-moved')
def test_update_query_error(self):
@@ -1203,7 +1203,7 @@
'creation_date': '2000/07/03 11:00'})
rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,'
'X owned_by U, U eid %(x)s',
- {'x':self.session.user.eid}, 'x')
+ {'x':self.session.user.eid})
self.assertEquals(rset.rows, [[u'\xe9name0']])
@@ -1286,7 +1286,7 @@
ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0]
self.execute("SET E in_group G, E firstname %(firstname)s, E surname %(surname)s "
"WHERE E eid %(x)s, G name 'users'",
- {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}, 'x')
+ {'x':ueid, 'firstname': u'jean', 'surname': u'paul'})
def test_nonregr_u_owned_by_u(self):
ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G "
--- a/server/test/unittest_repository.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_repository.py Tue Apr 13 19:43:51 2010 +0200
@@ -478,7 +478,7 @@
# our sqlite datetime adapter is ignore seconds fraction, so we have to
# ensure update is done the next seconds
time.sleep(1 - (ts.second - int(ts.second)))
- self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x')
+ self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp})
self.commit()
self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
self.session.set_pool()
--- a/server/test/unittest_rql2sql.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_rql2sql.py Tue Apr 13 19:43:51 2010 +0200
@@ -1209,6 +1209,10 @@
'''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER)
FROM cw_Personne AS _P''')
+ def test_substring(self):
+ self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne",
+ '''SELECT SUBSTR(_P.cw_nom, 1, 1)
+FROM cw_Personne AS _P''')
def test_parser_parse(self):
for t in self._parse(PARSER):
@@ -1601,12 +1605,16 @@
WHERE rel_concerne0.eid_from=_A.cw_eid AND rel_concerne0.eid_to=_N.cw_eid
GROUP BY _A.cw_eid,rel_todo_by1.eid_to,rel_todo_by3.eid_to''')
+ def test_substring(self):
+ self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne",
+ '''SELECT SUBSTRING(_P.cw_nom, 1, 1)
+FROM cw_Personne AS _P''')
class removeUnsusedSolutionsTC(TestCase):
def test_invariant_not_varying(self):
rqlst = mock_object(defined_vars={})
- rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
- rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+ rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+ rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
{'A': 'FootGroup', 'B': 'FootTeam'}], {}, None),
([{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
@@ -1616,8 +1624,8 @@
def test_invariant_varying(self):
rqlst = mock_object(defined_vars={})
- rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
- rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+ rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+ rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
{'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None),
([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set())
--- a/server/test/unittest_rqlannotation.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_rqlannotation.py Tue Apr 13 19:43:51 2010 +0200
@@ -100,6 +100,12 @@
self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
+ def test_8(self):
+ # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission
+ rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, '
+ 'X is Note, N eid 1')
+ self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+
def test_diff_scope_identity_deamb(self):
rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)')
self.assertEquals(rqlst.defined_vars['Z']._q_invariant, True)
--- a/server/test/unittest_security.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_security.py Tue Apr 13 19:43:51 2010 +0200
@@ -197,7 +197,7 @@
# to actually get Unauthorized exception, try to delete a relation we can read
self.restore_connection()
eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0]
- self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}, 'x')
+ self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid})
self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
self.commit()
cnx = self.login('iaminusersgrouponly')
@@ -214,7 +214,7 @@
cnx = self.login('user')
cu = cnx.cursor()
cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
- {'x': ueid, 'passwd': 'newpwd'}, 'x')
+ {'x': ueid, 'passwd': 'newpwd'})
cnx.commit()
cnx.close()
cnx = self.login('user', password='newpwd')
@@ -224,7 +224,7 @@
cnx = self.login('iaminusersgrouponly')
cu = cnx.cursor()
cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
- {'x': ueid, 'passwd': 'newpwd'}, 'x')
+ {'x': ueid, 'passwd': 'newpwd'})
self.assertRaises(Unauthorized, cnx.commit)
# read security test
@@ -243,22 +243,22 @@
cu = cnx.cursor()
rset = cu.execute('Affaire X')
self.assertEquals(rset.rows, [])
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+ self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
# cache test
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+ self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
cnx.commit()
- rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}, 'x')
+ rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2})
self.assertEquals(rset.rows, [[aff2]])
# more cache test w/ NOT eid
- rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}, 'x')
+ rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid})
self.assertEquals(rset.rows, [[aff2]])
- rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}, 'x')
+ rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2})
self.assertEquals(rset.rows, [])
# test can't update an attribute of an entity that can't be readen
- self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}, 'x')
+ self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
def test_entity_created_in_transaction(self):
@@ -270,7 +270,7 @@
cu = cnx.cursor()
aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
# entity created in transaction are readable *by eid*
- self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x'))
+ self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
# XXX would be nice if it worked
rset = cu.execute("Affaire X WHERE X sujet 'cool'")
self.assertEquals(len(rset), 0)
@@ -281,18 +281,17 @@
def test_read_erqlexpr_has_text1(self):
aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
card1 = self.execute("INSERT Card X: X title 'cool'")[0][0]
- self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}, 'x')
+ self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1})
self.commit()
cnx = self.login('iaminusersgrouponly')
cu = cnx.cursor()
aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
- cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1},
- ('a', 's'))
+ cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1})
cnx.commit()
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}, 'x')
- self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x'))
- self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}, 'x'))
+ self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1})
+ self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
+ self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}))
rset = cu.execute("Any X WHERE X has_text 'cool'")
self.assertEquals(sorted(eid for eid, in rset.rows),
[card1, aff2])
@@ -347,7 +346,7 @@
# only managers should be able to edit the 'test' attribute of Personne entities
eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0]
self.commit()
- self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+ self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
self.commit()
cnx = self.login('iaminusersgrouponly')
cu = cnx.cursor()
@@ -357,11 +356,11 @@
self.assertRaises(Unauthorized, cnx.commit)
eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0]
cnx.commit()
- cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+ cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
self.assertRaises(Unauthorized, cnx.commit)
- cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}, 'x')
+ cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid})
self.assertRaises(Unauthorized, cnx.commit)
- cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}, 'x')
+ cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid})
cnx.commit()
cnx.close()
@@ -370,23 +369,23 @@
note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
self.commit()
note.fire_transition('markasdone')
- self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}, 'x')
+ self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid})
self.commit()
cnx = self.login('iaminusersgrouponly')
cu = cnx.cursor()
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}, 'x')
+ cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid})
self.assertRaises(Unauthorized, cnx.commit)
note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
cnx.commit()
note2.fire_transition('markasdone')
cnx.commit()
- self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid}, 'x')),
+ self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})),
0)
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x')
+ cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
self.assertRaises(Unauthorized, cnx.commit)
note2.fire_transition('redoit')
cnx.commit()
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x')
+ cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
cnx.commit()
def test_attribute_read_security(self):
@@ -447,13 +446,13 @@
# should only be able to read the anonymous user, not another one
origuser = self.adminsession.user
self.assertRaises(Unauthorized,
- cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x')
+ cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid})
# nothing selected, nothing updated, no exception raised
#self.assertRaises(Unauthorized,
# cu.execute, 'SET X login "toto" WHERE X eid %(x)s',
# {'x': self.user.eid})
- rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x')
+ rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid})
self.assertEquals(rset.rows, [[anon.eid]])
# but can't modify it
cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
@@ -494,7 +493,7 @@
self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U')
self.assertRaises(Unauthorized,
cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
- {'x': anoneid, 'b': beid1}, 'x')
+ {'x': anoneid, 'b': beid1})
def test_ambigous_ordered(self):
@@ -551,10 +550,10 @@
aff.clear_related_cache('wf_info_for', role='object')
self.assertRaises(Unauthorized,
self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"',
- {'ti': trinfo.eid}, 'ti')
+ {'ti': trinfo.eid})
self.assertRaises(Unauthorized,
self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"',
- {'ti': trinfo.eid}, 'ti')
+ {'ti': trinfo.eid})
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_storage.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_storage.py Tue Apr 13 19:43:51 2010 +0200
@@ -131,14 +131,14 @@
' (Any D, X WHERE X eid %(x)s, X data D)'
' UNION '
' (Any D, X WHERE X eid %(x)s, X data D)'
- ')', {'x': f1.eid}, 'x')
+ ')', {'x': f1.eid})
self.assertEquals(len(rset), 2)
self.assertEquals(rset[0][0], f1.eid)
self.assertEquals(rset[1][0], f1.eid)
self.assertEquals(rset[0][1].getvalue(), 'the-data')
self.assertEquals(rset[1][1].getvalue(), 'the-data')
rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D',
- {'x': f1.eid}, 'x')
+ {'x': f1.eid})
self.assertEquals(len(rset), 1)
self.assertEquals(rset[0][0], f1.eid)
self.assertEquals(rset[0][1], len('the-data'))
@@ -146,7 +146,7 @@
' (Any D, X WHERE X eid %(x)s, X data D)'
' UNION '
' (Any D, X WHERE X eid %(x)s, X data D)'
- ')', {'x': f1.eid}, 'x')
+ ')', {'x': f1.eid})
self.assertEquals(len(rset), 2)
self.assertEquals(rset[0][0], f1.eid)
self.assertEquals(rset[1][0], f1.eid)
@@ -154,7 +154,7 @@
self.assertEquals(rset[1][1], len('the-data'))
ex = self.assertRaises(QueryError, self.execute,
'Any X,UPPER(D) WHERE X eid %(x)s, X data D',
- {'x': f1.eid}, 'x')
+ {'x': f1.eid})
self.assertEquals(str(ex), 'UPPER can not be called on mapped attribute')
@@ -177,7 +177,7 @@
{'d': Binary('some other data'), 'f': f1.eid})
self.assertEquals(f1.data.getvalue(), 'some other data')
self.commit()
- f2 = self.entity('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid})
+ f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
self.assertEquals(f2.data.getvalue(), 'some other data')
--- a/server/test/unittest_undo.py Tue Apr 13 19:22:46 2010 +0200
+++ b/server/test/unittest_undo.py Tue Apr 13 19:43:51 2010 +0200
@@ -144,8 +144,8 @@
undotxuuid = self.commit()
self.assertEquals(undotxuuid, None) # undo not undoable
self.assertEquals(errors, [])
- self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}, 'x'))
- self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}, 'x'))
+ self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}))
+ self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}))
self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"'))
self.assertEquals(toto.state, 'activated')
self.assertEquals(toto.get_email(), 'toto@logilab.org')
@@ -216,8 +216,8 @@
errors = self.cnx.undo_transaction(txuuid)
self.commit()
self.failIf(errors)
- self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}, 'x'))
- self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}, 'x'))
+ self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
+ self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
self.failIf(self.execute('Any X,Y WHERE X fiche Y'))
self.session.set_pool()
for eid in (p.eid, c.eid):
--- a/setup.py Tue Apr 13 19:22:46 2010 +0200
+++ b/setup.py Tue Apr 13 19:43:51 2010 +0200
@@ -24,38 +24,44 @@
import os
import sys
import shutil
-from distutils.core import setup
-from distutils.command import install_lib
from os.path import isdir, exists, join, walk
+try:
+ if os.environ.get('NO_SETUPTOOLS'):
+ raise ImportError() # do as there is no setuptools
+ from setuptools import setup
+ from setuptools.command import install_lib
+ USE_SETUPTOOLS = True
+except ImportError:
+ from distutils.core import setup
+ from distutils.command import install_lib
+ USE_SETUPTOOLS = False
+
# import required features
-from __pkginfo__ import modname, version, license, short_desc, long_desc, \
- web, author, author_email
+from __pkginfo__ import modname, version, license, description, web, \
+ author, author_email
+
+if exists('README'):
+ long_description = file('README').read()
+
# import optional features
-try:
- from __pkginfo__ import distname
-except ImportError:
- distname = modname
-try:
- from __pkginfo__ import scripts
-except ImportError:
- scripts = []
-try:
- from __pkginfo__ import data_files
-except ImportError:
- data_files = None
-try:
- from __pkginfo__ import subpackage_of
-except ImportError:
- subpackage_of = None
-try:
- from __pkginfo__ import include_dirs
-except ImportError:
- include_dirs = []
-try:
- from __pkginfo__ import ext_modules
-except ImportError:
- ext_modules = None
+import __pkginfo__
+if USE_SETUPTOOLS:
+ requires = {}
+ for entry in ("__depends__", "__recommends__"):
+ requires.update(getattr(__pkginfo__, entry, {}))
+ install_requires = [("%s %s" % (d, v and v or "")).strip()
+ for d, v in requires.iteritems()]
+else:
+ install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+subpackage_of = getattr(__pkginfo__, 'subpackage_of', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+
BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
@@ -92,7 +98,8 @@
def export(from_dir, to_dir,
blacklist=BASE_BLACKLIST,
- ignore_ext=IGNORED_EXTENSIONS):
+ ignore_ext=IGNORED_EXTENSIONS,
+ verbose=True):
"""make a mirror of from_dir in to_dir, omitting directories and files
listed in the black list
"""
@@ -111,7 +118,8 @@
continue
src = '%s/%s' % (directory, filename)
dest = to_dir + src[len(from_dir):]
- print >> sys.stderr, src, '->', dest
+ if verbose:
+ print >> sys.stderr, src, '->', dest
if os.path.isdir(src):
if not exists(dest):
os.mkdir(dest)
@@ -154,29 +162,31 @@
base = modname
for directory in include_dirs:
dest = join(self.install_dir, base, directory)
- export(directory, dest)
+ export(directory, dest, verbose=False)
def install(**kwargs):
"""setup entry point"""
+ if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \
+ sys.versioninfo < (2, 5, 4):
+ sys.argv.remove('--install-layout=deb')
+ print "W: remove '--install-layout=deb' option"
if subpackage_of:
package = subpackage_of + '.' + modname
kwargs['package_dir'] = {package : '.'}
packages = [package] + get_packages(os.getcwd(), package)
+ if USE_SETUPTOOLS:
+ kwargs['namespace_packages'] = [subpackage_of]
else:
kwargs['package_dir'] = {modname : '.'}
packages = [modname] + get_packages(os.getcwd(), modname)
kwargs['packages'] = packages
- return setup(name = distname,
- version = version,
- license =license,
- description = short_desc,
- long_description = long_desc,
- author = author,
- author_email = author_email,
- url = web,
- scripts = ensure_scripts(scripts),
- data_files=data_files,
+ return setup(name=distname, version=version, license=license, url=web,
+ description=description, long_description=long_description,
+ author=author, author_email=author_email,
+ scripts=ensure_scripts(scripts), data_files=data_files,
ext_modules=ext_modules,
+ install_requires=install_requires,
+ #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"],
cmdclass={'install_lib': MyInstallLib},
**kwargs
)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/README.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,3 @@
+Summary
+-------
+%(longdesc)s
--- a/skeleton/__pkginfo__.py.tmpl Tue Apr 13 19:22:46 2010 +0200
+++ b/skeleton/__pkginfo__.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -7,15 +7,12 @@
numversion = (0, 1, 0)
version = '.'.join(str(num) for num in numversion)
-license = 'LCL'
-copyright = '''Copyright (c) %(year)s %(author)s.
-%(author-web-site)s -- mailto:%(author-email)s'''
+license = '%(license)s'
author = '%(author)s'
author_email = '%(author-email)s'
-short_desc = '%(shortdesc)s'
-long_desc = '''%(longdesc)s'''
+description = '%(shortdesc)s'
web = 'http://www.cubicweb.org/project/%%s' %% distname
@@ -43,8 +40,6 @@
# Note: here, you'll need to add subdirectories if you want
# them to be included in the debian package
-__depends_cubes__ = {}
-__depends__ = {'cubicweb': '>= 3.6.0'}
-__use__ = (%(dependancies)s)
-__recommend__ = ()
+__depends__ = {'cubicweb': '>= %(version)s'}
+__recommends__ = {}
--- a/skeleton/data/cubes.CUBENAME.css Tue Apr 13 19:22:46 2010 +0200
+++ b/skeleton/data/cubes.CUBENAME.css Tue Apr 13 19:43:51 2010 +0200
@@ -1,1 +1,1 @@
-/* template specific CSS */
+/* cube-specific CSS */
--- a/skeleton/data/cubes.CUBENAME.js Tue Apr 13 19:22:46 2010 +0200
+++ b/skeleton/data/cubes.CUBENAME.js Tue Apr 13 19:43:51 2010 +0200
@@ -1,1 +1,1 @@
-// This contains template-specific javascript
\ No newline at end of file
+// This contains cube-specific javascript
\ No newline at end of file
--- a/skeleton/entities.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""this contains the cube-specific entities' classes
-
-:organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/entities.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,7 @@
+"""%(distname)s entity's classes
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/hooks.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,7 @@
+"""%(distname)s specific hooks and operations
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- a/skeleton/migration/postcreate.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-# postcreate script. You could setup site properties or a workflow here for example
-"""
-
-:organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-
-# Example of site property change
-#set_property('ui.site-title', "<sitename>")
-
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/postcreate.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,14 @@
+"""%(distname)s postcreate script, executed at instance creation time or when
+the cube is added to an existing instance.
+
+You could setup site properties or a workflow here for example.
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
+
+# Example of site property change
+#set_property('ui.site-title', "<sitename>")
+
--- a/skeleton/migration/precreate.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# Instructions here will be read before reading the schema
-"""
-
-:organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# You could create your own groups here, like in :
-# create_entity('CWGroup', name=u'mygroup')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/precreate.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,10 @@
+"""%(distname)s precreate script, executed at instance creation time or when
+the cube is added to an existing instance, before the schema is serialized.
+
+This is typically to create groups referenced by the cube'schema.
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- a/skeleton/schema.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-# cube's specific schema
-"""
-
-:organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/schema.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,7 @@
+"""%(distname)s schema
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- a/skeleton/setup.py Tue Apr 13 19:22:46 2010 +0200
+++ b/skeleton/setup.py Tue Apr 13 19:43:51 2010 +0200
@@ -1,14 +1,12 @@
#!/usr/bin/env python
-"""
+"""Generic Setup script, takes package info from __pkginfo__.py file
:organization: Logilab
:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
"""
-# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# pylint: disable-msg=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@@ -22,36 +20,178 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-""" Generic Setup script, takes package info from __pkginfo__.py file """
+
+import os
+import sys
+import shutil
+from os.path import isdir, exists, join, walk
-from distutils.core import setup
+try:
+ if os.environ.get('NO_SETUPTOOLS'):
+ raise ImportError() # do as there is no setuptools
+ from setuptools import setup
+ from setuptools.command import install_lib
+ USE_SETUPTOOLS = True
+except ImportError:
+ from distutils.core import setup
+ from distutils.command import install_lib
+ USE_SETUPTOOLS = False
# import required features
-from __pkginfo__ import distname, version, license, short_desc, long_desc, \
- web, author, author_email
+from __pkginfo__ import modname, version, license, description, web, \
+ author, author_email
+
+if exists('README'):
+ long_description = file('README').read()
+else:
+ long_description = ''
+
# import optional features
-try:
- from __pkginfo__ import data_files
-except ImportError:
- data_files = None
-try:
- from __pkginfo__ import include_dirs
-except ImportError:
- include_dirs = []
+import __pkginfo__
+if USE_SETUPTOOLS:
+ requires = {}
+ for entry in ("__depends__", "__recommends__"):
+ requires.update(getattr(__pkginfo__, entry, {}))
+ install_requires = [("%s %s" % (d, v and v or "")).strip()
+ for d, v in requires.iteritems()]
+else:
+ install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+subpackage_of = getattr(__pkginfo__, 'subpackage_of', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+
+
+BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
+
+
+def ensure_scripts(linux_scripts):
+ """
+ Creates the proper script names required for each platform
+ (taken from 4Suite)
+ """
+ from distutils import util
+ if util.get_platform()[:3] == 'win':
+ scripts_ = [script + '.bat' for script in linux_scripts]
+ else:
+ scripts_ = linux_scripts
+ return scripts_
+
+
+def get_packages(directory, prefix):
+ """return a list of subpackages for the given directory
+ """
+ result = []
+ for package in os.listdir(directory):
+ absfile = join(directory, package)
+ if isdir(absfile):
+ if exists(join(absfile, '__init__.py')) or \
+ package in ('test', 'tests'):
+ if prefix:
+ result.append('%s.%s' % (prefix, package))
+ else:
+ result.append(package)
+ result += get_packages(absfile, result[-1])
+ return result
+
+def export(from_dir, to_dir,
+ blacklist=BASE_BLACKLIST,
+ ignore_ext=IGNORED_EXTENSIONS,
+ verbose=True):
+ """make a mirror of from_dir in to_dir, omitting directories and files
+ listed in the black list
+ """
+ def make_mirror(arg, directory, fnames):
+ """walk handler"""
+ for norecurs in blacklist:
+ try:
+ fnames.remove(norecurs)
+ except ValueError:
+ pass
+ for filename in fnames:
+ # don't include binary files
+ if filename[-4:] in ignore_ext:
+ continue
+ if filename[-1] == '~':
+ continue
+ src = '%s/%s' % (directory, filename)
+ dest = to_dir + src[len(from_dir):]
+ if verbose:
+ print >> sys.stderr, src, '->', dest
+ if os.path.isdir(src):
+ if not exists(dest):
+ os.mkdir(dest)
+ else:
+ if exists(dest):
+ os.remove(dest)
+ shutil.copy2(src, dest)
+ try:
+ os.mkdir(to_dir)
+ except OSError, ex:
+ # file exists ?
+ import errno
+ if ex.errno != errno.EEXIST:
+ raise
+ walk(from_dir, make_mirror, None)
+
+
+EMPTY_FILE = '"""generated file, don\'t modify or your data will be lost"""\n'
+
+class MyInstallLib(install_lib.install_lib):
+ """extend install_lib command to handle package __init__.py and
+ include_dirs variable if necessary
+ """
+ def run(self):
+ """overridden from install_lib class"""
+ install_lib.install_lib.run(self)
+ # create Products.__init__.py if needed
+ if subpackage_of:
+ product_init = join(self.install_dir, subpackage_of, '__init__.py')
+ if not exists(product_init):
+ self.announce('creating %s' % product_init)
+ stream = open(product_init, 'w')
+ stream.write(EMPTY_FILE)
+ stream.close()
+ # manually install included directories if any
+ if include_dirs:
+ if subpackage_of:
+ base = join(subpackage_of, modname)
+ else:
+ base = modname
+ for directory in include_dirs:
+ dest = join(self.install_dir, base, directory)
+ export(directory, dest, verbose=False)
def install(**kwargs):
"""setup entry point"""
- #kwargs['distname'] = modname
- return setup(name=distname,
- version=version,
- license=license,
- description=short_desc,
- long_description=long_desc,
- author=author,
- author_email=author_email,
- url=web,
- data_files=data_files,
- **kwargs)
+ if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \
+ sys.versioninfo < (2, 5, 4):
+ sys.argv.remove('--install-layout=deb')
+ print "W: remove '--install-layout=deb' option"
+ if subpackage_of:
+ package = subpackage_of + '.' + modname
+ kwargs['package_dir'] = {package : '.'}
+ packages = [package] + get_packages(os.getcwd(), package)
+ if USE_SETUPTOOLS:
+ kwargs['namespace_packages'] = [subpackage_of]
+ else:
+ kwargs['package_dir'] = {modname : '.'}
+ packages = [modname] + get_packages(os.getcwd(), modname)
+ kwargs['packages'] = packages
+ return setup(name=distname, version=version, license=license, url=web,
+ description=description, long_description=long_description,
+ author=author, author_email=author_email,
+ scripts=ensure_scripts(scripts), data_files=data_files,
+ ext_modules=ext_modules,
+ install_requires=install_requires,
+ #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"],
+ cmdclass={'install_lib': MyInstallLib},
+ **kwargs
+ )
if __name__ == '__main__' :
install()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/sobjects.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,7 @@
+"""%(distname)s repository side views, usually for notification
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- a/skeleton/views.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""cube-specific forms/views/actions/components
-
-:organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/views.py.tmpl Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,7 @@
+"""%(distname)s views/forms/actions/components for web ui
+
+:organization: %(author)s
+:copyright: %(year)s %(author)s
+:contact: %(author-web-site)s -- mailto:%(author-email)s
+:license: %(long-license)s
+"""
--- a/sobjects/test/unittest_supervising.py Tue Apr 13 19:22:46 2010 +0200
+++ b/sobjects/test/unittest_supervising.py Tue Apr 13 19:43:51 2010 +0200
@@ -30,9 +30,9 @@
# do some modification
user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G '
'WHERE G name "users"').get_entity(0, 0)
- self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}, 'x')
+ self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid})
self.execute('DELETE Card B WHERE B title "une news !"')
- self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}, 'x')
+ self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid})
self.execute('SET X content "duh?" WHERE X is Comment')
self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"')
# check only one supervision email operation
@@ -91,7 +91,7 @@
def test_nonregr1(self):
session = self.session
# do some unlogged modification
- self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x')
+ self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid})
self.commit() # no crash
--- a/test/data/cubes/file/__pkginfo__.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/data/cubes/file/__pkginfo__.py Tue Apr 13 19:43:51 2010 +0200
@@ -13,48 +13,3 @@
numversion = (1, 4, 3)
version = '.'.join(str(num) for num in numversion)
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
-author = "Logilab"
-author_email = "contact@logilab.fr"
-web = ''
-
-short_desc = "Raw file support for the CubicWeb framework"
-long_desc = """CubicWeb is a entities / relations bases knowledge management system
-developped at Logilab.
-.
-This package provides schema and views to store files and images in cubicweb
-applications.
-.
-"""
-
-from os import listdir
-from os.path import join
-
-CUBES_DIR = join('share', 'cubicweb', 'cubes')
-try:
- data_files = [
- [join(CUBES_DIR, 'file'),
- [fname for fname in listdir('.')
- if fname.endswith('.py') and fname != 'setup.py']],
- [join(CUBES_DIR, 'file', 'data'),
- [join('data', fname) for fname in listdir('data')]],
- [join(CUBES_DIR, 'file', 'wdoc'),
- [join('wdoc', fname) for fname in listdir('wdoc')]],
- [join(CUBES_DIR, 'file', 'views'),
- [join('views', fname) for fname in listdir('views') if fname.endswith('.py')]],
- [join(CUBES_DIR, 'file', 'i18n'),
- [join('i18n', fname) for fname in listdir('i18n')]],
- [join(CUBES_DIR, 'file', 'migration'),
- [join('migration', fname) for fname in listdir('migration')]],
- ]
-except OSError:
- # we are in an installed directory
- pass
-
-
-cube_eid = 20320
-# used packages
-__use__ = ()
--- a/test/unittest_cwconfig.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/unittest_cwconfig.py Tue Apr 13 19:43:51 2010 +0200
@@ -7,13 +7,16 @@
"""
import sys
import os
+import tempfile
from os.path import dirname, join, abspath
from logilab.common.modutils import cleanup_sys_modules
-from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.testlib import (TestCase, unittest_main,
+ with_tempdir)
from logilab.common.changelog import Version
from cubicweb.devtools import ApptestConfiguration
+from cubicweb.cwconfig import _find_prefix
def unabsolutize(path):
parts = path.split(os.sep)
@@ -32,7 +35,7 @@
self.config._cubes = ('email', 'file')
def tearDown(self):
- os.environ.pop('CW_CUBES_PATH', None)
+ ApptestConfiguration.CUBES_PATH = []
def test_reorder_cubes(self):
# jpl depends on email and file and comment
@@ -52,7 +55,7 @@
def test_reorder_cubes_recommends(self):
from cubes.comment import __pkginfo__ as comment_pkginfo
- comment_pkginfo.__recommend__ = ('file',)
+ comment_pkginfo.__recommends_cubes__ = {'file': None}
try:
# email recommends comment
# comment recommends file
@@ -65,7 +68,7 @@
self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')),
('forge', 'email', 'comment', 'file'))
finally:
- comment_pkginfo.__use__ = ()
+ comment_pkginfo.__recommends_cubes__ = {}
# def test_vc_config(self):
@@ -91,11 +94,11 @@
# make sure we don't import the email cube, but the stdlib email package
import email
self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR)
- os.environ['CW_CUBES_PATH'] = CUSTOM_CUBES_DIR
+ self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
self.assertEquals(self.config.cubes_search_path(),
[CUSTOM_CUBES_DIR, self.config.CUBES_DIR])
- os.environ['CW_CUBES_PATH'] = os.pathsep.join([
- CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant'])
+ self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR,
+ self.config.CUBES_DIR, 'unexistant']
# filter out unexistant and duplicates
self.assertEquals(self.config.cubes_search_path(),
[CUSTOM_CUBES_DIR,
@@ -114,6 +117,91 @@
from cubes import file
self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')])
+class FindPrefixTC(TestCase):
+ def make_dirs(self, *args):
+ path = join(tempfile.tempdir, *args)
+ if not os.path.exists(path):
+ os.makedirs(path)
+ return path
+
+ def make_file(self, *args):
+ self.make_dirs(*args[: -1])
+ file_path = join(tempfile.tempdir, *args)
+ file_obj = open(file_path, 'w')
+ file_obj.write('""" None """')
+ file_obj.close()
+ return file_path
+
+ @with_tempdir
+ def test_samedir(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ self.assertEquals(_find_prefix(prefix), prefix)
+
+ @with_tempdir
+ def test_samedir_filepath(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ file_path = self.make_file('bob.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_dir_inside_prefix(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ dir_path = self.make_dirs('bob')
+ self.assertEquals(_find_prefix(dir_path), prefix)
+
+ @with_tempdir
+ def test_file_in_dir_inside_prefix(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ file_path = self.make_file('bob', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_file_in_deeper_dir_inside_prefix(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_multiple_candidate_prefix(self):
+ self.make_dirs('share', 'cubicweb')
+ prefix = self.make_dirs('bob')
+ self.make_dirs('bob', 'share', 'cubicweb')
+ file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_sister_candidate_prefix(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ self.make_dirs('bob', 'share', 'cubicweb')
+ file_path = self.make_file('bell', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_multiple_parent_candidate_prefix(self):
+ self.make_dirs('share', 'cubicweb')
+ prefix = self.make_dirs('share', 'cubicweb', 'bob')
+ self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb')
+ file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_upper_candidate_prefix(self):
+ prefix = tempfile.tempdir
+ self.make_dirs('share', 'cubicweb')
+ self.make_dirs('bell','bob', 'share', 'cubicweb')
+ file_path = self.make_file('bell', 'toto.py')
+ self.assertEquals(_find_prefix(file_path), prefix)
+
+ @with_tempdir
+ def test_no_prefix(self):
+ prefix = tempfile.tempdir
+ self.assertEquals(_find_prefix(prefix), sys.prefix)
if __name__ == '__main__':
unittest_main()
--- a/test/unittest_cwctl.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/unittest_cwctl.py Tue Apr 13 19:43:51 2010 +0200
@@ -10,15 +10,8 @@
from cStringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
-if os.environ.get('APYCOT_ROOT'):
- root = os.environ['APYCOT_ROOT']
- CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root
- os.environ['CW_CUBES_PATH'] = CUBES_DIR
- REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
- os.environ['CW_INSTANCES_DIR'] = REGISTRY_DIR
-
from cubicweb.cwconfig import CubicWebConfiguration
-CubicWebConfiguration.load_cwctl_plugins()
+CubicWebConfiguration.load_cwctl_plugins() # XXX necessary?
class CubicWebCtlTC(TestCase):
def setUp(self):
--- a/test/unittest_dbapi.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/unittest_dbapi.py Tue Apr 13 19:43:51 2010 +0200
@@ -40,21 +40,6 @@
self.assertRaises(ProgrammingError, cnx.user, None)
self.assertRaises(ProgrammingError, cnx.describe, 1)
- def test_session_data_api(self):
- cnx = self.login('anon')
- self.assertEquals(cnx.get_session_data('data'), None)
- self.assertEquals(cnx.session_data(), {})
- cnx.set_session_data('data', 4)
- self.assertEquals(cnx.get_session_data('data'), 4)
- self.assertEquals(cnx.session_data(), {'data': 4})
- cnx.del_session_data('data')
- cnx.del_session_data('whatever')
- self.assertEquals(cnx.get_session_data('data'), None)
- self.assertEquals(cnx.session_data(), {})
- cnx.session_data()['data'] = 4
- self.assertEquals(cnx.get_session_data('data'), 4)
- self.assertEquals(cnx.session_data(), {'data': 4})
-
def test_shared_data_api(self):
cnx = self.login('anon')
self.assertEquals(cnx.get_shared_data('data'), None)
--- a/test/unittest_entity.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/unittest_entity.py Tue Apr 13 19:43:51 2010 +0200
@@ -44,8 +44,8 @@
p = req.create_entity('Personne', nom=u'toto')
oe = req.create_entity('Note', type=u'x')
self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
- {'t': oe.eid, 'u': p.eid}, ('t','u'))
- self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x')
+ {'t': oe.eid, 'u': p.eid})
+ self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid})
e = req.create_entity('Note', type=u'z')
e.copy_relations(oe.eid)
self.assertEquals(len(e.ecrit_par), 1)
@@ -60,7 +60,7 @@
oe = req.create_entity('Note', type=u'x')
self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject'
self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
- {'t': oe.eid, 'u': p.eid}, ('t','u'))
+ {'t': oe.eid, 'u': p.eid})
e = req.create_entity('Note', type=u'z')
e.copy_relations(oe.eid)
self.failIf(e.ecrit_par)
@@ -69,12 +69,12 @@
def test_copy_with_composite(self):
user = self.user()
adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
- e = self.entity('Any X WHERE X eid %(x)s', {'x':user.eid}, 'x')
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
self.assertEquals(e.use_email[0].address, "toto@logilab.org")
self.assertEquals(e.use_email[0].eid, adeleid)
usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G '
'WHERE G name "users"')[0][0]
- e = self.entity('Any X WHERE X eid %(x)s', {'x':usereid}, 'x')
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0)
e.copy_relations(user.eid)
self.failIf(e.use_email)
self.failIf(e.primary_email)
@@ -87,14 +87,14 @@
user.fire_transition('deactivate')
self.commit()
eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0]
- e = self.entity('Any X WHERE X eid %(x)s', {'x': eid2}, 'x')
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0)
e.copy_relations(user.eid)
self.commit()
e.clear_related_cache('in_state', 'subject')
self.assertEquals(e.state, 'activated')
def test_related_cache_both(self):
- user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x')
+ user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0)
adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
self.commit()
self.assertEquals(user._related_cache, {})
@@ -235,7 +235,7 @@
#rql = email.unrelated_rql('use_email', 'Person', 'object')[0]
#self.assertEquals(rql, '')
self.login('anon')
- email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+ email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0]
self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA '
'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, '
@@ -260,7 +260,7 @@
unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')]
self.failUnless(p.eid in unrelated)
self.execute('SET X tags Y WHERE X is Tag, Y is Personne')
- e = self.entity('Any X WHERE X is Tag')
+ e = self.execute('Any X WHERE X is Tag').get_entity(0, 0)
unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')]
self.failIf(p.eid in unrelated)
@@ -281,7 +281,7 @@
self.assertEquals([x.address for x in rset.entities()], [u'hop'])
self.create_user('toto')
self.login('toto')
- email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+ email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
rset = email.unrelated('use_email', 'CWUser', 'object')
self.assertEquals([x.login for x in rset.entities()], ['toto'])
user = self.request().user
@@ -291,7 +291,7 @@
rset = user.unrelated('use_email', 'EmailAddress', 'subject')
self.assertEquals([x.address for x in rset.entities()], [])
self.login('anon')
- email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+ email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
rset = email.unrelated('use_email', 'CWUser', 'object')
self.assertEquals([x.login for x in rset.entities()], [])
user = self.request().user
@@ -439,7 +439,7 @@
eid = session.execute(
'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 '
'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0]
- trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+ trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0)
trinfo.complete()
self.failUnless(isinstance(trinfo['creation_date'], datetime))
self.failUnless(trinfo.relation_cached('from_state', 'subject'))
@@ -449,9 +449,9 @@
def test_request_cache(self):
req = self.request()
- user = self.entity('CWUser X WHERE X login "admin"', req=req)
+ user = self.execute('CWUser X WHERE X login "admin"', req=req).get_entity(0, 0)
state = user.in_state[0]
- samestate = self.entity('State X WHERE X name "activated"', req=req)
+ samestate = self.execute('State X WHERE X name "activated"', req=req).get_entity(0, 0)
self.failUnless(state is samestate)
def test_rest_path(self):
@@ -481,7 +481,7 @@
self.assertEquals(person.prenom, u'adrien')
self.assertEquals(person.nom, u'di mascio')
person.set_attributes(prenom=u'sylvain', nom=u'thénault')
- person = self.entity('Personne P') # XXX retreival needed ?
+ person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ?
self.assertEquals(person.prenom, u'sylvain')
self.assertEquals(person.nom, u'thénault')
--- a/test/unittest_rset.py Tue Apr 13 19:22:46 2010 +0200
+++ b/test/unittest_rset.py Tue Apr 13 19:43:51 2010 +0200
@@ -212,7 +212,7 @@
def test_get_entity_simple(self):
self.request().create_entity('CWUser', login=u'adim', upassword='adim',
surname=u'di mascio', firstname=u'adrien')
- e = self.entity('Any X,T WHERE X login "adim", X surname T')
+ e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0)
self.assertEquals(e['surname'], 'di mascio')
self.assertRaises(KeyError, e.__getitem__, 'firstname')
self.assertRaises(KeyError, e.__getitem__, 'creation_date')
--- a/toolsutils.py Tue Apr 13 19:22:46 2010 +0200
+++ b/toolsutils.py Tue Apr 13 19:43:51 2010 +0200
@@ -103,10 +103,9 @@
else:
print 'no diff between %s and %s' % (appl_file, ref_file)
-
+SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py')
def copy_skeleton(skeldir, targetdir, context,
- exclude=('*.py[co]', '*.orig', '*~', '*_flymake.py'),
- askconfirm=False):
+ exclude=SKEL_EXCLUDE, askconfirm=False):
import shutil
from fnmatch import fnmatch
skeldir = normpath(skeldir)
@@ -184,7 +183,7 @@
config_file, ex)
return config
-def env_path(env_var, default, name):
+def env_path(env_var, default, name, checkexists=True):
"""get a path specified in a variable or using the default value and return
it.
@@ -203,8 +202,8 @@
:raise `ConfigurationError`: if the returned path does not exist
"""
path = environ.get(env_var, default)
- if not exists(path):
- raise ConfigurationError('%s path %s doesn\'t exist' % (name, path))
+ if checkexists and not exists(path):
+ raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path))
return abspath(path)
--- a/transaction.py Tue Apr 13 19:22:46 2010 +0200
+++ b/transaction.py Tue Apr 13 19:43:51 2010 +0200
@@ -48,7 +48,7 @@
none if not found.
"""
return self.req.execute('Any X WHERE X eid %(x)s',
- {'x': self.user_eid}, 'x').get_entity(0, 0)
+ {'x': self.user_eid}).get_entity(0, 0)
def actions_list(self, public=True):
"""return an ordered list of action effectued during that transaction
--- a/utils.py Tue Apr 13 19:22:46 2010 +0200
+++ b/utils.py Tue Apr 13 19:43:51 2010 +0200
@@ -309,35 +309,6 @@
self.body.getvalue())
-def _pdf_conversion_availability():
- try:
- import pysixt
- except ImportError:
- return False
- from subprocess import Popen, STDOUT
- if not os.path.isfile('/usr/bin/fop'):
- return False
- try:
- Popen(['/usr/bin/fop', '-q'],
- stdout=open(os.devnull, 'w'),
- stderr=STDOUT)
- except OSError, e:
- getLogger('cubicweb').info('fop not usable (%s)', e)
- return False
- return True
-
-def can_do_pdf_conversion(__answer_cache=[]):
- """pdf conversion depends on
- * pysixt (python package)
- * fop 0.9x
-
- NOTE: actual check is done by _pdf_conversion_availability and
- result is cached
- """
- if not __answer_cache: # first time, not in cache
- __answer_cache.append(_pdf_conversion_availability())
- return __answer_cache[0]
-
try:
# may not be there if cubicweb-web not installed
from simplejson import dumps, JSONEncoder
--- a/web/_exceptions.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/_exceptions.py Tue Apr 13 19:43:51 2010 +0200
@@ -40,10 +40,6 @@
self.status = int(status)
self.content = content
-class ExplicitLogin(AuthenticationError):
- """raised when a bad connection id is given or when an attempt to establish
- a connection failed"""
-
class InvalidSession(CubicWebException):
"""raised when a session id is found but associated session is not found or
invalid
@@ -59,3 +55,9 @@
def dumps(self):
import simplejson
return simplejson.dumps({'reason': self.reason})
+
+class LogOut(PublishException):
+ """raised to ask for deauthentication of a logged in user"""
+ def __init__(self, url):
+ super(LogOut, self).__init__()
+ self.url = url
--- a/web/application.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/application.py Tue Apr 13 19:43:51 2010 +0200
@@ -5,6 +5,8 @@
:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
"""
+from __future__ import with_statement
+
__docformat__ = "restructuredtext en"
import sys
@@ -18,10 +20,11 @@
from cubicweb import (
ValidationError, Unauthorized, AuthenticationError, NoSelectableObject,
RepositoryError, CW_EVENT_MANAGER)
+from cubicweb.dbapi import DBAPISession
from cubicweb.web import LOGGER, component
from cubicweb.web import (
- StatusResponse, DirectResponse, Redirect, NotFound,
- RemoteCallFailed, ExplicitLogin, InvalidSession, RequestError)
+ StatusResponse, DirectResponse, Redirect, NotFound, LogOut,
+ RemoteCallFailed, InvalidSession, RequestError)
# make session manager available through a global variable so the debug view can
# print information about web session
@@ -52,7 +55,7 @@
for session in self.current_sessions():
no_use_time = (time() - session.last_usage_time)
total += 1
- if session.anonymous_connection:
+ if session.anonymous_session:
if no_use_time >= self.cleanup_anon_session_time:
self.close_session(session)
closed += 1
@@ -76,9 +79,11 @@
raise NotImplementedError()
def open_session(self, req):
- """open and return a new session for the given request
+ """open and return a new session for the given request. The session is
+ also bound to the request.
- :raise ExplicitLogin: if authentication is required
+ raise :exc:`cubicweb.AuthenticationError` if authentication failed
+ (no authentication info found or wrong user/password)
"""
raise NotImplementedError()
@@ -97,11 +102,24 @@
def __init__(self, vreg):
self.vreg = vreg
- def authenticate(self, req):
- """authenticate user and return corresponding user object
+ def validate_session(self, req, session):
+ """check session validity, reconnecting it to the repository if the
+ associated connection expired in the repository side (hence the
+ necessity for this method).
- :raise ExplicitLogin: if authentication is required (no authentication
- info found or wrong user/password)
+ raise :exc:`InvalidSession` if session is corrupted for a reason or
+ another and should be closed
+ """
+ raise NotImplementedError()
+
+ def authenticate(self, req):
+ """authenticate user using connection information found in the request,
+ and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
+ as well as login and authentication information dictionary used to open
+ the connection.
+
+ raise :exc:`cubicweb.AuthenticationError` if authentication failed
+ (no authentication info found or wrong user/password)
"""
raise NotImplementedError()
@@ -165,9 +183,11 @@
try:
session = self.get_session(req, sessionid)
except InvalidSession:
+ # try to open a new session, so we get an anonymous session if
+ # allowed
try:
session = self.open_session(req)
- except ExplicitLogin:
+ except AuthenticationError:
req.remove_cookie(cookie, self.SESSION_VAR)
raise
# remember last usage time for web session tracking
@@ -183,14 +203,14 @@
req.set_cookie(cookie, self.SESSION_VAR, maxage=None)
# remember last usage time for web session tracking
session.last_usage_time = time()
- if not session.anonymous_connection:
+ if not session.anonymous_session:
self._postlogin(req)
return session
def _update_last_login_time(self, req):
try:
req.execute('SET X last_login_time NOW WHERE X eid %(x)s',
- {'x' : req.user.eid}, 'x')
+ {'x' : req.user.eid})
req.cnx.commit()
except (RepositoryError, Unauthorized):
# ldap user are not writeable for instance
@@ -227,7 +247,7 @@
"""
self.session_manager.close_session(req.cnx)
req.remove_cookie(req.get_cookie(), self.SESSION_VAR)
- raise AuthenticationError(url=goto_url)
+ raise LogOut(url=goto_url)
class CubicWebPublisher(object):
@@ -271,7 +291,10 @@
sessions (i.e. a new connection may be created or an already existing
one may be reused
"""
- self.session_handler.set_session(req)
+ try:
+ self.session_handler.set_session(req)
+ except AuthenticationError:
+ req.set_session(DBAPISession(None))
# publish methods #########################################################
@@ -283,19 +306,18 @@
return self.main_publish(path, req)
finally:
cnx = req.cnx
- self._logfile_lock.acquire()
- try:
- try:
- result = ['\n'+'*'*80]
- result.append(req.url())
- result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q for q in cnx.executed_queries]
- cnx.executed_queries = []
- self._query_log.write('\n'.join(result).encode(req.encoding))
- self._query_log.flush()
- except Exception:
- self.exception('error while logging queries')
- finally:
- self._logfile_lock.release()
+ if cnx is not None:
+ with self._logfile_lock:
+ try:
+ result = ['\n'+'*'*80]
+ result.append(req.url())
+ result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q
+ for q in cnx.executed_queries]
+ cnx.executed_queries = []
+ self._query_log.write('\n'.join(result).encode(req.encoding))
+ self._query_log.flush()
+ except Exception:
+ self.exception('error while logging queries')
@deprecated("[3.4] use vreg['controllers'].select(...)")
def select_controller(self, oid, req):
@@ -340,7 +362,10 @@
# displaying the cookie authentication form
req.cnx.commit()
except (StatusResponse, DirectResponse):
- req.cnx.commit()
+ if req.cnx is not None:
+ req.cnx.commit()
+ raise
+ except (AuthenticationError, LogOut):
raise
except Redirect:
# redirect is raised by edit controller when everything went fine,
@@ -362,10 +387,13 @@
else:
# delete validation errors which may have been previously set
if '__errorurl' in req.form:
- req.del_session_data(req.form['__errorurl'])
+ req.session.data.pop(req.form['__errorurl'], None)
raise
- except (AuthenticationError, NotFound, RemoteCallFailed):
- raise
+ except RemoteCallFailed, ex:
+ req.set_header('content-type', 'application/json')
+ raise StatusResponse(500, ex.dumps())
+ except NotFound:
+ raise StatusResponse(404, self.notfound_content(req))
except ValidationError, ex:
self.validation_error_handler(req, ex)
except (Unauthorized, BadRQLQuery, RequestError), ex:
@@ -388,7 +416,7 @@
'values': req.form,
'eidmap': req.data.get('eidmap', {})
}
- req.set_session_data(req.form['__errorurl'], forminfo)
+ req.session.data[req.form['__errorurl']] = forminfo
# XXX form session key / __error_url should be differentiated:
# session key is 'url + #<form dom id', though we usually don't want
# the browser to move to the form since it hides the global
--- a/web/box.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/box.py Tue Apr 13 19:43:51 2010 +0200
@@ -12,7 +12,7 @@
from cubicweb import Unauthorized, role as get_role, target as get_target
from cubicweb.schema import display_name
-from cubicweb.selectors import (one_line_rset, primary_view,
+from cubicweb.selectors import (no_cnx, one_line_rset, primary_view,
match_context_prop, partial_has_related_entities)
from cubicweb.view import View, ReloadableMixIn
@@ -37,7 +37,7 @@
box.render(self.w)
"""
__registry__ = 'boxes'
- __select__ = match_context_prop()
+ __select__ = ~no_cnx() & match_context_prop()
categories_in_order = ()
cw_property_defs = {
--- a/web/captcha.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/captcha.py Tue Apr 13 19:43:51 2010 +0200
@@ -70,8 +70,7 @@
return img + super(CaptchaWidget, self).render(form, field, renderer)
def process_field_data(self, form, field):
- captcha = form._cw.get_session_data(field.input_name(form), None,
- pop=True)
+ captcha = form._cw.session.data.pop(field.input_name(form), None)
val = super(CaptchaWidget, self).process_field_data(form, field)
if val is None:
return val # required will be checked by field
--- a/web/component.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/component.py Tue Apr 13 19:43:51 2010 +0200
@@ -169,7 +169,7 @@
rset = entity.related(self.rtype, role(self))
else:
eid = self.cw_rset[row][col]
- rset = self._cw.execute(self.rql(), {'x': eid}, 'x')
+ rset = self._cw.execute(self.rql(), {'x': eid})
if not rset.rowcount:
return
self.w(u'<div class="%s">' % self.div_class())
--- a/web/data/cubicweb.acl.css Tue Apr 13 19:22:46 2010 +0200
+++ b/web/data/cubicweb.acl.css Tue Apr 13 19:43:51 2010 +0200
@@ -17,13 +17,13 @@
}
-h3.schema{
+h3.schema{
font-weight: bold;
}
h4 a,
h4 a:link,
-h4 a:visited{
+h4 a:visited{
color:#000;
}
@@ -39,11 +39,11 @@
table.schemaInfo td {
padding: .3em .5em;
border: 1px solid grey;
- width:33%;
+ width:33%;
}
-table.schemaInfo tr th {
+table.schemaInfo tr th {
padding: 0.2em 0px 0.2em 5px;
background-image:none;
background-color:#dfdfdf;
@@ -51,47 +51,46 @@
table.schemaInfo thead tr {
border: 1px solid #dfdfdf;
-}
+}
table.schemaInfo td {
- padding: 3px 10px 3px 5px;
+ padding: 3px 10px 3px 5px;
}
-.users{
+a.users{
color : #00CC33;
font-weight: bold }
-.guests{
+a.guests{
color : #ff7700;
font-weight: bold;
}
-.staff{
- color : #0083ab;
- font-weight: bold;
-}
-
-.owners{
+a.owners{
color : #8b0000;
font-weight: bold;
}
+a.managers{
+ color: #000000;
+}
+
.discret,
-a.grey{
+a.grey{
color:#666;
}
-a.grey:hover{
+a.grey:hover{
color:#000;
}
-.red{
+.red{
color : #ff7700;
}
-div#schema_security{
- width:780px;
+div#schema_security{
+ width:100%;
}
/******************************************************************************/
/* user groups edition form (views/euser.py) */
--- a/web/data/cubicweb.css Tue Apr 13 19:22:46 2010 +0200
+++ b/web/data/cubicweb.css Tue Apr 13 19:43:51 2010 +0200
@@ -63,7 +63,7 @@
text-decoration: underline;
}
-a img {
+a img, img {
border: none;
text-align: center;
}
--- a/web/data/cubicweb.schema.css Tue Apr 13 19:22:46 2010 +0200
+++ b/web/data/cubicweb.schema.css Tue Apr 13 19:43:51 2010 +0200
@@ -25,11 +25,14 @@
div.acl{
position: relative;
/* right: 20%;*/
- float: right;
- width: 10%;
+ width: 25%;
padding:0px 0px 0px 2em;
}
+div.acl table tr,td{
+ padding: 2px 2px 2px 2px;
+}
+
div.schema table {
width : 100%;
}
@@ -40,7 +43,6 @@
}
div.box div.title{
- border-bottom:1px solid black;
padding:0.2em 0.2em;
margin: 0 auto;
}
@@ -67,7 +69,6 @@
div.box{
float:left;
border:1px solid black;
- width:50%;
}
div.vl{
Binary file web/data/pdf_icon.gif has changed
--- a/web/facet.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/facet.py Tue Apr 13 19:43:51 2010 +0200
@@ -8,7 +8,6 @@
"""
__docformat__ = "restructuredtext en"
-from itertools import chain
from copy import deepcopy
from datetime import date, datetime, timedelta
@@ -199,7 +198,7 @@
# add attribute variable to selection
rqlst.add_selected(attrvar)
# add is restriction if necessary
- if not mainvar.stinfo['typerels']:
+ if mainvar.stinfo['typerel'] is None:
etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
rqlst.add_type_restriction(mainvar, etypes)
return var
@@ -228,12 +227,16 @@
for ovarname in linkedvars:
vargraph[ovarname].remove(trvarname)
# remove relation using this variable
- for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']):
+ for rel in trvar.stinfo['relations']:
if rel in removed:
# already removed
continue
rqlst.remove_node(rel)
removed.add(rel)
+ rel = trvar.stinfo['typerel']
+ if rel is not None and not rel in removed:
+ rqlst.remove_node(rel)
+ removed.add(rel)
# cleanup groupby clause
if rqlst.groupby:
for vref in rqlst.groupby[:]:
@@ -329,9 +332,9 @@
def support_and(self):
return False
- def rqlexec(self, rql, args=None, cachekey=None):
+ def rqlexec(self, rql, args=None):
try:
- return self._cw.execute(rql, args, cachekey)
+ return self._cw.execute(rql, args)
except Unauthorized:
return []
@@ -372,7 +375,7 @@
if self.target_type is not None:
rqlst.add_type_restriction(var, self.target_type)
try:
- rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey)
+ rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
except:
self.exception('error while getting vocabulary for %s, rql: %s',
self, rqlst.as_string())
@@ -463,7 +466,7 @@
newvar = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role)
_set_orderby(rqlst, newvar, self.sortasc, self.sortfunc)
try:
- rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey)
+ rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
except:
self.exception('error while getting vocabulary for %s, rql: %s',
self, rqlst.as_string())
--- a/web/form.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/form.py Tue Apr 13 19:43:51 2010 +0200
@@ -191,7 +191,7 @@
warn('[3.6.1] restore_previous_post already called, remove this call',
DeprecationWarning, stacklevel=2)
return
- forminfo = self._cw.get_session_data(sessionkey, pop=True)
+ forminfo = self._cw.session.data.pop(sessionkey, None)
if forminfo:
self._form_previous_values = forminfo['values']
self._form_valerror = forminfo['error']
--- a/web/formfields.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/formfields.py Tue Apr 13 19:43:51 2010 +0200
@@ -589,8 +589,7 @@
# raise UnmodifiedField instead of returning None, since the later
# will try to remove already attached file if any
raise UnmodifiedField()
- # skip browser submitted mime type
- filename, _, stream = value
+ filename, stream = value
# value is a 3-uple (filename, mimetype, stream)
value = Binary(stream.read())
if not value.getvalue(): # usually an unexistant file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/http_headers.py Tue Apr 13 19:43:51 2010 +0200
@@ -0,0 +1,1542 @@
+# This file has been extracted from the abandoned TwistedWeb2 project
+# http://twistedmatrix.com/trac/wiki/TwistedWeb2
+
+
+from __future__ import generators
+
+import types, time
+from calendar import timegm
+import base64
+import re
+
+def dashCapitalize(s):
+ ''' Capitalize a string, making sure to treat - as a word seperator '''
+ return '-'.join([ x.capitalize() for x in s.split('-')])
+
+# datetime parsing and formatting
+weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+weekdayname_lower = [name.lower() for name in weekdayname]
+monthname = [None,
+ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+monthname_lower = [name and name.lower() for name in monthname]
+
+# HTTP Header parsing API
+
+header_case_mapping = {}
+
+def casemappingify(d):
+ global header_case_mapping
+ newd = dict([(key.lower(),key) for key in d.keys()])
+ header_case_mapping.update(newd)
+
+def lowerify(d):
+ return dict([(key.lower(),value) for key,value in d.items()])
+
+
+class HeaderHandler(object):
+ """HeaderHandler manages header generating and parsing functions.
+ """
+ HTTPParsers = {}
+ HTTPGenerators = {}
+
+ def __init__(self, parsers=None, generators=None):
+ """
+ @param parsers: A map of header names to parsing functions.
+ @type parsers: L{dict}
+
+ @param generators: A map of header names to generating functions.
+ @type generators: L{dict}
+ """
+
+ if parsers:
+ self.HTTPParsers.update(parsers)
+ if generators:
+ self.HTTPGenerators.update(generators)
+
+ def parse(self, name, header):
+ """
+ Parse the given header based on its given name.
+
+ @param name: The header name to parse.
+ @type name: C{str}
+
+ @param header: A list of unparsed headers.
+ @type header: C{list} of C{str}
+
+ @return: The return value is the parsed header representation,
+ it is dependent on the header. See the HTTP Headers document.
+ """
+ parser = self.HTTPParsers.get(name, None)
+ if parser is None:
+ raise ValueError("No header parser for header '%s', either add one or use getHeaderRaw." % (name,))
+
+ try:
+ for p in parser:
+ # print "Parsing %s: %s(%s)" % (name, repr(p), repr(h))
+ header = p(header)
+ # if isinstance(h, types.GeneratorType):
+ # h=list(h)
+ except ValueError,v:
+ # print v
+ header=None
+
+ return header
+
+ def generate(self, name, header):
+ """
+ Generate the given header based on its given name.
+
+ @param name: The header name to generate.
+ @type name: C{str}
+
+ @param header: A parsed header, such as the output of
+ L{HeaderHandler}.parse.
+
+ @return: C{list} of C{str} each representing a generated HTTP header.
+ """
+ generator = self.HTTPGenerators.get(name, None)
+
+ if generator is None:
+ # print self.generators
+ raise ValueError("No header generator for header '%s', either add one or use setHeaderRaw." % (name,))
+
+ for g in generator:
+ header = g(header)
+
+ #self._raw_headers[name] = h
+ return header
+
+ def updateParsers(self, parsers):
+ """Update en masse the parser maps.
+
+ @param parsers: Map of header names to parser chains.
+ @type parsers: C{dict}
+ """
+ casemappingify(parsers)
+ self.HTTPParsers.update(lowerify(parsers))
+
+ def addParser(self, name, value):
+ """Add an individual parser chain for the given header.
+
+ @param name: Name of the header to add
+ @type name: C{str}
+
+ @param value: The parser chain
+ @type value: C{str}
+ """
+ self.updateParsers({name: value})
+
+ def updateGenerators(self, generators):
+ """Update en masse the generator maps.
+
+ @param parsers: Map of header names to generator chains.
+ @type parsers: C{dict}
+ """
+ casemappingify(generators)
+ self.HTTPGenerators.update(lowerify(generators))
+
+ def addGenerators(self, name, value):
+ """Add an individual generator chain for the given header.
+
+ @param name: Name of the header to add
+ @type name: C{str}
+
+ @param value: The generator chain
+ @type value: C{str}
+ """
+ self.updateGenerators({name: value})
+
+ def update(self, parsers, generators):
+ """Conveniently update parsers and generators all at once.
+ """
+ self.updateParsers(parsers)
+ self.updateGenerators(generators)
+
+
+DefaultHTTPHandler = HeaderHandler()
+
+
+## HTTP DateTime parser
+def parseDateTime(dateString):
+ """Convert an HTTP date string (one of three formats) to seconds since epoch."""
+ parts = dateString.split()
+
+ if not parts[0][0:3].lower() in weekdayname_lower:
+ # Weekday is stupid. Might have been omitted.
+ try:
+ return parseDateTime("Sun, "+dateString)
+ except ValueError:
+ # Guess not.
+ pass
+
+ partlen = len(parts)
+ if (partlen == 5 or partlen == 6) and parts[1].isdigit():
+ # 1st date format: Sun, 06 Nov 1994 08:49:37 GMT
+ # (Note: "GMT" is literal, not a variable timezone)
+ # (also handles without "GMT")
+ # This is the normal format
+ day = parts[1]
+ month = parts[2]
+ year = parts[3]
+ time = parts[4]
+ elif (partlen == 3 or partlen == 4) and parts[1].find('-') != -1:
+ # 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT
+ # (Note: "GMT" is literal, not a variable timezone)
+ # (also handles without without "GMT")
+ # Two digit year, yucko.
+ day, month, year = parts[1].split('-')
+ time = parts[2]
+ year=int(year)
+ if year < 69:
+ year = year + 2000
+ elif year < 100:
+ year = year + 1900
+ elif len(parts) == 5:
+ # 3rd date format: Sun Nov 6 08:49:37 1994
+ # ANSI C asctime() format.
+ day = parts[2]
+ month = parts[1]
+ year = parts[4]
+ time = parts[3]
+ else:
+ raise ValueError("Unknown datetime format %r" % dateString)
+
+ day = int(day)
+ month = int(monthname_lower.index(month.lower()))
+ year = int(year)
+ hour, min, sec = map(int, time.split(':'))
+ return int(timegm((year, month, day, hour, min, sec)))
+
+
+##### HTTP tokenizer
+class Token(str):
+ __slots__=[]
+ tokens = {}
+ def __new__(self, char):
+ token = Token.tokens.get(char)
+ if token is None:
+ Token.tokens[char] = token = str.__new__(self, char)
+ return token
+
+ def __repr__(self):
+ return "Token(%s)" % str.__repr__(self)
+
+
+http_tokens = " \t\"()<>@,;:\\/[]?={}"
+http_ctls = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f"
+
+def tokenize(header, foldCase=True):
+ """Tokenize a string according to normal HTTP header parsing rules.
+
+ In particular:
+ - Whitespace is irrelevant and eaten next to special separator tokens.
+ Its existance (but not amount) is important between character strings.
+ - Quoted string support including embedded backslashes.
+ - Case is insignificant (and thus lowercased), except in quoted strings.
+ (unless foldCase=False)
+ - Multiple headers are concatenated with ','
+
+ NOTE: not all headers can be parsed with this function.
+
+ Takes a raw header value (list of strings), and
+ Returns a generator of strings and Token class instances.
+ """
+ tokens=http_tokens
+ ctls=http_ctls
+
+ string = ",".join(header)
+ list = []
+ start = 0
+ cur = 0
+ quoted = False
+ qpair = False
+ inSpaces = -1
+ qstring = None
+
+ for x in string:
+ if quoted:
+ if qpair:
+ qpair = False
+ qstring = qstring+string[start:cur-1]+x
+ start = cur+1
+ elif x == '\\':
+ qpair = True
+ elif x == '"':
+ quoted = False
+ yield qstring+string[start:cur]
+ qstring=None
+ start = cur+1
+ elif x in tokens:
+ if start != cur:
+ if foldCase:
+ yield string[start:cur].lower()
+ else:
+ yield string[start:cur]
+
+ start = cur+1
+ if x == '"':
+ quoted = True
+ qstring = ""
+ inSpaces = False
+ elif x in " \t":
+ if inSpaces is False:
+ inSpaces = True
+ else:
+ inSpaces = -1
+ yield Token(x)
+ elif x in ctls:
+ raise ValueError("Invalid control character: %d in header" % ord(x))
+ else:
+ if inSpaces is True:
+ yield Token(' ')
+ inSpaces = False
+
+ inSpaces = False
+ cur = cur+1
+
+ if qpair:
+ raise ValueError, "Missing character after '\\'"
+ if quoted:
+ raise ValueError, "Missing end quote"
+
+ if start != cur:
+ if foldCase:
+ yield string[start:cur].lower()
+ else:
+ yield string[start:cur]
+
+def split(seq, delim):
+ """The same as str.split but works on arbitrary sequences.
+ Too bad it's not builtin to python!"""
+
+ cur = []
+ for item in seq:
+ if item == delim:
+ yield cur
+ cur = []
+ else:
+ cur.append(item)
+ yield cur
+
+# def find(seq, *args):
+# """The same as seq.index but returns -1 if not found, instead
+# Too bad it's not builtin to python!"""
+# try:
+# return seq.index(value, *args)
+# except ValueError:
+# return -1
+
+
+def filterTokens(seq):
+ """Filter out instances of Token, leaving only a list of strings.
+
+ Used instead of a more specific parsing method (e.g. splitting on commas)
+ when only strings are expected, so as to be a little lenient.
+
+ Apache does it this way and has some comments about broken clients which
+ forget commas (?), so I'm doing it the same way. It shouldn't
+ hurt anything, in any case.
+ """
+
+ l=[]
+ for x in seq:
+ if not isinstance(x, Token):
+ l.append(x)
+ return l
+
+##### parser utilities:
+def checkSingleToken(tokens):
+ if len(tokens) != 1:
+ raise ValueError, "Expected single token, not %s." % (tokens,)
+ return tokens[0]
+
+def parseKeyValue(val):
+ if len(val) == 1:
+ return val[0],None
+ elif len(val) == 3 and val[1] == Token('='):
+ return val[0],val[2]
+ raise ValueError, "Expected key or key=value, but got %s." % (val,)
+
+def parseArgs(field):
+ args=split(field, Token(';'))
+ val = args.next()
+ args = [parseKeyValue(arg) for arg in args]
+ return val,args
+
+def listParser(fun):
+ """Return a function which applies 'fun' to every element in the
+ comma-separated list"""
+ def listParserHelper(tokens):
+ fields = split(tokens, Token(','))
+ for field in fields:
+ if len(field) != 0:
+ yield fun(field)
+
+ return listParserHelper
+
+def last(seq):
+ """Return seq[-1]"""
+
+ return seq[-1]
+
+##### Generation utilities
+def quoteString(s):
+ return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"')
+
+def listGenerator(fun):
+ """Return a function which applies 'fun' to every element in
+ the given list, then joins the result with generateList"""
+ def listGeneratorHelper(l):
+ return generateList([fun(e) for e in l])
+
+ return listGeneratorHelper
+
+def generateList(seq):
+ return ", ".join(seq)
+
+def singleHeader(item):
+ return [item]
+
+def generateKeyValues(kvs):
+ l = []
+ # print kvs
+ for k,v in kvs:
+ if v is None:
+ l.append('%s' % k)
+ else:
+ l.append('%s=%s' % (k,v))
+ return ";".join(l)
+
+
+class MimeType(object):
+ def fromString(klass, mimeTypeString):
+ """Generate a MimeType object from the given string.
+
+ @param mimeTypeString: The mimetype to parse
+
+ @return: L{MimeType}
+ """
+ return DefaultHTTPHandler.parse('content-type', [mimeTypeString])
+
+ fromString = classmethod(fromString)
+
+ def __init__(self, mediaType, mediaSubtype, params={}, **kwargs):
+ """
+ @type mediaType: C{str}
+
+ @type mediaSubtype: C{str}
+
+ @type params: C{dict}
+ """
+ self.mediaType = mediaType
+ self.mediaSubtype = mediaSubtype
+ self.params = dict(params)
+
+ if kwargs:
+ self.params.update(kwargs)
+
+ def __eq__(self, other):
+ if not isinstance(other, MimeType): return NotImplemented
+ return (self.mediaType == other.mediaType and
+ self.mediaSubtype == other.mediaSubtype and
+ self.params == other.params)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
+
+ def __hash__(self):
+ return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+
+##### Specific header parsers.
+def parseAccept(field):
+ type,args = parseArgs(field)
+
+ if len(type) != 3 or type[1] != Token('/'):
+ raise ValueError, "MIME Type "+str(type)+" invalid."
+
+ # okay, this spec is screwy. A 'q' parameter is used as the separator
+ # between MIME parameters and (as yet undefined) additional HTTP
+ # parameters.
+
+ num = 0
+ for arg in args:
+ if arg[0] == 'q':
+ mimeparams=tuple(args[0:num])
+ params=args[num:]
+ break
+ num = num + 1
+ else:
+ mimeparams=tuple(args)
+ params=[]
+
+ # Default values for parameters:
+ qval = 1.0
+
+ # Parse accept parameters:
+ for param in params:
+ if param[0] =='q':
+ qval = float(param[1])
+ else:
+ # Warn? ignored parameter.
+ pass
+
+ ret = MimeType(type[0],type[2],mimeparams),qval
+ return ret
+
+def parseAcceptQvalue(field):
+ type,args=parseArgs(field)
+
+ type = checkSingleToken(type)
+
+ qvalue = 1.0 # Default qvalue is 1
+ for arg in args:
+ if arg[0] == 'q':
+ qvalue = float(arg[1])
+ return type,qvalue
+
+def addDefaultCharset(charsets):
+ if charsets.get('*') is None and charsets.get('iso-8859-1') is None:
+ charsets['iso-8859-1'] = 1.0
+ return charsets
+
+def addDefaultEncoding(encodings):
+ if encodings.get('*') is None and encodings.get('identity') is None:
+ # RFC doesn't specify a default value for identity, only that it
+ # "is acceptable" if not mentioned. Thus, give it a very low qvalue.
+ encodings['identity'] = .0001
+ return encodings
+
+
+def parseContentType(header):
+ # Case folding is disabled for this header, because of use of
+ # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf
+ # So, we need to explicitly .lower() the type/subtype and arg keys.
+
+ type,args = parseArgs(header)
+
+ if len(type) != 3 or type[1] != Token('/'):
+ raise ValueError, "MIME Type "+str(type)+" invalid."
+
+ args = [(kv[0].lower(), kv[1]) for kv in args]
+
+ return MimeType(type[0].lower(), type[2].lower(), tuple(args))
+
+def parseContentMD5(header):
+ try:
+ return base64.decodestring(header)
+ except Exception,e:
+ raise ValueError(e)
+
+def parseContentRange(header):
+ """Parse a content-range header into (kind, start, end, realLength).
+
+ realLength might be None if real length is not known ('*').
+ start and end might be None if start,end unspecified (for response code 416)
+ """
+ kind, other = header.strip().split()
+ if kind.lower() != "bytes":
+ raise ValueError("a range of type %r is not supported")
+ startend, realLength = other.split("/")
+ if startend.strip() == '*':
+ start,end=None,None
+ else:
+ start, end = map(int, startend.split("-"))
+ if realLength == "*":
+ realLength = None
+ else:
+ realLength = int(realLength)
+ return (kind, start, end, realLength)
+
+def parseExpect(field):
+ type,args=parseArgs(field)
+
+ type=parseKeyValue(type)
+ return (type[0], (lambda *args:args)(type[1], *args))
+
+def parseExpires(header):
+ # """HTTP/1.1 clients and caches MUST treat other invalid date formats,
+ # especially including the value 0, as in the past (i.e., "already expired")."""
+
+ try:
+ return parseDateTime(header)
+ except ValueError:
+ return 0
+
+def parseIfModifiedSince(header):
+ # Ancient versions of netscape and *current* versions of MSIE send
+ # If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123
+ # which is blantantly RFC-violating and not documented anywhere
+ # except bug-trackers for web frameworks.
+
+ # So, we'll just strip off everything after a ';'.
+ return parseDateTime(header.split(';', 1)[0])
+
+def parseIfRange(headers):
+ try:
+ return ETag.parse(tokenize(headers))
+ except ValueError:
+ return parseDateTime(last(headers))
+
+def parseRange(range):
+ range = list(range)
+ if len(range) < 3 or range[1] != Token('='):
+ raise ValueError("Invalid range header format: %s" %(range,))
+
+ type=range[0]
+ if type != 'bytes':
+ raise ValueError("Unknown range unit: %s." % (type,))
+ rangeset=split(range[2:], Token(','))
+ ranges = []
+
+ for byterangespec in rangeset:
+ if len(byterangespec) != 1:
+ raise ValueError("Invalid range header format: %s" % (range,))
+ start,end=byterangespec[0].split('-')
+
+ if not start and not end:
+ raise ValueError("Invalid range header format: %s" % (range,))
+
+ if start:
+ start = int(start)
+ else:
+ start = None
+
+ if end:
+ end = int(end)
+ else:
+ end = None
+
+ if start and end and start > end:
+ raise ValueError("Invalid range header, start > end: %s" % (range,))
+ ranges.append((start,end))
+ return type,ranges
+
+def parseRetryAfter(header):
+ try:
+ # delta seconds
+ return time.time() + int(header)
+ except ValueError:
+ # or datetime
+ return parseDateTime(header)
+
+# WWW-Authenticate and Authorization
+
+def parseWWWAuthenticate(tokenized):
+ headers = []
+
+ tokenList = list(tokenized)
+
+ while tokenList:
+ scheme = tokenList.pop(0)
+ challenge = {}
+ last = None
+ kvChallenge = False
+
+ while tokenList:
+ token = tokenList.pop(0)
+ if token == Token('='):
+ kvChallenge = True
+ challenge[last] = tokenList.pop(0)
+ last = None
+
+ elif token == Token(','):
+ if kvChallenge:
+ if len(tokenList) > 1 and tokenList[1] != Token('='):
+ break
+
+ else:
+ break
+
+ else:
+ last = token
+
+ if last and scheme and not challenge and not kvChallenge:
+ challenge = last
+ last = None
+
+ headers.append((scheme, challenge))
+
+ if last and last not in (Token('='), Token(',')):
+ if headers[-1] == (scheme, challenge):
+ scheme = last
+ challenge = {}
+ headers.append((scheme, challenge))
+
+ return headers
+
+def parseAuthorization(header):
+ scheme, rest = header.split(' ', 1)
+ # this header isn't tokenized because it may eat characters
+ # in the unquoted base64 encoded credentials
+ return scheme.lower(), rest
+
+#### Header generators
+def generateAccept(accept):
+ mimeType,q = accept
+
+ out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+ if mimeType.params:
+ out+=';'+generateKeyValues(mimeType.params.iteritems())
+
+ if q != 1.0:
+ out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
+
+ return out
+
+def removeDefaultEncoding(seq):
+ for item in seq:
+ if item[0] != 'identity' or item[1] != .0001:
+ yield item
+
+def generateAcceptQvalue(keyvalue):
+ if keyvalue[1] == 1.0:
+ return "%s" % keyvalue[0:1]
+ else:
+ return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.')
+
+def parseCacheControl(kv):
+ k, v = parseKeyValue(kv)
+ if k == 'max-age' or k == 'min-fresh' or k == 's-maxage':
+ # Required integer argument
+ if v is None:
+ v = 0
+ else:
+ v = int(v)
+ elif k == 'max-stale':
+ # Optional integer argument
+ if v is not None:
+ v = int(v)
+ elif k == 'private' or k == 'no-cache':
+ # Optional list argument
+ if v is not None:
+ v = [field.strip().lower() for field in v.split(',')]
+ return k, v
+
+def generateCacheControl((k, v)):
+ if v is None:
+ return str(k)
+ else:
+ if k == 'no-cache' or k == 'private':
+ # quoted list of values
+ v = quoteString(generateList(
+ [header_case_mapping.get(name) or dashCapitalize(name) for name in v]))
+ return '%s=%s' % (k,v)
+
+def generateContentRange(tup):
+ """tup is (type, start, end, len)
+ len can be None.
+ """
+ type, start, end, len = tup
+ if len == None:
+ len = '*'
+ else:
+ len = int(len)
+ if start == None and end == None:
+ startend = '*'
+ else:
+ startend = '%d-%d' % (start, end)
+
+ return '%s %s/%s' % (type, startend, len)
+
+def generateDateTime(secSinceEpoch):
+ """Convert seconds since epoch to HTTP datetime string."""
+ year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch)
+ s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+ weekdayname[wd],
+ day, monthname[month], year,
+ hh, mm, ss)
+ return s
+
+def generateExpect(item):
+ if item[1][0] is None:
+ out = '%s' % (item[0],)
+ else:
+ out = '%s=%s' % (item[0], item[1][0])
+ if len(item[1]) > 1:
+ out += ';'+generateKeyValues(item[1][1:])
+ return out
+
+def generateRange(range):
+ def noneOr(s):
+ if s is None:
+ return ''
+ return s
+
+ type,ranges=range
+
+ if type != 'bytes':
+ raise ValueError("Unknown range unit: "+type+".")
+
+ return (type+'='+
+ ','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1]))
+ for startend in ranges]))
+
+def generateRetryAfter(when):
+ # always generate delta seconds format
+ return str(int(when - time.time()))
+
+def generateContentType(mimeType):
+ out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+ if mimeType.params:
+ out+=';'+generateKeyValues(mimeType.params.iteritems())
+ return out
+
+def generateIfRange(dateOrETag):
+ if isinstance(dateOrETag, ETag):
+ return dateOrETag.generate()
+ else:
+ return generateDateTime(dateOrETag)
+
+# WWW-Authenticate and Authorization
+
+def generateWWWAuthenticate(headers):
+ _generated = []
+ for seq in headers:
+ scheme, challenge = seq[0], seq[1]
+
+ # If we're going to parse out to something other than a dict
+ # we need to be able to generate from something other than a dict
+
+ try:
+ l = []
+ for k,v in dict(challenge).iteritems():
+ l.append("%s=%s" % (k, quoteString(v)))
+
+ _generated.append("%s %s" % (scheme, ", ".join(l)))
+ except ValueError:
+ _generated.append("%s %s" % (scheme, challenge))
+
+ return _generated
+
+def generateAuthorization(seq):
+ return [' '.join(seq)]
+
+
+####
+class ETag(object):
+ def __init__(self, tag, weak=False):
+ self.tag = str(tag)
+ self.weak = weak
+
+ def match(self, other, strongCompare):
+ # Sec 13.3.
+ # The strong comparison function: in order to be considered equal, both
+ # validators MUST be identical in every way, and both MUST NOT be weak.
+ #
+ # The weak comparison function: in order to be considered equal, both
+ # validators MUST be identical in every way, but either or both of
+ # them MAY be tagged as "weak" without affecting the result.
+
+ if not isinstance(other, ETag) or other.tag != self.tag:
+ return False
+
+ if strongCompare and (other.weak or self.weak):
+ return False
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "Etag(%r, weak=%r)" % (self.tag, self.weak)
+
+ def parse(tokens):
+ tokens=tuple(tokens)
+ if len(tokens) == 1 and not isinstance(tokens[0], Token):
+ return ETag(tokens[0])
+
+ if(len(tokens) == 3 and tokens[0] == "w"
+ and tokens[1] == Token('/')):
+ return ETag(tokens[2], weak=True)
+
+ raise ValueError("Invalid ETag.")
+
+ parse=staticmethod(parse)
+
+ def generate(self):
+ if self.weak:
+ return 'W/'+quoteString(self.tag)
+ else:
+ return quoteString(self.tag)
+
+def parseStarOrETag(tokens):
+ tokens=tuple(tokens)
+ if tokens == ('*',):
+ return '*'
+ else:
+ return ETag.parse(tokens)
+
+def generateStarOrETag(etag):
+ if etag=='*':
+ return etag
+ else:
+ return etag.generate()
+
+#### Cookies. Blech!
+class Cookie(object):
+ # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version']
+
+ def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0):
+ self.name=name
+ self.value=value
+ self.path=path
+ self.domain=domain
+ self.ports=ports
+ self.expires=expires
+ self.discard=discard
+ self.secure=secure
+ self.comment=comment
+ self.commenturl=commenturl
+ self.version=version
+
+ def __repr__(self):
+ s="Cookie(%r=%r" % (self.name, self.value)
+ if self.path is not None: s+=", path=%r" % (self.path,)
+ if self.domain is not None: s+=", domain=%r" % (self.domain,)
+ if self.ports is not None: s+=", ports=%r" % (self.ports,)
+ if self.expires is not None: s+=", expires=%r" % (self.expires,)
+ if self.secure is not False: s+=", secure=%r" % (self.secure,)
+ if self.comment is not None: s+=", comment=%r" % (self.comment,)
+ if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,)
+ if self.version != 0: s+=", version=%r" % (self.version,)
+ s+=")"
+ return s
+
+ def __eq__(self, other):
+ return (isinstance(other, Cookie) and
+ other.path == self.path and
+ other.domain == self.domain and
+ other.ports == self.ports and
+ other.expires == self.expires and
+ other.secure == self.secure and
+ other.comment == self.comment and
+ other.commenturl == self.commenturl and
+ other.version == self.version)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+def parseCookie(headers):
+ """Bleargh, the cookie spec sucks.
+ This surely needs interoperability testing.
+ There are two specs that are supported:
+ Version 0) http://wp.netscape.com/newsref/std/cookie_spec.html
+ Version 1) http://www.faqs.org/rfcs/rfc2965.html
+ """
+
+ cookies = []
+ # There can't really be multiple cookie headers according to RFC, because
+ # if multiple headers are allowed, they must be joinable with ",".
+ # Neither new RFC2965 cookies nor old netscape cookies are.
+
+ header = ';'.join(headers)
+ if header[0:8].lower() == "$version":
+ # RFC2965 cookie
+ h=tokenize([header], foldCase=False)
+ r_cookies = split(h, Token(','))
+ for r_cookie in r_cookies:
+ last_cookie = None
+ rr_cookies = split(r_cookie, Token(';'))
+ for cookie in rr_cookies:
+ nameval = tuple(split(cookie, Token('=')))
+ if len(nameval) == 2:
+ (name,), (value,) = nameval
+ else:
+ (name,), = nameval
+ value = None
+
+ name=name.lower()
+ if name == '$version':
+ continue
+ if name[0] == '$':
+ if last_cookie is not None:
+ if name == '$path':
+ last_cookie.path=value
+ elif name == '$domain':
+ last_cookie.domain=value
+ elif name == '$port':
+ if value is None:
+ last_cookie.ports = ()
+ else:
+ last_cookie.ports=tuple([int(s) for s in value.split(',')])
+ else:
+ last_cookie = Cookie(name, value, version=1)
+ cookies.append(last_cookie)
+ else:
+ # Oldstyle cookies don't do quoted strings or anything sensible.
+ # All characters are valid for names except ';' and '=', and all
+ # characters are valid for values except ';'. Spaces are stripped,
+ # however.
+ r_cookies = header.split(';')
+ for r_cookie in r_cookies:
+ name,value = r_cookie.split('=', 1)
+ name=name.strip(' \t')
+ value=value.strip(' \t')
+
+ cookies.append(Cookie(name, value))
+
+ return cookies
+
+cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$"
+cookie_validname_re = re.compile(cookie_validname)
+cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$'
+cookie_validvalue_re = re.compile(cookie_validvalue)
+
+def generateCookie(cookies):
+ # There's a fundamental problem with the two cookie specifications.
+ # They both use the "Cookie" header, and the RFC Cookie header only allows
+ # one version to be specified. Thus, when you have a collection of V0 and
+ # V1 cookies, you have to either send them all as V0 or send them all as
+ # V1.
+
+ # I choose to send them all as V1.
+
+ # You might think converting a V0 cookie to a V1 cookie would be lossless,
+ # but you'd be wrong. If you do the conversion, and a V0 parser tries to
+ # read the cookie, it will see a modified form of the cookie, in cases
+ # where quotes must be added to conform to proper V1 syntax.
+ # (as a real example: "Cookie: cartcontents=oid:94680,qty:1,auto:0,esp:y")
+
+ # However, that is what we will do, anyways. It has a high probability of
+ # breaking applications that only handle oldstyle cookies, where some other
+ # application set a newstyle cookie that is applicable over for site
+ # (or host), AND where the oldstyle cookie uses a value which is invalid
+ # syntax in a newstyle cookie.
+
+ # Also, the cookie name *cannot* be quoted in V1, so some cookies just
+ # cannot be converted at all. (e.g. "Cookie: phpAds_capAd[32]=2"). These
+ # are just dicarded during conversion.
+
+ # As this is an unsolvable problem, I will pretend I can just say
+ # OH WELL, don't do that, or else upgrade your old applications to have
+ # newstyle cookie parsers.
+
+ # I will note offhandedly that there are *many* sites which send V0 cookies
+ # that are not valid V1 cookie syntax. About 20% for my cookies file.
+ # However, they do not generally mix them with V1 cookies, so this isn't
+ # an issue, at least right now. I have not tested to see how many of those
+ # webapps support RFC2965 V1 cookies. I suspect not many.
+
+ max_version = max([cookie.version for cookie in cookies])
+
+ if max_version == 0:
+ # no quoting or anything.
+ return ';'.join(["%s=%s" % (cookie.name, cookie.value) for cookie in cookies])
+ else:
+ str_cookies = ['$Version="1"']
+ for cookie in cookies:
+ if cookie.version == 0:
+ # Version 0 cookie: we make sure the name and value are valid
+ # V1 syntax.
+
+ # If they are, we use them as is. This means in *most* cases,
+ # the cookie will look literally the same on output as it did
+ # on input.
+ # If it isn't a valid name, ignore the cookie.
+ # If it isn't a valid value, quote it and hope for the best on
+ # the other side.
+
+ if cookie_validname_re.match(cookie.name) is None:
+ continue
+
+ value=cookie.value
+ if cookie_validvalue_re.match(cookie.value) is None:
+ value = quoteString(value)
+
+ str_cookies.append("%s=%s" % (cookie.name, value))
+ else:
+ # V1 cookie, nice and easy
+ str_cookies.append("%s=%s" % (cookie.name, quoteString(cookie.value)))
+
+ if cookie.path:
+ str_cookies.append("$Path=%s" % quoteString(cookie.path))
+ if cookie.domain:
+ str_cookies.append("$Domain=%s" % quoteString(cookie.domain))
+ if cookie.ports is not None:
+ if len(cookie.ports) == 0:
+ str_cookies.append("$Port")
+ else:
+ str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
+ return ';'.join(str_cookies)
+
+def parseSetCookie(headers):
+ setCookies = []
+ for header in headers:
+ try:
+ parts = header.split(';')
+ l = []
+
+ for part in parts:
+ namevalue = part.split('=',1)
+ if len(namevalue) == 1:
+ name=namevalue[0]
+ value=None
+ else:
+ name,value=namevalue
+ value=value.strip(' \t')
+
+ name=name.strip(' \t')
+
+ l.append((name, value))
+
+ setCookies.append(makeCookieFromList(l, True))
+ except ValueError:
+ # If we can't parse one Set-Cookie, ignore it,
+ # but not the rest of Set-Cookies.
+ pass
+ return setCookies
+
+def parseSetCookie2(toks):
+ outCookies = []
+ for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))]
+ for y in split(toks, Token(','))]:
+ try:
+ outCookies.append(makeCookieFromList(cookie, False))
+ except ValueError:
+ # Again, if we can't handle one cookie -- ignore it.
+ pass
+ return outCookies
+
+def makeCookieFromList(tup, netscapeFormat):
+ name, value = tup[0]
+ if name is None or value is None:
+ raise ValueError("Cookie has missing name or value")
+ if name.startswith("$"):
+ raise ValueError("Invalid cookie name: %r, starts with '$'." % name)
+ cookie = Cookie(name, value)
+ hadMaxAge = False
+
+ for name,value in tup[1:]:
+ name = name.lower()
+
+ if value is None:
+ if name in ("discard", "secure"):
+ # Boolean attrs
+ value = True
+ elif name != "port":
+ # Can be either boolean or explicit
+ continue
+
+ if name in ("comment", "commenturl", "discard", "domain", "path", "secure"):
+ # simple cases
+ setattr(cookie, name, value)
+ elif name == "expires" and not hadMaxAge:
+ if netscapeFormat and value[0] == '"' and value[-1] == '"':
+ value = value[1:-1]
+ cookie.expires = parseDateTime(value)
+ elif name == "max-age":
+ hadMaxAge = True
+ cookie.expires = int(value) + time.time()
+ elif name == "port":
+ if value is None:
+ cookie.ports = ()
+ else:
+ if netscapeFormat and value[0] == '"' and value[-1] == '"':
+ value = value[1:-1]
+ cookie.ports = tuple([int(s) for s in value.split(',')])
+ elif name == "version":
+ cookie.version = int(value)
+
+ return cookie
+
+
+def generateSetCookie(cookies):
+ setCookies = []
+ for cookie in cookies:
+ out = ["%s=%s" % (cookie.name, cookie.value)]
+ if cookie.expires:
+ out.append("expires=%s" % generateDateTime(cookie.expires))
+ if cookie.path:
+ out.append("path=%s" % cookie.path)
+ if cookie.domain:
+ out.append("domain=%s" % cookie.domain)
+ if cookie.secure:
+ out.append("secure")
+
+ setCookies.append('; '.join(out))
+ return setCookies
+
+def generateSetCookie2(cookies):
+ setCookies = []
+ for cookie in cookies:
+ out = ["%s=%s" % (cookie.name, quoteString(cookie.value))]
+ if cookie.comment:
+ out.append("Comment=%s" % quoteString(cookie.comment))
+ if cookie.commenturl:
+ out.append("CommentURL=%s" % quoteString(cookie.commenturl))
+ if cookie.discard:
+ out.append("Discard")
+ if cookie.domain:
+ out.append("Domain=%s" % quoteString(cookie.domain))
+ if cookie.expires:
+ out.append("Max-Age=%s" % (cookie.expires - time.time()))
+ if cookie.path:
+ out.append("Path=%s" % quoteString(cookie.path))
+ if cookie.ports is not None:
+ if len(cookie.ports) == 0:
+ out.append("Port")
+ else:
+ out.append("Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
+ if cookie.secure:
+ out.append("Secure")
+ out.append('Version="1"')
+ setCookies.append('; '.join(out))
+ return setCookies
+
+def parseDepth(depth):
+ if depth not in ("0", "1", "infinity"):
+ raise ValueError("Invalid depth header value: %s" % (depth,))
+ return depth
+
+def parseOverWrite(overwrite):
+ if overwrite == "F":
+ return False
+ elif overwrite == "T":
+ return True
+ raise ValueError("Invalid overwrite header value: %s" % (overwrite,))
+
+def generateOverWrite(overwrite):
+ if overwrite:
+ return "T"
+ else:
+ return "F"
+
+##### Random stuff that looks useful.
+# def sortMimeQuality(s):
+# def sorter(item1, item2):
+# if item1[0] == '*':
+# if item2[0] == '*':
+# return 0
+
+
+# def sortQuality(s):
+# def sorter(item1, item2):
+# if item1[1] < item2[1]:
+# return -1
+# if item1[1] < item2[1]:
+# return 1
+# if item1[0] == item2[0]:
+# return 0
+
+
+# def getMimeQuality(mimeType, accepts):
+# type,args = parseArgs(mimeType)
+# type=type.split(Token('/'))
+# if len(type) != 2:
+# raise ValueError, "MIME Type "+s+" invalid."
+
+# for accept in accepts:
+# accept,acceptQual=accept
+# acceptType=accept[0:1]
+# acceptArgs=accept[2]
+
+# if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and
+# (args == acceptArgs or len(acceptArgs) == 0)):
+# return acceptQual
+
+# def getQuality(type, accepts):
+# qual = accepts.get(type)
+# if qual is not None:
+# return qual
+
+# return accepts.get('*')
+
+# Headers object
+class __RecalcNeeded(object):
+ def __repr__(self):
+ return "<RecalcNeeded>"
+
+_RecalcNeeded = __RecalcNeeded()
+
+class Headers(object):
+ """This class stores the HTTP headers as both a parsed representation and
+ the raw string representation. It converts between the two on demand."""
+
+ def __init__(self, headers=None, rawHeaders=None, handler=DefaultHTTPHandler):
+ self._raw_headers = {}
+ self._headers = {}
+ self.handler = handler
+ if headers is not None:
+ for key, value in headers.iteritems():
+ self.setHeader(key, value)
+ if rawHeaders is not None:
+ for key, value in rawHeaders.iteritems():
+ self.setRawHeaders(key, value)
+
+ def _setRawHeaders(self, headers):
+ self._raw_headers = headers
+ self._headers = {}
+
+ def _toParsed(self, name):
+ r = self._raw_headers.get(name, None)
+ h = self.handler.parse(name, r)
+ if h is not None:
+ self._headers[name] = h
+ return h
+
+ def _toRaw(self, name):
+ h = self._headers.get(name, None)
+ r = self.handler.generate(name, h)
+ if r is not None:
+ self._raw_headers[name] = r
+ return r
+
+ def hasHeader(self, name):
+ """Does a header with the given name exist?"""
+ name=name.lower()
+ return self._raw_headers.has_key(name)
+
+ def getRawHeaders(self, name, default=None):
+ """Returns a list of headers matching the given name as the raw string given."""
+
+ name=name.lower()
+ raw_header = self._raw_headers.get(name, default)
+ if raw_header is not _RecalcNeeded:
+ return raw_header
+
+ return self._toRaw(name)
+
+ def getHeader(self, name, default=None):
+ """Ret9urns the parsed representation of the given header.
+ The exact form of the return value depends on the header in question.
+
+ If no parser for the header exists, raise ValueError.
+
+ If the header doesn't exist, return default (or None if not specified)
+ """
+ name=name.lower()
+ parsed = self._headers.get(name, default)
+ if parsed is not _RecalcNeeded:
+ return parsed
+ return self._toParsed(name)
+
+ def setRawHeaders(self, name, value):
+ """Sets the raw representation of the given header.
+ Value should be a list of strings, each being one header of the
+ given name.
+ """
+ name=name.lower()
+ self._raw_headers[name] = value
+ self._headers[name] = _RecalcNeeded
+
+ def setHeader(self, name, value):
+ """Sets the parsed representation of the given header.
+ Value should be a list of objects whose exact form depends
+ on the header in question.
+ """
+ name=name.lower()
+ self._raw_headers[name] = _RecalcNeeded
+ self._headers[name] = value
+
+ def addRawHeader(self, name, value):
+ """
+ Add a raw value to a header that may or may not already exist.
+ If it exists, add it as a separate header to output; do not
+ replace anything.
+ """
+ name=name.lower()
+ raw_header = self._raw_headers.get(name)
+ if raw_header is None:
+ # No header yet
+ raw_header = []
+ self._raw_headers[name] = raw_header
+ elif raw_header is _RecalcNeeded:
+ raw_header = self._toRaw(name)
+
+ raw_header.append(value)
+ self._headers[name] = _RecalcNeeded
+
+ def removeHeader(self, name):
+ """Removes the header named."""
+
+ name=name.lower()
+ if self._raw_headers.has_key(name):
+ del self._raw_headers[name]
+ del self._headers[name]
+
+ def __repr__(self):
+ return '<Headers: Raw: %s Parsed: %s>'% (self._raw_headers, self._headers)
+
+ def canonicalNameCaps(self, name):
+ """Return the name with the canonical capitalization, if known,
+ otherwise, Caps-After-Dashes"""
+ return header_case_mapping.get(name) or dashCapitalize(name)
+
+ def getAllRawHeaders(self):
+ """Return an iterator of key,value pairs of all headers
+ contained in this object, as strings. The keys are capitalized
+ in canonical capitalization."""
+ for k,v in self._raw_headers.iteritems():
+ if v is _RecalcNeeded:
+ v = self._toRaw(k)
+ yield self.canonicalNameCaps(k), v
+
+ def makeImmutable(self):
+ """Make this header set immutable. All mutating operations will
+ raise an exception."""
+ self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise
+
+ def _mutateRaise(self, *args):
+ raise AttributeError("This header object is immutable as the headers have already been sent.")
+
+
+"""The following dicts are all mappings of header to list of operations
+ to perform. The first operation should generally be 'tokenize' if the
+ header can be parsed according to the normal tokenization rules. If
+ it cannot, generally the first thing you want to do is take only the
+ last instance of the header (in case it was sent multiple times, which
+ is strictly an error, but we're nice.).
+ """
+
+iteritems = lambda x: x.iteritems()
+
+
+parser_general_headers = {
+ 'Cache-Control':(tokenize, listParser(parseCacheControl), dict),
+ 'Connection':(tokenize,filterTokens),
+ 'Date':(last,parseDateTime),
+# 'Pragma':tokenize
+# 'Trailer':tokenize
+ 'Transfer-Encoding':(tokenize,filterTokens),
+# 'Upgrade':tokenize
+# 'Via':tokenize,stripComment
+# 'Warning':tokenize
+}
+
+generator_general_headers = {
+ 'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader),
+ 'Connection':(generateList,singleHeader),
+ 'Date':(generateDateTime,singleHeader),
+# 'Pragma':
+# 'Trailer':
+ 'Transfer-Encoding':(generateList,singleHeader),
+# 'Upgrade':
+# 'Via':
+# 'Warning':
+}
+
+parser_request_headers = {
+ 'Accept': (tokenize, listParser(parseAccept), dict),
+ 'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset),
+ 'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
+ 'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict),
+ 'Authorization': (last, parseAuthorization),
+ 'Cookie':(parseCookie,),
+ 'Expect':(tokenize, listParser(parseExpect), dict),
+ 'From':(last,),
+ 'Host':(last,),
+ 'If-Match':(tokenize, listParser(parseStarOrETag), list),
+ 'If-Modified-Since':(last, parseIfModifiedSince),
+ 'If-None-Match':(tokenize, listParser(parseStarOrETag), list),
+ 'If-Range':(parseIfRange,),
+ 'If-Unmodified-Since':(last,parseDateTime),
+ 'Max-Forwards':(last,int),
+# 'Proxy-Authorization':str, # what is "credentials"
+ 'Range':(tokenize, parseRange),
+ 'Referer':(last,str), # TODO: URI object?
+ 'TE':(tokenize, listParser(parseAcceptQvalue), dict),
+ 'User-Agent':(last,str),
+}
+
+generator_request_headers = {
+ 'Accept': (iteritems,listGenerator(generateAccept),singleHeader),
+ 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+ 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader),
+ 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+ 'Authorization': (generateAuthorization,), # what is "credentials"
+ 'Cookie':(generateCookie,singleHeader),
+ 'Expect':(iteritems, listGenerator(generateExpect), singleHeader),
+ 'From':(str,singleHeader),
+ 'Host':(str,singleHeader),
+ 'If-Match':(listGenerator(generateStarOrETag), singleHeader),
+ 'If-Modified-Since':(generateDateTime,singleHeader),
+ 'If-None-Match':(listGenerator(generateStarOrETag), singleHeader),
+ 'If-Range':(generateIfRange, singleHeader),
+ 'If-Unmodified-Since':(generateDateTime,singleHeader),
+ 'Max-Forwards':(str, singleHeader),
+# 'Proxy-Authorization':str, # what is "credentials"
+ 'Range':(generateRange,singleHeader),
+ 'Referer':(str,singleHeader),
+ 'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+ 'User-Agent':(str,singleHeader),
+}
+
+parser_response_headers = {
+ 'Accept-Ranges':(tokenize, filterTokens),
+ 'Age':(last,int),
+ 'ETag':(tokenize, ETag.parse),
+ 'Location':(last,), # TODO: URI object?
+# 'Proxy-Authenticate'
+ 'Retry-After':(last, parseRetryAfter),
+ 'Server':(last,),
+ 'Set-Cookie':(parseSetCookie,),
+ 'Set-Cookie2':(tokenize, parseSetCookie2),
+ 'Vary':(tokenize, filterTokens),
+ 'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False),
+ parseWWWAuthenticate,)
+}
+
+generator_response_headers = {
+ 'Accept-Ranges':(generateList, singleHeader),
+ 'Age':(str, singleHeader),
+ 'ETag':(ETag.generate, singleHeader),
+ 'Location':(str, singleHeader),
+# 'Proxy-Authenticate'
+ 'Retry-After':(generateRetryAfter, singleHeader),
+ 'Server':(str, singleHeader),
+ 'Set-Cookie':(generateSetCookie,),
+ 'Set-Cookie2':(generateSetCookie2,),
+ 'Vary':(generateList, singleHeader),
+ 'WWW-Authenticate':(generateWWWAuthenticate,)
+}
+
+parser_entity_headers = {
+ 'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens),
+ 'Content-Encoding':(tokenize, filterTokens),
+ 'Content-Language':(tokenize, filterTokens),
+ 'Content-Length':(last, int),
+ 'Content-Location':(last,), # TODO: URI object?
+ 'Content-MD5':(last, parseContentMD5),
+ 'Content-Range':(last, parseContentRange),
+ 'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType),
+ 'Expires':(last, parseExpires),
+ 'Last-Modified':(last, parseDateTime),
+ }
+
+generator_entity_headers = {
+ 'Allow':(generateList, singleHeader),
+ 'Content-Encoding':(generateList, singleHeader),
+ 'Content-Language':(generateList, singleHeader),
+ 'Content-Length':(str, singleHeader),
+ 'Content-Location':(str, singleHeader),
+ 'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader),
+ 'Content-Range':(generateContentRange, singleHeader),
+ 'Content-Type':(generateContentType, singleHeader),
+ 'Expires':(generateDateTime, singleHeader),
+ 'Last-Modified':(generateDateTime, singleHeader),
+ }
+
+DefaultHTTPHandler.updateParsers(parser_general_headers)
+DefaultHTTPHandler.updateParsers(parser_request_headers)
+DefaultHTTPHandler.updateParsers(parser_response_headers)
+DefaultHTTPHandler.updateParsers(parser_entity_headers)
+
+DefaultHTTPHandler.updateGenerators(generator_general_headers)
+DefaultHTTPHandler.updateGenerators(generator_request_headers)
+DefaultHTTPHandler.updateGenerators(generator_response_headers)
+DefaultHTTPHandler.updateGenerators(generator_entity_headers)
+
+
+# casemappingify(DefaultHTTPParsers)
+# casemappingify(DefaultHTTPGenerators)
+
+# lowerify(DefaultHTTPParsers)
+# lowerify(DefaultHTTPGenerators)
--- a/web/httpcache.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/httpcache.py Tue Apr 13 19:43:51 2010 +0200
@@ -43,6 +43,8 @@
"""
def etag(self):
+ if self.req.cnx is None:
+ return self.view.__regid__
return self.view.__regid__ + '/' + ','.join(sorted(self.req.user.groups))
def max_age(self):
@@ -131,8 +133,5 @@
# max-age=0 to actually force revalidation when needed
viewmod.View.cache_max_age = 0
-
-viewmod.EntityView.http_cache_manager = EntityHTTPCacheManager
-
viewmod.StartupView.http_cache_manager = MaxAgeHTTPCacheManager
viewmod.StartupView.cache_max_age = 60*60*2 # stay in http cache for 2 hours by default
--- a/web/request.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/request.py Tue Apr 13 19:43:51 2010 +0200
@@ -31,6 +31,7 @@
from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT
from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit,
RequestError, StatusResponse)
+from cubicweb.web.http_headers import Headers
_MARKER = object()
@@ -88,6 +89,8 @@
self.pageid = None
self.datadir_url = self._datadir_url()
self._set_pageid()
+ # prepare output header
+ self.headers_out = Headers()
def _set_pageid(self):
"""initialize self.pageid
@@ -119,11 +122,11 @@
self.set_page_data('rql_varmaker', varmaker)
return varmaker
- def set_connection(self, cnx, user=None):
+ def set_session(self, session, user=None):
"""method called by the session handler when the user is authenticated
or an anonymous connection is open
"""
- super(CubicWebRequestBase, self).set_connection(cnx, user)
+ super(CubicWebRequestBase, self).set_session(session, user)
# set request language
vreg = self.vreg
if self.user:
@@ -148,8 +151,9 @@
gettext, self.pgettext = self.translations[lang]
self._ = self.__ = gettext
self.lang = lang
- self.cnx.set_session_props(lang=lang)
self.debug('request language: %s', lang)
+ if self.cnx is not None:
+ self.cnx.set_session_props(lang=lang)
# input form parameters management ########################################
@@ -233,7 +237,7 @@
@property
def message(self):
try:
- return self.get_session_data(self._msgid, default=u'', pop=True)
+ return self.session.data.pop(self._msgid, '')
except AttributeError:
try:
return self._msg
@@ -254,17 +258,17 @@
def set_redirect_message(self, msg):
assert isinstance(msg, unicode)
msgid = self.redirect_message_id()
- self.set_session_data(msgid, msg)
+ self.session.data[msgid] = msg
return msgid
def append_to_redirect_message(self, msg):
msgid = self.redirect_message_id()
- currentmsg = self.get_session_data(msgid)
+ currentmsg = self.session.data.get(msgid)
if currentmsg is not None:
currentmsg = '%s %s' % (currentmsg, msg)
else:
currentmsg = msg
- self.set_session_data(msgid, currentmsg)
+ self.session.data[msgid] = currentmsg
return msgid
def reset_message(self):
@@ -277,7 +281,7 @@
"""update the current search state"""
searchstate = self.form.get('__mode')
if not searchstate and self.cnx is not None:
- searchstate = self.get_session_data('search_state', 'normal')
+ searchstate = self.session.data.get('search_state', 'normal')
self.set_search_state(searchstate)
def set_search_state(self, searchstate):
@@ -288,7 +292,7 @@
self.search_state = ('linksearch', searchstate.split(':'))
assert len(self.search_state[-1]) == 4
if self.cnx is not None:
- self.set_session_data('search_state', searchstate)
+ self.session.data['search_state'] = searchstate
def match_search_state(self, rset):
"""when searching an entity to create a relation, return True if entities in
@@ -305,12 +309,12 @@
def update_breadcrumbs(self):
"""stores the last visisted page in session data"""
- searchstate = self.get_session_data('search_state')
+ searchstate = self.session.data.get('search_state')
if searchstate == 'normal':
- breadcrumbs = self.get_session_data('breadcrumbs', None)
+ breadcrumbs = self.session.data.get('breadcrumbs')
if breadcrumbs is None:
breadcrumbs = SizeConstrainedList(10)
- self.set_session_data('breadcrumbs', breadcrumbs)
+ self.session.data['breadcrumbs'] = breadcrumbs
breadcrumbs.append(self.url())
else:
url = self.url()
@@ -318,7 +322,7 @@
breadcrumbs.append(url)
def last_visited_page(self):
- breadcrumbs = self.get_session_data('breadcrumbs', None)
+ breadcrumbs = self.session.data.get('breadcrumbs')
if breadcrumbs:
return breadcrumbs.pop()
return self.base_url()
@@ -365,11 +369,10 @@
self.del_page_data(cbname)
def clear_user_callbacks(self):
- if self.cnx is not None:
- sessdata = self.session_data()
- callbacks = [key for key in sessdata if key.startswith('cb_')]
- for callback in callbacks:
- self.del_session_data(callback)
+ if self.session is not None: # XXX
+ for key in self.session.data.keys():
+ if key.startswith('cb_'):
+ del self.session.data[key]
# web edition helpers #####################################################
@@ -435,13 +438,13 @@
This is needed when the edition is completed (whether it's validated
or cancelled)
"""
- self.del_session_data('pending_insert')
- self.del_session_data('pending_delete')
+ self.session.data.pop('pending_insert', None)
+ self.session.data.pop('pending_delete', None)
def cancel_edition(self, errorurl):
"""remove pending operations and `errorurl`'s specific stored data
"""
- self.del_session_data(errorurl)
+ self.session.data.pop(errorurl, None)
self.remove_pending_operations()
# high level methods for HTTP headers management ##########################
@@ -657,17 +660,26 @@
"""
raise NotImplementedError()
- def set_header(self, header, value):
+ def set_header(self, header, value, raw=True):
"""set an output HTTP header"""
- raise NotImplementedError()
+ if raw:
+ # adding encoded header is important, else page content
+ # will be reconverted back to unicode and apart unefficiency, this
+ # may cause decoding problem (e.g. when downloading a file)
+ self.headers_out.setRawHeaders(header, [str(value)])
+ else:
+ self.headers_out.setHeader(header, value)
def add_header(self, header, value):
"""add an output HTTP header"""
- raise NotImplementedError()
+ # adding encoded header is important, else page content
+ # will be reconverted back to unicode and apart unefficiency, this
+ # may cause decoding problem (e.g. when downloading a file)
+ self.headers_out.addRawHeader(header, str(value))
def remove_header(self, header):
"""remove an output HTTP header"""
- raise NotImplementedError()
+ self.headers_out.removeHeader(header)
def header_authorization(self):
"""returns a couple (auth-type, auth-value)"""
@@ -733,26 +745,29 @@
def get_page_data(self, key, default=None):
"""return value associated to `key` in curernt page data"""
- page_data = self.cnx.get_session_data(self.pageid, {})
+ page_data = self.session.data.get(self.pageid)
+ if page_data is None:
+ return default
return page_data.get(key, default)
def set_page_data(self, key, value):
"""set value associated to `key` in current page data"""
self.html_headers.add_unload_pagedata()
- page_data = self.cnx.get_session_data(self.pageid, {})
+ page_data = self.session.data.setdefault(self.pageid, {})
page_data[key] = value
- return self.cnx.set_session_data(self.pageid, page_data)
+ self.session.data[self.pageid] = page_data
def del_page_data(self, key=None):
"""remove value associated to `key` in current page data
if `key` is None, all page data will be cleared
"""
if key is None:
- self.cnx.del_session_data(self.pageid)
+ self.session.data.pop(self.pageid, None)
else:
- page_data = self.cnx.get_session_data(self.pageid, {})
- page_data.pop(key, None)
- self.cnx.set_session_data(self.pageid, page_data)
+ try:
+ del self.session.data[self.pageid][key]
+ except KeyError:
+ pass
# user-agent detection ####################################################
Binary file web/test/data/sample1.pdf has changed
--- a/web/test/data/sample1.xml Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,138 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd" [
- <!ATTLIST html xmlns:cubicweb CDATA #FIXED 'http://www.logilab.org/2008/cubicweb' >
-
-<!ENTITY % coreattrs
- "id ID #IMPLIED
- class CDATA #IMPLIED
- style CDATA #IMPLIED
- title CDATA #IMPLIED
-
- cubicweb:sortvalue CDATA #IMPLIED
- cubicweb:target CDATA #IMPLIED
- cubicweb:limit CDATA #IMPLIED
- cubicweb:type CDATA #IMPLIED
- cubicweb:loadtype CDATA #IMPLIED
- cubicweb:wdgtype CDATA #IMPLIED
- cubicweb:initfunc CDATA #IMPLIED
- cubicweb:inputid CDATA #IMPLIED
- cubicweb:tindex CDATA #IMPLIED
- cubicweb:inputname CDATA #IMPLIED
- cubicweb:value CDATA #IMPLIED
- cubicweb:required CDATA #IMPLIED
- cubicweb:accesskey CDATA #IMPLIED
- cubicweb:maxlength CDATA #IMPLIED
- cubicweb:variables CDATA #IMPLIED
- cubicweb:displayactions CDATA #IMPLIED
- cubicweb:fallbackvid CDATA #IMPLIED
- cubicweb:fname CDATA #IMPLIED
- cubicweb:vid CDATA #IMPLIED
- cubicweb:rql CDATA #IMPLIED
- cubicweb:actualrql CDATA #IMPLIED
- cubicweb:rooteid CDATA #IMPLIED
- cubicweb:dataurl CDATA #IMPLIED
- cubicweb:size CDATA #IMPLIED
- cubicweb:tlunit CDATA #IMPLIED
- cubicweb:loadurl CDATA #IMPLIED
- cubicweb:uselabel CDATA #IMPLIED
- cubicweb:facetargs CDATA #IMPLIED
- cubicweb:facetName CDATA #IMPLIED
- "> ] >
-
-<html xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb" xml:lang="fr" lang="fr">
-<head>
-<base href="http://crater:8888/"></base><meta http-equiv="content-type" content="application/xhtml+xml; charset=UTF-8"/>
-<meta name="ROBOTS" content="NOINDEX" />
-<link rel="shortcut icon" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/favicon.ico"/>
-<link rel="alternate" type="application/rss+xml" title="RSS feed" href="http://crater:8888/project/Comet/0.2.0?vid=rss"/>
-<title>Comet 0.2.0 (unset title)</title>
-<script type="text/javascript"><!--//--><![CDATA[//><!--
-pageid = "0499a5d7add13919a458db30006d9832";
-//--><!]]></script>
-<link rel="stylesheet" type="text/css" media="all" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubes.tracker.css"/>
-<link rel="stylesheet" type="text/css" media="print" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.print.css"/>
-<link rel="stylesheet" type="text/css" media="all" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.login.css"/>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.corner.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.json.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.compat.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.python.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.htmlhelpers.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.ajax.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.bookmarks.js"></script>
-<script type="text/javascript">
-jQuery(document).ready(function () {
- jQuery("#__login:visible").focus()
- });
-</script>
-</head>
-
-<body>
-<table id="header"><tr>
-<td id="firstcolumn"><a href="http://crater:8888/"><img class="logo" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/logo.png" alt="logo"/></a></td>
-<td id="headtext"><span id="appliName"><a href="http://crater:8888/">unset title</a></span><span class="pathbar"> > <a href="http://crater:8888/Project">projets</a> > <a href="http://crater:8888/project/Comet" title="">Comet</a> > 
-0.2.0</span></td><td>
-anonyme [<a class="logout" href="javascript: popupLoginBox();">s'authentifier</a>]</td><td><a href="http://crater:8888/doc/main" class="help" title="aide"> </a></td><td id="lastcolumn"></td>
-</tr></table>
-<div id="popupLoginBox" class="hidden"><div id="loginContent">
-<form method="post" action="http://crater:8888/project/Comet/0.2.0?vid=statussheet" id="login_form">
-<table>
-<tr>
-<td><label for="__login">identifiant</label></td><td><input name="__login" id="__login" class="data" type="text" /></td></tr><tr>
-<td><label for="__password" >mot de passe</label></td><td><input name="__password" id="__password" class="data" type="password" /></td>
-</tr><tr>
-<td> </td><td><input type="submit" class="loginButton right" value="s'identifier" />
-</td></tr>
-</table>
-</form>
-</div></div>
-
- <div id="stateheader">
- </div>
- <div id="page"><table width="100%" border="0" id="mainLayout"><tr>
-<td class="navcol"><div class="navboxes">
-<div class="searchBoxFrame" id="search_box"><div class="boxTitle"><span><span onclick="javascript: toggleVisibility('rqlinput')">rechercher</span></span></div><div class="boxContent">
-<form action="http://crater:8888/view">
-<table id="tsearch"><tr><td>
-<input id="norql" type="text" accesskey="q" tabindex="1" title="search text" value="" name="rql" />
-<input type="hidden" name="__fromsearchbox" value="1" />
-<input type="hidden" name="subvid" value="tsearch" />
-</td><td>
-<input tabindex="2" type="submit" id="rqlboxsubmit" class="rqlsubmit" value="" />
-</td></tr></table>
-</form></div>
-<div class="shadow"> </div></div><div class="greyBoxFrame" id="edit_box"><div class="boxTitle"><span>actions - version</span></div><div class="boxContent">
-<ul class="boxListing"><li class="boxMainactions"><a href="http://crater:8888/project/Comet/0.2.0" title="keyword: view">voir</a></li>
-<li class="boxMainactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=edition" title="keyword: edit">modifier</a></li>
-<li class="boxMainactions"><a title="aucune transition possible">état: <i>en cours</i></a></li><li><a href="javascript: toggleVisibility('boxmenu_ajouter')" class="boxMenu">ajouter</a><ul id="boxmenu_ajouter" class="hidden"><li class="boxItem"><a href="http://crater:8888/project/Comet/0.2.0?etype=Ticket&__linkto=done_in%3A789%3Asubject&__redirectvid=statussheet&__redirectpath=project%2FComet%2F0.2.0&vid=creation" title="">ticket</a></li>
-<li class="boxItem"><a href="http://crater:8888/project/Comet/0.2.0?etype=Ticket&__linkto=appeared_in%3A789%3Asubject&__redirectvid=statussheet&__redirectpath=project%2FComet%2F0.2.0&vid=creation" title="">signaler une anomalie</a></li>
-</ul></li><li><a href="javascript: toggleVisibility('boxmenu_plus_dactions')" class="boxMenu">plus d'actions</a><ul id="boxmenu_plus_dactions" class="hidden"><li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=security" title="keyword: managepermission">gestion des permissions</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=deleteconf" title="keyword: delete">supprimer</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=copy" title="keyword: copy">copier</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/view?rql=Any%20X%20WHERE%20X%20version_of%20P%2C%20P%20name%20%22Comet%22%2C%20X%20num%20%220.2.0%22%2C%20X%20is%20Version&template=pdf-main-template" title="keyword: pdfexport">export pdf</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=document" title="keyword: pvrestexport">export ReST</a></li>
-</ul></li></ul>
-</div>
-<div class="shadow"> </div></div><div class="boxFrame" id="bookmarks_box"><div class="boxTitle"><span>signets</span></div><div class="boxContent">
-<ul class="sideBox"><li><a href="javascript: toggleVisibility('boxmenu_gérer_les_signets')" class="boxMenu">gérer les signets</a><ul id="boxmenu_gérer_les_signets" class="hidden"><li class="boxManage"><a href="http://crater:8888/add/Bookmark?__linkto=bookmarked_by%3A5%3Asubject&path=project%2FComet%2F0.2.0%3Fvid%3Dstatussheet" title="keyword: bookmark">poser un signet ici</a></li>
-<li class="boxManage"><a href="http://crater:8888/cwuser/admin?target=subject&vid=xaddrelation&rtype=bookmarked_by" title="">récupérer des signets existants</a></li>
-</ul></li></ul>
-</div>
-<div class="shadow"> </div></div></div></td>
-<td id="contentcol">
-<div id="rqlinput" class="hidden">
- <form action="http://crater:8888/view">
-<fieldset>
-<input type="text" id="rql" name="rql" value="Any X WHERE X version_of P, P name "Comet", X num "0.2.0", X is Version" title="texte à rechercher ou requête RQL" tabindex="3" accesskey="q" class="searchField" />
-<input type="submit" value="" class="rqlsubmit" tabindex="4" />
-</fieldset>
-</form></div><div id="appMsg" onclick="javascript: toggleVisibility('appMsg')" class="hidden">
-</div><div id="pageContent">
-<div id="contentmain">
-<h2>Fiche de statut</h2><table class="listing"><tr><th rowspan="2">Projets</th><th colspan="2">Version</th><th rowspan="2">Parent</th><th rowspan="2">Tickets ouverts</th><th rowspan="2">Tickets implémentés</th><th rowspan="2">Statut</th></tr><tr><th>actuelle</th><th>ciblée</th></tr><tr><td title=""><a href="http://crater:8888/project/Developper%20manual" title="">Developper manual</a></td><td><a href="http://crater:8888/project/Developper%20manual/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Developper%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td><div title="detail a bit configuration steps"><a href="http://crater:8888/ticket/803">T 803</a></div></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/User%20manual" title="">User manual</a></td><td><a href="http://crater:8888/project/User%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/User%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td></td><td><div title="write a tutorial"><a href="http://crater:8888/ticket/801">T 801</a></div></td><td>livrée</td></tr><tr><td title=""><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td><a href="http://crater:8888/project/Comet%20documentation/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet" title="">Comet</a></td><td></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/Lgc" title="">Lgc</a></td><td><a href="http://crater:8888/project/Lgc/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Lgc/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Tracker" title="">Tracker</a></td><td></td><td><div title="add support for xhtml -> pdf conversion"><a href="http://crater:8888/ticket/793">T 793</a></div></td><td>livrée</td></tr><tr><td title=""><a href="http://crater:8888/project/Tracker" title="">Tracker</a></td><td><a href="http://crater:8888/project/Tracker/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Tracker/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Confman" title="">Confman</a></td><td><div title="extract core from forge cube"><a href="http://crater:8888/ticket/795">T 795</a></div></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/Confman" title="">Confman</a></td><td><a href="http://crater:8888/project/Confman/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Confman/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet" title="">Comet</a></td><td><div title="have a version status sheet"><a href="http://crater:8888/ticket/797">T 797</a></div></td><td></td><td>en cours</td></tr></table></div>
-</div>
-</td>
-</tr></table></div>
-<div class="footer"><a href="http://crater:8888/changelog">nouveautés</a> | <a href="http://crater:8888/doc/about">à propos de ce site</a> | © 2001-2009 <a href="http://www.logilab.fr">Logilab S.A.</a></div></body>
-</html>
\ No newline at end of file
--- a/web/test/unittest_application.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/test/unittest_application.py Tue Apr 13 19:43:51 2010 +0200
@@ -14,9 +14,10 @@
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.decorators import clear_cache
+from cubicweb import AuthenticationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.fake import FakeRequest
-from cubicweb.web import Redirect, AuthenticationError, ExplicitLogin, INTERNAL_FIELD_VALUE
+from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE
from cubicweb.web.views.basecontrollers import ViewController
class FakeMapping:
@@ -29,7 +30,7 @@
class MockCursor:
def __init__(self):
self.executed = []
- def execute(self, rql, args=None, cachekey=None):
+ def execute(self, rql, args=None, build_descr=False):
args = args or {}
self.executed.append(rql % args)
@@ -39,10 +40,12 @@
def __init__(self, form=None):
self._cw = FakeRequest()
self._cw.form = form or {}
- self._cursor = self._cw.cursor = MockCursor()
+ self._cursor = MockCursor()
+ self._cw.execute = self._cursor.execute
def new_cursor(self):
- self._cursor = self._cw.cursor = MockCursor()
+ self._cursor = MockCursor()
+ self._cw.execute = self._cursor.execute
def set_form(self, form):
self._cw.form = form
@@ -178,7 +181,7 @@
'__errorurl': 'view?vid=edition...'
}
path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
- forminfo = req.get_session_data('view?vid=edition...')
+ forminfo = req.session.data['view?vid=edition...']
eidmap = forminfo['eidmap']
self.assertEquals(eidmap, {})
values = forminfo['values']
@@ -208,7 +211,7 @@
'__errorurl': 'view?vid=edition...',
}
path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
- forminfo = req.get_session_data('view?vid=edition...')
+ forminfo = req.session.data['view?vid=edition...']
self.assertEquals(set(forminfo['eidmap']), set('XY'))
self.assertEquals(forminfo['eidmap']['X'], None)
self.assertIsInstance(forminfo['eidmap']['Y'], int)
@@ -237,7 +240,7 @@
'__errorurl': 'view?vid=edition...',
}
path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
- forminfo = req.get_session_data('view?vid=edition...')
+ forminfo = req.session.data['view?vid=edition...']
self.assertEquals(set(forminfo['eidmap']), set('XY'))
self.assertIsInstance(forminfo['eidmap']['X'], int)
self.assertIsInstance(forminfo['eidmap']['Y'], int)
@@ -299,29 +302,29 @@
# authentication tests ####################################################
def test_http_auth_no_anon(self):
- req, origcnx = self.init_authentication('http')
+ req, origsession = self.init_authentication('http')
self.assertAuthFailure(req)
- self.assertRaises(ExplicitLogin, self.app_publish, req, 'login')
+ self.assertRaises(AuthenticationError, self.app_publish, req, 'login')
self.assertEquals(req.cnx, None)
- authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password']))
+ authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
req._headers['Authorization'] = 'basic %s' % authstr
- self.assertAuthSuccess(req, origcnx)
- self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
- self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+ self.assertAuthSuccess(req, origsession)
+ self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+ self.assertRaises(LogOut, self.app_publish, req, 'logout')
self.assertEquals(len(self.open_sessions), 0)
def test_cookie_auth_no_anon(self):
- req, origcnx = self.init_authentication('cookie')
+ req, origsession = self.init_authentication('cookie')
self.assertAuthFailure(req)
form = self.app_publish(req, 'login')
self.failUnless('__login' in form)
self.failUnless('__password' in form)
self.assertEquals(req.cnx, None)
- req.form['__login'] = origcnx.login
- req.form['__password'] = origcnx.authinfo['password']
- self.assertAuthSuccess(req, origcnx)
- self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
- self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+ req.form['__login'] = origsession.login
+ req.form['__password'] = origsession.authinfo['password']
+ self.assertAuthSuccess(req, origsession)
+ self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+ self.assertRaises(LogOut, self.app_publish, req, 'logout')
self.assertEquals(len(self.open_sessions), 0)
def test_login_by_email(self):
@@ -331,71 +334,72 @@
'WHERE U login %(login)s', {'address': address, 'login': login})
self.commit()
# option allow-email-login not set
- req, origcnx = self.init_authentication('cookie')
+ req, origsession = self.init_authentication('cookie')
req.form['__login'] = address
- req.form['__password'] = origcnx.authinfo['password']
+ req.form['__password'] = origsession.authinfo['password']
self.assertAuthFailure(req)
# option allow-email-login set
- origcnx.login = address
+ origsession.login = address
self.set_option('allow-email-login', True)
req.form['__login'] = address
- req.form['__password'] = origcnx.authinfo['password']
- self.assertAuthSuccess(req, origcnx)
- self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
- self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+ req.form['__password'] = origsession.authinfo['password']
+ self.assertAuthSuccess(req, origsession)
+ self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+ self.assertRaises(LogOut, self.app_publish, req, 'logout')
self.assertEquals(len(self.open_sessions), 0)
def _reset_cookie(self, req):
# preparing the suite of the test
# set session id in cookie
cookie = Cookie.SimpleCookie()
- cookie['__session'] = req.cnx.sessionid
+ cookie['__session'] = req.session.sessionid
req._headers['Cookie'] = cookie['__session'].OutputString()
clear_cache(req, 'get_authorization')
- # reset cnx as if it was a new incoming request
- req.cnx = None
+ # reset session as if it was a new incoming request
+ req.session = req.cnx = None
def _test_auth_anon(self, req):
self.app.connect(req)
- acnx = req.cnx
+ asession = req.session
self.assertEquals(len(self.open_sessions), 1)
- self.assertEquals(acnx.login, 'anon')
- self.assertEquals(acnx.authinfo['password'], 'anon')
- self.failUnless(acnx.anonymous_connection)
+ self.assertEquals(asession.login, 'anon')
+ self.assertEquals(asession.authinfo['password'], 'anon')
+ self.failUnless(asession.anonymous_session)
self._reset_cookie(req)
def _test_anon_auth_fail(self, req):
self.assertEquals(len(self.open_sessions), 1)
self.app.connect(req)
self.assertEquals(req.message, 'authentication failure')
- self.assertEquals(req.cnx.anonymous_connection, True)
+ self.assertEquals(req.session.anonymous_session, True)
self.assertEquals(len(self.open_sessions), 1)
self._reset_cookie(req)
def test_http_auth_anon_allowed(self):
- req, origcnx = self.init_authentication('http', 'anon')
+ req, origsession = self.init_authentication('http', 'anon')
self._test_auth_anon(req)
authstr = base64.encodestring('toto:pouet')
req._headers['Authorization'] = 'basic %s' % authstr
self._test_anon_auth_fail(req)
- authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password']))
+ authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
req._headers['Authorization'] = 'basic %s' % authstr
- self.assertAuthSuccess(req, origcnx)
- self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
- self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+ self.assertAuthSuccess(req, origsession)
+ self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+ self.assertRaises(LogOut, self.app_publish, req, 'logout')
self.assertEquals(len(self.open_sessions), 0)
def test_cookie_auth_anon_allowed(self):
- req, origcnx = self.init_authentication('cookie', 'anon')
+ req, origsession = self.init_authentication('cookie', 'anon')
self._test_auth_anon(req)
req.form['__login'] = 'toto'
req.form['__password'] = 'pouet'
self._test_anon_auth_fail(req)
- req.form['__login'] = origcnx.login
- req.form['__password'] = origcnx.authinfo['password']
- self.assertAuthSuccess(req, origcnx)
- self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
- self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+ req.form['__login'] = origsession.login
+ req.form['__password'] = origsession.authinfo['password']
+ self.assertAuthSuccess(req, origsession)
+ self.assertEquals(req.session.authinfo,
+ {'password': origsession.authinfo['password']})
+ self.assertRaises(LogOut, self.app_publish, req, 'logout')
self.assertEquals(len(self.open_sessions), 0)
def test_non_regr_optional_first_var(self):
--- a/web/test/unittest_pdf.py Tue Apr 13 19:22:46 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,41 +0,0 @@
-import os.path as osp
-from tempfile import NamedTemporaryFile
-from subprocess import Popen as sub
-from xml.etree.cElementTree import ElementTree, fromstring, tostring, dump
-
-from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb.utils import can_do_pdf_conversion
-from cubicweb.ext.xhtml2fo import ReportTransformer
-
-DATADIR = osp.join(osp.dirname(__file__), 'data')
-
-class PDFTC(TestCase):
-
- def test_xhtml_to_fop_to_pdf(self):
- if not can_do_pdf_conversion():
- self.skip('dependencies not available : check pysixt and fop')
- xmltree = ElementTree()
- xmltree.parse(osp.join(DATADIR, 'sample1.xml'))
- foptree = ReportTransformer(u'contentmain').transform(xmltree)
- # next
- foptmp = NamedTemporaryFile()
- foptree.write(foptmp)
- foptmp.flush()
- pdftmp = NamedTemporaryFile()
- fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name])
- fopproc.wait()
- del foptmp
- if fopproc.returncode:
- self.skip('fop returned status %s' % fopproc.returncode)
- pdftmp.seek(0) # a bit superstitious
- reference = open(osp.join(DATADIR, 'sample1.pdf'), 'r').read()
- output = pdftmp.read()
- # XXX almost equals due to ID, creation date, so it seems to fail
- self.assertEquals( len(output), len(reference) )
- # cut begin & end 'cause they contain variyng data
- self.assertTextEquals(output[150:1500], reference[150:1500])
-
-if __name__ == '__main__':
- unittest_main()
-
--- a/web/test/unittest_urlpublisher.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/test/unittest_urlpublisher.py Tue Apr 13 19:43:51 2010 +0200
@@ -26,7 +26,7 @@
req = self.request()
b = req.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla')
c = req.create_entity('Tag', name=u'yo') # take care: Tag's name normalized to lower case
- self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}, 'b')
+ self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid})
def process(self, url):
req = self.req = self.request()
--- a/web/test/unittest_views_basecontrollers.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/test/unittest_views_basecontrollers.py Tue Apr 13 19:43:51 2010 +0200
@@ -71,7 +71,7 @@
'in_group-subject:'+eid: groups,
}
path, params = self.expect_redirect_publish(req, 'edit')
- e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
self.assertEquals(e.firstname, u'Sylvain')
self.assertEquals(e.surname, u'Th\xe9nault')
self.assertEquals(e.login, user.login)
@@ -111,7 +111,7 @@
'surname-subject:'+eid: u'Sylvain',
}
path, params = self.expect_redirect_publish(req, 'edit')
- e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
self.assertEquals(e.login, user.login)
self.assertEquals(e.firstname, u'Th\xe9nault')
self.assertEquals(e.surname, u'Sylvain')
@@ -247,7 +247,7 @@
tmpgroup = self.request().create_entity('CWGroup', name=u"test")
user = self.user()
req = self.request(**req_form(user))
- req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)]))
+ req.session.data['pending_insert'] = set([(user.eid, 'in_group', tmpgroup.eid)])
path, params = self.expect_redirect_publish(req, 'edit')
usergroups = [gname for gname, in
self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
@@ -266,7 +266,7 @@
self.assertUnorderedIterableEquals(usergroups, ['managers', 'test'])
# now try to delete the relation
req = self.request(**req_form(user))
- req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)]))
+ req.session.data['pending_delete'] = set([(user.eid, 'in_group', groupeid)])
path, params = self.expect_redirect_publish(req, 'edit')
usergroups = [gname for gname, in
self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
@@ -346,7 +346,7 @@
self.assertIn('_cwmsgid', params)
eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid
self.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
- {'x': self.session.user.eid, 'e': eid}, 'x')
+ {'x': self.session.user.eid, 'e': eid})
self.commit()
req = req
req.form = {'eid': u(eid), '__type:%s'%eid: 'EmailAddress',
@@ -385,7 +385,7 @@
}
try:
path, params = self.expect_redirect_publish(req, 'edit')
- e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}, 'x').get_entity(0, 0)
+ e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0)
self.assertEquals(e.name, 'CWEType')
self.assertEquals(sorted(g.eid for g in e.read_permission), groupeids)
finally:
@@ -407,7 +407,7 @@
path, params = self.expect_redirect_publish(req, 'edit')
self.failUnless(path.startswith('blogentry/'))
eid = path.split('/')[1]
- e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}, 'x').get_entity(0, 0)
+ e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0)
self.assertEquals(e.title, '"13:03:40"')
self.assertEquals(e.content, '"13:03:43"')
@@ -554,17 +554,21 @@
def test_remote_add_existing_tag(self):
self.remote_call('tag_entity', self.john.eid, ['python'])
- self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
- ['python', 'cubicweb'])
- self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
- [['python']])
+ self.assertUnorderedIterableEquals(
+ [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+ ['python', 'cubicweb'])
+ self.assertEquals(
+ self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
+ [['python']])
def test_remote_add_new_tag(self):
self.remote_call('tag_entity', self.john.eid, ['javascript'])
- self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
- ['python', 'cubicweb', 'javascript'])
- self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
- [['javascript']])
+ self.assertUnorderedIterableEquals(
+ [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+ ['python', 'cubicweb', 'javascript'])
+ self.assertEquals(
+ self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
+ [['javascript']])
def test_pending_insertion(self):
res, req = self.remote_call('add_pending_inserts', [['12', 'tags', '13']])
--- a/web/test/unittest_views_baseviews.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/test/unittest_views_baseviews.py Tue Apr 13 19:43:51 2010 +0200
@@ -90,7 +90,7 @@
req = self.request()
e = req.create_entity("State", name=u'<toto>', description=u'loo"ong blabla')
rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, X description D, X creation_date CD, X eid %(x)s',
- {'x': e.eid}, 'x')
+ {'x': e.eid})
view = self.vreg['views'].select('table', req, rset=rset)
return e, rset, view
--- a/web/views/authentication.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/authentication.py Tue Apr 13 19:43:51 2010 +0200
@@ -12,7 +12,7 @@
from cubicweb import AuthenticationError, BadConnectionId
from cubicweb.view import Component
from cubicweb.dbapi import repo_connect, ConnectionProperties
-from cubicweb.web import ExplicitLogin, InvalidSession
+from cubicweb.web import InvalidSession
from cubicweb.web.application import AbstractAuthenticationManager
class NoAuthInfo(Exception): pass
@@ -28,9 +28,10 @@
"""
raise NotImplementedError()
- def authenticated(self, req, cnx, retreiver):
+ def authenticated(self, retreiver, req, cnx, login, authinfo):
"""callback when return authentication information have opened a
- repository connection successfully
+ repository connection successfully. Take care req has no session
+ attached yet, hence req.execute isn't available.
"""
pass
@@ -59,50 +60,51 @@
self.authinforetreivers = sorted(vreg['webauth'].possible_objects(vreg),
key=lambda x: x.order)
assert self.authinforetreivers
+ # 2-uple login / password, login is None when no anonymous access
+ # configured
self.anoninfo = vreg.config.anonymous_user()
+ if self.anoninfo[0]:
+ self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]})
def validate_session(self, req, session):
- """check session validity, and return eventually hijacked session
+ """check session validity, reconnecting it to the repository if the
+ associated connection expired in the repository side (hence the
+ necessity for this method). Return the connected user on success.
- :raise InvalidSession:
- if session is corrupted for a reason or another and should be closed
+ raise :exc:`InvalidSession` if session is corrupted for a reason or
+ another and should be closed
"""
# with this authentication manager, session is actually a dbapi
# connection
- cnx = session
+ cnx = session.cnx
login = req.get_authorization()[0]
+ # check cnx.login and not user.login, since in case of login by
+ # email, login and cnx.login are the email while user.login is the
+ # actual user login
+ if login and session.login != login:
+ raise InvalidSession('login mismatch')
try:
# calling cnx.user() check connection validity, raise
# BadConnectionId on failure
user = cnx.user(req)
- # check cnx.login and not user.login, since in case of login by
- # email, login and cnx.login are the email while user.login is the
- # actual user login
- if login and cnx.login != login:
- cnx.close()
- raise InvalidSession('login mismatch')
except BadConnectionId:
# check if a connection should be automatically restablished
- if (login is None or login == cnx.login):
- cnx = self._authenticate(req, cnx.login, cnx.authinfo)
+ if (login is None or login == session.login):
+ cnx = self._authenticate(session.login, session.authinfo)
user = cnx.user(req)
- # backport session's data
- cnx.data = session.data
+ session.cnx = cnx
else:
raise InvalidSession('bad connection id')
- # associate the connection to the current request
- req.set_connection(cnx, user)
- return cnx
+ return user
def authenticate(self, req):
- """authenticate user and return corresponding user object
+ """authenticate user using connection information found in the request,
+ and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
+ as well as login and authentication information dictionary used to open
+ the connection.
- :raise ExplicitLogin: if authentication is required (no authentication
- info found or wrong user/password)
-
- Note: this method is violating AuthenticationManager interface by
- returning a session instance instead of the user. This is expected by
- the InMemoryRepositorySessionManager.
+ raise :exc:`cubicweb.AuthenticationError` if authentication failed
+ (no authentication info found or wrong user/password)
"""
for retreiver in self.authinforetreivers:
try:
@@ -110,44 +112,28 @@
except NoAuthInfo:
continue
try:
- cnx = self._authenticate(req, login, authinfo)
- except ExplicitLogin:
+ cnx = self._authenticate(login, authinfo)
+ except AuthenticationError:
continue # the next one may succeed
for retreiver_ in self.authinforetreivers:
- retreiver_.authenticated(req, cnx, retreiver)
- break
- else:
- # false if no authentication info found, eg this is not an
- # authentication failure
- if 'login' in locals():
- req.set_message(req._('authentication failure'))
- cnx = self._open_anonymous_connection(req)
- return cnx
+ retreiver_.authenticated(retreiver, req, cnx, login, authinfo)
+ return cnx, login, authinfo
+ # false if no authentication info found, eg this is not an
+ # authentication failure
+ if 'login' in locals():
+ req.set_message(req._('authentication failure'))
+ login, authinfo = self.anoninfo
+ if login:
+ cnx = self._authenticate(login, authinfo)
+ cnx.anonymous_connection = True
+ return cnx, login, authinfo
+ raise AuthenticationError()
- def _authenticate(self, req, login, authinfo):
+ def _authenticate(self, login, authinfo):
cnxprops = ConnectionProperties(self.vreg.config.repo_method,
close=False, log=self.log_queries)
- try:
- cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo)
- except AuthenticationError:
- raise ExplicitLogin()
- self._init_cnx(cnx, login, authinfo)
- # associate the connection to the current request
- req.set_connection(cnx)
+ cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo)
+ # decorate connection
+ cnx.vreg = self.vreg
return cnx
- def _open_anonymous_connection(self, req):
- # restore an anonymous connection if possible
- login, password = self.anoninfo
- if login:
- cnx = self._authenticate(req, login, {'password': password})
- cnx.anonymous_connection = True
- return cnx
- raise ExplicitLogin()
-
- def _init_cnx(self, cnx, login, authinfo):
- # decorate connection
- cnx.vreg = self.vreg
- cnx.login = login
- cnx.authinfo = authinfo
-
--- a/web/views/autoform.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/autoform.py Tue Apr 13 19:43:51 2010 +0200
@@ -256,7 +256,7 @@
This is where are stored relations being added while editing
an entity. This used to be stored in a temporary cookie.
"""
- pending = req.get_session_data('pending_insert') or ()
+ pending = req.session.data.get('pending_insert', ())
return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
if eid is None or eid in (subj, obj)]
@@ -266,7 +266,7 @@
This is where are stored relations being removed while editing
an entity. This used to be stored in a temporary cookie.
"""
- pending = req.get_session_data('pending_delete') or ()
+ pending = req.session.data.get('pending_delete', ())
return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
if eid is None or eid in (subj, obj)]
@@ -289,7 +289,7 @@
execute = req.execute
for subj, rtype, obj in parse_relations_descr(rdefs):
rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
- execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+ execute(rql, {'x': subj, 'y': obj})
req.set_message(req._('relations deleted'))
def insert_relations(req, rdefs):
@@ -297,7 +297,7 @@
execute = req.execute
for subj, rtype, obj in parse_relations_descr(rdefs):
rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
- execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+ execute(rql, {'x': subj, 'y': obj})
class GenericRelationsWidget(fw.FieldWidget):
--- a/web/views/basecomponents.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/basecomponents.py Tue Apr 13 19:43:51 2010 +0200
@@ -2,7 +2,6 @@
* the rql input form
* the logged user link
-* pdf view link
:organization: Logilab
:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
--- a/web/views/basecontrollers.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/basecontrollers.py Tue Apr 13 19:43:51 2010 +0200
@@ -10,22 +10,20 @@
"""
__docformat__ = "restructuredtext en"
-from smtplib import SMTP
-
import simplejson
from logilab.common.decorators import cached
from logilab.common.date import strptime
-from cubicweb import (NoSelectableObject, ValidationError, ObjectNotFound,
- typed_eid)
+from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError,
+ AuthenticationError, typed_eid)
from cubicweb.utils import CubicWebJsonEncoder
from cubicweb.selectors import authenticated_user, match_form_params
from cubicweb.mail import format_mail
-from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed, json_dumps
+from cubicweb.web import Redirect, RemoteCallFailed, DirectResponse, json_dumps
from cubicweb.web.controller import Controller
-from cubicweb.web.views import vid_from_rset
-from cubicweb.web.views.formrenderers import FormRenderer
+from cubicweb.web.views import vid_from_rset, formrenderers
+
try:
from cubicweb.web.facet import (FilterRQLBuilder, get_facet,
prepare_facets_rqlst)
@@ -58,7 +56,7 @@
user's session data
"""
def wrapper(self, *args, **kwargs):
- data = self._cw.get_session_data(self._cw.pageid)
+ data = self._cw.session.data.get(self._cw.pageid)
if data is None:
raise RemoteCallFailed(self._cw._('pageid-not-found'))
return func(self, *args, **kwargs)
@@ -72,7 +70,7 @@
"""log in the instance"""
if self._cw.vreg.config['auth-mode'] == 'http':
# HTTP authentication
- raise ExplicitLogin()
+ raise AuthenticationError()
else:
# Cookie authentication
return self.appli.need_login_content(self._cw)
@@ -118,7 +116,10 @@
req = self._cw
if rset is None and not hasattr(req, '_rql_processed'):
req._rql_processed = True
- rset = self.process_rql(req.form.get('rql'))
+ if req.cnx is None:
+ rset = None
+ else:
+ rset = self.process_rql(req.form.get('rql'))
if rset and rset.rowcount == 1 and '__method' in req.form:
entity = rset.get_entity(0, 0)
try:
@@ -179,14 +180,14 @@
else:
rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype
for teid in eids:
- req.execute(rql, {'x': eid, 'y': typed_eid(teid)}, ('x', 'y'))
+ req.execute(rql, {'x': eid, 'y': typed_eid(teid)})
def _validation_error(req, ex):
req.cnx.rollback()
# XXX necessary to remove existant validation error?
# imo (syt), it's not necessary
- req.get_session_data(req.form.get('__errorurl'), pop=True)
+ req.session.data.pop(req.form.get('__errorurl'), None)
foreid = ex.entity
eidmap = req.data.get('eidmap', {})
for var, eid in eidmap.items():
@@ -283,7 +284,7 @@
raise RemoteCallFailed(repr(exc))
try:
result = func(*args)
- except RemoteCallFailed:
+ except (RemoteCallFailed, DirectResponse):
raise
except Exception, ex:
self.exception('an exception occured while calling js_%s(%s): %s',
@@ -316,12 +317,12 @@
form['__action_%s' % action] = u'whatever'
return form
- def _exec(self, rql, args=None, eidkey=None, rocheck=True):
+ def _exec(self, rql, args=None, rocheck=True):
"""json mode: execute RQL and return resultset as json"""
if rocheck:
self._cw.ensure_ro_rql(rql)
try:
- return self._cw.execute(rql, args, eidkey)
+ return self._cw.execute(rql, args)
except Exception, ex:
self.exception("error in _exec(rql=%s): %s", rql, ex)
return None
@@ -379,7 +380,7 @@
form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity)
form.build_context()
vfield = form.field_by_name('value')
- renderer = FormRenderer(self._cw)
+ renderer = formrenderers.FormRenderer(self._cw)
return vfield.render(form, renderer, tabindex=tabindex) \
+ renderer.render_help(form, vfield)
@@ -473,7 +474,7 @@
@check_pageid
@jsonize
def js_user_callback(self, cbname):
- page_data = self._cw.get_session_data(self._cw.pageid, {})
+ page_data = self._cw.session.data.get(self._cw.pageid, {})
try:
cb = page_data[cbname]
except KeyError:
@@ -502,7 +503,7 @@
self._cw.unregister_callback(self._cw.pageid, cbname)
def js_unload_page_data(self):
- self._cw.del_session_data(self._cw.pageid)
+ self._cw.session.data.pop(self._cw.pageid, None)
def js_cancel_edition(self, errorurl):
"""cancelling edition from javascript
@@ -547,15 +548,13 @@
def _add_pending(self, eidfrom, rel, eidto, kind):
key = 'pending_%s' % kind
- pendings = self._cw.get_session_data(key, set())
+ pendings = self._cw.session.data.setdefault(key, set())
pendings.add( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
- self._cw.set_session_data(key, pendings)
def _remove_pending(self, eidfrom, rel, eidto, kind):
key = 'pending_%s' % kind
- pendings = self._cw.get_session_data(key)
+ pendings = self._cw.session.data[key]
pendings.remove( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
- self._cw.set_session_data(key, pendings)
def js_remove_pending_insert(self, (eidfrom, rel, eidto)):
self._remove_pending(eidfrom, rel, eidto, 'insert')
@@ -612,7 +611,7 @@
for recipient in self.recipients():
text = body % recipient.as_email_context()
self.sendmail(recipient.get_email(), subject, text)
- # breadcrumbs = self._cw.get_session_data('breadcrumbs', None)
+ #breadcrumbs = self._cw.session.data.get('breadcrumbs', None)
url = self._cw.build_url(__message=self._cw._('emails successfully sent'))
raise Redirect(url)
@@ -643,7 +642,7 @@
def redirect(self):
req = self._cw
- breadcrumbs = req.get_session_data('breadcrumbs', None)
+ breadcrumbs = req.session.data.get('breadcrumbs', None)
if breadcrumbs is not None and len(breadcrumbs) > 1:
url = req.rebuild_url(breadcrumbs[-2],
__message=req._('transaction undoed'))
--- a/web/views/basetemplates.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/basetemplates.py Tue Apr 13 19:43:51 2010 +0200
@@ -12,9 +12,9 @@
from logilab.common.deprecation import class_renamed
from cubicweb.appobject import objectify_selector
-from cubicweb.selectors import match_kwargs
+from cubicweb.selectors import match_kwargs, no_cnx
from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW
-from cubicweb.utils import UStringIO, can_do_pdf_conversion
+from cubicweb.utils import UStringIO
from cubicweb.schema import display_name
from cubicweb.web import component, formfields as ff, formwidgets as fw
from cubicweb.web.views import forms
@@ -78,7 +78,6 @@
return 0
return view.templatable
-
class NonTemplatableViewTemplate(MainTemplate):
"""main template for any non templatable views (xml, binaries, etc.)"""
__regid__ = 'main-template'
@@ -192,9 +191,9 @@
class ErrorTemplate(TheMainTemplate):
- """fallback template if an internal error occured during displaying the
- main template. This template may be called for authentication error,
- which means that req.cnx and req.user may not be set.
+ """fallback template if an internal error occured during displaying the main
+ template. This template may be called for authentication error, which means
+ that req.cnx and req.user may not be set.
"""
__regid__ = 'error-template'
@@ -268,61 +267,6 @@
self.w(u'</td>\n')
self.w(u'</tr></table>\n')
-if can_do_pdf_conversion():
- try:
- from xml.etree.cElementTree import ElementTree
- except ImportError: #python2.4
- from elementtree import ElementTree
- from subprocess import Popen as sub
- from StringIO import StringIO
- from tempfile import NamedTemporaryFile
- from cubicweb.ext.xhtml2fo import ReportTransformer
-
-
- class PdfViewComponent(component.EntityVComponent):
- __regid__ = 'pdfview'
-
- context = 'ctxtoolbar'
-
- def cell_call(self, row, col, view):
- entity = self.cw_rset.get_entity(row, col)
- url = entity.absolute_url(vid=view.__regid__, __template='pdf-main-template')
- iconurl = self._cw.build_url('data/pdf_icon.gif')
- label = self._cw._('Download page as pdf')
- self.w(u'<a href="%s" title="%s" class="toolbarButton"><img src="%s" alt="%s"/></a>' %
- (xml_escape(url), label, xml_escape(iconurl), label))
-
- class PdfMainTemplate(TheMainTemplate):
- __regid__ = 'pdf-main-template'
-
- def call(self, view):
- """build the standard view, then when it's all done, convert xhtml to pdf
- """
- super(PdfMainTemplate, self).call(view)
- section = self._cw.form.pop('section', 'contentmain')
- pdf = self.to_pdf(self._stream, section)
- self._cw.set_content_type('application/pdf', filename='report.pdf')
- self.binary = True
- self.w = None
- self.set_stream()
- # pylint needs help
- self.w(pdf)
-
- def to_pdf(self, stream, section):
- # XXX see ticket/345282
- stream = stream.getvalue().replace(' ', ' ').encode('utf-8')
- xmltree = ElementTree()
- xmltree.parse(StringIO(stream))
- foptree = ReportTransformer(section).transform(xmltree)
- foptmp = NamedTemporaryFile()
- pdftmp = NamedTemporaryFile()
- foptree.write(foptmp)
- foptmp.flush()
- fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name])
- fopproc.wait()
- pdftmp.seek(0)
- pdf = pdftmp.read()
- return pdf
# page parts templates ########################################################
@@ -405,7 +349,7 @@
self.w(u'<td id="lastcolumn">')
self.w(u'</td>\n')
self.w(u'</tr></table>\n')
- if self._cw.cnx.anonymous_connection:
+ if self._cw.session.anonymous_session:
self.wview('logform', rset=self.cw_rset, id='popupLoginBox',
klass='hidden', title=False, showmessage=False)
--- a/web/views/bookmark.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/bookmark.py Tue Apr 13 19:43:51 2010 +0200
@@ -112,7 +112,7 @@
else:
# we can't edit shared bookmarks we don't own
bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s'
- erset = req.execute(bookmarksrql, {'x': ueid}, 'x',
+ erset = req.execute(bookmarksrql, {'x': ueid},
build_descr=False)
bookmarksrql %= {'x': ueid}
if erset:
--- a/web/views/cwuser.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/cwuser.py Tue Apr 13 19:43:51 2010 +0200
@@ -1,4 +1,4 @@
-"""Specific views for users
+"""Specific views for users and groups
:organization: Logilab
:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
@@ -12,6 +12,7 @@
from cubicweb.selectors import one_line_rset, implements, match_user_groups
from cubicweb.view import EntityView
from cubicweb.web import action, uicfg
+from cubicweb.web.views import tabs
_pvs = uicfg.primaryview_section
_pvs.tag_attribute(('CWUser', 'login'), 'hidden')
@@ -71,3 +72,65 @@
if emailaddr:
self.w(u'<foaf:mbox>%s</foaf:mbox>\n' % xml_escape(emailaddr))
self.w(u'</foaf:Person>\n')
+
+
+# group views ##################################################################
+
+_pvs.tag_object_of(('CWUser', 'in_group', 'CWGroup'), 'hidden')
+_pvs.tag_object_of(('*', 'require_group', 'CWGroup'), 'hidden')
+
+
+class CWGroupPrimaryView(tabs.TabbedPrimaryView):
+ __select__ = implements('CWGroup')
+ tabs = [_('cwgroup-main'), _('cwgroup-permissions')]
+ default_tab = 'cwgroup-main'
+
+
+class CWGroupMainTab(tabs.PrimaryTab):
+ __regid__ = 'cwgroup-main'
+ __select__ = tabs.PrimaryTab.__select__ & implements('CWGroup')
+
+ def render_entity_attributes(self, entity, siderelations=None):
+ rql = 'Any U, FN, LN, CD, LL ORDERBY L WHERE U in_group G, ' \
+ 'U login L, U firstname FN, U surname LN, U creation_date CD, ' \
+ 'U last_login_time LL, G eid %(x)s'
+ rset = self._cw.execute(rql, {'x': entity.eid})
+ headers = (_(u'user'), _(u'first name'), _(u'last name'),
+ _(u'creation date'), _(u'last login time'))
+ self.wview('editable-table', rset, 'null', displayfilter=True,
+ displaycols=range(5), mainindex=0, headers=headers)
+
+class CWGroupPermTab(EntityView):
+ __regid__ = 'cwgroup-permissions'
+ __select__ = implements('CWGroup')
+
+ def cell_call(self, row, col):
+ self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css'))
+ access_types = ('read', 'delete', 'add', 'update')
+ w = self.w
+ entity = self.cw_rset.get_entity(row, col)
+ objtype_access = {'CWEType': ('read', 'delete', 'add', 'update'),
+ 'CWRelation': ('add', 'delete')}
+ rql_cwetype = 'DISTINCT Any X WHERE X %s_permission CWG, X is CWEType, ' \
+ 'CWG eid %%(e)s'
+ rql_cwrelation = 'DISTINCT Any RT WHERE X %s_permission CWG, X is CWRelation, ' \
+ 'X relation_type RT, CWG eid %%(e)s'
+ self.render_objtype_access(entity, 'CWEType', objtype_access, rql_cwetype)
+ self.render_objtype_access(entity, 'CWRelation', objtype_access, rql_cwrelation)
+
+ def render_objtype_access(self, entity, objtype, objtype_access, rql):
+ self.w(u'<h4>%s</h4>' % self._cw._(objtype))
+ for access_type in objtype_access[objtype]:
+ rset = self._cw.execute(rql % access_type, {'e': entity.eid})
+ if rset:
+ self.w(u'<div>%s:</div>' % self._cw.__(access_type + '_permission'))
+ self.w(u'<div>%s</div><br/>' % self._cw.view('csv', rset, 'null'))
+
+class CWGroupInContextView(EntityView):
+ __regid__ = 'incontext'
+ __select__ = implements('CWGroup')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.complete_entity(row, col)
+ self.w(u'<a href="%s" class="%s">%s</a>' % (
+ entity.absolute_url(), entity.name, entity.printable_value('name')))
--- a/web/views/editcontroller.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/editcontroller.py Tue Apr 13 19:43:51 2010 +0200
@@ -249,13 +249,13 @@
rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
subjvar, rschema, objvar)
for reid in origvalues.difference(values):
- self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y')))
+ self.relations_rql.append((rql, {'x': eid, 'y': reid}))
seteids = values.difference(origvalues)
if seteids:
rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
subjvar, rschema, objvar)
for reid in seteids:
- self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y')))
+ self.relations_rql.append((rql, {'x': eid, 'y': reid}))
def delete_entities(self, eidtypes):
"""delete entities from the repository"""
--- a/web/views/editviews.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/editviews.py Tue Apr 13 19:43:51 2010 +0200
@@ -113,5 +113,5 @@
text, data = captcha.captcha(self._cw.vreg.config['captcha-font-file'],
self._cw.vreg.config['captcha-font-size'])
key = self._cw.form.get('captchakey', 'captcha')
- self._cw.set_session_data(key, text)
+ self._cw.session.data[key] = text
self.w(data.read())
--- a/web/views/iprogress.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/iprogress.py Tue Apr 13 19:43:51 2010 +0200
@@ -17,9 +17,9 @@
from cubicweb.interfaces import IProgress, IMileStone
from cubicweb.schema import display_name
from cubicweb.view import EntityView
-
+from cubicweb.web.views.tableview import EntityAttributesTableView
-class ProgressTableView(EntityView):
+class ProgressTableView(EntityAttributesTableView):
"""The progress table view is able to display progress information
of any object implement IMileStone.
@@ -39,26 +39,13 @@
__regid__ = 'progress_table_view'
title = _('task progression')
__select__ = implements(IMileStone)
+ table_css = "progress"
+ css_files = ('cubicweb.iprogress.css',)
# default columns of the table
columns = (_('project'), _('milestone'), _('state'), _('eta_date'),
_('cost'), _('progress'), _('todo_by'))
-
- def call(self, columns=None):
- """displays all versions in a table"""
- self._cw.add_css('cubicweb.iprogress.css')
- _ = self._cw._
- self.columns = columns or self.columns
- ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0])
- self.w(u'<table class="progress">')
- self.table_header(ecls)
- self.w(u'<tbody>')
- for row in xrange(self.cw_rset.rowcount):
- self.cell_call(row=row, col=0)
- self.w(u'</tbody>')
- self.w(u'</table>')
-
def cell_call(self, row, col):
_ = self._cw._
entity = self.cw_rset.get_entity(row, col)
@@ -91,20 +78,6 @@
"""use entity's type as label"""
return display_name(self._cw, ecls.__regid__)
- def table_header(self, ecls):
- """builds the table's header"""
- self.w(u'<thead><tr>')
- _ = self._cw._
- for column in self.columns:
- meth = getattr(self, 'header_for_%s' % column, None)
- if meth:
- colname = meth(ecls)
- else:
- colname = _(column)
- self.w(u'<th>%s</th>' % xml_escape(colname))
- self.w(u'</tr></thead>\n')
-
-
## cell management ########################################################
def build_project_cell(self, entity):
"""``project`` column cell renderer"""
--- a/web/views/management.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/management.py Tue Apr 13 19:43:51 2010 +0200
@@ -16,56 +16,14 @@
from cubicweb.uilib import html_traceback, rest_traceback
from cubicweb.web import formwidgets as wdgs
from cubicweb.web.formfields import guess_field
+from cubicweb.web.views.schema import SecurityViewMixIn
+
+from yams.buildobjs import EntityType
SUBMIT_MSGID = _('Submit bug report')
MAIL_SUBMIT_MSGID = _('Submit bug report by mail')
-
-class SecurityViewMixIn(object):
- """display security information for a given schema """
-
- def schema_definition(self, eschema, link=True, access_types=None):
- w = self.w
- _ = self._cw._
- if not access_types:
- access_types = eschema.ACTIONS
- w(u'<table class="schemaInfo">')
- w(u'<tr><th>%s</th><th>%s</th><th>%s</th></tr>' % (
- _("permission"), _('granted to groups'), _('rql expressions')))
- for access_type in access_types:
- w(u'<tr>')
- w(u'<td>%s</td>' % self._cw.__('%s_perm' % access_type))
- groups = eschema.get_groups(access_type)
- l = []
- groups = [(_(group), group) for group in groups]
- for trad, group in sorted(groups):
- if link:
- # XXX we should get a group entity and call its absolute_url
- # method
- l.append(u'<a href="%s" class="%s">%s</a><br/>' % (
- self._cw.build_url('cwgroup/%s' % group), group, trad))
- else:
- l.append(u'<div class="%s">%s</div>' % (group, trad))
- w(u'<td>%s</td>' % u''.join(l))
- rqlexprs = eschema.get_rqlexprs(access_type)
- w(u'<td>%s</td>' % u'<br/><br/>'.join(expr.expression for expr in rqlexprs))
- w(u'</tr>\n')
- w(u'</table>')
-
- def has_schema_modified_permissions(self, eschema, access_types):
- """ return True if eschema's actual permissions are diffrents
- from the default ones
- """
- for access_type in access_types:
- if eschema.get_rqlexprs(access_type):
- return True
- if eschema.get_groups(access_type) != \
- frozenset(eschema.get_default_groups()[access_type]):
- return True
- return False
-
-
-class SecurityManagementView(EntityView, SecurityViewMixIn):
+class SecurityManagementView(SecurityViewMixIn, EntityView):
"""display security information for a given entity"""
__regid__ = 'security'
__select__ = EntityView.__select__ & authenticated_user()
@@ -88,7 +46,7 @@
xml_escape(entity.dc_title())))
# first show permissions defined by the schema
self.w('<h2>%s</h2>' % _('schema\'s permissions definitions'))
- self.schema_definition(entity.e_schema)
+ self.permissions_table(entity.e_schema)
self.w('<h2>%s</h2>' % _('manage security'))
# ownership information
if self._cw.vreg.schema.rschema('owned_by').has_perm(self._cw, 'add',
--- a/web/views/schema.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/schema.py Tue Apr 13 19:43:51 2010 +0200
@@ -9,11 +9,13 @@
from itertools import cycle
+from logilab.common.ureports import Section, Table
from logilab.mtconverter import xml_escape
from yams import BASE_TYPES, schema2dot as s2d
+from yams.buildobjs import DEFAULT_ATTRPERMS
from cubicweb.selectors import (implements, yes, match_user_groups,
- has_related_entities)
+ has_related_entities, authenticated_user)
from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES,
WORKFLOW_TYPES, INTERNAL_TYPES)
from cubicweb.schemaviewer import SchemaViewer
@@ -21,7 +23,7 @@
from cubicweb import tags, uilib
from cubicweb.web import action, facet, uicfg
from cubicweb.web.views import TmpFileViewMixin
-from cubicweb.web.views import primary, baseviews, tabs, management
+from cubicweb.web.views import primary, baseviews, tabs, tableview, iprogress
ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES
SKIP_TYPES = (ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES | WORKFLOW_TYPES
@@ -38,23 +40,86 @@
_pvs.tag_subject_of(('*', '%s_permission' % _action, '*'), 'hidden')
_pvs.tag_object_of(('*', '%s_permission' % _action, '*'), 'hidden')
+_pvs.tag_object_of(('Workflow', 'workflow_of', 'CWEType'), 'hidden')
+_pvs.tag_subject_of(('CWEType', 'default_workflow', 'Workflow'), 'hidden')
+
+_pvs.tag_object_of(('*', 'relation_type', 'CWRType'), 'hidden')
+
+class SecurityViewMixIn(object):
+ """mixin providing methods to display security information for a entity,
+ relation or relation definition schema
+ """
+
+ def permissions_table(self, erschema, permissions=None):
+ self._cw.add_css('cubicweb.acl.css')
+ w = self.w
+ _ = self._cw._
+ w(u'<table class="schemaInfo">')
+ w(u'<tr><th>%s</th><th>%s</th><th>%s</th></tr>' % (
+ _("permission"), _('granted to groups'), _('rql expressions')))
+ for action in erschema.ACTIONS:
+ w(u'<tr><td>%s</td><td>' % _(action))
+ if permissions is None:
+ groups = erschema.get_groups(action)
+ else:
+ groups = permissions[action][0]
+ # XXX get group entity and call it's incontext view
+ groups = [u'<a class="%s" href="%s">%s</a>' % (
+ group, self._cw.build_url('cwgroup/%s' % group), label)
+ for group, label in sorted((_(g), g) for g in groups)]
+ w(u'<br/>'.join(groups))
+ w(u'</td><td>')
+ if permissions is None:
+ rqlexprs = sorted(e.expression for e in erschema.get_rqlexprs(action))
+ else:
+ rqlexprs = permissions[action][1]
+ w(u'<br/>'.join(rqlexprs))
+ w(u'</td></tr>\n')
+ w(u'</table>')
+
+ def grouped_permissions_table(self, rschema):
+ # group relation definitions with identical permissions
+ perms = {}
+ for rdef in rschema.rdefs.itervalues():
+ rdef_perms = []
+ for action in ('read', 'add', 'delete'):
+ groups = sorted(rdef.get_groups(action))
+ exprs = sorted(e.expression for e in rdef.get_rqlexprs(action))
+ rdef_perms.append( (action, (tuple(groups), tuple(exprs))) )
+ rdef_perms = tuple(rdef_perms)
+ if rdef_perms in perms:
+ perms[rdef_perms].append( (rdef.subject, rdef.object) )
+ else:
+ perms[rdef_perms] = [(rdef.subject, rdef.object)]
+ # set layout permissions in a table for each group of relation
+ # definition
+ w = self.w
+ w(u'<div style="margin: 0px 1.5em">')
+ tmpl = u'<strong>%s</strong> %s <strong>%s</strong>'
+ for perm, rdefs in perms.iteritems():
+ w(u'<div>%s</div>' % u', '.join(
+ tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs))
+ # accessing rdef from previous loop by design: only used to get
+ # ACTIONS
+ self.permissions_table(rdef, dict(perm))
+ w(u'</div>')
+
+
# global schema view ###########################################################
class SchemaView(tabs.TabsMixin, StartupView):
__regid__ = 'schema'
title = _('instance schema')
- tabs = [_('schema-text'), _('schema-image')]
- default_tab = 'schema-text'
+ tabs = [_('schema-description'), _('schema-image'), _('schema-security')]
+ default_tab = 'schema-description'
def call(self):
"""display schema information"""
- self._cw.add_js('cubicweb.ajax.js')
- self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css'))
self.w(u'<h1>%s</h1>' % _('Schema of the data model'))
self.render_tabs(self.tabs, self.default_tab)
-class SchemaTabImageView(StartupView):
+class SchemaImageTab(StartupView):
__regid__ = 'schema-image'
def call(self):
@@ -67,26 +132,28 @@
self._cw._("graphical representation of the instance'schema")))
-class SchemaTabTextView(StartupView):
- __regid__ = 'schema-text'
+class SchemaDescriptionTab(StartupView):
+ __regid__ = 'schema-description'
def call(self):
rset = self._cw.execute('Any X ORDERBY N WHERE X is CWEType, X name N, '
'X final FALSE')
self.wview('table', rset, displayfilter=True)
+ rset = self._cw.execute('Any X ORDERBY N WHERE X is CWRType, X name N, '
+ 'X final FALSE')
+ self.wview('table', rset, displayfilter=True)
+ owl_downloadurl = self._cw.build_url('view', vid='owl')
+ self.w(u'<div><a href="%s">%s</a></div>' %
+ (owl_downloadurl, self._cw._(u'Download schema as OWL')))
-class ManagerSchemaPermissionsView(StartupView, management.SecurityViewMixIn):
+class SchemaPermissionsTab(SecurityViewMixIn, StartupView):
__regid__ = 'schema-security'
__select__ = StartupView.__select__ & match_user_groups('managers')
def call(self, display_relations=True):
self._cw.add_css('cubicweb.acl.css')
skiptypes = skip_types(self._cw)
- formparams = {}
- formparams['sec'] = self.__regid__
- if not skiptypes:
- formparams['skipmeta'] = u'0'
schema = self._cw.vreg.schema
# compute entities
entities = sorted(eschema for eschema in schema.entities()
@@ -101,249 +168,377 @@
relations = []
# index
_ = self._cw._
- self.w(u'<div id="schema_security"><a id="index" href="index"/>')
- self.w(u'<h2 class="schema">%s</h2>' % _('index').capitalize())
- self.w(u'<h4>%s</h4>' % _('Entities').capitalize())
+ url = xml_escape(self._cw.build_url('schema'))
+ self.w(u'<div id="schema_security">')
+ self.w(u'<h2 class="schema">%s</h2>' % _('Index'))
+ self.w(u'<h4 id="entities">%s</h4>' % _('Entity types'))
ents = []
for eschema in sorted(entities):
- url = xml_escape(self._cw.build_url('schema', **formparams))
- ents.append(u'<a class="grey" href="%s#%s">%s</a> (%s)' % (
- url, eschema.type, eschema.type, _(eschema.type)))
+ ents.append(u'<a class="grey" href="%s#%s">%s</a>' % (
+ url, eschema.type, eschema.type))
self.w(u', '.join(ents))
- self.w(u'<h4>%s</h4>' % (_('relations').capitalize()))
+ self.w(u'<h4 id="relations">%s</h4>' % _('Relation types'))
rels = []
for rschema in sorted(relations):
- url = xml_escape(self._cw.build_url('schema', **formparams))
- rels.append(u'<a class="grey" href="%s#%s">%s</a> (%s), ' % (
- url , rschema.type, rschema.type, _(rschema.type)))
- self.w(u', '.join(ents))
- # entities
- self.display_entities(entities, formparams)
- # relations
+ rels.append(u'<a class="grey" href="%s#%s">%s</a>' % (
+ url , rschema.type, rschema.type))
+ self.w(u', '.join(rels))
+ # permissions tables
+ self.display_entities(entities)
if relations:
- self.display_relations(relations, formparams)
+ self.display_relations(relations)
self.w(u'</div>')
- def display_entities(self, entities, formparams):
+ def has_non_default_perms(self, rdef):
+ """return true if the given *attribute* relation definition has custom
+ permission
+ """
+ for action in rdef.ACTIONS:
+ def_rqlexprs = []
+ def_groups = []
+ for perm in DEFAULT_ATTRPERMS[action]:
+ if not isinstance(perm, basestring):
+ def_rqlexprs.append(perm.expression)
+ else:
+ def_groups.append(perm)
+ rqlexprs = [rql.expression for rql in rdef.get_rqlexprs(action)]
+ groups = rdef.get_groups(action)
+ if groups != frozenset(def_groups) or \
+ frozenset(rqlexprs) != frozenset(def_rqlexprs):
+ return True
+ return False
+
+ def display_entities(self, entities):
_ = self._cw._
- self.w(u'<a id="entities" href="entities"/>')
- self.w(u'<h2 class="schema">%s</h2>' % _('permissions for entities').capitalize())
+ url = xml_escape(self._cw.build_url('schema'))
+ self.w(u'<h2 id="entities" class="schema">%s</h2>' % _('Permissions for entity types'))
for eschema in entities:
- self.w(u'<a id="%s" href="%s"/>' % (eschema.type, eschema.type))
- self.w(u'<h3 class="schema">%s (%s) ' % (eschema.type, _(eschema.type)))
- url = xml_escape(self._cw.build_url('schema', **formparams) + '#index')
- self.w(u'<a href="%s"><img src="%s" alt="%s"/></a>' % (
+ self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
+ eschema.type, self._cw.build_url('cwetype/%s' % eschema.type),
+ eschema.type, _(eschema.type)))
+ self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
url, self._cw.external_resource('UP_ICON'), _('up')))
self.w(u'</h3>')
self.w(u'<div style="margin: 0px 1.5em">')
- self._cw.vreg.schema_definition(eschema, link=False)
+ self.permissions_table(eschema)
# display entity attributes only if they have some permissions modified
modified_attrs = []
for attr, etype in eschema.attribute_definitions():
- if self.has_schema_modified_permissions(attr, attr.ACTIONS):
- modified_attrs.append(attr)
- if modified_attrs:
- self.w(u'<h4>%s</h4>' % _('attributes with modified permissions:').capitalize())
+ rdef = eschema.rdef(attr)
+ if attr not in META_RTYPES and self.has_non_default_perms(rdef):
+ modified_attrs.append(rdef)
+ if modified_attrs:
+ self.w(u'<h4>%s</h4>' % _('Attributes with non default permissions:'))
self.w(u'</div>')
self.w(u'<div style="margin: 0px 6em">')
- for attr in modified_attrs:
- self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attr.type, _(attr.type)))
- self._cw.vreg.schema_definition(attr, link=False)
+ for rdef in modified_attrs:
+ attrtype = str(rdef.rtype)
+ self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attrtype, _(attrtype)))
+ self.permissions_table(rdef)
self.w(u'</div>')
- def display_relations(self, relations, formparams):
+ def display_relations(self, relations):
_ = self._cw._
- self.w(u'<a id="relations" href="relations"/>')
- self.w(u'<h2 class="schema">%s </h2>' % _('permissions for relations').capitalize())
+ url = xml_escape(self._cw.build_url('schema'))
+ self.w(u'<h2 id="relations" class="schema">%s</h2>' % _('Permissions for relations'))
for rschema in relations:
- self.w(u'<a id="%s" href="%s"/>' % (rschema.type, rschema.type))
- self.w(u'<h3 class="schema">%s (%s) ' % (rschema.type, _(rschema.type)))
- url = xml_escape(self._cw.build_url('schema', **formparams) + '#index')
- self.w(u'<a href="%s"><img src="%s" alt="%s"/></a>' % (
+ self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
+ rschema.type, self._cw.build_url('cwrtype/%s' % rschema.type),
+ rschema.type, _(rschema.type)))
+ self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
url, self._cw.external_resource('UP_ICON'), _('up')))
self.w(u'</h3>')
- self.w(u'<div style="margin: 0px 1.5em">')
- subjects = [str(subj) for subj in rschema.subjects()]
- self.w(u'<div><strong>%s</strong> %s (%s)</div>' % (
- _('subject_plural:'),
- ', '.join(str(subj) for subj in rschema.subjects()),
- ', '.join(_(str(subj)) for subj in rschema.subjects())))
- self.w(u'<div><strong>%s</strong> %s (%s)</div>' % (
- _('object_plural:'),
- ', '.join(str(obj) for obj in rschema.objects()),
- ', '.join(_(str(obj)) for obj in rschema.objects())))
- self._cw.vreg.schema_definition(rschema, link=False)
- self.w(u'</div>')
-
-
-class SchemaUreportsView(StartupView):
- __regid__ = 'schema-block'
-
- def call(self):
- viewer = SchemaViewer(self._cw)
- layout = viewer.visit_schema(self._cw.vreg.schema, display_relations=True,
- skiptypes=skip_types(self._cw))
- self.w(uilib.ureport_as_html(layout))
-
-
-# CWAttribute / CWRelation #####################################################
-
-class CWRDEFPrimaryView(primary.PrimaryView):
- __select__ = implements('CWAttribute', 'CWRelation')
- cache_max_age = 60*60*2 # stay in http cache for 2 hours by default
-
- def render_entity_title(self, entity):
- self.w(u'<h1><span class="etype">%s</span> %s</h1>'
- % (entity.dc_type().capitalize(),
- xml_escape(entity.dc_long_title())))
+ self.grouped_permissions_table(rschema)
# CWEType ######################################################################
+# register msgid generated in entity relations tables
+_('i18ncard_1'), _('i18ncard_?'), _('i18ncard_+'), _('i18ncard_*')
+
+class CWETypePrimaryView(tabs.TabbedPrimaryView):
+ __select__ = implements('CWEType')
+ tabs = [_('cwetype-description'), _('cwetype-box'), _('cwetype-workflow'),
+ _('cwetype-views'), _('cwetype-permissions')]
+ default_tab = 'cwetype-description'
+
+
+class CWETypeDescriptionTab(tabs.PrimaryTab):
+ __regid__ = 'cwetype-description'
+ __select__ = tabs.PrimaryTab.__select__ & implements('CWEType')
+
+ def render_entity_attributes(self, entity, siderelations=None):
+ _ = self._cw._
+ self.w(u'<div>%s</div>' % xml_escape(entity.description or u''))
+ # entity schema image
+ url = entity.absolute_url(vid='schemagraph')
+ self.w(u'<img src="%s" alt="%s"/>' % (
+ xml_escape(url),
+ xml_escape(_('graphical schema for %s') % entity.name)))
+ # entity schema attributes
+ self.w(u'<h2>%s</h2>' % _('Attributes'))
+ rset = self._cw.execute(
+ 'Any A,F,D,C,I,J,A,DE ORDERBY AA WHERE A is CWAttribute, '
+ 'A ordernum AA, A defaultval D, A description DE, A cardinality C, '
+ 'A fulltextindexed I, A internationalizable J, '
+ 'A relation_type R, R name N, A to_entity O, O name F, '
+ 'A from_entity S, S eid %(x)s',
+ {'x': entity.eid})
+ self.wview('table', rset, 'null',
+ cellvids={0: 'rdef-name-cell',
+ 3: 'etype-attr-cardinality-cell',
+ 6: 'rdef-constraints-cell'},
+ headers=(_(u'name'), _(u'type'),
+ _(u'default value'), _(u'required'),
+ _(u'fulltext indexed'), _(u'internationalizable'),
+ _(u'constraints'), _(u'description')),
+ mainindex=0)
+ # entity schema relations
+ self.w(u'<h2>%s</h2>' % _('Relations'))
+ rset = self._cw.execute(
+ 'Any A,TT,"i18ncard_"+SUBSTRING(C, 1, 1),K,A,TTN ORDERBY RN '
+ 'WHERE A is CWRelation, A composite K, A cardinality C, '
+ 'A relation_type R, R name RN, '
+ 'A to_entity TT, TT name TTN, A from_entity S, S eid %(x)s',
+ {'x': entity.eid})
+ if rset:
+ self.w(u'<h5>%s %s</h5>' % (entity.name, _('is subject of:')))
+ self.wview('table', rset, 'null',
+ cellvids={0: 'rdef-name-cell',
+ 2: 'etype-rel-cardinality-cell',
+ 4: 'rdef-constraints-cell'},
+ headers=(_(u'name'), _(u'object type'), _(u'cardinality'),
+ _(u'composite'), _(u'constraints')),
+ displaycols=range(5))
+ self.w(u'<br/>')
+ rset = self._cw.execute(
+ 'Any A,TT,"i18ncard_"+SUBSTRING(C, 2, 1),K,A,TTN ORDERBY RN '
+ 'WHERE A is CWRelation, A composite K, A cardinality C, '
+ 'A relation_type R, R name RN, '
+ 'A from_entity TT, TT name TTN, A to_entity O, O eid %(x)s',
+ {'x': entity.eid})
+ if rset:
+ self.w(u'<h5>%s %s</h5>' % (entity.name, _('is object of:')))
+ self.wview('table', rset, 'null',
+ cellvids={0: 'rdef-object-name-cell',
+ 2: 'etype-rel-cardinality-cell',
+ 4: 'rdef-constraints-cell'},
+ headers=(_(u'name'), _(u'subject type'), _(u'cardinality'),
+ _(u'composite'), _(u'constraints')),
+ displaycols=range(5))
+
+
+class CWETypeAttributeCardinalityCell(baseviews.FinalView):
+ __regid__ = 'etype-attr-cardinality-cell'
+
+ def cell_call(self, row, col):
+ if self.cw_rset.rows[row][col][0] == '1':
+ self.w(self._cw._(u'yes'))
+ else:
+ self.w(self._cw._(u'no'))
+
+
+class CWETypeRelationCardinalityCell(baseviews.FinalView):
+ __regid__ = 'etype-rel-cardinality-cell'
+
+ def cell_call(self, row, col):
+ self.w(self._cw._(self.cw_rset.rows[row][col]))
+
+
+class CWETypeBoxTab(EntityView):
+ __regid__ = 'cwetype-box'
+ __select__ = implements('CWEType')
+
+ def cell_call(self, row, col):
+ viewer = SchemaViewer(self._cw)
+ entity = self.cw_rset.get_entity(row, col)
+ eschema = self._cw.vreg.schema.eschema(entity.name)
+ layout = viewer.visit_entityschema(eschema)
+ self.w(uilib.ureport_as_html(layout))
+ self.w(u'<br class="clear"/>')
+
+
+class CWETypePermTab(SecurityViewMixIn, EntityView):
+ __regid__ = 'cwetype-permissions'
+ __select__ = implements('CWEType') & authenticated_user()
+
+ def cell_call(self, row, col):
+ self._cw.add_css('cubicweb.acl.css')
+ entity = self.cw_rset.get_entity(row, col)
+ eschema = self._cw.vreg.schema.eschema(entity.name)
+ self.w(u'<div style="margin: 0px 1.5em">')
+ self.permissions_table(eschema)
+ self.w(u'<h4>%s</h4>' % _('attributes permissions:').capitalize())
+ for attr, etype in eschema.attribute_definitions():
+ if attr not in META_RTYPES:
+ rdef = eschema.rdef(attr)
+ attrtype = str(rdef.rtype)
+ self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attrtype, _(attrtype)))
+ self.permissions_table(rdef)
+ self.w(u'</div>')
+
+
+class CWETypeWorkflowTab(EntityView):
+ __regid__ = 'cwetype-workflow'
+ __select__ = (implements('CWEType')
+ & has_related_entities('workflow_of', 'object'))
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ if entity.default_workflow:
+ wf = entity.default_workflow[0]
+ if len(entity.reverse_workflow_of) > 1:
+ self.w(u'<h1>%s (%s)</h1>'
+ % (wf.name, self._cw._('default_workflow')))
+ self.display_workflow(wf)
+ defaultwfeid = wf.eid
+ else:
+ self.w(u'<div class="error">%s</div>'
+ % self._cw._('There is no default workflow'))
+ defaultwfeid = None
+ for altwf in entity.reverse_workflow_of:
+ if altwf.eid == defaultwfeid:
+ continue
+ self.w(u'<h1>%s</h1>' % altwf.name)
+ self.display_workflow(altwf)
+
+ def display_workflow(self, wf):
+ self.w(wf.view('wfgraph'))
+ self.w('<a href="%s">%s</a>' % (
+ wf.absolute_url(), self._cw._('more info about this workflow')))
+
+
+class CWETypeViewsTab(EntityView):
+ __regid__ = 'cwetype-views'
+ __select__ = EntityView.__select__ & implements('CWEType')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ etype = entity.name
+ _ = self._cw._
+ # possible views for this entity type
+ views = [(_(view.title),) for view in self.possible_views(etype)]
+ self.wview('pyvaltable', pyvalue=sorted(views), headers=(_(u'views'),))
+
+ def possible_views(self, etype):
+ rset = self._cw.etype_rset(etype)
+ return [v for v in self._cw.vreg['views'].possible_views(self._cw, rset)
+ if v.category != 'startupview']
+
+
class CWETypeOneLineView(baseviews.OneLineView):
__select__ = implements('CWEType')
def cell_call(self, row, col, **kwargs):
entity = self.cw_rset.get_entity(row, col)
- final = entity.final
- if final:
+ if entity.final:
self.w(u'<em class="finalentity">')
super(CWETypeOneLineView, self).cell_call(row, col, **kwargs)
- if final:
+ if entity.final:
self.w(u'</em>')
-class CWETypePrimaryView(tabs.TabsMixin, primary.PrimaryView):
- __select__ = implements('CWEType')
- title = _('in memory entity schema')
- main_related_section = False
- tabs = [_('cwetype-schema-text'), _('cwetype-schema-image'),
- _('cwetype-schema-permissions'), _('cwetype-workflow')]
- default_tab = 'cwetype-schema-text'
-
- def render_entity(self, entity):
- self.render_entity_title(entity)
- self.w(u'<div>%s</div>' % entity.description)
- self.render_tabs(self.tabs, self.default_tab, entity)
-
-
-class CWETypeSTextView(EntityView):
- __regid__ = 'cwetype-schema-text'
- __select__ = EntityView.__select__ & implements('CWEType')
-
- def cell_call(self, row, col):
- entity = self.cw_rset.get_entity(row, col)
- self.w(u'<h2>%s</h2>' % _('Attributes'))
- rset = self._cw.execute('Any N,F,D,I,J,DE,A '
- 'ORDERBY AA WHERE A is CWAttribute, '
- 'A ordernum AA, A defaultval D, '
- 'A description DE, '
- 'A fulltextindexed I, A internationalizable J, '
- 'A relation_type R, R name N, '
- 'A to_entity O, O name F, '
- 'A from_entity S, S eid %(x)s',
- {'x': entity.eid})
- self.wview('editable-table', rset, 'null', displayfilter=True)
- self.w(u'<h2>%s</h2>' % _('Relations'))
- rset = self._cw.execute(
- 'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN '
- 'WHERE A is CWRelation, A description D, A composite K, '
- 'A relation_type R, R name RN, A to_entity TT, TT name TTN, '
- 'A cardinality C, A from_entity S, S eid %(x)s',
- {'x': entity.eid})
- self.wview('editable-table', rset, 'null', displayfilter=True,
- displaycols=range(6), mainindex=5)
- rset = self._cw.execute(
- 'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN '
- 'WHERE A is CWRelation, A description D, A composite K, '
- 'A relation_type R, R name RN, A from_entity TT, TT name TTN, '
- 'A cardinality C, A to_entity O, O eid %(x)s',
- {'x': entity.eid})
- self.wview('editable-table', rset, 'null', displayfilter=True,
- displaycols=range(6), mainindex=5)
-
-
-class CWETypeSImageView(EntityView):
- __regid__ = 'cwetype-schema-image'
- __select__ = EntityView.__select__ & implements('CWEType')
-
- def cell_call(self, row, col):
- entity = self.cw_rset.get_entity(row, col)
- url = entity.absolute_url(vid='schemagraph')
- self.w(u'<img src="%s" alt="%s"/>' % (
- xml_escape(url),
- xml_escape(self._cw._('graphical schema for %s') % entity.name)))
-
-
-class CWETypeSPermView(EntityView):
- __regid__ = 'cwetype-schema-permissions'
- __select__ = EntityView.__select__ & implements('CWEType')
-
- def cell_call(self, row, col):
- entity = self.cw_rset.get_entity(row, col)
- _ = self._cw._
- self.w(u'<h2>%s</h2>' % _('Add permissions'))
- rset = self._cw.execute('Any P WHERE X add_permission P, '
- 'X eid %(x)s',
- {'x': entity.eid})
- self.wview('outofcontext', rset, 'null')
- self.w(u'<h2>%s</h2>' % _('Read permissions'))
- rset = self._cw.execute('Any P WHERE X read_permission P, '
- 'X eid %(x)s',
- {'x': entity.eid})
- self.wview('outofcontext', rset, 'null')
- self.w(u'<h2>%s</h2>' % _('Update permissions'))
- rset = self._cw.execute('Any P WHERE X update_permission P, '
- 'X eid %(x)s',
- {'x': entity.eid})
- self.wview('outofcontext', rset, 'null')
- self.w(u'<h2>%s</h2>' % _('Delete permissions'))
- rset = self._cw.execute('Any P WHERE X delete_permission P, '
- 'X eid %(x)s',
- {'x': entity.eid})
- self.wview('outofcontext', rset, 'null')
-
-
-class CWETypeSWorkflowView(EntityView):
- __regid__ = 'cwetype-workflow'
- __select__ = (EntityView.__select__ & implements('CWEType') &
- has_related_entities('workflow_of', 'object'))
-
- def cell_call(self, row, col):
- entity = self.cw_rset.get_entity(row, col)
- if entity.default_workflow:
- wf = entity.default_workflow[0]
- self.w(u'<h1>%s (%s)</h1>' % (wf.name, self._cw._('default')))
- self.wf_image(wf)
- for altwf in entity.reverse_workflow_of:
- if altwf.eid == wf.eid:
- continue
- self.w(u'<h1>%s</h1>' % altwf.name)
- self.wf_image(altwf)
-
- def wf_image(self, wf):
- self.w(u'<img src="%s" alt="%s"/>' % (
- xml_escape(wf.absolute_url(vid='wfgraph')),
- xml_escape(self._cw._('graphical representation of %s') % wf.name)))
-
-
# CWRType ######################################################################
-class CWRTypeSchemaView(primary.PrimaryView):
+class CWRTypePrimaryView(tabs.TabbedPrimaryView):
__select__ = implements('CWRType')
- title = _('in memory relation schema')
- main_related_section = False
+ tabs = [_('cwrtype-description'), _('cwrtype-permissions')]
+ default_tab = 'cwrtype-description'
+
- def render_entity_attributes(self, entity):
- super(CWRTypeSchemaView, self).render_entity_attributes(entity)
+class CWRTypeDescriptionTab(tabs.PrimaryTab):
+ __regid__ = 'cwrtype-description'
+ __select__ = implements('CWRType')
+
+ def render_entity_attributes(self, entity, siderelations=None):
+ _ = self._cw._
+ self.w(u'<div>%s</div>' % xml_escape(entity.description or u''))
rschema = self._cw.vreg.schema.rschema(entity.name)
- viewer = SchemaViewer(self._cw)
- layout = viewer.visit_relationschema(rschema, title=False)
- self.w(uilib.ureport_as_html(layout))
if not rschema.final:
- msg = self._cw._('graphical schema for %s') % entity.name
+ msg = _('graphical schema for %s') % entity.name
self.w(tags.img(src=entity.absolute_url(vid='schemagraph'),
alt=msg))
+ rset = self._cw.execute('Any R,C,CC,R WHERE R is CWRelation, '
+ 'R relation_type RT, RT eid %(x)s, '
+ 'R cardinality C, R composite CC',
+ {'x': entity.eid})
+ self.wview('table', rset, 'null',
+ headers=(_(u'relation'), _(u'cardinality'), _(u'composite'),
+ _(u'constraints')),
+ cellvids={3: 'rdef-constraints-cell'})
+
+
+class CWRTypePermTab(SecurityViewMixIn, EntityView):
+ __regid__ = 'cwrtype-permissions'
+ __select__ = implements('CWRType') & authenticated_user()
+
+ def cell_call(self, row, col):
+ self._cw.add_css('cubicweb.acl.css')
+ entity = self.cw_rset.get_entity(row, col)
+ rschema = self._cw.vreg.schema.rschema(entity.name)
+ self.grouped_permissions_table(rschema)
+
+
+# CWAttribute / CWRelation #####################################################
+
+class CWRDEFPrimaryView(tabs.TabbedPrimaryView):
+ __select__ = implements('CWRelation', 'CWAttribute')
+ tabs = [_('cwrdef-description'), _('cwrdef-permissions')]
+ default_tab = 'cwrdef-description'
+
+class CWRDEFDescriptionTab(tabs.PrimaryTab):
+ __regid__ = 'cwrdef-description'
+ __select__ = implements('CWRelation', 'CWAttribute')
+
+class CWRDEFPermTab(SecurityViewMixIn, EntityView):
+ __regid__ = 'cwrdef-permissions'
+ __select__ = implements('CWRelation', 'CWAttribute') & authenticated_user()
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ rschema = self._cw.vreg.schema.rschema(entity.rtype.name)
+ rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)]
+ self.permissions_table(rdef)
+
+
+class CWRDEFNameView(tableview.CellView):
+ """display relation name and its translation only in a cell view, link to
+ relation definition's primary view (for use in entity type relations table
+ for instance)
+ """
+ __regid__ = 'rdef-name-cell'
+ __select__ = implements('CWRelation', 'CWAttribute')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ rtype = entity.relation_type[0].name
+ # XXX use contect entity + pgettext
+ self.w(u'<a href="%s">%s</a> (%s)' % (
+ entity.absolute_url(), rtype, self._cw._(rtype)))
+
+class CWRDEFObjectNameView(tableview.CellView):
+ """same as CWRDEFNameView but when the context is the object entity
+ """
+ __regid__ = 'rdef-object-name-cell'
+ __select__ = implements('CWRelation', 'CWAttribute')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ rtype = entity.relation_type[0].name
+ # XXX use contect entity + pgettext
+ self.w(u'<a href="%s">%s</a> (%s)' % (
+ entity.absolute_url(), rtype, self._cw.__(rtype + '_object')))
+
+class CWRDEFConstraintsCell(EntityView):
+ __regid__ = 'rdef-constraints-cell'
+ __select__ = implements('CWAttribute', 'CWRelation')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ rschema = self._cw.vreg.schema.rschema(entity.rtype.name)
+ rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)]
+ constraints = [xml_escape(str(c)) for c in getattr(rdef, 'constraints')]
+ self.w(u'<br/>'.join(constraints))
# schema images ###############################################################
--- a/web/views/sessions.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/sessions.py Tue Apr 13 19:43:51 2010 +0200
@@ -10,6 +10,7 @@
from cubicweb.web import InvalidSession
from cubicweb.web.application import AbstractSessionManager
+from cubicweb.dbapi import DBAPISession
class InMemoryRepositorySessionManager(AbstractSessionManager):
@@ -40,26 +41,28 @@
if self.has_expired(session):
self.close_session(session)
raise InvalidSession()
- # give an opportunity to auth manager to hijack the session (necessary
- # with the RepositoryAuthenticationManager in case the connection to the
- # repository has expired)
try:
- session = self.authmanager.validate_session(req, session)
- # necessary in case session has been hijacked
- self._sessions[session.sessionid] = session
+ user = self.authmanager.validate_session(req, session)
except InvalidSession:
# invalid session
- del self._sessions[sessionid]
+ self.close_session(session)
raise
+ # associate the connection to the current request
+ req.set_session(session, user)
return session
def open_session(self, req):
- """open and return a new session for the given request
+ """open and return a new session for the given request. The session is
+ also bound to the request.
- :raise ExplicitLogin: if authentication is required
+ raise :exc:`cubicweb.AuthenticationError` if authentication failed
+ (no authentication info found or wrong user/password)
"""
- session = self.authmanager.authenticate(req)
+ cnx, login, authinfo = self.authmanager.authenticate(req)
+ session = DBAPISession(cnx, login, authinfo)
self._sessions[session.sessionid] = session
+ # associate the connection to the current request
+ req.set_session(session)
return session
def close_session(self, session):
@@ -69,8 +72,9 @@
self.info('closing http session %s' % session)
del self._sessions[session.sessionid]
try:
- session.close()
+ session.cnx.close()
except:
# already closed, may occurs if the repository session expired but
# not the web session
pass
+ session.cnx = None
--- a/web/views/tableview.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/tableview.py Tue Apr 13 19:43:51 2010 +0200
@@ -326,3 +326,67 @@
class EditableInitialTableTableView(InitialTableView):
__regid__ = 'editable-initialtable'
finalview = 'editable-final'
+
+
+class EntityAttributesTableView(EntityView):
+ """This table displays entity attributes in a table and allow to set a
+ specific method to help building cell content for each attribute as well as
+ column header.
+
+ Table will render entity cell by using the appropriate build_COLNAME_cell
+ methods if defined otherwise cell content will be entity.COLNAME.
+
+ Table will render column header using the method header_for_COLNAME if
+ defined otherwise COLNAME will be used.
+ """
+ __abstract__ = True
+ columns = ()
+ table_css = "listing"
+ css_files = ()
+
+ def call(self, columns=None):
+ if self.css_files:
+ self._cw.add_css(self.css_files)
+ _ = self._cw._
+ self.columns = columns or self.columns
+ ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0])
+ self.w(u'<table class="%s">' % self.table_css)
+ self.table_header(ecls)
+ self.w(u'<tbody>')
+ for row in xrange(self.cw_rset.rowcount):
+ self.cell_call(row=row, col=0)
+ self.w(u'</tbody>')
+ self.w(u'</table>')
+
+ def cell_call(self, row, col):
+ _ = self._cw._
+ entity = self.cw_rset.get_entity(row, col)
+ infos = {}
+ for col in self.columns:
+ meth = getattr(self, 'build_%s_cell' % col, None)
+ # find the build method or try to find matching attribute
+ if meth:
+ content = meth(entity)
+ else:
+ content = entity.printable_value(col)
+ infos[col] = content
+ self.w(u"""<tr onmouseover="addElementClass(this, 'highlighted');"
+ onmouseout="removeElementClass(this, 'highlighted')">""")
+ line = u''.join(u'<td>%%(%s)s</td>' % col for col in self.columns)
+ self.w(line % infos)
+ self.w(u'</tr>\n')
+
+ def table_header(self, ecls):
+ """builds the table's header"""
+ self.w(u'<thead><tr>')
+ _ = self._cw._
+ for column in self.columns:
+ meth = getattr(self, 'header_for_%s' % column, None)
+ if meth:
+ colname = meth(ecls)
+ else:
+ colname = _(column)
+ self.w(u'<th>%s</th>' % xml_escape(colname))
+ self.w(u'</tr></thead>\n')
+
+
--- a/web/views/urlpublishing.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/urlpublishing.py Tue Apr 13 19:43:51 2010 +0200
@@ -130,8 +130,7 @@
if len(parts) != 1:
raise PathDontMatch()
try:
- rset = req.execute('Any X WHERE X eid %(x)s',
- {'x': typed_eid(parts[0])}, 'x')
+ rset = req.execute('Any X WHERE X eid %(x)s', {'x': typed_eid(parts[0])})
except ValueError:
raise PathDontMatch()
if rset.rowcount == 0:
@@ -178,7 +177,7 @@
rql = u'Any X WHERE X is %s, X %s %%(x)s' % (etype, attrname)
if attrname == 'eid':
try:
- rset = req.execute(rql, {'x': typed_eid(value)}, 'x')
+ rset = req.execute(rql, {'x': typed_eid(value)})
except (ValueError, TypeResolverException):
# conflicting eid/type
raise PathDontMatch()
--- a/web/views/workflow.py Tue Apr 13 19:22:46 2010 +0200
+++ b/web/views/workflow.py Tue Apr 13 19:43:51 2010 +0200
@@ -11,6 +11,9 @@
__docformat__ = "restructuredtext en"
_ = unicode
+import tempfile
+import os
+
from logilab.mtconverter import xml_escape
from logilab.common.graph import escape, GraphGenerator, DotBackend
@@ -24,6 +27,7 @@
from cubicweb.web import uicfg, stdmsgs, action, component, form, action
from cubicweb.web import formfields as ff, formwidgets as fwdgs
from cubicweb.web.views import TmpFileViewMixin, forms, primary, autoform
+from cubicweb.web.views.tabs import TabbedPrimaryView, PrimaryTab
_pvs = uicfg.primaryview_section
_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden')
@@ -129,7 +133,7 @@
headers = (_('from_state'), _('to_state'), _('comment'), _('date'))
rql = '%s %s, X eid %%(x)s' % (sel, rql)
try:
- rset = self._cw.execute(rql, {'x': eid}, 'x')
+ rset = self._cw.execute(rql, {'x': eid})
except Unauthorized:
return
if rset:
@@ -187,6 +191,7 @@
_pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden')
_pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden')
_pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden')
+_pvs.tag_object_of(('*', 'default_workflow', 'Workflow'), 'hidden')
_abaa = uicfg.actionbox_appearsin_addmenu
_abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False)
@@ -198,14 +203,10 @@
_abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True)
_abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True)
-class WorkflowPrimaryView(primary.PrimaryView):
+class WorkflowPrimaryView(TabbedPrimaryView):
__select__ = implements('Workflow')
-
- def render_entity_attributes(self, entity):
- self.w(entity.view('reledit', rtype='description'))
- self.w(u'<img src="%s" alt="%s"/>' % (
- xml_escape(entity.absolute_url(vid='wfgraph')),
- xml_escape(self._cw._('graphical workflow for %s') % entity.name)))
+ tabs = [ _('wf_tab_info'), _('wfgraph'),]
+ default_tab = 'wf_tab_info'
class CellView(view.EntityView):
@@ -225,6 +226,59 @@
self.w(xml_escape(self._cw.view('textincontext', self.cw_rset,
row=row, col=col)))
+class WorkflowTabTextView(PrimaryTab):
+ __regid__ = 'wf_tab_info'
+ __select__ = PrimaryTab.__select__ & one_line_rset() & implements('Workflow')
+
+ def render_entity_attributes(self, entity):
+ _ = self._cw._
+ self.w(u'<div>%s</div>' % (entity.printable_value('description')))
+ self.w(u'<span>%s%s</span>' % (_("workflow_of").capitalize(), _(" :")))
+ html = []
+ for e in entity.workflow_of:
+ view = e.view('outofcontext')
+ if entity.eid == e.default_workflow[0].eid:
+ view += u' <span>[%s]</span>' % _('default_workflow')
+ html.append(view)
+ self.w(', '.join(v for v in html))
+ self.w(u'<h2>%s</h2>' % _("Transition_plural"))
+ rset = self._cw.execute(
+ 'Any T,T,DS,T,TT ORDERBY TN WHERE T transition_of WF, WF eid %(x)s,'
+ 'T type TT, T name TN, T destination_state DS?', {'x': entity.eid})
+ self.wview('editable-table', rset, 'null',
+ cellvids={ 1: 'trfromstates', 2: 'outofcontext', 3:'trsecurity',},
+ headers = (_('Transition'), _('from_state'),
+ _('to_state'), _('permissions'), _('type') ),
+ )
+
+
+class TransitionSecurityTextView(view.EntityView):
+ __regid__ = 'trsecurity'
+ __select__ = implements('Transition')
+
+ def cell_call(self, row, col):
+ _ = self._cw._
+ entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
+ if entity.require_group:
+ self.w(u'<div>%s%s %s</div>' %
+ (_('groups'), _(" :"),
+ u', '.join((g.view('incontext') for g
+ in entity.require_group))))
+ if entity.condition:
+ self.w(u'<div>%s%s %s</div>' %
+ ( _('conditions'), _(" :"),
+ u'<br/>'.join((e.dc_title() for e
+ in entity.condition))))
+
+class TransitionAllowedTextView(view.EntityView):
+ __regid__ = 'trfromstates'
+ __select__ = implements('Transition')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
+ self.w(u', '.join((e.view('outofcontext') for e
+ in entity.reverse_allowed_transition)))
+
# workflow entity types edition ################################################
@@ -284,24 +338,18 @@
def node_properties(self, stateortransition):
"""return default DOT drawing options for a state or transition"""
props = {'label': stateortransition.printable_value('name'),
- 'fontname': 'Courier'}
+ 'fontname': 'Courier', 'fontsize':10,
+ 'href': stateortransition.absolute_url(),
+ }
if hasattr(stateortransition, 'state_of'):
props['shape'] = 'box'
props['style'] = 'filled'
if stateortransition.reverse_initial_state:
- props['color'] = '#88CC88'
+ props['fillcolor'] = '#88CC88'
else:
props['shape'] = 'ellipse'
descr = []
tr = stateortransition
- if tr.require_group:
- descr.append('%s %s'% (
- self._('groups:'),
- ','.join(g.printable_value('name') for g in tr.require_group)))
- if tr.condition:
- descr.append('%s %s'% (
- self._('condition:'),
- ' | '.join(e.expression for e in tr.condition)))
if descr:
props['label'] += escape('\n'.join(descr))
return props
@@ -331,17 +379,39 @@
yield transition.eid, outgoingstate.eid, transition
+class WorkflowGraphView(view.EntityView):
+ __regid__ = 'wfgraph'
+ __select__ = EntityView.__select__ & one_line_rset() & implements('Workflow')
+
+ def cell_call(self, row, col):
+ entity = self.cw_rset.get_entity(row, col)
+ visitor = WorkflowVisitor(entity)
+ prophdlr = WorkflowDotPropsHandler(self._cw)
+ wfname = 'workflow%s' % str(entity.eid)
+ generator = GraphGenerator(DotBackend(wfname, None,
+ ratio='compress', size='30,10'))
+ # map file
+ pmap, mapfile = tempfile.mkstemp(".map", wfname)
+ os.close(pmap)
+ # image file
+ fd, tmpfile = tempfile.mkstemp('.png')
+ os.close(fd)
+ generator.generate(visitor, prophdlr, tmpfile, mapfile)
+ self.w(u'<img src="%s" alt="%s" usemap="#%s" />' % (
+ xml_escape(entity.absolute_url(vid='wfimage', tmpfile=tmpfile)),
+ xml_escape(self._cw._('graphical workflow for %s') % entity.name),
+ wfname))
+ stream = open(mapfile, 'r').read()
+ stream = stream.decode(self._cw.encoding)
+ self.w(stream)
+ os.unlink(mapfile)
+
class WorkflowImageView(TmpFileViewMixin, view.EntityView):
- __regid__ = 'wfgraph'
+ __regid__ = 'wfimage'
__select__ = implements('Workflow')
content_type = 'image/png'
- def _generate(self, tmpfile):
- """display schema information for an entity"""
- entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
- visitor = WorkflowVisitor(entity)
- prophdlr = WorkflowDotPropsHandler(self._cw)
- generator = GraphGenerator(DotBackend('workflow', 'LR',
- ratio='compress', size='30,12'))
- return generator.generate(visitor, prophdlr, tmpfile)
-
+ def cell_call(self, row=0, col=0):
+ tmpfile = self._cw.form.get('tmpfile', None)
+ self.w(open(tmpfile, 'rb').read())
+ os.unlink(tmpfile)
--- a/wsgi/handler.py Tue Apr 13 19:22:46 2010 +0200
+++ b/wsgi/handler.py Tue Apr 13 19:43:51 2010 +0200
@@ -9,8 +9,7 @@
__docformat__ = "restructuredtext en"
from cubicweb import AuthenticationError
-from cubicweb.web import (NotFound, Redirect, DirectResponse, StatusResponse,
- ExplicitLogin)
+from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
from cubicweb.web.application import CubicWebPublisher
from cubicweb.wsgi.request import CubicWebWsgiRequest
@@ -113,8 +112,6 @@
req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
try:
self.appli.connect(req)
- except AuthenticationError:
- return self.request_auth(req)
except Redirect, ex:
return self.redirect(req, ex.location)
path = req.path
@@ -126,12 +123,9 @@
return WSGIResponse(200, req, ex.response)
except StatusResponse, ex:
return WSGIResponse(ex.status, req, ex.content)
- except NotFound:
- result = self.appli.notfound_content(req)
- return WSGIResponse(404, req, result)
- except ExplicitLogin: # must be before AuthenticationError
+ except AuthenticationError: # must be before AuthenticationError
return self.request_auth(req)
- except AuthenticationError:
+ except LogOut:
if self.config['auth-mode'] == 'cookie':
# in cookie mode redirecting to the index view is enough :
# either anonymous connection is allowed and the page will
--- a/wsgi/request.py Tue Apr 13 19:22:46 2010 +0200
+++ b/wsgi/request.py Tue Apr 13 19:43:51 2010 +0200
@@ -38,9 +38,9 @@
post, files = self.get_posted_data()
super(CubicWebWsgiRequest, self).__init__(vreg, https, post)
if files is not None:
- for fdef in files.itervalues():
- fdef[0] = unicode(fdef[0], self.encoding)
- self.form.update(files)
+ for key, (name, _, stream) in files.iteritems():
+ name = unicode(name, self.encoding)
+ self.form[key] = (name, stream)
# prepare output headers
self.headers_out = {}