# HG changeset patch # User Sylvain Thénault # Date 1270728709 -7200 # Node ID d14bfd477c4465123738356a2f9107886a3a2adc # Parent e8235b2789fcdf30fb611dad713a459993b43594# Parent f3c2cb460ad9f2cdf8f100b8c42fafb1335c5a35 backport stable diff -r f3c2cb460ad9 -r d14bfd477c44 README --- a/README Thu Apr 08 12:42:47 2010 +0200 +++ b/README Thu Apr 08 14:11:49 2010 +0200 @@ -1,6 +1,15 @@ CubicWeb semantic web framework =============================== +CubicWeb is a entities / relations based knowledge management system +developped at Logilab. + +This package contains: +* a repository server +* a RQL command line client to the repository +* an adaptative modpython interface to the server +* a bunch of other management tools + Install ------- diff -r f3c2cb460ad9 -r d14bfd477c44 __pkginfo__.py --- a/__pkginfo__.py Thu Apr 08 12:42:47 2010 +0200 +++ b/__pkginfo__.py Thu Apr 08 14:11:49 2010 +0200 @@ -1,36 +1,21 @@ # pylint: disable-msg=W0622,C0103 """cubicweb global packaging information for the cubicweb knowledge management software + :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -distname = "cubicweb" -modname = "cubicweb" +modname = distname = "cubicweb" numversion = (3, 7, 3) version = '.'.join(str(num) for num in numversion) -license = 'LGPL' -copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - +description = "a repository of entities / relations for knowledge management" author = "Logilab" author_email = "contact@logilab.fr" - -short_desc = "a repository of entities / relations for knowledge management" -long_desc = """CubicWeb is a entities / relations based knowledge management system -developped at Logilab. - -This package contains: -* a repository server -* a RQL command line client to the repository -* an adaptative modpython interface to the server -* a bunch of other management tools -""" - web = 'http://www.cubicweb.org' ftp = 'ftp://ftp.logilab.org/pub/cubicweb' -pyversions = ['2.5', '2.6'] +license = 'LGPL' classifiers = [ 'Environment :: Web Environment', @@ -39,6 +24,32 @@ 'Programming Language :: JavaScript', ] +__depends__ = { + 'logilab-common': '>= 0.49.0', + 'logilab-mtconverter': '>= 0.6.0', + 'rql': '>= 0.26.0', + 'yams': '>= 0.28.1', + 'docutils': '>= 0.6', + #gettext # for xgettext, msgcat, etc... + # web dependancies + 'simplejson': '>= 2.0.9', + 'lxml': '', + 'Twisted': '', + # XXX graphviz + # server dependencies + 'logilab-database': '', + 'pysqlite': '>= 2.5.5', # XXX install pysqlite2 + } + +__recommends__ = { + 'Pyro': '>= 3.9.1', + 'PIL': '', # for captcha + 'pycrypto': '', # for crypto extensions + 'fyzz': '>= 0.1.0', # for sparql + 'vobject': '>= 0.6.0', # for ical view + #'Products.FCKeditor':'', + #'SimpleTAL':'>= 4.1.6', + } import sys from os import listdir, environ @@ -49,57 +60,53 @@ if not s.endswith('.bat')] include_dirs = [join('test', 'data'), join('server', 'test', 'data'), + join('hooks', 'test', 'data'), join('web', 'test', 'data'), join('devtools', 'test', 'data'), 'skeleton'] -entities_dir = 'entities' -schema_dir = 'schemas' -sobjects_dir = 'sobjects' -server_migration_dir = join('misc', 'migration') -data_dir = join('web', 'data') -wdoc_dir = join('web', 'wdoc') -wdocimages_dir = join(wdoc_dir, 'images') -views_dir = join('web', 'views') -i18n_dir = 'i18n' +_server_migration_dir = join('misc', 'migration') +_data_dir = join('web', 'data') +_wdoc_dir = join('web', 'wdoc') +_wdocimages_dir = join(_wdoc_dir, 'images') +_views_dir = join('web', 'views') +_i18n_dir = 'i18n' -if environ.get('APYCOT_ROOT'): +_pyversion = '.'.join(str(num) for num in sys.version_info[0:2]) +if '--home' in sys.argv: # --home install - pydir = 'python' + pydir = 'python' + _pyversion else: - python_version = '.'.join(str(num) for num in sys.version_info[0:2]) - pydir = join('python' + python_version, 'site-packages') + pydir = join('python' + _pyversion, 'site-packages') try: data_files = [ - # common data - #[join('share', 'cubicweb', 'entities'), - # [join(entities_dir, filename) for filename in listdir(entities_dir)]], # server data [join('share', 'cubicweb', 'schemas'), - [join(schema_dir, filename) for filename in listdir(schema_dir)]], - #[join('share', 'cubicweb', 'sobjects'), - # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]], + [join('schemas', filename) for filename in listdir('schemas')]], [join('share', 'cubicweb', 'migration'), - [join(server_migration_dir, filename) - for filename in listdir(server_migration_dir)]], + [join(_server_migration_dir, filename) + for filename in listdir(_server_migration_dir)]], # web data [join('share', 'cubicweb', 'cubes', 'shared', 'data'), - [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]], + [join(_data_dir, fname) for fname in listdir(_data_dir) + if not isdir(join(_data_dir, fname))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'), - [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]], + [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), - [join(data_dir, 'images', fname) for fname in listdir(join(data_dir, 'images'))]], + [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'), - [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]], + [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir) + if not isdir(join(_wdoc_dir, fname))]], [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'), - [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]], - # XXX: .pt install should be handled properly in a near future version + [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]], + [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), + [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]], + # XXX: drop .pt files [join('lib', pydir, 'cubicweb', 'web', 'views'), - [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]], - [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), - [join(i18n_dir, fname) for fname in listdir(i18n_dir)]], + [join(_views_dir, fname) for fname in listdir(_views_dir) + if fname.endswith('.pt')]], # skeleton ] except OSError: diff -r f3c2cb460ad9 -r d14bfd477c44 cwconfig.py --- a/cwconfig.py Thu Apr 08 12:42:47 2010 +0200 +++ b/cwconfig.py Thu Apr 08 14:11:49 2010 +0200 @@ -126,12 +126,11 @@ import sys import os import logging -import tempfile from smtplib import SMTP from threading import Lock -from os.path import exists, join, expanduser, abspath, normpath, basename, isdir +from os.path import (exists, join, expanduser, abspath, normpath, + basename, isdir, dirname) from warnings import warn - from logilab.common.decorators import cached, classproperty from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods, init_log @@ -179,6 +178,23 @@ % (directory, modes)) return modes[0] +def _find_prefix(start_path=CW_SOFTWARE_ROOT): + """Runs along the parent directories of *start_path* (default to cubicweb source directory) + looking for one containing a 'share/cubicweb' directory. + The first matching directory is assumed as the prefix installation of cubicweb + + Returns the matching prefix or None. + """ + prefix = start_path + old_prefix = None + if not isdir(start_path): + prefix = dirname(start_path) + while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix: + old_prefix = prefix + prefix = dirname(prefix) + if isdir(join(prefix, 'share', 'cubicweb')): + return prefix + return sys.prefix # persistent options definition PERSISTENT_OPTIONS = ( @@ -251,6 +267,11 @@ CWDEV = exists(join(CW_SOFTWARE_ROOT, '.hg')) +try: + _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX'] +except KeyError: + _INSTALL_PREFIX = _find_prefix() + class CubicWebNoAppConfiguration(ConfigurationMixIn): """base class for cubicweb configuration without a specific instance directory """ @@ -264,25 +285,16 @@ # debug mode debugmode = False - if os.environ.get('APYCOT_ROOT'): - mode = 'test' - # allow to test cubes within apycot using cubicweb not installed by - # apycot - if __file__.startswith(os.environ['APYCOT_ROOT']): - CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ - # create __init__ file - file(join(CUBES_DIR, '__init__.py'), 'w').close() - else: - CUBES_DIR = '/usr/share/cubicweb/cubes/' - elif (CWDEV and _forced_mode != 'system'): + + if (CWDEV and _forced_mode != 'system'): mode = 'user' - CUBES_DIR = abspath(normpath(join(CW_SOFTWARE_ROOT, '../cubes'))) + _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes') else: - if _forced_mode == 'user': - mode = 'user' - else: - mode = 'system' - CUBES_DIR = '/usr/share/cubicweb/cubes/' + mode = _forced_mode or 'system' + _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes') + + CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False) + CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep) options = ( ('log-threshold', @@ -344,7 +356,6 @@ }), ) # static and class methods used to get instance independant resources ## - @staticmethod def cubicweb_version(): """return installed cubicweb version""" @@ -383,21 +394,17 @@ % directory) continue for cube in os.listdir(directory): - if isdir(join(directory, cube)) and not cube == 'shared': + if cube in ('CVS', '.svn', 'shared', '.hg'): + continue + if isdir(join(directory, cube)): cubes.add(cube) return sorted(cubes) @classmethod def cubes_search_path(cls): """return the path of directories where cubes should be searched""" - path = [] - try: - for directory in os.environ['CW_CUBES_PATH'].split(os.pathsep): - directory = abspath(normpath(directory)) - if exists(directory) and not directory in path: - path.append(directory) - except KeyError: - pass + path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH + if directory.strip() and exists(directory.strip())] if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR): path.append(cls.CUBES_DIR) return path @@ -413,7 +420,7 @@ @classmethod def cube_dir(cls, cube): """return the cube directory for the given cube id, - raise ConfigurationError if it doesn't exists + raise `ConfigurationError` if it doesn't exists """ for directory in cls.cubes_search_path(): cubedir = join(directory, cube) @@ -431,10 +438,12 @@ """return the information module for the given cube""" cube = CW_MIGRATION_MAP.get(cube, cube) try: - return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__ + parent = __import__('cubes.%s.__pkginfo__' % cube) + return getattr(parent, cube).__pkginfo__ except Exception, ex: - raise ConfigurationError('unable to find packaging information for ' - 'cube %s (%s: %s)' % (cube, ex.__class__.__name__, ex)) + raise ConfigurationError( + 'unable to find packaging information for cube %s (%s: %s)' + % (cube, ex.__class__.__name__, ex)) @classmethod def cube_version(cls, cube): @@ -446,14 +455,43 @@ return Version(version) @classmethod + def _cube_deps(cls, cube, key, oldkey): + """return cubicweb cubes used by the given cube""" + pkginfo = cls.cube_pkginfo(cube) + try: + # explicit __xxx_cubes__ attribute + deps = getattr(pkginfo, key) + except AttributeError: + # deduce cubes from generic __xxx__ attribute + try: + gendeps = getattr(pkginfo, key.replace('_cubes', '')) + except AttributeError: + # bw compat + if hasattr(pkginfo, oldkey): + warn('[3.8] %s is deprecated, use %s dict' % (oldkey, key), + DeprecationWarning) + deps = getattr(pkginfo, oldkey) + else: + deps = {} + else: + deps = dict( (x[len('cubicweb-'):], v) + for x, v in gendeps.iteritems() + if x.startswith('cubicweb-')) + if not isinstance(deps, dict): + deps = dict((key, None) for key in deps) + warn('[3.8] cube %s should define %s as a dict' % (cube, key), + DeprecationWarning) + return deps + + @classmethod def cube_dependencies(cls, cube): """return cubicweb cubes used by the given cube""" - return getattr(cls.cube_pkginfo(cube), '__use__', ()) + return cls._cube_deps(cube, '__depends_cubes__', '__use__') @classmethod def cube_recommends(cls, cube): """return cubicweb cubes recommended by the given cube""" - return getattr(cls.cube_pkginfo(cube), '__recommend__', ()) + return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__') @classmethod def expand_cubes(cls, cubes, with_recommends=False): @@ -486,9 +524,10 @@ graph = {} for cube in cubes: cube = CW_MIGRATION_MAP.get(cube, cube) - deps = cls.cube_dependencies(cube) + \ - cls.cube_recommends(cube) - graph[cube] = set(dep for dep in deps if dep in cubes) + graph[cube] = set(dep for dep in cls.cube_dependencies(cube) + if dep in cubes) + graph[cube] |= set(dep for dep in cls.cube_recommends(cube) + if dep in cubes) cycles = get_cycles(graph) if cycles: cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles) @@ -636,6 +675,7 @@ cw_rest_init() def adjust_sys_path(self): + # overriden in CubicWebConfiguration self.cls_adjust_sys_path() def init_log(self, logthreshold=None, debug=False, @@ -685,35 +725,24 @@ """ return None + class CubicWebConfiguration(CubicWebNoAppConfiguration): """base class for cubicweb server and web configurations""" - INSTANCES_DATA_DIR = None - if os.environ.get('APYCOT_ROOT'): - root = os.environ['APYCOT_ROOT'] - REGISTRY_DIR = '%s/etc/cubicweb.d/' % root - if not exists(REGISTRY_DIR): - os.makedirs(REGISTRY_DIR) - RUNTIME_DIR = tempfile.gettempdir() - # allow to test cubes within apycot using cubicweb not installed by - # apycot - if __file__.startswith(os.environ['APYCOT_ROOT']): - MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root + if CubicWebNoAppConfiguration.mode == 'user': + _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/') + else: #mode = 'system' + if _INSTALL_PREFIX == '/usr': + _INSTANCES_DIR = '/etc/cubicweb.d/' else: - MIGRATION_DIR = '/usr/share/cubicweb/migration/' - else: - if CubicWebNoAppConfiguration.mode == 'user': - REGISTRY_DIR = expanduser('~/etc/cubicweb.d/') - RUNTIME_DIR = tempfile.gettempdir() - INSTANCES_DATA_DIR = REGISTRY_DIR - else: #mode = 'system' - REGISTRY_DIR = '/etc/cubicweb.d/' - RUNTIME_DIR = '/var/run/cubicweb/' - INSTANCES_DATA_DIR = '/var/lib/cubicweb/instances/' - if CWDEV: - MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration') - else: - MIGRATION_DIR = '/usr/share/cubicweb/migration/' + _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') + + if os.environ.get('APYCOT_ROOT'): + _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py') + if not exists(_cubes_init): + file(join(_cubes_init), 'w').close() + if not exists(_INSTANCES_DIR): + os.makedirs(_INSTANCES_DIR) # for some commands (creation...) we don't want to initialize gettext set_language = True @@ -757,25 +786,19 @@ ) @classmethod - def runtime_dir(cls): - """run time directory for pid file...""" - return env_path('CW_RUNTIME_DIR', cls.RUNTIME_DIR, 'run time') - - @classmethod - def registry_dir(cls): + def instances_dir(cls): """return the control directory""" - return env_path('CW_INSTANCES_DIR', cls.REGISTRY_DIR, 'registry') - - @classmethod - def instance_data_dir(cls): - """return the instance data directory""" - return env_path('CW_INSTANCES_DATA_DIR', cls.INSTANCES_DATA_DIR, - 'additional data') + return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry') @classmethod def migration_scripts_dir(cls): """cubicweb migration scripts directory""" - return env_path('CW_MIGRATION_DIR', cls.MIGRATION_DIR, 'migration') + if CWDEV: + return join(CW_SOFTWARE_ROOT, 'misc', 'migration') + mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration') + if not exists(mdir): + raise ConfigurationError('migration path %s doesn\'t exist' % mdir) + return mdir @classmethod def config_for(cls, appid, config=None): @@ -798,9 +821,10 @@ """return the home directory of the instance with the given instance id """ - home = join(cls.registry_dir(), appid) + home = join(cls.instances_dir(), appid) if not exists(home): - raise ConfigurationError('no such instance %s (check it exists with "cubicweb-ctl list")' % appid) + raise ConfigurationError('no such instance %s (check it exists with' + ' "cubicweb-ctl list")' % appid) return home MODES = ('common', 'repository', 'Any', 'web') @@ -823,7 +847,9 @@ def default_log_file(self): """return default path to the log file of the instance'server""" if self.mode == 'user': - basepath = join(tempfile.gettempdir(), '%s-%s' % (basename(self.appid), self.name)) + import tempfile + basepath = join(tempfile.gettempdir(), '%s-%s' % ( + basename(self.appid), self.name)) path = basepath + '.log' i = 1 while exists(path) and i < 100: # arbitrary limit to avoid infinite loop @@ -838,7 +864,13 @@ def default_pid_file(self): """return default path to the pid file of the instance'server""" - return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name)) + if self.mode == 'system': + # XXX not under _INSTALL_PREFIX, right? + rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time') + else: + import tempfile + rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time') + return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) # instance methods used to get instance specific resources ############# @@ -858,11 +890,17 @@ @property def apphome(self): - return join(self.registry_dir(), self.appid) + return join(self.instances_dir(), self.appid) @property def appdatahome(self): - return join(self.instance_data_dir(), self.appid) + if self.mode == 'system': + # XXX not under _INSTALL_PREFIX, right? + iddir = '/var/lib/cubicweb/instances/' + else: + iddir = self.instances_dir() + iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data') + return join(iddir, self.appid) def init_cubes(self, cubes): assert self._cubes is None, self._cubes @@ -927,7 +965,8 @@ if exists(sitefile) and not sitefile in self._site_loaded: self._load_site_cubicweb(sitefile) self._site_loaded.add(sitefile) - self.warning('[3.5] site_erudi.py is deprecated, should be renamed to site_cubicweb.py') + self.warning('[3.5] site_erudi.py is deprecated, should be ' + 'renamed to site_cubicweb.py') def _load_site_cubicweb(self, sitefile): # XXX extrapath argument to load_module_from_file only in lgc > 0.46 diff -r f3c2cb460ad9 -r d14bfd477c44 cwctl.py --- a/cwctl.py Thu Apr 08 12:42:47 2010 +0200 +++ b/cwctl.py Thu Apr 08 14:11:49 2010 +0200 @@ -13,6 +13,7 @@ # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash # completion). So import locally in command helpers. import sys +from warnings import warn from os import remove, listdir, system, pathsep try: from os import kill, getpgid @@ -85,7 +86,7 @@ Instance used by another one should appears first in the file (one instance per line) """ - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() _allinstances = list_instances(regdir) if isfile(join(regdir, 'startorder')): allinstances = [] @@ -168,84 +169,6 @@ # base commands ############################################################### -def version_strictly_lower(a, b): - from logilab.common.changelog import Version - if a: - a = Version(a) - if b: - b = Version(b) - return a < b - -def max_version(a, b): - from logilab.common.changelog import Version - return str(max(Version(a), Version(b))) - -class ConfigurationProblem(object): - """Each cube has its own list of dependencies on other cubes/versions. - - The ConfigurationProblem is used to record the loaded cubes, then to detect - inconsistencies in their dependencies. - - See configuration management on wikipedia for litterature. - """ - - def __init__(self): - self.cubes = {} - - def add_cube(self, name, info): - self.cubes[name] = info - - def solve(self): - self.warnings = [] - self.errors = [] - self.read_constraints() - for cube, versions in sorted(self.constraints.items()): - oper, version = None, None - # simplify constraints - if versions: - for constraint in versions: - op, ver = constraint - if oper is None: - oper = op - version = ver - elif op == '>=' and oper == '>=': - version = max_version(ver, version) - else: - print 'unable to handle this case', oper, version, op, ver - # "solve" constraint satisfaction problem - if cube not in self.cubes: - self.errors.append( ('add', cube, version) ) - elif versions: - lower_strict = version_strictly_lower(self.cubes[cube].version, version) - if oper in ('>=','='): - if lower_strict: - self.errors.append( ('update', cube, version) ) - else: - print 'unknown operator', oper - - def read_constraints(self): - self.constraints = {} - self.reverse_constraints = {} - for cube, info in self.cubes.items(): - if hasattr(info,'__depends_cubes__'): - use = info.__depends_cubes__ - if not isinstance(use, dict): - use = dict((key, None) for key in use) - self.warnings.append('cube %s should define __depends_cubes__ as a dict not a list') - else: - self.warnings.append('cube %s should define __depends_cubes__' % cube) - use = dict((key, None) for key in info.__use__) - for name, constraint in use.items(): - self.constraints.setdefault(name,set()) - if constraint: - try: - oper, version = constraint.split() - self.constraints[name].add( (oper, version) ) - except: - self.warnings.append('cube %s depends on %s but constraint badly formatted: %s' - % (cube, name, constraint)) - self.reverse_constraints.setdefault(name, set()).add(cube) - class ListCommand(Command): """List configurations, cubes and instances. @@ -262,6 +185,7 @@ """run the command with its specific arguments""" if args: raise BadCommandUsage('Too much arguments') + from cubicweb.migration import ConfigurationProblem print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode) print print 'Available configurations:' @@ -273,7 +197,7 @@ continue print ' ', line print - cfgpb = ConfigurationProblem() + cfgpb = ConfigurationProblem(cwcfg) try: cubesdir = pathsep.join(cwcfg.cubes_search_path()) namesize = max(len(x) for x in cwcfg.available_cubes()) @@ -284,26 +208,31 @@ else: print 'Available cubes (%s):' % cubesdir for cube in cwcfg.available_cubes(): - if cube in ('CVS', '.svn', 'shared', '.hg'): - continue try: tinfo = cwcfg.cube_pkginfo(cube) tversion = tinfo.version - cfgpb.add_cube(cube, tinfo) + cfgpb.add_cube(cube, tversion) except ConfigurationError: tinfo = None tversion = '[missing cube information]' print '* %s %s' % (cube.ljust(namesize), tversion) if self.config.verbose: - shortdesc = tinfo and (getattr(tinfo, 'short_desc', '') - or tinfo.__doc__) - if shortdesc: - print ' '+ ' \n'.join(shortdesc.splitlines()) + if tinfo: + descr = getattr(tinfo, 'description', '') + if not descr: + descr = getattr(tinfo, 'short_desc', '') + if descr: + warn('[3.8] short_desc is deprecated, update %s' + ' pkginfo' % cube, DeprecationWarning) + else: + descr = tinfo.__doc__ + if descr: + print ' '+ ' \n'.join(descr.splitlines()) modes = detect_available_modes(cwcfg.cube_dir(cube)) print ' available modes: %s' % ', '.join(modes) print try: - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() except ConfigurationError, ex: print 'No instance available:', ex print @@ -611,7 +540,7 @@ actionverb = 'restarted' def run_args(self, args, askconfirm): - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() if not isfile(join(regdir, 'startorder')) or len(args) <= 1: # no specific startorder super(RestartInstanceCommand, self).run_args(args, askconfirm) @@ -953,7 +882,7 @@ def run(self, args): """run the command with its specific arguments""" - regdir = cwcfg.registry_dir() + regdir = cwcfg.instances_dir() for appid in sorted(listdir(regdir)): print appid diff -r f3c2cb460ad9 -r d14bfd477c44 cwvreg.py --- a/cwvreg.py Thu Apr 08 12:42:47 2010 +0200 +++ b/cwvreg.py Thu Apr 08 14:11:49 2010 +0200 @@ -602,7 +602,7 @@ def solutions(self, req, rqlst, args): def type_from_eid(eid, req=req): return req.describe(eid)[0] - self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args) + return self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args) def parse(self, req, rql, args=None): rqlst = self.rqlhelper.parse(rql) diff -r f3c2cb460ad9 -r d14bfd477c44 dbapi.py --- a/dbapi.py Thu Apr 08 12:42:47 2010 +0200 +++ b/dbapi.py Thu Apr 08 14:11:49 2010 +0200 @@ -14,6 +14,7 @@ from logging import getLogger from time import time, clock from itertools import count +from warnings import warn from logilab.common.logging_ext import set_log_methods from logilab.common.decorators import monkeypatch @@ -282,9 +283,11 @@ if user: self.set_entity_cache(user) - def execute(self, *args, **kwargs): - """Session interface compatibility""" - return self.cursor.execute(*args, **kwargs) + def execute(self, rql, args=None, eid_key=None, build_descr=True): + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + return self.cursor.execute(rql, args, build_descr=build_descr) set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi')) @@ -706,34 +709,42 @@ self._closed = True - def execute(self, operation, parameters=None, eid_key=None, build_descr=True): - """Prepare and execute a database operation (query or command). - Parameters may be provided as sequence or mapping and will be bound to - variables in the operation. Variables are specified in a - database-specific notation (see the module's paramstyle attribute for - details). + def execute(self, rql, args=None, eid_key=None, build_descr=True): + """execute a rql query, return resulting rows and their description in + a :class:`~cubicweb.rset.ResultSet` object + + * `rql` should be an Unicode string or a plain ASCII string, containing + the rql query - A reference to the operation will be retained by the cursor. If the - same operation object is passed in again, then the cursor can optimize - its behavior. This is most effective for algorithms where the same - operation is used, but different parameters are bound to it (many - times). + * `args` the optional args dictionary associated to the query, with key + matching named substitution in `rql` + + * `build_descr` is a boolean flag indicating if the description should + be built on select queries (if false, the description will be en empty + list) + + on INSERT queries, there will be one row for each inserted entity, + containing its eid - For maximum efficiency when reusing an operation, it is best to use the - setinputsizes() method to specify the parameter types and sizes ahead - of time. It is legal for a parameter to not match the predefined - information; the implementation should compensate, possibly with a loss - of efficiency. + on SET queries, XXX describe + + DELETE queries returns no result. + + .. Note:: + to maximize the rql parsing/analyzing cache performance, you should + always use substitute arguments in queries, i.e. avoid query such as:: + + execute('Any X WHERE X eid 123') - The parameters may also be specified as list of tuples to e.g. insert - multiple rows in a single operation, but this kind of usage is - depreciated: executemany() should be used instead. + use:: - Return values are not defined by the DB-API, but this here it returns a - ResultSet object. + execute('Any X WHERE X eid %(x)s', {'x': 123}) """ - self._res = rset = self._repo.execute(self._sessid, operation, - parameters, eid_key, build_descr) + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + self._res = rset = self._repo.execute(self._sessid, rql, + args, build_descr) rset.req = self.req self._index = 0 return rset diff -r f3c2cb460ad9 -r d14bfd477c44 debian/control --- a/debian/control Thu Apr 08 12:42:47 2010 +0200 +++ b/debian/control Thu Apr 08 14:11:49 2010 +0200 @@ -83,7 +83,7 @@ Architecture: all XB-Python-Version: ${python:Versions} Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3) -Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-pysixt, fop, python-imaging +Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging Description: web interface library for the CubicWeb framework CubicWeb is a semantic web application framework. . @@ -97,7 +97,7 @@ Package: cubicweb-common Architecture: all XB-Python-Version: ${python:Versions} -Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.25.0), python-lxml +Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.26.0), python-lxml Recommends: python-simpletal (>= 4.0), python-crypto Conflicts: cubicweb-core Replaces: cubicweb-core diff -r f3c2cb460ad9 -r d14bfd477c44 devtools/__init__.py --- a/devtools/__init__.py Thu Apr 08 12:42:47 2010 +0200 +++ b/devtools/__init__.py Thu Apr 08 14:11:49 2010 +0200 @@ -97,9 +97,6 @@ }), )) - if not os.environ.get('APYCOT_ROOT'): - REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes')) - def __init__(self, appid, log_threshold=logging.CRITICAL+10): ServerConfiguration.__init__(self, appid) self.init_log(log_threshold, force=True) diff -r f3c2cb460ad9 -r d14bfd477c44 devtools/devctl.py --- a/devtools/devctl.py Thu Apr 08 12:42:47 2010 +0200 +++ b/devtools/devctl.py Thu Apr 08 14:11:49 2010 +0200 @@ -22,7 +22,8 @@ from cubicweb.__pkginfo__ import version as cubicwebversion from cubicweb import CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage -from cubicweb.toolsutils import Command, copy_skeleton, underline_title +from cubicweb.toolsutils import (SKEL_EXCLUDE, Command, + copy_skeleton, underline_title) from cubicweb.web.webconfig import WebConfiguration from cubicweb.server.serverconfig import ServerConfiguration @@ -440,12 +441,19 @@ """Create a new cube. - the name of the new cube + the name of the new cube. It should be a valid python module name. """ name = 'newcube' arguments = '' options = ( + ("layout", + {'short': 'L', 'type' : 'choice', 'metavar': '', + 'default': 'simple', 'choices': ('simple', 'full'), + 'help': 'cube layout. You\'ll get a minimal cube with the "simple" \ +layout, and a full featured cube with "full" layout.', + } + ), ("directory", {'short': 'd', 'type' : 'string', 'metavar': '', 'help': 'directory where the new cube should be created', @@ -475,14 +483,28 @@ 'help': 'cube author\'s web site', } ), + ("license", + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': 'LGPL', 'choices': ('GPL', 'LGPL', ''), + 'help': 'cube license', + } + ), ) + LICENSES = { + 'LGPL': 'GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses', + 'GPL': 'GNU General Public License, v2.1 - http://www.gnu.org/licenses', + '': 'closed source' + } def run(self, args): + import re from logilab.common.shellutils import ASK if len(args) != 1: raise BadCommandUsage("exactly one argument (cube name) is expected") - cubename, = args + cubename = args[0] + if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cubename): + raise BadCommandUsage("cube name should be a valid python module name") verbose = self.get('verbose') cubesdir = self.get('directory') if not cubesdir: @@ -533,8 +555,14 @@ 'author': self['author'], 'author-email': self['author-email'], 'author-web-site': self['author-web-site'], + 'license': self['license'], + 'long-license': self.LICENSES[self['license']], } - copy_skeleton(skeldir, cubedir, context) + exclude = SKEL_EXCLUDE + if self['layout'] == 'simple': + exclude += ('sobjects.py*', 'precreate.py*', 'realdb_test*', + 'cubes.*', 'external_resources*') + copy_skeleton(skeldir, cubedir, context, exclude=exclude) def _ask_for_dependancies(self): from logilab.common.shellutils import ASK diff -r f3c2cb460ad9 -r d14bfd477c44 devtools/repotest.py --- a/devtools/repotest.py Thu Apr 08 12:42:47 2010 +0200 +++ b/devtools/repotest.py Thu Apr 08 14:11:49 2010 +0200 @@ -233,8 +233,8 @@ self._dumb_sessions.append(s) return s - def execute(self, rql, args=None, eid_key=None, build_descr=True): - return self.o.execute(self.session, rql, args, eid_key, build_descr) + def execute(self, rql, args=None, build_descr=True): + return self.o.execute(self.session, rql, args, build_descr) def commit(self): self.session.commit() diff -r f3c2cb460ad9 -r d14bfd477c44 devtools/testlib.py --- a/devtools/testlib.py Thu Apr 08 12:42:47 2010 +0200 +++ b/devtools/testlib.py Thu Apr 08 14:11:49 2010 +0200 @@ -5,6 +5,8 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ +from __future__ import with_statement + __docformat__ = "restructuredtext en" import os @@ -13,6 +15,7 @@ from urllib import unquote from math import log from contextlib import contextmanager +from warnings import warn import simplejson @@ -30,6 +33,7 @@ from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError from cubicweb.sobjects import notification from cubicweb.web import Redirect, application +from cubicweb.server.session import security_enabled from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS from cubicweb.devtools import fake, htmlparser @@ -292,7 +296,7 @@ upassword=password, **kwargs) req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(g) for g in groups), - {'x': user.eid}, 'x') + {'x': user.eid}) user.clear_related_cache('in_group', 'subject') if commit: req.cnx.commit() @@ -333,8 +337,11 @@ """executes , builds a resultset, and returns a couple (rset, req) where req is a FakeRequest """ + if eidkey is not None: + warn('[3.8] eidkey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) req = req or self.request(rql=rql) - return self.cnx.cursor(req).execute(unicode(rql), args, eidkey) + return req.execute(unicode(rql), args) @nocoverage def commit(self): @@ -355,14 +362,14 @@ # # server side db api ####################################################### def sexecute(self, rql, args=None, eid_key=None): + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) self.session.set_pool() - return self.session.execute(rql, args, eid_key) + return self.session.execute(rql, args) # other utilities ######################################################### - def entity(self, rql, args=None, eidkey=None, req=None): - return self.execute(rql, args, eidkey, req=req).get_entity(0, 0) - @contextmanager def temporary_appobjects(self, *appobjects): self.vreg._loadedmods.setdefault(self.__module__, {}) @@ -686,29 +693,19 @@ # deprecated ############################################################### + @deprecated('[3.8] use self.execute(...).get_entity(0, 0)') + def entity(self, rql, args=None, eidkey=None, req=None): + if eidkey is not None: + warn('[3.8] eidkey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + return self.execute(rql, args, req=req).get_entity(0, 0) + @deprecated('[3.6] use self.request().create_entity(...)') def add_entity(self, etype, req=None, **kwargs): if req is None: req = self.request() return req.create_entity(etype, **kwargs) - @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())') - def etype_instance(self, etype, req=None): - req = req or self.request() - e = self.vreg['etypes'].etype_class(etype)(req) - e.eid = None - return e - - @nocoverage - @deprecated('[3.4] use req = self.request(); rset = req.execute()', - stacklevel=3) - def rset_and_req(self, rql, optional_args=None, args=None, eidkey=None): - """executes , builds a resultset, and returns a - couple (rset, req) where req is a FakeRequest - """ - return (self.execute(rql, args, eidkey), - self.request(rql=rql, **optional_args or {})) - # auto-populating test classes and utilities ################################### @@ -792,6 +789,10 @@ """this method populates the database with `how_many` entities of each possible type. It also inserts random relations between them """ + with security_enabled(self.session, read=False, write=False): + self._auto_populate(how_many) + + def _auto_populate(self, how_many): cu = self.cursor() self.custom_populate(how_many, cu) vreg = self.vreg diff -r f3c2cb460ad9 -r d14bfd477c44 entities/authobjs.py --- a/entities/authobjs.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entities/authobjs.py Thu Apr 08 14:11:49 2010 +0200 @@ -96,7 +96,7 @@ try: return self._cw.execute( 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s', - {'x': eid, 'u': self.eid}, 'x') + {'x': eid, 'u': self.eid}) except Unauthorized: return False owns = cached(owns, keyarg=1) @@ -105,13 +105,11 @@ rql = 'Any P WHERE P is CWPermission, U eid %(u)s, U in_group G, '\ 'P name %(pname)s, P require_group G' kwargs = {'pname': pname, 'u': self.eid} - cachekey = None if contexteid is not None: rql += ', X require_permission P, X eid %(x)s' kwargs['x'] = contexteid - cachekey = 'x' try: - return self._cw.execute(rql, kwargs, cachekey) + return self._cw.execute(rql, kwargs) except Unauthorized: return False diff -r f3c2cb460ad9 -r d14bfd477c44 entities/lib.py --- a/entities/lib.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entities/lib.py Thu Apr 08 14:11:49 2010 +0200 @@ -23,6 +23,7 @@ return address return '%s at %s' % (name, host.replace('.', ' dot ')) + class EmailAddress(AnyEntity): __regid__ = 'EmailAddress' fetch_attrs, fetch_order = fetch_config(['address', 'alias']) @@ -50,8 +51,10 @@ subjrels = self.e_schema.object_relations() if not ('sender' in subjrels and 'recipients' in subjrels): return - rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s' - rset = self._cw.execute(rql, {'y': self.eid}, 'y') + rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC ' + 'WHERE X sender Y or X recipients Y, ' + 'X subject S, X date D, Y eid %(y)s', + {'y': self.eid}) if skipeids is None: skipeids = set() for i in xrange(len(rset)): @@ -131,7 +134,7 @@ def touch(self): self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s', - {'t': datetime.now(), 'x': self.eid}, 'x') + {'t': datetime.now(), 'x': self.eid}) def valid(self, date): if date: diff -r f3c2cb460ad9 -r d14bfd477c44 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entities/test/unittest_base.py Thu Apr 08 14:11:49 2010 +0200 @@ -69,18 +69,18 @@ class CWUserTC(BaseEntityTC): def test_complete(self): - e = self.entity('CWUser X WHERE X login "admin"') + e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) e.complete() def test_matching_groups(self): - e = self.entity('CWUser X WHERE X login "admin"') + e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0) self.failUnless(e.matching_groups('managers')) self.failIf(e.matching_groups('xyz')) self.failUnless(e.matching_groups(('xyz', 'managers'))) self.failIf(e.matching_groups(('xyz', 'abcd'))) def test_dc_title_and_name(self): - e = self.entity('CWUser U WHERE U login "member"') + e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) self.assertEquals(e.dc_title(), 'member') self.assertEquals(e.name(), 'member') e.set_attributes(firstname=u'bouah') @@ -91,7 +91,7 @@ self.assertEquals(e.name(), u'bouah lôt') def test_allowed_massmail_keys(self): - e = self.entity('CWUser U WHERE U login "member"') + e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omited self.assertEquals(e.allowed_massmail_keys(), set(('surname', 'firstname', 'login', 'last_login_time', diff -r f3c2cb460ad9 -r d14bfd477c44 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entities/test/unittest_wfobjs.py Thu Apr 08 14:11:49 2010 +0200 @@ -96,7 +96,7 @@ self.assertEquals(e.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): - user = self.entity('CWUser X') + user = self.execute('CWUser X').get_entity(0, 0) trs = list(user.possible_transitions()) self.assertEquals(len(trs), 1) self.assertEquals(trs[0].name, u'deactivate') @@ -131,7 +131,7 @@ with security_enabled(self.session, write=False): ex = self.assertRaises(ValidationError, self.session.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': self.user().eid, 's': s.eid}, 'x') + {'x': self.user().eid, 's': s.eid}) self.assertEquals(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " "You may want to set a custom workflow for this entity first."}) @@ -413,7 +413,7 @@ wf = add_wf(self, 'Company') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', - {'wf': wf.eid, 'x': self.member.eid}, 'x') + {'wf': wf.eid, 'x': self.member.eid}) ex = self.assertRaises(ValidationError, self.commit) self.assertEquals(ex.errors, {'custom_workflow-subject': 'workflow isn\'t a workflow for this type'}) diff -r f3c2cb460ad9 -r d14bfd477c44 entities/wfobjs.py --- a/entities/wfobjs.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entities/wfobjs.py Thu Apr 08 14:11:49 2010 +0200 @@ -65,7 +65,7 @@ def state_by_name(self, statename): rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, ' 'S state_of WF, WF eid %(wf)s', - {'n': statename, 'wf': self.eid}, 'wf') + {'n': statename, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -73,7 +73,7 @@ def state_by_eid(self, eid): rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, ' 'S state_of WF, WF eid %(wf)s', - {'s': eid, 'wf': self.eid}, ('wf', 's')) + {'s': eid, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -81,7 +81,7 @@ def transition_by_name(self, trname): rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' 'T transition_of WF, WF eid %(wf)s', - {'n': trname, 'wf': self.eid}, 'wf') + {'n': trname, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -89,7 +89,7 @@ def transition_by_eid(self, eid): rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, ' 'T transition_of WF, WF eid %(wf)s', - {'t': eid, 'wf': self.eid}, ('wf', 't')) + {'t': eid, 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -100,12 +100,12 @@ """add a state to this workflow""" state = self._cw.create_entity('State', name=unicode(name), **kwargs) self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) + {'s': state.eid, 'wf': self.eid}) if initial: assert not self.initial, "Initial state already defined as %s" % self.initial self._cw.execute('SET WF initial_state S ' 'WHERE S eid %(s)s, WF eid %(wf)s', - {'s': state.eid, 'wf': self.eid}, ('s', 'wf')) + {'s': state.eid, 'wf': self.eid}) return state def _add_transition(self, trtype, name, fromstates, @@ -113,7 +113,7 @@ tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs) self._cw.execute('SET T transition_of WF ' 'WHERE T eid %(t)s, WF eid %(wf)s', - {'t': tr.eid, 'wf': self.eid}, ('t', 'wf')) + {'t': tr.eid, 'wf': self.eid}) assert fromstates, fromstates if not isinstance(fromstates, (tuple, list)): fromstates = (fromstates,) @@ -122,7 +122,7 @@ state = state.eid self._cw.execute('SET S allowed_transition T ' 'WHERE S eid %(s)s, T eid %(t)s', - {'s': state, 't': tr.eid}, ('s', 't')) + {'s': state, 't': tr.eid}) tr.set_permissions(requiredgroups, conditions, reset=False) return tr @@ -136,7 +136,7 @@ tostate = tostate.eid self._cw.execute('SET T destination_state S ' 'WHERE S eid %(s)s, T eid %(t)s', - {'t': tr.eid, 's': tostate}, ('s', 't')) + {'t': tr.eid, 's': tostate}) return tr def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(), @@ -147,7 +147,7 @@ if hasattr(subworkflow, 'eid'): subworkflow = subworkflow.eid assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s', - {'t': tr.eid, 'wf': subworkflow}, ('wf', 't')) + {'t': tr.eid, 'wf': subworkflow}) for fromstate, tostate in exitpoints: tr.add_exit_point(fromstate, tostate) return tr @@ -159,11 +159,11 @@ if not hasattr(replacement, 'eid'): replacement = self.state_by_name(replacement) execute = self._cw.execute - execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}, 's') + execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}) execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', - {'os': todelstate.eid, 'ns': replacement.eid}, 's') + {'os': todelstate.eid, 'ns': replacement.eid}) execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', - {'os': todelstate.eid, 'ns': replacement.eid}, 's') + {'os': todelstate.eid, 'ns': replacement.eid}) todelstate.delete() @@ -227,13 +227,13 @@ """ if reset: self._cw.execute('DELETE T require_group G WHERE T eid %(x)s', - {'x': self.eid}, 'x') + {'x': self.eid}) self._cw.execute('DELETE T condition R WHERE T eid %(x)s', - {'x': self.eid}, 'x') + {'x': self.eid}) for gname in requiredgroups: rset = self._cw.execute('SET T require_group G ' 'WHERE T eid %(x)s, G name %(gn)s', - {'x': self.eid, 'gn': gname}, 'x') + {'x': self.eid, 'gn': gname}) assert rset, '%s is not a known group' % gname if isinstance(conditions, basestring): conditions = (conditions,) @@ -247,7 +247,7 @@ kwargs.setdefault('mainvars', u'X') self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' 'X expression %(expr)s, X mainvars %(mainvars)s, ' - 'T condition X WHERE T eid %(x)s',kwargs, 'x') + 'T condition X WHERE T eid %(x)s', kwargs) # XXX clear caches? @deprecated('[3.6.1] use set_permission') @@ -299,15 +299,14 @@ if tostate is None: self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s', - {'t': self.eid, 'fs': fromstate}, ('t', 'fs')) + {'t': self.eid, 'fs': fromstate}) else: if hasattr(tostate, 'eid'): tostate = tostate.eid self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, ' 'X subworkflow_state FS, X destination_state TS ' 'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s', - {'t': self.eid, 'fs': fromstate, 'ts': tostate}, - ('t', 'fs', 'ts')) + {'t': self.eid, 'fs': fromstate, 'ts': tostate}) def get_exit_point(self, entity, stateeid): """if state is an exit point, return its associated destination state""" @@ -469,7 +468,7 @@ 'T type TT, T type %(type)s, ' 'T name TN, T transition_of WF, WF eid %(wfeid)s', {'x': self.current_state.eid, 'type': type, - 'wfeid': self.current_workflow.eid}, 'x') + 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): if tr.may_be_fired(self.eid): yield tr diff -r f3c2cb460ad9 -r d14bfd477c44 entity.py --- a/entity.py Thu Apr 08 12:42:47 2010 +0200 +++ b/entity.py Thu Apr 08 14:11:49 2010 +0200 @@ -500,7 +500,7 @@ continue rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y')) + execute(rql, {'x': self.eid, 'y': ceid}) self.clear_related_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): if rschema.meta: @@ -518,7 +518,7 @@ continue rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) - execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y')) + execute(rql, {'x': self.eid, 'y': ceid}) self.clear_related_cache(rschema.type, 'object') # data fetching methods ################################################### @@ -620,8 +620,7 @@ # if some outer join are included to fetch inlined relations rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected), ','.join(rql)) - rset = self._cw.execute(rql, {'x': self.eid}, 'x', - build_descr=False)[0] + rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0] # handle attributes for i in xrange(1, lastattr): self[str(selected[i-1][0])] = rset[i] @@ -650,7 +649,7 @@ return None rql = "Any A WHERE X eid %%(x)s, X %s A" % name try: - rset = self._cw.execute(rql, {'x': self.eid}, 'x') + rset = self._cw.execute(rql, {'x': self.eid}) except Unauthorized: self[name] = value = None else: @@ -681,7 +680,7 @@ pass assert self.has_eid() rql = self.related_rql(rtype, role) - rset = self._cw.execute(rql, {'x': self.eid}, 'x') + rset = self._cw.execute(rql, {'x': self.eid}) self.set_related_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) @@ -807,7 +806,7 @@ if limit is not None: before, after = rql.split(' WHERE ', 1) rql = '%s LIMIT %s WHERE %s' % (before, limit, after) - return self._cw.execute(rql, args, tuple(args)) + return self._cw.execute(rql, args) # relations cache handling ################################################ @@ -890,7 +889,7 @@ # and now update the database kwargs['x'] = self.eid self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), - kwargs, 'x') + kwargs) kwargs.pop('x') # update current local object _after_ the rql query to avoid # interferences between the query execution itself and the @@ -913,13 +912,13 @@ restr = 'X %s Y' % attr if values is None: self._cw.execute('DELETE %s WHERE X eid %%(x)s' % restr, - {'x': self.eid}, 'x') + {'x': self.eid}) continue if not isinstance(values, (tuple, list, set, frozenset)): values = (values,) self._cw.execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( restr, ','.join(str(r.eid) for r in values)), - {'x': self.eid}, 'x') + {'x': self.eid}) def delete(self, **kwargs): assert self.has_eid(), self.eid diff -r f3c2cb460ad9 -r d14bfd477c44 etwist/http.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/etwist/http.py Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,71 @@ +"""twisted server for CubicWeb web instances + +:organization: Logilab +:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. +:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses +""" + +__docformat__ = "restructuredtext en" + +from cubicweb.web.http_headers import Headers + +class HTTPResponse(object): + """An object representing an HTTP Response to be sent to the client. + """ + def __init__(self, twisted_request, code=None, headers=None, stream=None): + self._headers_out = headers + self._twreq = twisted_request + self._stream = stream + self._code = code + + self._init_headers() + self._finalize() + + def _init_headers(self): + if self._headers_out is None: + return + + # initialize cookies + cookies = self._headers_out.getHeader('set-cookie') or [] + for cookie in cookies: + self._twreq.addCookie(cookie.name, cookie.value, cookie.expires, + cookie.domain, cookie.path, #TODO max-age + comment = cookie.comment, secure=cookie.secure) + self._headers_out.removeHeader('set-cookie') + + # initialize other headers + for k, v in self._headers_out.getAllRawHeaders(): + self._twreq.setHeader(k, v[0]) + + # add content-length if not present + if (self._headers_out.getHeader('content-length') is None + and self._stream is not None): + self._twreq.setHeader('content-length', len(self._stream)) + + + def _finalize(self): + if self._stream is not None: + self._twreq.write(str(self._stream)) + if self._code is not None: + self._twreq.setResponseCode(self._code) + self._twreq.finish() + + def __repr__(self): + return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code) + + +def not_modified_response(twisted_request, headers_in): + headers_out = Headers() + + for header in ( + # Required from sec 10.3.5: + 'date', 'etag', 'content-location', 'expires', + 'cache-control', 'vary', + # Others: + 'server', 'proxy-authenticate', 'www-authenticate', 'warning'): + value = headers_in.getRawHeaders(header) + if value is not None: + headers_out.setRawHeaders(header, value) + return HTTPResponse(twisted_request=twisted_request, + headers=headers_out) diff -r f3c2cb460ad9 -r d14bfd477c44 etwist/request.py --- a/etwist/request.py Thu Apr 08 12:42:47 2010 +0200 +++ b/etwist/request.py Thu Apr 08 14:11:49 2010 +0200 @@ -9,22 +9,13 @@ from datetime import datetime -from twisted.web2 import http, http_headers +from twisted.web import http from cubicweb.web import DirectResponse from cubicweb.web.request import CubicWebRequestBase from cubicweb.web.httpcache import GMTOFFSET - -def cleanup_files(dct, encoding): - d = {} - for k, infos in dct.items(): - for (filename, mt, stream) in infos: - if filename: - # XXX: suppose that no file submitted <-> no filename - filename = unicode(filename, encoding) - mt = u'%s/%s' % (mt.mediaType, mt.mediaSubtype) - d[k] = (filename, mt, stream) - return d +from cubicweb.web.http_headers import Headers +from cubicweb.etwist.http import not_modified_response class CubicWebTwistedRequestAdapter(CubicWebRequestBase): @@ -32,10 +23,15 @@ self._twreq = req self._base_url = base_url super(CubicWebTwistedRequestAdapter, self).__init__(vreg, https, req.args) - self.form.update(cleanup_files(req.files, self.encoding)) - # prepare output headers - self.headers_out = http_headers.Headers() - self._headers = req.headers + for key, (name, stream) in req.files.iteritems(): + if name is None: + self.form[key] = (name, stream) + else: + self.form[key] = (unicode(name, self.encoding), stream) + # XXX can't we keep received_headers? + self._headers_in = Headers() + for k, v in req.received_headers.iteritems(): + self._headers_in.addRawHeader(k, v) def base_url(self): """return the root url of the instance""" @@ -63,29 +59,8 @@ raise KeyError if the header is not set """ if raw: - return self._twreq.headers.getRawHeaders(header, [default])[0] - return self._twreq.headers.getHeader(header, default) - - def set_header(self, header, value, raw=True): - """set an output HTTP header""" - if raw: - # adding encoded header is important, else page content - # will be reconverted back to unicode and apart unefficiency, this - # may cause decoding problem (e.g. when downloading a file) - self.headers_out.setRawHeaders(header, [str(value)]) - else: - self.headers_out.setHeader(header, value) - - def add_header(self, header, value): - """add an output HTTP header""" - # adding encoded header is important, else page content - # will be reconverted back to unicode and apart unefficiency, this - # may cause decoding problem (e.g. when downloading a file) - self.headers_out.addRawHeader(header, str(value)) - - def remove_header(self, header): - """remove an output HTTP header""" - self.headers_out.removeHeader(header) + return self._headers_in.getRawHeaders(header, [default])[0] + return self._headers_in.getHeader(header, default) def _validate_cache(self): """raise a `DirectResponse` exception if a cached page along the way @@ -95,11 +70,32 @@ # Expires header seems to be required by IE7 self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') return - try: - http.checkPreconditions(self._twreq, _PreResponse(self)) - except http.HTTPError, ex: - self.info('valid http cache, no actual rendering') - raise DirectResponse(ex.response) + + # when using both 'Last-Modified' and 'ETag' response headers + # (i.e. using respectively If-Modified-Since and If-None-Match request + # headers, see + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4 for + # reference + + cached_because_not_modified_since = False + + last_modified = self.headers_out.getHeader('last-modified') + if last_modified is not None: + cached_because_not_modified_since = (self._twreq.setLastModified(last_modified) + == http.CACHED) + + if not cached_because_not_modified_since: + return + + cached_because_etag_is_same = False + etag = self.headers_out.getRawHeaders('etag') + if etag is not None: + cached_because_etag_is_same = self._twreq.setETag(etag[0]) == http.CACHED + + if cached_because_etag_is_same: + response = not_modified_response(self._twreq, self._headers_in) + raise DirectResponse(response) + # Expires header seems to be required by IE7 self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') @@ -120,9 +116,3 @@ # :/ twisted is returned a localized time stamp return datetime.fromtimestamp(mtime) + GMTOFFSET return None - - -class _PreResponse(object): - def __init__(self, request): - self.headers = request.headers_out - self.code = 200 diff -r f3c2cb460ad9 -r d14bfd477c44 etwist/server.py --- a/etwist/server.py Thu Apr 08 12:42:47 2010 +0200 +++ b/etwist/server.py Thu Apr 08 14:11:49 2010 +0200 @@ -14,19 +14,25 @@ from time import mktime from datetime import date, timedelta from urlparse import urlsplit, urlunsplit +from cgi import FieldStorage, parse_header from twisted.internet import reactor, task, threads from twisted.internet.defer import maybeDeferred -from twisted.web2 import channel, http, server, iweb -from twisted.web2 import static, resource, responsecode +from twisted.web import http, server +from twisted.web import static, resource +from twisted.web.server import NOT_DONE_YET + +from logilab.common.decorators import monkeypatch from cubicweb import ConfigurationError, CW_EVENT_MANAGER from cubicweb.web import (AuthenticationError, NotFound, Redirect, RemoteCallFailed, DirectResponse, StatusResponse, ExplicitLogin) + from cubicweb.web.application import CubicWebPublisher from cubicweb.etwist.request import CubicWebTwistedRequestAdapter +from cubicweb.etwist.http import HTTPResponse def daemonize(): # XXX unix specific @@ -67,8 +73,20 @@ return baseurl -class LongTimeExpiringFile(static.File): - """overrides static.File and sets a far futre ``Expires`` date +class ForbiddenDirectoryLister(resource.Resource): + def render(self, request): + return HTTPResponse(twisted_request=request, + code=http.FORBIDDEN, + stream='Access forbidden') + +class File(static.File): + """Prevent from listing directories""" + def directoryListing(self): + return ForbiddenDirectoryLister() + + +class LongTimeExpiringFile(File): + """overrides static.File and sets a far future ``Expires`` date on the resouce. versions handling is done by serving static files by different @@ -79,22 +97,19 @@ etc. """ - def renderHTTP(self, request): + def render(self, request): def setExpireHeader(response): - response = iweb.IResponse(response) # Don't provide additional resource information to error responses if response.code < 400: # the HTTP RFC recommands not going further than 1 year ahead expires = date.today() + timedelta(days=6*30) response.headers.setHeader('Expires', mktime(expires.timetuple())) return response - d = maybeDeferred(super(LongTimeExpiringFile, self).renderHTTP, request) + d = maybeDeferred(super(LongTimeExpiringFile, self).render, request) return d.addCallback(setExpireHeader) -class CubicWebRootResource(resource.PostableResource): - addSlash = False - +class CubicWebRootResource(resource.Resource): def __init__(self, config, debug=None): self.debugmode = debug self.config = config @@ -104,6 +119,7 @@ self.base_url = config['base-url'] self.https_url = config['https-url'] self.versioned_datadir = 'data%s' % config.instance_md5_version() + self.children = {} def init_publisher(self): config = self.config @@ -145,35 +161,35 @@ except select.error: return - def locateChild(self, request, segments): + def getChild(self, path, request): """Indicate which resource to use to process down the URL's path""" - if segments: - if segments[0] == 'https': - segments = segments[1:] - if len(segments) >= 2: - if segments[0] in (self.versioned_datadir, 'data', 'static'): - # Anything in data/, static/ is treated as static files - if segments[0] == 'static': - # instance static directory - datadir = self.config.static_directory - elif segments[1] == 'fckeditor': - fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] - return static.File(fckeditordir), segments[2:] - else: - # cube static data file - datadir = self.config.locate_resource(segments[1]) - if datadir is None: - return None, [] - self.debug('static file %s from %s', segments[-1], datadir) - if segments[0] == 'data': - return static.File(str(datadir)), segments[1:] - else: - return LongTimeExpiringFile(datadir), segments[1:] - elif segments[0] == 'fckeditor': - fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] - return static.File(fckeditordir), segments[1:] + pre_path = request.prePathURL() + # XXX testing pre_path[0] not enough? + if any(s in pre_path + for s in (self.versioned_datadir, 'data', 'static')): + # Anything in data/, static/ is treated as static files + + if 'static' in pre_path: + # instance static directory + datadir = self.config.static_directory + elif 'fckeditor' in pre_path: + fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] + return File(fckeditordir) + else: + # cube static data file + datadir = self.config.locate_resource(path) + if datadir is None: + return self + self.info('static file %s from %s', path, datadir) + if 'data' in pre_path: + return File(os.path.join(datadir, path)) + else: + return LongTimeExpiringFile(datadir) + elif path == 'fckeditor': + fckeditordir = self.config.ext_resources['FCKEDITOR_PATH'] + return File(fckeditordir) # Otherwise we use this single resource - return self, () + return self def render(self, request): """Render a page from the root resource""" @@ -183,7 +199,8 @@ if self.config['profile']: # default profiler don't trace threads return self.render_request(request) else: - return threads.deferToThread(self.render_request, request) + deferred = threads.deferToThread(self.render_request, request) + return NOT_DONE_YET def render_request(self, request): origpath = request.path @@ -209,12 +226,12 @@ try: self.appli.connect(req) except AuthenticationError: - return self.request_auth(req) + return self.request_auth(request=req) except Redirect, ex: - return self.redirect(req, ex.location) + return self.redirect(request=req, location=ex.location) if https and req.cnx.anonymous_connection: # don't allow anonymous on https connection - return self.request_auth(req) + return self.request_auth(request=req) if self.url_rewriter is not None: # XXX should occur before authentication? try: @@ -231,234 +248,115 @@ except DirectResponse, ex: return ex.response except StatusResponse, ex: - return http.Response(stream=ex.content, code=ex.status, - headers=req.headers_out or None) + return HTTPResponse(stream=ex.content, code=ex.status, + twisted_request=req._twreq, + headers=req.headers_out) except RemoteCallFailed, ex: req.set_header('content-type', 'application/json') - return http.Response(stream=ex.dumps(), - code=responsecode.INTERNAL_SERVER_ERROR) + return HTTPResponse(twisted_request=req._twreq, code=http.INTERNAL_SERVER_ERROR, + stream=ex.dumps(), headers=req.headers_out) except NotFound: result = self.appli.notfound_content(req) - return http.Response(stream=result, code=responsecode.NOT_FOUND, - headers=req.headers_out or None) + return HTTPResponse(twisted_request=req._twreq, code=http.NOT_FOUND, + stream=result, headers=req.headers_out) + except ExplicitLogin: # must be before AuthenticationError - return self.request_auth(req) + return self.request_auth(request=req) except AuthenticationError, ex: if self.config['auth-mode'] == 'cookie' and getattr(ex, 'url', None): - return self.redirect(req, ex.url) + return self.redirect(request=req, location=ex.url) # in http we have to request auth to flush current http auth # information - return self.request_auth(req, loggedout=True) + return self.request_auth(request=req, loggedout=True) except Redirect, ex: - return self.redirect(req, ex.location) + return self.redirect(request=req, location=ex.location) # request may be referenced by "onetime callback", so clear its entity # cache to avoid memory usage req.drop_entity_cache() - return http.Response(stream=result, code=responsecode.OK, - headers=req.headers_out or None) - def redirect(self, req, location): - req.headers_out.setHeader('location', str(location)) - self.debug('redirecting to %s', location) - # 303 See other - return http.Response(code=303, headers=req.headers_out) + return HTTPResponse(twisted_request=req._twreq, code=http.OK, + stream=result, headers=req.headers_out) - def request_auth(self, req, loggedout=False): - if self.https_url and req.base_url() != self.https_url: - req.headers_out.setHeader('location', self.https_url + 'login') - return http.Response(code=303, headers=req.headers_out) + def redirect(self, request, location): + self.debug('redirecting to %s', str(location)) + request.headers_out.setHeader('location', str(location)) + # 303 See other + return HTTPResponse(twisted_request=request._twreq, code=303, + headers=request.headers_out) + + def request_auth(self, request, loggedout=False): + if self.https_url and request.base_url() != self.https_url: + return self.redirect(request, self.https_url + 'login') if self.config['auth-mode'] == 'http': - code = responsecode.UNAUTHORIZED + code = http.UNAUTHORIZED else: - code = responsecode.FORBIDDEN + code = http.FORBIDDEN if loggedout: - if req.https: - req._base_url = self.base_url - req.https = False - content = self.appli.loggedout_content(req) + if request.https: + request._base_url = self.base_url + request.https = False + content = self.appli.loggedout_content(request) else: - content = self.appli.need_login_content(req) - return http.Response(code, req.headers_out, content) + content = self.appli.need_login_content(request) + return HTTPResponse(twisted_request=request._twreq, + stream=content, code=code, + headers=request.headers_out) -from twisted.internet import defer -from twisted.web2 import fileupload +#TODO +# # XXX max upload size in the configuration -# XXX set max file size to 100Mo: put max upload size in the configuration -# line below for twisted >= 8.0, default param value for earlier version -resource.PostableResource.maxSize = 100*1024*1024 -def parsePOSTData(request, maxMem=100*1024, maxFields=1024, - maxSize=100*1024*1024): - if request.stream.length == 0: - return defer.succeed(None) +@monkeypatch(http.Request) +def requestReceived(self, command, path, version): + """Called by channel when all data has been received. - ctype = request.headers.getHeader('content-type') - - if ctype is None: - return defer.succeed(None) - - def updateArgs(data): - args = data - request.args.update(args) - - def updateArgsAndFiles(data): - args, files = data - request.args.update(args) - request.files.update(files) - - def error(f): - f.trap(fileupload.MimeFormatError) - raise http.HTTPError(responsecode.BAD_REQUEST) - - if ctype.mediaType == 'application' and ctype.mediaSubtype == 'x-www-form-urlencoded': - d = fileupload.parse_urlencoded(request.stream, keep_blank_values=True) - d.addCallbacks(updateArgs, error) - return d - elif ctype.mediaType == 'multipart' and ctype.mediaSubtype == 'form-data': - boundary = ctype.params.get('boundary') - if boundary is None: - return defer.fail(http.HTTPError( - http.StatusResponse(responsecode.BAD_REQUEST, - "Boundary not specified in Content-Type."))) - d = fileupload.parseMultipartFormData(request.stream, boundary, - maxMem, maxFields, maxSize) - d.addCallbacks(updateArgsAndFiles, error) - return d + This method is not intended for users. + """ + self.content.seek(0,0) + self.args = {} + self.files = {} + self.stack = [] + self.method, self.uri = command, path + self.clientproto = version + x = self.uri.split('?', 1) + if len(x) == 1: + self.path = self.uri else: - raise http.HTTPError(responsecode.BAD_REQUEST) - -server.parsePOSTData = parsePOSTData + self.path, argstring = x + self.args = http.parse_qs(argstring, 1) + # cache the client and server information, we'll need this later to be + # serialized and sent with the request so CGIs will work remotely + self.client = self.channel.transport.getPeer() + self.host = self.channel.transport.getHost() + # Argument processing + ctype = self.getHeader('content-type') + if self.method == "POST" and ctype: + key, pdict = parse_header(ctype) + if key == 'application/x-www-form-urlencoded': + self.args.update(http.parse_qs(self.content.read(), 1)) + elif key == 'multipart/form-data': + self.content.seek(0,0) + form = FieldStorage(self.content, self.received_headers, + environ={'REQUEST_METHOD': 'POST'}, + keep_blank_values=1, + strict_parsing=1) + for key in form: + value = form[key] + if isinstance(value, list): + self.args[key] = [v.value for v in value] + elif value.filename: + if value.done != -1: # -1 is transfer has been interrupted + self.files[key] = (value.filename, value.file) + else: + self.files[key] = (None, None) + else: + self.args[key] = value.value + self.process() from logging import getLogger from cubicweb import set_log_methods -set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted')) - - -listiterator = type(iter([])) - -def _gc_debug(all=True): - import gc - from pprint import pprint - from cubicweb.appobject import AppObject - gc.collect() - count = 0 - acount = 0 - fcount = 0 - rcount = 0 - ccount = 0 - scount = 0 - ocount = {} - from rql.stmts import Union - from cubicweb.schema import CubicWebSchema - from cubicweb.rset import ResultSet - from cubicweb.dbapi import Connection, Cursor - from cubicweb.req import RequestSessionBase - from cubicweb.server.repository import Repository - from cubicweb.server.sources.native import NativeSQLSource - from cubicweb.server.session import Session - from cubicweb.devtools.testlib import CubicWebTC - from logilab.common.testlib import TestSuite - from optparse import Values - import types, weakref - for obj in gc.get_objects(): - if isinstance(obj, RequestSessionBase): - count += 1 - if isinstance(obj, Session): - print ' session', obj, referrers(obj, True) - elif isinstance(obj, AppObject): - acount += 1 - elif isinstance(obj, ResultSet): - rcount += 1 - #print ' rset', obj, referrers(obj) - elif isinstance(obj, Repository): - print ' REPO', obj, referrers(obj, True) - #elif isinstance(obj, NativeSQLSource): - # print ' SOURCe', obj, referrers(obj) - elif isinstance(obj, CubicWebTC): - print ' TC', obj, referrers(obj) - elif isinstance(obj, TestSuite): - print ' SUITE', obj, referrers(obj) - #elif isinstance(obj, Values): - # print ' values', '%#x' % id(obj), referrers(obj, True) - elif isinstance(obj, Connection): - ccount += 1 - #print ' cnx', obj, referrers(obj) - #elif isinstance(obj, Cursor): - # ccount += 1 - # print ' cursor', obj, referrers(obj) - elif isinstance(obj, file): - fcount += 1 - # print ' open file', file.name, file.fileno - elif isinstance(obj, CubicWebSchema): - scount += 1 - print ' schema', obj, referrers(obj) - elif not isinstance(obj, (type, tuple, dict, list, set, frozenset, - weakref.ref, weakref.WeakKeyDictionary, - listiterator, - property, classmethod, - types.ModuleType, types.MemberDescriptorType, - types.FunctionType, types.MethodType)): - try: - ocount[obj.__class__] += 1 - except KeyError: - ocount[obj.__class__] = 1 - except AttributeError: - pass - if count: - print ' NB REQUESTS/SESSIONS', count - if acount: - print ' NB APPOBJECTS', acount - if ccount: - print ' NB CONNECTIONS', ccount - if rcount: - print ' NB RSETS', rcount - if scount: - print ' NB SCHEMAS', scount - if fcount: - print ' NB FILES', fcount - if all: - ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20] - pprint(ocount) - if gc.garbage: - print 'UNREACHABLE', gc.garbage - -def referrers(obj, showobj=False): - try: - return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x))) - for x in _referrers(obj))) - except TypeError: - s = set() - unhashable = [] - for x in _referrers(obj): - try: - s.add(x) - except TypeError: - unhashable.append(x) - return sorted(s) + unhashable - -def _referrers(obj, seen=None, level=0): - import gc, types - from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema - interesting = [] - if seen is None: - seen = set() - for x in gc.get_referrers(obj): - if id(x) in seen: - continue - seen.add(id(x)) - if isinstance(x, types.FrameType): - continue - if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)): - continue - if isinstance(x, (list, tuple, set, dict, listiterator)): - if level >= 5: - pass - #interesting.append(x) - else: - interesting += _referrers(x, seen, level+1) - else: - interesting.append(x) - return interesting +LOGGER = getLogger('cubicweb.twisted') +set_log_methods(CubicWebRootResource, LOGGER) def run(config, debug): # create the site @@ -466,7 +364,7 @@ website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 - reactor.listenTCP(port, channel.HTTPFactory(website)) + reactor.listenTCP(port, website) logger = getLogger('cubicweb.twisted') if not debug: if sys.platform == 'win32': diff -r f3c2cb460ad9 -r d14bfd477c44 ext/xhtml2fo.py --- a/ext/xhtml2fo.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,137 +0,0 @@ -from xml.etree.ElementTree import QName -from pysixt.standard.xhtml_xslfo.transformer import XHTML2FOTransformer -from pysixt.utils.xslfo.standard import cm -from pysixt.utils.xslfo import SimplePageMaster -from pysixt.standard.xhtml_xslfo.default_styling import default_styles -from pysixt.standard.xhtml_xslfo import XHTML_NS - - -class ReportTransformer(XHTML2FOTransformer): - """ - Class transforming an XHTML input tree into a FO document - displaying reports (one report for each
- element in the input tree. - """ - - def __init__(self, section, - page_width=21.0, page_height=29.7, - margin_top=1.0, margin_bottom=1.0, - margin_left=1.0, margin_right=1.0, - header_footer_height=0.75, - standard_font_size=11.0, default_lang=u"fr" ): - """ - Initializes a transformer turning an XHTML input tree - containing
elements representing - main content sections into a FO output tree displaying the - reports. - - page_width: float - width of the page (in cm) - page_height: float - height of the page (in cm) - margin_top: float - top margin of the page (in cm) - margin_bottom: float - bottom margin of the page (in cm) - margin_left: float - left margin of the page (in cm) - margin_right: float - right margin of the page (in cm) - header_footer_height: float - height of the header or the footer of the - page that the page number (if any) will be - inserted in. - standard_font_size: float - standard size of the font (in pt) - default_lang: u"" - default language (used for hyphenation) - """ - self.section = section - self.page_width = page_width - self.page_height = page_height - - self.page_tmargin = margin_top - self.page_bmargin = margin_bottom - self.page_lmargin = margin_left - self.page_rmargin = margin_right - - self.hf_height = header_footer_height - - self.font_size = standard_font_size - self.lang = default_lang - - XHTML2FOTransformer.__init__(self) - - - def define_pagemasters(self): - """ - Defines the page masters for the FO output document. - """ - pm = SimplePageMaster(u"page-report") - pm.set_page_dims( self.page_width*cm, self.page_height*cm ) - pm.set_page_margins({u'top' : self.page_tmargin*cm, - u'bottom': self.page_bmargin*cm, - u'left' : self.page_lmargin*cm, - u'right' : self.page_rmargin*cm }) - pm.add_peripheral_region(u"end", self.hf_height) - dims = {} - dims[u"bottom"] = self.hf_height + 0.25 - pm.set_main_region_margins(dims) - return [pm] - - def _visit_report(self, in_elt, _out_elt, params): - """ - Specific visit function for the input
elements whose class is - "report". The _root_visit method of this class selects these input - elements and asks the process of these elements with this specific - visit function. - """ - - ps = self.create_pagesequence(u"page-report") - props = { u"force-page-count": u"no-force", - u"initial-page-number": u"1", - u"format": u"1", } - self._output_properties(ps, props) - - sc = self.create_staticcontent(ps, u"end") - sc_bl = self.create_block(sc) - attrs = { u"hyphenate": u"false", } - attrs[u"font-size"] = u"%.1fpt" % (self.font_size * 0.7) - attrs[u"language"] = self.lang - attrs[u"text-align"] = u"center" - self._output_properties(sc_bl, attrs) - sc_bl.text = u"Page" + u" " # ### Should be localised! - pn = self.create_pagenumber(sc_bl) - pn.tail = u"/" - self.create_pagenumbercitation( - sc_bl, u"last-block-of-report-%d" % params[u"context_pos"]) - - fl = self.create_flow(ps, u"body") - bl = self.create_block(fl) - - # Sets on the highest block element the properties of the XHTML body - # element. These properties (at the least the inheritable ones) will - # be inherited by all the future FO elements. - bodies = list(self.in_tree.getiterator(QName(XHTML_NS, u"body"))) - if len(bodies) > 0: - attrs = self._extract_properties([bodies[0]]) - else: - attrs = default_styles[u"body"].copy() - attrs[u"font-size"] = u"%.1fpt" % self.font_size - attrs[u"language"] = self.lang - self._output_properties(bl,attrs) - - # Processes the report content - self._copy_text(in_elt, bl) - self._process_nodes(in_elt.getchildren(), bl) - - # Inserts an empty block at the end of the report in order to be able - # to compute the last page number of this report. - last_bl = self.create_block(bl) - props = { u"keep-with-previous": u"always", } - props[u"id"] = u"last-block-of-report-%d" % params[u"context_pos"] - self._output_properties(last_bl,props) - - - def _root_visit(self): - """ - Visit function called when starting the process of the input tree. - """ - content = [ d for d in self.in_tree.getiterator(QName(XHTML_NS, u"div")) - if d.get(u"id") == self.section ] - # Asks the process of the report elements with a specific visit - # function - self._process_nodes(content, self.fo_root, - with_function=self._visit_report) - diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/email.py --- a/hooks/email.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/email.py Thu Apr 08 14:11:49 2010 +0200 @@ -28,7 +28,7 @@ if self.condition(): self.session.execute( 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype, - {'x': self.entity.eid, 'y': self.email.eid}, 'x') + {'x': self.entity.eid, 'y': self.email.eid}) class SetPrimaryEmailRelationOp(SetUseEmailRelationOp): diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/integrity.py --- a/hooks/integrity.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/integrity.py Thu Apr 08 14:11:49 2010 +0200 @@ -77,7 +77,7 @@ continue if rtype in pendingrtypes: continue - if not session.execute(self.base_rql % rtype, {'x': eid}, 'x'): + if not session.execute(self.base_rql % rtype, {'x': eid}): etype = session.describe(eid)[0] _ = session._ msg = _('at least one relation %(rtype)s is required on ' @@ -304,7 +304,7 @@ # don't do anything if the entity is being created or deleted if not (eid in pendingeids or eid in neweids): etype = session.describe(eid)[0] - session.execute(self.base_rql % (etype, rtype), {'x': eid}, 'x') + session.execute(self.base_rql % (etype, rtype), {'x': eid}) class _DelayedDeleteSEntityOp(_DelayedDeleteOp): """delete orphan subject entity of a composite relation""" diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/metadata.py --- a/hooks/metadata.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/metadata.py Thu Apr 08 14:11:49 2010 +0200 @@ -102,8 +102,7 @@ def precommit_event(self): self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,' 'NOT EXISTS(X owned_by U, X eid %(x)s)', - {'c': self.compositeeid, 'x': self.composedeid}, - ('c', 'x')) + {'c': self.compositeeid, 'x': self.composedeid}) class SyncCompositeOwner(MetaDataHook): diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/notification.py --- a/hooks/notification.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/notification.py Thu Apr 08 14:11:49 2010 +0200 @@ -124,7 +124,7 @@ rqlsel.append(var) rqlrestr.append('X %s %s' % (attr, var)) rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr)) - rset = session.execute(rql, {'x': self.entity.eid}, 'x') + rset = session.execute(rql, {'x': self.entity.eid}) for i, attr in enumerate(attrs): oldvalue = rset[0][i] newvalue = self.entity[attr] diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/syncschema.py --- a/hooks/syncschema.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/syncschema.py Thu Apr 08 14:11:49 2010 +0200 @@ -1004,7 +1004,7 @@ DropRelationTable(session, rschema.type) # if this is the last instance, drop associated relation type if lastrel and not self.eidto in pendings: - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x') + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}) MemSchemaRDefDel(session, (subjschema, rschema, objschema)) diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/syncsession.py --- a/hooks/syncsession.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/syncsession.py Thu Apr 08 14:11:49 2010 +0200 @@ -36,7 +36,7 @@ no query should be emitted while comitting """ rql = 'Any N WHERE G eid %(x)s, G name N' - result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False) + result = session.execute(rql, {'x': kwargs['geid']}, build_descr=False) hook.Operation.__init__(self, session, *args, **kwargs) self.group = result[0][0] @@ -216,7 +216,7 @@ if not session.describe(eidfrom)[0] == 'CWProperty': return key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V', - {'x': eidfrom}, 'x')[0] + {'x': eidfrom})[0] if session.vreg.property_info(key)['sitewide']: qname = role_name('for_user', 'subject') msg = session._("site-wide property can't be set for user") @@ -234,7 +234,7 @@ def __call__(self): session = self._cw key = session.execute('Any K WHERE P eid %(x)s, P pkey K', - {'x': self.eidfrom}, 'x')[0][0] + {'x': self.eidfrom})[0][0] session.transaction_data.setdefault('pendingrelations', []).append( (self.eidfrom, self.rtype, self.eidto)) for session_ in get_user_sessions(session.repo, self.eidto): diff -r f3c2cb460ad9 -r d14bfd477c44 hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Thu Apr 08 12:42:47 2010 +0200 +++ b/hooks/test/unittest_syncschema.py Thu Apr 08 14:11:49 2010 +0200 @@ -32,17 +32,17 @@ def _set_perms(self, eid): self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"', - {'x': eid}, 'x') + {'x': eid}) def _set_attr_perms(self, eid): self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup', - {'x': eid}, 'x') + {'x': eid}) self.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"', - {'x': eid}, 'x') + {'x': eid}) def test_base(self): schema = self.repo.schema @@ -88,7 +88,7 @@ 'WHERE RT name "concerne2", E name "CWUser"')[0][0] self._set_perms(rdefeid) self.commit() - self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x') + self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}) self.commit() self.failUnless('concerne2' in schema['CWUser'].subject_relations()) self.failIf('concerne2' in schema['Societe2'].subject_relations()) @@ -248,7 +248,7 @@ attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F ' 'WHERE RT name "messageid", E name "BaseTransition", F name "String"')[0][0] assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': attreid}, 'x') + {'x': attreid}) self.commit() self.schema.rebuild_infered_relations() self.failUnless('Transition' in self.schema['messageid'].subjects()) @@ -299,10 +299,10 @@ if not getattr(cstr, 'eid', None): self.skip('start me alone') # bug in schema reloading, constraint's eid not restored self.execute('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}, 'x') + {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' 'WHERE CT name %(ct)s, EDEF eid %(x)s', - {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}, 'x') + {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) self.commit() cstr = rdef.constraint_by_type('StaticVocabularyConstraint') self.assertEquals(cstr.values, (u'normal', u'auto', u'new')) diff -r f3c2cb460ad9 -r d14bfd477c44 i18n/en.po --- a/i18n/en.po Thu Apr 08 12:42:47 2010 +0200 +++ b/i18n/en.po Thu Apr 08 14:11:49 2010 +0200 @@ -368,9 +368,6 @@ msgid "Do you want to delete the following element(s) ?" msgstr "" -msgid "Download page as pdf" -msgstr "" - msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Email address" @@ -1578,12 +1575,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "view page as pdf icon" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "previous / next entity" diff -r f3c2cb460ad9 -r d14bfd477c44 i18n/es.po --- a/i18n/es.po Thu Apr 08 12:42:47 2010 +0200 +++ b/i18n/es.po Thu Apr 08 14:11:49 2010 +0200 @@ -376,9 +376,6 @@ msgid "Do you want to delete the following element(s) ?" msgstr "Desea suprimir el(los) elemento(s) siguiente(s)" -msgid "Download page as pdf" -msgstr "" - msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "" @@ -1609,12 +1606,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "Elemento anterior / siguiente" diff -r f3c2cb460ad9 -r d14bfd477c44 i18n/fr.po --- a/i18n/fr.po Thu Apr 08 12:42:47 2010 +0200 +++ b/i18n/fr.po Thu Apr 08 14:11:49 2010 +0200 @@ -387,9 +387,6 @@ msgid "Do you want to delete the following element(s) ?" msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?" -msgid "Download page as pdf" -msgstr "télécharger la page au format PDF" - msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Adresse électronique" @@ -1630,12 +1627,6 @@ msgid "contentnavigation_metadata_description" msgstr "" -msgid "contentnavigation_pdfview" -msgstr "icône pdf" - -msgid "contentnavigation_pdfview_description" -msgstr "" - msgid "contentnavigation_prevnext" msgstr "élément précedent / suivant" diff -r f3c2cb460ad9 -r d14bfd477c44 migration.py --- a/migration.py Thu Apr 08 12:42:47 2010 +0200 +++ b/migration.py Thu Apr 08 14:11:49 2010 +0200 @@ -16,6 +16,7 @@ from logilab.common.decorators import cached from logilab.common.configuration import REQUIRED, read_old_config from logilab.common.shellutils import ASK +from logilab.common.changelog import Version from cubicweb import ConfigurationError @@ -374,3 +375,75 @@ from logging import getLogger from cubicweb import set_log_methods set_log_methods(MigrationHelper, getLogger('cubicweb.migration')) + + +def version_strictly_lower(a, b): + if a: + a = Version(a) + if b: + b = Version(b) + return a < b + +def max_version(a, b): + return str(max(Version(a), Version(b))) + +class ConfigurationProblem(object): + """Each cube has its own list of dependencies on other cubes/versions. + + The ConfigurationProblem is used to record the loaded cubes, then to detect + inconsistencies in their dependencies. + + See configuration management on wikipedia for litterature. + """ + + def __init__(self, config): + self.cubes = {} + self.config = config + + def add_cube(self, name, version): + self.cubes[name] = version + + def solve(self): + self.warnings = [] + self.errors = [] + self.read_constraints() + for cube, versions in sorted(self.constraints.items()): + oper, version = None, None + # simplify constraints + if versions: + for constraint in versions: + op, ver = constraint + if oper is None: + oper = op + version = ver + elif op == '>=' and oper == '>=': + version = max_version(ver, version) + else: + print 'unable to handle this case', oper, version, op, ver + # "solve" constraint satisfaction problem + if cube not in self.cubes: + self.errors.append( ('add', cube, version) ) + elif versions: + lower_strict = version_strictly_lower(self.cubes[cube], version) + if oper in ('>=','='): + if lower_strict: + self.errors.append( ('update', cube, version) ) + else: + print 'unknown operator', oper + + def read_constraints(self): + self.constraints = {} + self.reverse_constraints = {} + for cube in self.cubes: + use = self.config.cube_dependencies(cube) + for name, constraint in use.iteritems(): + self.constraints.setdefault(name,set()) + if constraint: + try: + oper, version = constraint.split() + self.constraints[name].add( (oper, version) ) + except: + self.warnings.append( + 'cube %s depends on %s but constraint badly ' + 'formatted: %s' % (cube, name, constraint)) + self.reverse_constraints.setdefault(name, set()).add(cube) diff -r f3c2cb460ad9 -r d14bfd477c44 misc/migration/postcreate.py --- a/misc/migration/postcreate.py Thu Apr 08 12:42:47 2010 +0200 +++ b/misc/migration/postcreate.py Thu Apr 08 14:11:49 2010 +0200 @@ -43,7 +43,7 @@ # need this since we already have at least one user in the database (the default admin) for user in rql('Any X WHERE X is CWUser').entities(): rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', - {'x': user.eid, 's': activated.eid}, 'x') + {'x': user.eid, 's': activated.eid}) # on interactive mode, ask for level 0 persistent options if interactive_mode: @@ -55,11 +55,12 @@ default = cfg.option_default(optname, optdict) # only record values differing from default if value != default: - rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', {'k': key, 'v': value}) + rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', + {'k': key, 'v': value}) # add PERM_USE_TEMPLATE_FORMAT permission from cubicweb.schema import PERM_USE_TEMPLATE_FORMAT usetmplperm = create_entity('CWPermission', name=PERM_USE_TEMPLATE_FORMAT, label=_('use template languages')) rql('SET X require_group G WHERE G name "managers", X eid %(x)s', - {'x': usetmplperm.eid}, 'x') + {'x': usetmplperm.eid}) diff -r f3c2cb460ad9 -r d14bfd477c44 pytestconf.py --- a/pytestconf.py Thu Apr 08 12:42:47 2010 +0200 +++ b/pytestconf.py Thu Apr 08 14:11:49 2010 +0200 @@ -5,8 +5,6 @@ from os.path import split, splitext from logilab.common.pytest import PyTester -from cubicweb.etwist.server import _gc_debug - class CustomPyTester(PyTester): def testfile(self, filename, batchmode=False): try: @@ -22,7 +20,6 @@ if getattr(cls, '__module__', None) != modname: continue clean_repo_test_cls(cls) - #_gc_debug() def clean_repo_test_cls(cls): if 'repo' in cls.__dict__: diff -r f3c2cb460ad9 -r d14bfd477c44 req.py --- a/req.py Thu Apr 08 12:42:47 2010 +0200 +++ b/req.py Thu Apr 08 14:11:49 2010 +0200 @@ -137,7 +137,6 @@ rql = 'INSERT %s X' % etype relations = [] restrictions = set() - cachekey = [] pending_relations = [] for attr, value in kwargs.items(): if isinstance(value, (tuple, list, set, frozenset)): @@ -157,7 +156,6 @@ restriction = '%s eid %%(%s)s' % (rvar, attr) if not restriction in restrictions: restrictions.add(restriction) - cachekey.append(attr) kwargs[attr] = value.eid else: # attribute relations.append('X %s %%(%s)s' % (attr, attr)) @@ -165,7 +163,7 @@ rql = '%s: %s' % (rql, ', '.join(relations)) if restrictions: rql = '%s WHERE %s' % (rql, ', '.join(restrictions)) - created = execute(rql, kwargs, cachekey).get_entity(0, 0) + created = execute(rql, kwargs).get_entity(0, 0) for attr, values in pending_relations: if attr.startswith('reverse_'): restr = 'Y %s X' % attr[len('reverse_'):] @@ -173,7 +171,7 @@ restr = 'X %s Y' % attr execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % ( restr, ','.join(str(r.eid) for r in values)), - {'x': created.eid}, 'x', build_descr=False) + {'x': created.eid}, build_descr=False) return created def ensure_ro_rql(self, rql): diff -r f3c2cb460ad9 -r d14bfd477c44 rqlrewrite.py --- a/rqlrewrite.py Thu Apr 08 12:42:47 2010 +0200 +++ b/rqlrewrite.py Thu Apr 08 14:11:49 2010 +0200 @@ -41,15 +41,15 @@ except KeyError: continue stinfo = var.stinfo - if stinfo.get('uidrels'): + if stinfo.get('uidrel') is not None: continue # eid specified, no need for additional type specification try: - typerels = rqlst.defined_vars[varname].stinfo.get('typerels') + typerel = rqlst.defined_vars[varname].stinfo.get('typerel') except KeyError: assert varname in rqlst.aliases continue - if newroot is rqlst and typerels: - mytyperel = iter(typerels).next() + if newroot is rqlst and typerel is not None: + mytyperel = typerel else: for vref in newroot.defined_vars[varname].references(): rel = vref.relation() @@ -80,7 +80,7 @@ # tree is not annotated yet, no scope set so add the restriction # to the root rel = newroot.add_type_restriction(var, possibletypes) - stinfo['typerels'] = frozenset((rel,)) + stinfo['typerel'] = rel stinfo['possibletypes'] = possibletypes diff -r f3c2cb460ad9 -r d14bfd477c44 rset.py --- a/rset.py Thu Apr 08 12:42:47 2010 +0200 +++ b/rset.py Thu Apr 08 14:11:49 2010 +0200 @@ -31,14 +31,12 @@ :type rql: str or unicode :ivar rql: the original RQL query string """ - def __init__(self, results, rql, args=None, description=(), cachekey=None, - rqlst=None): + def __init__(self, results, rql, args=None, description=(), rqlst=None): self.rows = results self.rowcount = results and len(results) or 0 # original query and arguments self.rql = rql self.args = args - self.cachekey = cachekey # entity types for each cell (same shape as rows) # maybe discarded if specified when the query has been executed self.description = description diff -r f3c2cb460ad9 -r d14bfd477c44 schema.py --- a/schema.py Thu Apr 08 12:42:47 2010 +0200 +++ b/schema.py Thu Apr 08 14:11:49 2010 +0200 @@ -705,14 +705,14 @@ if eidto is None: # checking constraint for an attribute relation restriction = 'S eid %(s)s, ' + self.restriction - args, ck = {'s': eidfrom}, 's' + args = {'s': eidfrom} else: restriction = 'S eid %(s)s, O eid %(o)s, ' + self.restriction - args, ck = {'s': eidfrom, 'o': eidto}, ('s', 'o') + args = {'s': eidfrom, 'o': eidto} rql = 'Any %s WHERE %s' % (self.mainvars, restriction) if self.distinct_query: rql = 'DISTINCT ' + rql - return session.execute(rql, args, ck, build_descr=False) + return session.execute(rql, args, build_descr=False) class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint): @@ -839,9 +839,8 @@ return False if keyarg is None: kwargs.setdefault('u', session.user.eid) - cachekey = kwargs.keys() try: - rset = session.execute(rql, kwargs, cachekey, build_descr=True) + rset = session.execute(rql, kwargs, build_descr=True) except NotImplementedError: self.critical('cant check rql expression, unsupported rql %s', rql) if self.eid is not None: diff -r f3c2cb460ad9 -r d14bfd477c44 selectors.py --- a/selectors.py Thu Apr 08 12:42:47 2010 +0200 +++ b/selectors.py Thu Apr 08 14:11:49 2010 +0200 @@ -1030,7 +1030,7 @@ def score(self, req, rset, row, col): try: return len(req.execute(self.rql, {'x': rset[row][col], - 'u': req.user.eid}, 'x')) + 'u': req.user.eid})) except Unauthorized: return 0 diff -r f3c2cb460ad9 -r d14bfd477c44 server/hook.py --- a/server/hook.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/hook.py Thu Apr 08 14:11:49 2010 +0200 @@ -75,10 +75,7 @@ self.unregister(cls) def register(self, obj, **kwargs): - for event in obj.events: - if event not in ALL_HOOKS: - raise Exception('bad event %s on %s.%s' % ( - event, obj.__module__, obj.__name__)) + obj.check_events() super(HooksRegistry, self).register(obj, **kwargs) def call_hooks(self, event, session=None, **kwargs): @@ -199,10 +196,13 @@ # XXX deprecated enabled = True - @classproperty - def __registries__(cls): + @classmethod + def check_events(cls): try: - return ['%s_hooks' % ev for ev in cls.events] + for event in cls.events: + if event not in ALL_HOOKS: + raise Exception('bad event %s on %s.%s' % ( + event, cls.__module__, cls.__name__)) except AttributeError: raise except TypeError: @@ -210,6 +210,11 @@ cls.events, cls.__module__, cls.__name__)) @classproperty + def __registries__(cls): + cls.check_events() + return ['%s_hooks' % ev for ev in cls.events] + + @classproperty def __regid__(cls): warn('[3.6] %s.%s: please specify an id for your hook' % (cls.__module__, cls.__name__), DeprecationWarning) diff -r f3c2cb460ad9 -r d14bfd477c44 server/migractions.py --- a/server/migractions.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/migractions.py Thu Apr 08 14:11:49 2010 +0200 @@ -268,9 +268,9 @@ if self.session: self.session.set_pool() - def rqlexecall(self, rqliter, cachekey=None, ask_confirm=True): + def rqlexecall(self, rqliter, ask_confirm=True): for rql, kwargs in rqliter: - self.rqlexec(rql, kwargs, cachekey, ask_confirm=ask_confirm) + self.rqlexec(rql, kwargs, ask_confirm=ask_confirm) @cached def _create_context(self): @@ -361,14 +361,14 @@ # handle groups newgroups = list(erschema.get_groups(action)) for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, ' - 'T eid %%(x)s' % perm, {'x': teid}, 'x', + 'T eid %%(x)s' % perm, {'x': teid}, ask_confirm=False): if not gname in newgroups: if not confirm or self.confirm('Remove %s permission of %s to %s?' % (action, erschema, gname)): self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), - {'x': geid}, 'x', ask_confirm=False) + {'x': geid}, ask_confirm=False) else: newgroups.remove(gname) for gname in newgroups: @@ -376,7 +376,7 @@ % (action, erschema, gname)): self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' % (perm, teid), - {'x': gm[gname]}, 'x', ask_confirm=False) + {'x': gm[gname]}, ask_confirm=False) # handle rql expressions newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' @@ -388,7 +388,7 @@ # deleting the relation will delete the expression entity self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s' % (perm, teid), - {'x': expreid}, 'x', ask_confirm=False) + {'x': expreid}, ask_confirm=False) else: newexprs.pop(expression) for expression in newexprs.values(): @@ -399,7 +399,7 @@ 'X expression %%(expr)s, X mainvars %%(vars)s, T %s X ' 'WHERE T eid %%(x)s' % perm, {'expr': expr, 'exprtype': exprtype, - 'vars': expression.mainvars, 'x': teid}, 'x', + 'vars': expression.mainvars, 'x': teid}, ask_confirm=False) def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True, syncprops=True): @@ -524,14 +524,13 @@ newcstr = None if newcstr is None: self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s', - {'x': cstr.eid}, 'x', - ask_confirm=confirm) + {'x': cstr.eid}, ask_confirm=confirm) else: newconstraints.remove(newcstr) value = unicode(newcstr.serialize()) if value != unicode(cstr.serialize()): self.rqlexec('SET X value %(v)s WHERE X eid %(x)s', - {'x': cstr.eid, 'v': value}, 'x', + {'x': cstr.eid, 'v': value}, ask_confirm=confirm) # 2. add new constraints cstrtype_map = self.cstrtype_mapping() @@ -644,10 +643,10 @@ self.cmd_drop_relation_definition( str(fromtype), rschema.type, str(totype)) # execute post-remove files - for pack in reversed(removedcubes): - self.exec_event_script('postremove', self.config.cube_dir(pack)) + for cube in reversed(removedcubes): + self.exec_event_script('postremove', self.config.cube_dir(cube)) self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s', - {'pk': u'system.version.'+pack}, ask_confirm=False) + {'pk': u'system.version.'+cube}, ask_confirm=False) self.commit() # schema migration actions ################################################ @@ -736,8 +735,8 @@ continue if instspschema.specializes() != eschema: self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s', - {'d': instspschema.eid, - 'pn': eschema.type}, ask_confirm=confirm) + {'d': instspschema.eid, 'pn': eschema.type}, + ask_confirm=confirm) for rschema, tschemas, role in spschema.relation_definitions(True): for tschema in tschemas: if not tschema in instschema: @@ -1073,12 +1072,12 @@ for etype in wfof: rset = self.rqlexec( 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False) + {'x': wf.eid, 'et': etype}, ask_confirm=False) assert rset, 'unexistant entity type %s' % etype if default: self.rqlexec( 'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False) + {'x': wf.eid, 'et': etype}, ask_confirm=False) if commit: self.commit() return wf @@ -1202,6 +1201,9 @@ def rqlexec(self, rql, kwargs=None, cachekey=None, build_descr=True, ask_confirm=True): """rql action""" + if cachekey is not None: + warn('[3.8] cachekey is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) if not isinstance(rql, (tuple, list)): rql = ( (rql, kwargs), ) res = None @@ -1213,7 +1215,7 @@ msg = rql if not ask_confirm or self.confirm('Execute rql: %s ?' % msg): try: - res = execute(rql, kwargs, cachekey, build_descr=build_descr) + res = execute(rql, kwargs, build_descr=build_descr) except Exception, ex: if self.confirm('Error: %s\nabort?' % ex): raise diff -r f3c2cb460ad9 -r d14bfd477c44 server/msplanner.py --- a/server/msplanner.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/msplanner.py Thu Apr 08 14:11:49 2010 +0200 @@ -309,21 +309,24 @@ # find for each source which variable/solution are supported for varname, varobj in self.rqlst.defined_vars.items(): # if variable has an eid specified, we can get its source directly - # NOTE: use uidrels and not constnode to deal with "X eid IN(1,2,3,4)" - if varobj.stinfo['uidrels']: - vrels = varobj.stinfo['relations'] - varobj.stinfo['uidrels'] - for rel in varobj.stinfo['uidrels']: - for const in rel.children[1].get_nodes(Constant): - eid = const.eval(self.plan.args) - source = self._session.source_from_eid(eid) - if vrels and not any(source.support_relation(r.r_type) - for r in vrels): - self._set_source_for_term(self.system_source, varobj) - else: - self._set_source_for_term(source, varobj) + # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)" + if varobj.stinfo['uidrel'] is not None: + rel = varobj.stinfo['uidrel'] + hasrel = len(varobj.stinfo['relations']) > 1 + for const in rel.children[1].get_nodes(Constant): + eid = const.eval(self.plan.args) + source = self._session.source_from_eid(eid) + if (source is self.system_source + or (hasrel and + not any(source.support_relation(r.r_type) + for r in varobj.stinfo['relations'] + if not r is rel))): + self._set_source_for_term(self.system_source, varobj) + else: + self._set_source_for_term(source, varobj) continue rels = varobj.stinfo['relations'] - if not rels and not varobj.stinfo['typerels']: + if not rels and varobj.stinfo['typerel'] is None: # (rare) case where the variable has no type specified nor # relation accessed ex. "Any MAX(X)" self._set_source_for_term(self.system_source, varobj) @@ -700,7 +703,7 @@ for var in select.defined_vars.itervalues(): if not var in terms: stinfo = var.stinfo - for ovar, rtype in stinfo['attrvars']: + for ovar, rtype in stinfo.get('attrvars', ()): if ovar in terms: needsel.add(var.name) terms.append(var) @@ -778,20 +781,19 @@ # variable is refed by an outer scope and should be substituted # using an 'identity' relation (else we'll get a conflict of # temporary tables) - if rhsvar in terms and not lhsvar in terms: + if rhsvar in terms and not lhsvar in terms and lhsvar.scope is lhsvar.stmt: self._identity_substitute(rel, lhsvar, terms, needsel) - elif lhsvar in terms and not rhsvar in terms: + elif lhsvar in terms and not rhsvar in terms and rhsvar.scope is rhsvar.stmt: self._identity_substitute(rel, rhsvar, terms, needsel) def _identity_substitute(self, relation, var, terms, needsel): newvar = self._insert_identity_variable(relation.scope, var) - if newvar is not None: - # ensure relation is using '=' operator, else we rely on a - # sqlgenerator side effect (it won't insert an inequality operator - # in this case) - relation.children[1].operator = '=' - terms.append(newvar) - needsel.add(newvar.name) + # ensure relation is using '=' operator, else we rely on a + # sqlgenerator side effect (it won't insert an inequality operator + # in this case) + relation.children[1].operator = '=' + terms.append(newvar) + needsel.add(newvar.name) def _choose_term(self, sourceterms): """pick one term among terms supported by a source, which will be used @@ -1419,7 +1421,7 @@ return False if not var in terms or used_in_outer_scope(var, self.current_scope): return False - if any(v for v, _ in var.stinfo['attrvars'] if not v in terms): + if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms): return False return True diff -r f3c2cb460ad9 -r d14bfd477c44 server/mssteps.py --- a/server/mssteps.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/mssteps.py Thu Apr 08 14:11:49 2010 +0200 @@ -61,7 +61,7 @@ if not isinstance(vref, VariableRef): continue var = vref.variable - if var.stinfo['attrvars']: + if var.stinfo.get('attrvars'): for lhsvar, rtype in var.stinfo['attrvars']: if lhsvar.name in srqlst.defined_vars: key = '%s.%s' % (lhsvar.name, rtype) diff -r f3c2cb460ad9 -r d14bfd477c44 server/querier.py --- a/server/querier.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/querier.py Thu Apr 08 14:11:49 2010 +0200 @@ -326,16 +326,9 @@ varkwargs = {} if not session.transaction_data.get('security-rqlst-cache'): for var in rqlst.defined_vars.itervalues(): - for rel in var.stinfo['uidrels']: - const = rel.children[1].children[0] - try: - varkwargs[var.name] = typed_eid(const.eval(self.args)) - break - except AttributeError: - #from rql.nodes import Function - #assert isinstance(const, Function) - # X eid IN(...) - pass + if var.stinfo['constnode'] is not None: + eid = var.stinfo['constnode'].eval(self.args) + varkwargs[var.name] = typed_eid(eid) # dictionnary of variables restricted for security reason localchecks = {} restricted_vars = set() @@ -529,16 +522,22 @@ def set_schema(self, schema): self.schema = schema repo = self._repo + # rql st and solution cache. Don't bother using a Cache instance: we + # should have a limited number of queries in there, since there are no + # entries in this cache for user queries (which have no args) + self._rql_cache = {} + # rql cache key cache + self._rql_ck_cache = Cache(repo.config['rql-cache-size']) + # some cache usage stats + self.cache_hit, self.cache_miss = 0, 0 # rql parsing / analysing helper self.solutions = repo.vreg.solutions - self._rql_cache = Cache(repo.config['rql-cache-size']) - self.cache_hit, self.cache_miss = 0, 0 + rqlhelper = repo.vreg.rqlhelper + self._parse = rqlhelper.parse + self._annotate = rqlhelper.annotate # rql planner # note: don't use repo.sources, may not be built yet, and also "admin" # isn't an actual source - rqlhelper = repo.vreg.rqlhelper - self._parse = rqlhelper.parse - self._annotate = rqlhelper.annotate if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2: from cubicweb.server.ssplanner import SSPlanner self._planner = SSPlanner(schema, rqlhelper) @@ -561,7 +560,7 @@ return InsertPlan(self, rqlst, args, session) return ExecutionPlan(self, rqlst, args, session) - def execute(self, session, rql, args=None, eid_key=None, build_descr=True): + def execute(self, session, rql, args=None, build_descr=True): """execute a rql query, return resulting rows and their description in a `ResultSet` object @@ -570,12 +569,6 @@ * `build_descr` is a boolean flag indicating if the description should be built on select queries (if false, the description will be en empty list) - * `eid_key` must be both a key in args and a substitution in the rql - query. It should be used to enhance cacheability of rql queries. - It may be a tuple for keys in args. - `eid_key` must be provided in cases where a eid substitution is provided - and resolves ambiguities in the possible solutions inferred for each - variable in the query. on INSERT queries, there will be one row with the eid of each inserted entity @@ -591,40 +584,33 @@ print '*'*80 print 'querier input', rql, args # parse the query and binds variables - if eid_key is not None: - if not isinstance(eid_key, (tuple, list)): - eid_key = (eid_key,) - cachekey = [rql] - for key in eid_key: - try: - etype = self._repo.type_from_eid(args[key], session) - except KeyError: - raise QueryError('bad cache key %s (no value)' % key) - except TypeError: - raise QueryError('bad cache key %s (value: %r)' % ( - key, args[key])) - except UnknownEid: - # we want queries such as "Any X WHERE X eid 9999" - # return an empty result instead of raising UnknownEid - return empty_rset(rql, args) - cachekey.append(etype) - # ensure eid is correctly typed in args - args[key] = typed_eid(args[key]) - cachekey = tuple(cachekey) - else: + try: cachekey = rql - try: + if args: + eidkeys = self._rql_ck_cache[rql] + if eidkeys: + try: + cachekey = self._repo.querier_cache_key(session, rql, + args, eidkeys) + except UnknownEid: + # we want queries such as "Any X WHERE X eid 9999" + # return an empty result instead of raising UnknownEid + return empty_rset(rql, args) rqlst = self._rql_cache[cachekey] self.cache_hit += 1 except KeyError: self.cache_miss += 1 rqlst = self.parse(rql) try: - self.solutions(session, rqlst, args) + eidkeys = self.solutions(session, rqlst, args) except UnknownEid: # we want queries such as "Any X WHERE X eid 9999" return an # empty result instead of raising UnknownEid return empty_rset(rql, args, rqlst) + self._rql_ck_cache[rql] = eidkeys + if eidkeys: + cachekey = self._repo.querier_cache_key(session, rql, args, + eidkeys) self._rql_cache[cachekey] = rqlst orig_rqlst = rqlst if rqlst.TYPE != 'select': @@ -684,7 +670,7 @@ # FIXME: get number of affected entities / relations on non # selection queries ? # return a result set object - return ResultSet(results, rql, args, descr, eid_key, orig_rqlst) + return ResultSet(results, rql, args, descr, orig_rqlst) from logging import getLogger from cubicweb import set_log_methods diff -r f3c2cb460ad9 -r d14bfd477c44 server/repository.py --- a/server/repository.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/repository.py Thu Apr 08 14:11:49 2010 +0200 @@ -33,7 +33,7 @@ from yams.schema import role_name from rql import RQLSyntaxError -from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, +from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError, UnknownEid, AuthenticationError, ExecutionError, ETypeNotSupportedBySources, MultiSourcesError, BadConnectionId, Unauthorized, ValidationError, @@ -76,12 +76,12 @@ with security_enabled(session, read=False): session.execute('DELETE X %s Y WHERE X eid %%(x)s, ' 'NOT Y eid %%(y)s' % rtype, - {'x': eidfrom, 'y': eidto}, 'x') + {'x': eidfrom, 'y': eidto}) if card[1] in '1?': with security_enabled(session, read=False): session.execute('DELETE X %sY WHERE Y eid %%(y)s, ' 'NOT X eid %%(x)s' % rtype, - {'x': eidfrom, 'y': eidto}, 'y') + {'x': eidfrom, 'y': eidto}) class Repository(object): @@ -408,7 +408,7 @@ """return a CWUser entity for user with the given eid""" cls = self.vreg['etypes'].etype_class('CWUser') rql = cls.fetch_rql(session.user, ['X eid %(x)s']) - rset = session.execute(rql, {'x': eid}, 'x') + rset = session.execute(rql, {'x': eid}) assert len(rset) == 1, rset cwuser = rset.get_entity(0, 0) # pylint: disable-msg=W0104 @@ -567,7 +567,7 @@ session.commit() return session.id - def execute(self, sessionid, rqlstring, args=None, eid_key=None, build_descr=True): + def execute(self, sessionid, rqlstring, args=None, build_descr=True): """execute a RQL query * rqlstring should be an unicode string or a plain ascii string @@ -578,7 +578,7 @@ session = self._get_session(sessionid, setpool=True) try: try: - return self.querier.execute(session, rqlstring, args, eid_key, + return self.querier.execute(session, rqlstring, args, build_descr) except (Unauthorized, RQLSyntaxError): raise @@ -836,6 +836,21 @@ """return the source for the given entity's eid""" return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]] + def querier_cache_key(self, session, rql, args, eidkeys): + cachekey = [rql] + for key in sorted(eidkeys): + try: + etype = self.type_from_eid(args[key], session) + except KeyError: + raise QueryError('bad cache key %s (no value)' % key) + except TypeError: + raise QueryError('bad cache key %s (value: %r)' % ( + key, args[key])) + cachekey.append(etype) + # ensure eid is correctly typed in args + args[key] = typed_eid(args[key]) + return tuple(cachekey) + def eid2extid(self, source, eid, session=None): """get local id from an eid""" etype, uri, extid = self.type_and_source_from_eid(eid, session) @@ -901,7 +916,7 @@ else: # minimal meta-data session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s', - {'x': entity.eid, 'name': entity.__regid__}, 'x') + {'x': entity.eid, 'name': entity.__regid__}) session.commit(reset_pool) return eid except: @@ -949,7 +964,7 @@ rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype else: rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype - session.execute(rql, {'x': eid}, 'x', build_descr=False) + session.execute(rql, {'x': eid}, build_descr=False) self.system_source.delete_info(session, entity, sourceuri, extid) def locate_relation_source(self, session, subject, rtype, object): diff -r f3c2cb460ad9 -r d14bfd477c44 server/rqlannotation.py --- a/server/rqlannotation.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/rqlannotation.py Thu Apr 08 14:11:49 2010 +0200 @@ -38,7 +38,7 @@ stinfo['invariant'] = False stinfo['principal'] = _select_main_var(stinfo['rhsrelations']) continue - if not stinfo['relations'] and not stinfo['typerels']: + if not stinfo['relations'] and stinfo['typerel'] is None: # Any X, Any MAX(X)... # those particular queries should be executed using the system # entities table unless there is some type restriction @@ -80,7 +80,7 @@ continue rschema = getrschema(rel.r_type) if rel.optional: - if rel in stinfo['optrelations']: + if rel in stinfo.get('optrelations', ()): # optional variable can't be invariant if this is the lhs # variable of an inlined relation if not rel in stinfo['rhsrelations'] and rschema.inlined: @@ -296,7 +296,7 @@ def compute(self, rqlst): # set domains for each variable for varname, var in rqlst.defined_vars.iteritems(): - if var.stinfo['uidrels'] or \ + if var.stinfo['uidrel'] is not None or \ self.eschema(rqlst.solutions[0][varname]).final: ptypes = var.stinfo['possibletypes'] else: @@ -339,7 +339,7 @@ def set_rel_constraint(self, term, rel, etypes_func): if isinstance(term, VariableRef) and self.is_ambiguous(term.variable): var = term.variable - if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \ + if len(var.stinfo['relations']) == 1 \ or rel.sqlscope is var.sqlscope or rel.r_type == 'identity': self.restrict(var, frozenset(etypes_func())) try: @@ -356,7 +356,7 @@ if isinstance(other, VariableRef) and isinstance(other.variable, Variable): deambiguifier = other.variable if not var is self.deambification_map.get(deambiguifier): - if not var.stinfo['typerels']: + if var.stinfo['typerel'] is None: otheretypes = deambiguifier.stinfo['possibletypes'] elif not self.is_ambiguous(deambiguifier): otheretypes = self.varsols[deambiguifier] @@ -364,7 +364,7 @@ # we know variable won't be invariant, try to use # it to deambguify the current variable otheretypes = self.varsols[deambiguifier] - if not deambiguifier.stinfo['typerels']: + if deambiguifier.stinfo['typerel'] is None: # if deambiguifier has no type restriction using 'is', # don't record it deambiguifier = None diff -r f3c2cb460ad9 -r d14bfd477c44 server/session.py --- a/server/session.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/session.py Thu Apr 08 14:11:49 2010 +0200 @@ -13,6 +13,7 @@ import threading from time import time from uuid import uuid4 +from warnings import warn from logilab.common.deprecation import deprecated from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj @@ -642,8 +643,14 @@ return self.repo.source_from_eid(eid, self) def execute(self, rql, kwargs=None, eid_key=None, build_descr=True): - """db-api like method directly linked to the querier execute method""" - rset = self._execute(self, rql, kwargs, eid_key, build_descr) + """db-api like method directly linked to the querier execute method. + + See :meth:`cubicweb.dbapi.Cursor.execute` documentation. + """ + if eid_key is not None: + warn('[3.8] eid_key is deprecated, you can safely remove this argument', + DeprecationWarning, stacklevel=2) + rset = self._execute(self, rql, kwargs, build_descr) rset.req = self return rset diff -r f3c2cb460ad9 -r d14bfd477c44 server/sources/ldapuser.py --- a/server/sources/ldapuser.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/sources/ldapuser.py Thu Apr 08 14:11:49 2010 +0200 @@ -230,10 +230,10 @@ elif rset: if not execute('SET X address %(addr)s WHERE ' 'U primary_email X, U eid %(u)s', - {'addr': ldapemailaddr, 'u': eid}, 'u'): + {'addr': ldapemailaddr, 'u': eid}): execute('SET X address %(addr)s WHERE ' 'X eid %(x)s', - {'addr': ldapemailaddr, 'x': rset[0][0]}, 'x') + {'addr': ldapemailaddr, 'x': rset[0][0]}) else: # no email found, create it _insert_email(session, ldapemailaddr, eid) @@ -546,7 +546,7 @@ super(LDAPUserSource, self).after_entity_insertion(session, dn, entity) for group in self.user_default_groups: session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s', - {'x': entity.eid, 'group': group}, 'x') + {'x': entity.eid, 'group': group}) # search for existant email first try: emailaddr = self._cache[dn][self.user_rev_attrs['email']] @@ -556,7 +556,7 @@ {'addr': emailaddr}) if rset: session.execute('SET U primary_email X WHERE U eid %(u)s, X eid %(x)s', - {'x': rset[0][0], 'u': entity.eid}, 'u') + {'x': rset[0][0], 'u': entity.eid}) else: # not found, create it _insert_email(session, emailaddr, entity.eid) @@ -571,7 +571,7 @@ def _insert_email(session, emailaddr, ueid): session.execute('INSERT EmailAddress X: X address %(addr)s, U primary_email X ' - 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}, 'x') + 'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}) class GotDN(Exception): """exception used when a dn localizing the searched user has been found""" diff -r f3c2cb460ad9 -r d14bfd477c44 server/sources/pyrorql.py --- a/server/sources/pyrorql.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/sources/pyrorql.py Thu Apr 08 14:11:49 2010 +0200 @@ -286,7 +286,7 @@ session.set_shared_data('sources_error', msg % self.uri) return [] try: - rql, cachekey = RQL2RQL(self).generate(session, union, args) + rql = RQL2RQL(self).generate(session, union, args) except UnknownEid, ex: if server.DEBUG: print ' unknown eid', ex, 'no results' @@ -294,7 +294,7 @@ if server.DEBUG & server.DBG_RQL: print ' translated rql', rql try: - rset = cu.execute(rql, args, cachekey) + rset = cu.execute(rql, args) except Exception, ex: self.exception(str(ex)) msg = session._("error while querying source %s, some data may be missing") @@ -396,9 +396,8 @@ def generate(self, session, rqlst, args): self._session = session self.kwargs = args - self.cachekey = [] self.need_translation = False - return self.visit_union(rqlst), self.cachekey + return self.visit_union(rqlst) def visit_union(self, node): s = self._accept_children(node) @@ -547,7 +546,6 @@ # ensure we have not yet translated the value... if not key in self._const_var: self.kwargs[key] = self.eid2extid(self.kwargs[key]) - self.cachekey.append(key) self._const_var[key] = None return node.as_string() diff -r f3c2cb460ad9 -r d14bfd477c44 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/sources/rql2sql.py Thu Apr 08 14:11:49 2010 +0200 @@ -87,7 +87,7 @@ modified = False for varname in tuple(unstable): var = select.defined_vars[varname] - if not var.stinfo['optrelations']: + if not var.stinfo.get('optrelations'): continue modified = True unstable.remove(varname) @@ -114,13 +114,13 @@ var.stinfo['relations'].remove(rel) newvar.stinfo['relations'].add(newrel) if rel.optional in ('left', 'both'): - newvar.stinfo['optrelations'].add(newrel) + newvar.add_optional_relation(newrel) for vref in newrel.children[1].iget_nodes(VariableRef): var = vref.variable var.stinfo['relations'].add(newrel) var.stinfo['rhsrelations'].add(newrel) if rel.optional in ('right', 'both'): - var.stinfo['optrelations'].add(newrel) + var.add_optional_relation(newrel) # extract subquery solutions mysolutions = [sol.copy() for sol in solutions] cleanup_solutions(newselect, mysolutions) @@ -888,7 +888,7 @@ condition = '%s=%s' % (lhssql, rhsconst.accept(self)) if relation.r_type != 'identity': condition = '(%s OR %s IS NULL)' % (condition, lhssql) - if not lhsvar.stinfo['optrelations']: + if not lhsvar.stinfo.get('optrelations'): return condition self.add_outer_join_condition(lhsvar, t1, condition) return @@ -987,7 +987,7 @@ sql = '%s%s' % (lhssql, rhssql) except AttributeError: sql = '%s%s' % (lhssql, rhssql) - if lhs.variable.stinfo['optrelations']: + if lhs.variable.stinfo.get('optrelations'): self.add_outer_join_condition(lhs.variable, table, sql) else: return sql @@ -1002,7 +1002,7 @@ lhsvar = lhs.variable me_is_principal = lhsvar.stinfo.get('principal') is rel if me_is_principal: - if not lhsvar.stinfo['typerels']: + if lhsvar.stinfo['typerel'] is None: # the variable is using the fti table, no join needed jointo = None elif not lhsvar.name in self._varmap: @@ -1135,7 +1135,7 @@ vtablename = '_' + variable.name self.add_table('entities AS %s' % vtablename, vtablename) sql = '%s.eid' % vtablename - if variable.stinfo['typerels']: + if variable.stinfo['typerel'] is not None: # add additional restriction on entities.type column pts = variable.stinfo['possibletypes'] if len(pts) == 1: @@ -1297,7 +1297,7 @@ tablealias = self._state.outer_tables[table] actualtables = self._state.actual_tables[-1] except KeyError: - for rel in var.stinfo['optrelations']: + for rel in var.stinfo.get('optrelations'): self.visit_relation(rel) assert self._state.outer_tables self.add_outer_join_condition(var, table, condition) diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_ldapuser.py Thu Apr 08 14:11:49 2010 +0200 @@ -176,7 +176,7 @@ rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' 'WF creation_date D, WF from_state FS,' 'WF owned_by U?, X eid %(x)s', - {'x': adim.eid}, 'x') + {'x': adim.eid}) self.assertEquals(rset.rows, [[syt.eid]]) finally: # restore db state diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_migractions.py Thu Apr 08 14:11:49 2010 +0200 @@ -108,8 +108,8 @@ testdate = date(2005, 12, 13) eid1 = self.mh.rqlexec('INSERT Note N')[0][0] eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] - d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1}, 'x')[0][0] - d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2}, 'x')[0][0] + d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] + d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] self.assertEquals(d1, date.today()) self.assertEquals(d2, testdate) self.mh.rollback() @@ -503,13 +503,13 @@ note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0) aff = self.execute('INSERT Affaire X').get_entity(0, 0) self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid}, 'x')) + {'x': text.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid}, 'x')) + {'x': note.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': text.eid, 'y': aff.eid}, 'x')) + {'x': text.eid, 'y': aff.eid})) self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': note.eid, 'y': aff.eid}, 'x')) + {'x': note.eid, 'y': aff.eid})) # XXX remove specializes by ourselves, else tearDown fails when removing # Para because of Note inheritance. This could be fixed by putting the # MemSchemaCWETypeDel(session, name) operation in the diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_multisources.py Thu Apr 08 14:11:49 2010 +0200 @@ -109,7 +109,7 @@ self.assertEquals(metainf['type'], 'Card') self.assert_(metainf['extid']) etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', - {'x': externent.eid}, 'x')[0][0] + {'x': externent.eid})[0][0] self.assertEquals(etype, 'Card') def test_order_limit_offset(self): @@ -129,7 +129,7 @@ self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0] aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0] # grant read access - self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x') + self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}) self.commit() cnx = self.login('anon') cu = cnx.cursor() @@ -139,8 +139,8 @@ def test_synchronization(self): cu = cnx2.cursor() - assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}, 'x') - cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}, 'x') + assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}) + cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}) aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0] cnx2.commit() try: @@ -155,20 +155,20 @@ self.failIf(rset) finally: # restore state - cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}, 'x') + cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}) cnx2.commit() def test_simplifiable_var(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', - {'x': affeid}, 'x') + {'x': affeid}) self.assertEquals(len(rset), 1) self.assertEquals(rset[0][1], "pitetre") def test_simplifiable_var_2(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', - {'x': affeid, 'u': self.session.user.eid}, 'x') + {'x': affeid, 'u': self.session.user.eid}) self.assertEquals(len(rset), 1) def test_sort_func(self): @@ -216,7 +216,7 @@ rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') self.assertEquals(len(rset), 2, rset.rows) finally: - cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}, 'x') + cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}) cnx2.commit() def test_attr_unification_neq_1(self): @@ -258,15 +258,15 @@ userstate = self.session.user.in_state[0] states.remove((userstate.eid, userstate.name)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': self.session.user.eid}, 'x')) + {'x': self.session.user.eid})) self.assertSetEquals(notstates, states) aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] - aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0] + aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0] self.assertEquals(aff1statename, 'pitetre') states.add((userstate.eid, userstate.name)) states.remove((aff1stateeid, aff1statename)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', - {'x': aff1}, 'x')) + {'x': aff1})) self.assertSetEquals(notstates, states) def test_absolute_url_base_url(self): diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_querier.py Thu Apr 08 14:11:49 2010 +0200 @@ -220,13 +220,13 @@ def test_typed_eid(self): # should return an empty result set - rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}, 'x') + rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}) self.assertIsInstance(rset[0][0], (int, long)) def test_bytes_storage(self): feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s', {'data': Binary("xxx")})[0][0] - fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid}, 'x')[0][0] + fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] self.assertIsInstance(fdata, Binary) self.assertEquals(fdata.getvalue(), 'xxx') @@ -356,17 +356,17 @@ def test_select_outer_join_optimized(self): peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] - rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}, 'x') + rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) self.assertEquals(rset.rows, [[peid1]]) rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?', - {'x':peid1}, 'x') + {'x':peid1}) self.assertEquals(rset.rows, [[peid1]]) def test_select_left_outer_join(self): rset = self.execute('DISTINCT Any G WHERE U? in_group G') self.assertEquals(len(rset), 4) rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', - {'x': self.session.user.eid}, 'x') + {'x': self.session.user.eid}) self.assertEquals(len(rset), 4) def test_select_ambigous_outer_join(self): @@ -374,7 +374,7 @@ self.execute("INSERT Tag X: X name 'tagbis'")[0][0] geid = self.execute("CWGroup G WHERE G name 'users'")[0][0] self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s", - {'g': geid, 't': teid}, 'g') + {'g': geid, 't': teid}) rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN") self.failUnless(['users', 'tag'] in rset.rows) self.failUnless(['activated', None] in rset.rows) @@ -882,7 +882,7 @@ def test_insert_5bis(self): peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': peid}, 'x') + {'x': peid}) rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) self.assertEquals(rset.description, [('Personne', 'Societe',)]) @@ -1000,17 +1000,17 @@ eid = self.execute("INSERT Folder T: T name 'toto'")[0][0] self.commit() # fill the cache - self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x') + self.execute("Any X WHERE X eid %(x)s", {'x': eid}) self.execute("Any X WHERE X eid %s" %eid) - self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x') + self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) self.execute("Folder X WHERE X eid %s" %eid) self.execute("DELETE Folder T WHERE T eid %s"%eid) self.commit() - rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x') + rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}) self.assertEquals(rset.rows, []) rset = self.execute("Any X WHERE X eid %s" %eid) self.assertEquals(rset.rows, []) - rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x') + rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) self.assertEquals(rset.rows, []) rset = self.execute("Folder X WHERE X eid %s" %eid) self.assertEquals(rset.rows, []) @@ -1086,7 +1086,7 @@ def test_update_string_concat(self): beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'}) - newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid}, 'x')[0][0] + newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] self.assertEquals(newname, 'toto-moved') def test_update_query_error(self): @@ -1203,7 +1203,7 @@ 'creation_date': '2000/07/03 11:00'}) rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' 'X owned_by U, U eid %(x)s', - {'x':self.session.user.eid}, 'x') + {'x':self.session.user.eid}) self.assertEquals(rset.rows, [[u'\xe9name0']]) @@ -1286,7 +1286,7 @@ ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] self.execute("SET E in_group G, E firstname %(firstname)s, E surname %(surname)s " "WHERE E eid %(x)s, G name 'users'", - {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}, 'x') + {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}) def test_nonregr_u_owned_by_u(self): ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_repository.py Thu Apr 08 14:11:49 2010 +0200 @@ -457,7 +457,7 @@ # our sqlite datetime adapter is ignore seconds fraction, so we have to # ensure update is done the next seconds time.sleep(1 - (ts.second - int(ts.second))) - self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x') + self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}) self.commit() self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) self.session.set_pool() diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_rql2sql.py Thu Apr 08 14:11:49 2010 +0200 @@ -1605,8 +1605,8 @@ class removeUnsusedSolutionsTC(TestCase): def test_invariant_not_varying(self): rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, @@ -1616,8 +1616,8 @@ def test_invariant_varying(self): rqlst = mock_object(defined_vars={}) - rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True) - rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False) + rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) + rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_rqlannotation.py Thu Apr 08 14:11:49 2010 +0200 @@ -100,6 +100,12 @@ self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + def test_8(self): + # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission + rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, ' + 'X is Note, N eid 1') + self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + def test_diff_scope_identity_deamb(self): rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)') self.assertEquals(rqlst.defined_vars['Z']._q_invariant, True) diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_security.py --- a/server/test/unittest_security.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_security.py Thu Apr 08 14:11:49 2010 +0200 @@ -197,7 +197,7 @@ # to actually get Unauthorized exception, try to delete a relation we can read self.restore_connection() eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}, 'x') + self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}) self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") self.commit() cnx = self.login('iaminusersgrouponly') @@ -214,7 +214,7 @@ cnx = self.login('user') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}, 'x') + {'x': ueid, 'passwd': 'newpwd'}) cnx.commit() cnx.close() cnx = self.login('user', password='newpwd') @@ -224,7 +224,7 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}, 'x') + {'x': ueid, 'passwd': 'newpwd'}) self.assertRaises(Unauthorized, cnx.commit) # read security test @@ -243,22 +243,22 @@ cu = cnx.cursor() rset = cu.execute('Affaire X') self.assertEquals(rset.rows, []) - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) # cache test - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() - rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}, 'x') + rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}) self.assertEquals(rset.rows, [[aff2]]) # more cache test w/ NOT eid - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}, 'x') + rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) self.assertEquals(rset.rows, [[aff2]]) - rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}, 'x') + rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) self.assertEquals(rset.rows, []) # test can't update an attribute of an entity that can't be readen - self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}, 'x') + self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) def test_entity_created_in_transaction(self): @@ -270,7 +270,7 @@ cu = cnx.cursor() aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] # entity created in transaction are readable *by eid* - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x')) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) # XXX would be nice if it worked rset = cu.execute("Affaire X WHERE X sujet 'cool'") self.assertEquals(len(rset), 0) @@ -281,18 +281,17 @@ def test_read_erqlexpr_has_text1(self): aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] card1 = self.execute("INSERT Card X: X title 'cool'")[0][0] - self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}, 'x') + self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] - cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}, - ('a', 's')) + cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) cnx.commit() - self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}, 'x') - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x')) - self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}, 'x')) + self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) + self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1})) rset = cu.execute("Any X WHERE X has_text 'cool'") self.assertEquals(sorted(eid for eid, in rset.rows), [card1, aff2]) @@ -347,7 +346,7 @@ # only managers should be able to edit the 'test' attribute of Personne entities eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0] self.commit() - self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x') + self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() @@ -357,11 +356,11 @@ self.assertRaises(Unauthorized, cnx.commit) eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0] cnx.commit() - cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) self.assertRaises(Unauthorized, cnx.commit) - cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}) self.assertRaises(Unauthorized, cnx.commit) - cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}, 'x') + cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) cnx.commit() cnx.close() @@ -370,23 +369,23 @@ note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) self.commit() note.fire_transition('markasdone') - self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}, 'x') + self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) self.commit() cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) self.assertRaises(Unauthorized, cnx.commit) note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) cnx.commit() note2.fire_transition('markasdone') cnx.commit() - self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid}, 'x')), + self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), 0) - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) self.assertRaises(Unauthorized, cnx.commit) note2.fire_transition('redoit') cnx.commit() - cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x') + cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) cnx.commit() def test_attribute_read_security(self): @@ -447,13 +446,13 @@ # should only be able to read the anonymous user, not another one origuser = self.adminsession.user self.assertRaises(Unauthorized, - cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x') + cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}) # nothing selected, nothing updated, no exception raised #self.assertRaises(Unauthorized, # cu.execute, 'SET X login "toto" WHERE X eid %(x)s', # {'x': self.user.eid}) - rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x') + rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) self.assertEquals(rset.rows, [[anon.eid]]) # but can't modify it cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) @@ -494,7 +493,7 @@ self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U') self.assertRaises(Unauthorized, cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s', - {'x': anoneid, 'b': beid1}, 'x') + {'x': anoneid, 'b': beid1}) def test_ambigous_ordered(self): @@ -551,10 +550,10 @@ aff.clear_related_cache('wf_info_for', role='object') self.assertRaises(Unauthorized, self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', - {'ti': trinfo.eid}, 'ti') + {'ti': trinfo.eid}) self.assertRaises(Unauthorized, self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"', - {'ti': trinfo.eid}, 'ti') + {'ti': trinfo.eid}) if __name__ == '__main__': unittest_main() diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_storage.py --- a/server/test/unittest_storage.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_storage.py Thu Apr 08 14:11:49 2010 +0200 @@ -131,14 +131,14 @@ ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}, 'x') + ')', {'x': f1.eid}) self.assertEquals(len(rset), 2) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[1][0], f1.eid) self.assertEquals(rset[0][1].getvalue(), 'the-data') self.assertEquals(rset[1][1].getvalue(), 'the-data') rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}, 'x') + {'x': f1.eid}) self.assertEquals(len(rset), 1) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[0][1], len('the-data')) @@ -146,7 +146,7 @@ ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' - ')', {'x': f1.eid}, 'x') + ')', {'x': f1.eid}) self.assertEquals(len(rset), 2) self.assertEquals(rset[0][0], f1.eid) self.assertEquals(rset[1][0], f1.eid) @@ -154,7 +154,7 @@ self.assertEquals(rset[1][1], len('the-data')) ex = self.assertRaises(QueryError, self.execute, 'Any X,UPPER(D) WHERE X eid %(x)s, X data D', - {'x': f1.eid}, 'x') + {'x': f1.eid}) self.assertEquals(str(ex), 'UPPER can not be called on mapped attribute') @@ -177,7 +177,7 @@ {'d': Binary('some other data'), 'f': f1.eid}) self.assertEquals(f1.data.getvalue(), 'some other data') self.commit() - f2 = self.entity('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}) + f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) self.assertEquals(f2.data.getvalue(), 'some other data') diff -r f3c2cb460ad9 -r d14bfd477c44 server/test/unittest_undo.py --- a/server/test/unittest_undo.py Thu Apr 08 12:42:47 2010 +0200 +++ b/server/test/unittest_undo.py Thu Apr 08 14:11:49 2010 +0200 @@ -144,8 +144,8 @@ undotxuuid = self.commit() self.assertEquals(undotxuuid, None) # undo not undoable self.assertEquals(errors, []) - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}, 'x')) - self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}, 'x')) + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) + self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"')) self.assertEquals(toto.state, 'activated') self.assertEquals(toto.get_email(), 'toto@logilab.org') @@ -216,8 +216,8 @@ errors = self.cnx.undo_transaction(txuuid) self.commit() self.failIf(errors) - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}, 'x')) - self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}, 'x')) + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid})) + self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid})) self.failIf(self.execute('Any X,Y WHERE X fiche Y')) self.session.set_pool() for eid in (p.eid, c.eid): diff -r f3c2cb460ad9 -r d14bfd477c44 setup.py --- a/setup.py Thu Apr 08 12:42:47 2010 +0200 +++ b/setup.py Thu Apr 08 14:11:49 2010 +0200 @@ -24,38 +24,44 @@ import os import sys import shutil -from distutils.core import setup -from distutils.command import install_lib from os.path import isdir, exists, join, walk +try: + if os.environ.get('NO_SETUPTOOLS'): + raise ImportError() # do as there is no setuptools + from setuptools import setup + from setuptools.command import install_lib + USE_SETUPTOOLS = True +except ImportError: + from distutils.core import setup + from distutils.command import install_lib + USE_SETUPTOOLS = False + # import required features -from __pkginfo__ import modname, version, license, short_desc, long_desc, \ - web, author, author_email +from __pkginfo__ import modname, version, license, description, web, \ + author, author_email + +if exists('README'): + long_description = file('README').read() + # import optional features -try: - from __pkginfo__ import distname -except ImportError: - distname = modname -try: - from __pkginfo__ import scripts -except ImportError: - scripts = [] -try: - from __pkginfo__ import data_files -except ImportError: - data_files = None -try: - from __pkginfo__ import subpackage_of -except ImportError: - subpackage_of = None -try: - from __pkginfo__ import include_dirs -except ImportError: - include_dirs = [] -try: - from __pkginfo__ import ext_modules -except ImportError: - ext_modules = None +import __pkginfo__ +if USE_SETUPTOOLS: + requires = {} + for entry in ("__depends__", "__recommends__"): + requires.update(getattr(__pkginfo__, entry, {})) + install_requires = [("%s %s" % (d, v and v or "")).strip() + for d, v in requires.iteritems()] +else: + install_requires = [] + +distname = getattr(__pkginfo__, 'distname', modname) +scripts = getattr(__pkginfo__, 'scripts', ()) +include_dirs = getattr(__pkginfo__, 'include_dirs', ()) +data_files = getattr(__pkginfo__, 'data_files', None) +subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) +ext_modules = getattr(__pkginfo__, 'ext_modules', None) + BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog') IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc') @@ -92,7 +98,8 @@ def export(from_dir, to_dir, blacklist=BASE_BLACKLIST, - ignore_ext=IGNORED_EXTENSIONS): + ignore_ext=IGNORED_EXTENSIONS, + verbose=True): """make a mirror of from_dir in to_dir, omitting directories and files listed in the black list """ @@ -111,7 +118,8 @@ continue src = '%s/%s' % (directory, filename) dest = to_dir + src[len(from_dir):] - print >> sys.stderr, src, '->', dest + if verbose: + print >> sys.stderr, src, '->', dest if os.path.isdir(src): if not exists(dest): os.mkdir(dest) @@ -154,29 +162,31 @@ base = modname for directory in include_dirs: dest = join(self.install_dir, base, directory) - export(directory, dest) + export(directory, dest, verbose=False) def install(**kwargs): """setup entry point""" + if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \ + sys.versioninfo < (2, 5, 4): + sys.argv.remove('--install-layout=deb') + print "W: remove '--install-layout=deb' option" if subpackage_of: package = subpackage_of + '.' + modname kwargs['package_dir'] = {package : '.'} packages = [package] + get_packages(os.getcwd(), package) + if USE_SETUPTOOLS: + kwargs['namespace_packages'] = [subpackage_of] else: kwargs['package_dir'] = {modname : '.'} packages = [modname] + get_packages(os.getcwd(), modname) kwargs['packages'] = packages - return setup(name = distname, - version = version, - license =license, - description = short_desc, - long_description = long_desc, - author = author, - author_email = author_email, - url = web, - scripts = ensure_scripts(scripts), - data_files=data_files, + return setup(name=distname, version=version, license=license, url=web, + description=description, long_description=long_description, + author=author, author_email=author_email, + scripts=ensure_scripts(scripts), data_files=data_files, ext_modules=ext_modules, + install_requires=install_requires, + #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"], cmdclass={'install_lib': MyInstallLib}, **kwargs ) diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/README.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/README.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,3 @@ +Summary +------- +%(longdesc)s diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/__pkginfo__.py.tmpl --- a/skeleton/__pkginfo__.py.tmpl Thu Apr 08 12:42:47 2010 +0200 +++ b/skeleton/__pkginfo__.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -7,15 +7,12 @@ numversion = (0, 1, 0) version = '.'.join(str(num) for num in numversion) -license = 'LCL' -copyright = '''Copyright (c) %(year)s %(author)s. -%(author-web-site)s -- mailto:%(author-email)s''' +license = '%(license)s' author = '%(author)s' author_email = '%(author-email)s' -short_desc = '%(shortdesc)s' -long_desc = '''%(longdesc)s''' +description = '%(shortdesc)s' web = 'http://www.cubicweb.org/project/%%s' %% distname @@ -43,8 +40,6 @@ # Note: here, you'll need to add subdirectories if you want # them to be included in the debian package -__depends_cubes__ = {} -__depends__ = {'cubicweb': '>= 3.6.0'} -__use__ = (%(dependancies)s) -__recommend__ = () +__depends__ = {'cubicweb': '>= %(version)s'} +__recommends__ = {} diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/data/cubes.CUBENAME.css --- a/skeleton/data/cubes.CUBENAME.css Thu Apr 08 12:42:47 2010 +0200 +++ b/skeleton/data/cubes.CUBENAME.css Thu Apr 08 14:11:49 2010 +0200 @@ -1,1 +1,1 @@ -/* template specific CSS */ +/* cube-specific CSS */ diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/data/cubes.CUBENAME.js --- a/skeleton/data/cubes.CUBENAME.js Thu Apr 08 12:42:47 2010 +0200 +++ b/skeleton/data/cubes.CUBENAME.js Thu Apr 08 14:11:49 2010 +0200 @@ -1,1 +1,1 @@ -// This contains template-specific javascript \ No newline at end of file +// This contains cube-specific javascript \ No newline at end of file diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/entities.py --- a/skeleton/entities.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""this contains the cube-specific entities' classes - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/entities.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/entities.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s entity's classes + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/hooks.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/hooks.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s specific hooks and operations + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/migration/postcreate.py --- a/skeleton/migration/postcreate.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ -# postcreate script. You could setup site properties or a workflow here for example -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" - -# Example of site property change -#set_property('ui.site-title', "") - diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/migration/postcreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/migration/postcreate.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,14 @@ +"""%(distname)s postcreate script, executed at instance creation time or when +the cube is added to an existing instance. + +You could setup site properties or a workflow here for example. + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" + +# Example of site property change +#set_property('ui.site-title', "") + diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/migration/precreate.py --- a/skeleton/migration/precreate.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -# Instructions here will be read before reading the schema -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" -# You could create your own groups here, like in : -# create_entity('CWGroup', name=u'mygroup') diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/migration/precreate.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/migration/precreate.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,10 @@ +"""%(distname)s precreate script, executed at instance creation time or when +the cube is added to an existing instance, before the schema is serialized. + +This is typically to create groups referenced by the cube'schema. + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/schema.py --- a/skeleton/schema.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -# cube's specific schema -""" - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/schema.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/schema.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s schema + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/setup.py --- a/skeleton/setup.py Thu Apr 08 12:42:47 2010 +0200 +++ b/skeleton/setup.py Thu Apr 08 14:11:49 2010 +0200 @@ -1,14 +1,12 @@ #!/usr/bin/env python -""" +"""Generic Setup script, takes package info from __pkginfo__.py file :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152 -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr +# pylint: disable-msg=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611 # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -22,36 +20,178 @@ # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Generic Setup script, takes package info from __pkginfo__.py file """ + +import os +import sys +import shutil +from os.path import isdir, exists, join, walk -from distutils.core import setup +try: + if os.environ.get('NO_SETUPTOOLS'): + raise ImportError() # do as there is no setuptools + from setuptools import setup + from setuptools.command import install_lib + USE_SETUPTOOLS = True +except ImportError: + from distutils.core import setup + from distutils.command import install_lib + USE_SETUPTOOLS = False # import required features -from __pkginfo__ import distname, version, license, short_desc, long_desc, \ - web, author, author_email +from __pkginfo__ import modname, version, license, description, web, \ + author, author_email + +if exists('README'): + long_description = file('README').read() +else: + long_description = '' + # import optional features -try: - from __pkginfo__ import data_files -except ImportError: - data_files = None -try: - from __pkginfo__ import include_dirs -except ImportError: - include_dirs = [] +import __pkginfo__ +if USE_SETUPTOOLS: + requires = {} + for entry in ("__depends__", "__recommends__"): + requires.update(getattr(__pkginfo__, entry, {})) + install_requires = [("%s %s" % (d, v and v or "")).strip() + for d, v in requires.iteritems()] +else: + install_requires = [] + +distname = getattr(__pkginfo__, 'distname', modname) +scripts = getattr(__pkginfo__, 'scripts', ()) +include_dirs = getattr(__pkginfo__, 'include_dirs', ()) +data_files = getattr(__pkginfo__, 'data_files', None) +subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) +ext_modules = getattr(__pkginfo__, 'ext_modules', None) + + +BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog') +IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc') + + +def ensure_scripts(linux_scripts): + """ + Creates the proper script names required for each platform + (taken from 4Suite) + """ + from distutils import util + if util.get_platform()[:3] == 'win': + scripts_ = [script + '.bat' for script in linux_scripts] + else: + scripts_ = linux_scripts + return scripts_ + + +def get_packages(directory, prefix): + """return a list of subpackages for the given directory + """ + result = [] + for package in os.listdir(directory): + absfile = join(directory, package) + if isdir(absfile): + if exists(join(absfile, '__init__.py')) or \ + package in ('test', 'tests'): + if prefix: + result.append('%s.%s' % (prefix, package)) + else: + result.append(package) + result += get_packages(absfile, result[-1]) + return result + +def export(from_dir, to_dir, + blacklist=BASE_BLACKLIST, + ignore_ext=IGNORED_EXTENSIONS, + verbose=True): + """make a mirror of from_dir in to_dir, omitting directories and files + listed in the black list + """ + def make_mirror(arg, directory, fnames): + """walk handler""" + for norecurs in blacklist: + try: + fnames.remove(norecurs) + except ValueError: + pass + for filename in fnames: + # don't include binary files + if filename[-4:] in ignore_ext: + continue + if filename[-1] == '~': + continue + src = '%s/%s' % (directory, filename) + dest = to_dir + src[len(from_dir):] + if verbose: + print >> sys.stderr, src, '->', dest + if os.path.isdir(src): + if not exists(dest): + os.mkdir(dest) + else: + if exists(dest): + os.remove(dest) + shutil.copy2(src, dest) + try: + os.mkdir(to_dir) + except OSError, ex: + # file exists ? + import errno + if ex.errno != errno.EEXIST: + raise + walk(from_dir, make_mirror, None) + + +EMPTY_FILE = '"""generated file, don\'t modify or your data will be lost"""\n' + +class MyInstallLib(install_lib.install_lib): + """extend install_lib command to handle package __init__.py and + include_dirs variable if necessary + """ + def run(self): + """overridden from install_lib class""" + install_lib.install_lib.run(self) + # create Products.__init__.py if needed + if subpackage_of: + product_init = join(self.install_dir, subpackage_of, '__init__.py') + if not exists(product_init): + self.announce('creating %s' % product_init) + stream = open(product_init, 'w') + stream.write(EMPTY_FILE) + stream.close() + # manually install included directories if any + if include_dirs: + if subpackage_of: + base = join(subpackage_of, modname) + else: + base = modname + for directory in include_dirs: + dest = join(self.install_dir, base, directory) + export(directory, dest, verbose=False) def install(**kwargs): """setup entry point""" - #kwargs['distname'] = modname - return setup(name=distname, - version=version, - license=license, - description=short_desc, - long_description=long_desc, - author=author, - author_email=author_email, - url=web, - data_files=data_files, - **kwargs) + if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \ + sys.versioninfo < (2, 5, 4): + sys.argv.remove('--install-layout=deb') + print "W: remove '--install-layout=deb' option" + if subpackage_of: + package = subpackage_of + '.' + modname + kwargs['package_dir'] = {package : '.'} + packages = [package] + get_packages(os.getcwd(), package) + if USE_SETUPTOOLS: + kwargs['namespace_packages'] = [subpackage_of] + else: + kwargs['package_dir'] = {modname : '.'} + packages = [modname] + get_packages(os.getcwd(), modname) + kwargs['packages'] = packages + return setup(name=distname, version=version, license=license, url=web, + description=description, long_description=long_description, + author=author, author_email=author_email, + scripts=ensure_scripts(scripts), data_files=data_files, + ext_modules=ext_modules, + install_requires=install_requires, + #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"], + cmdclass={'install_lib': MyInstallLib}, + **kwargs + ) if __name__ == '__main__' : install() diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/sobjects.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/sobjects.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s repository side views, usually for notification + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/views.py --- a/skeleton/views.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -"""cube-specific forms/views/actions/components - -:organization: Logilab -:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. -:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr -:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses -""" diff -r f3c2cb460ad9 -r d14bfd477c44 skeleton/views.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/views.py.tmpl Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,7 @@ +"""%(distname)s views/forms/actions/components for web ui + +:organization: %(author)s +:copyright: %(year)s %(author)s +:contact: %(author-web-site)s -- mailto:%(author-email)s +:license: %(long-license)s +""" diff -r f3c2cb460ad9 -r d14bfd477c44 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Thu Apr 08 12:42:47 2010 +0200 +++ b/sobjects/test/unittest_supervising.py Thu Apr 08 14:11:49 2010 +0200 @@ -30,9 +30,9 @@ # do some modification user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G ' 'WHERE G name "users"').get_entity(0, 0) - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}, 'x') + self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}) self.execute('DELETE Card B WHERE B title "une news !"') - self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}, 'x') + self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}) self.execute('SET X content "duh?" WHERE X is Comment') self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"') # check only one supervision email operation @@ -91,7 +91,7 @@ def test_nonregr1(self): session = self.session # do some unlogged modification - self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x') + self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}) self.commit() # no crash diff -r f3c2cb460ad9 -r d14bfd477c44 test/data/cubes/file/__pkginfo__.py --- a/test/data/cubes/file/__pkginfo__.py Thu Apr 08 12:42:47 2010 +0200 +++ b/test/data/cubes/file/__pkginfo__.py Thu Apr 08 14:11:49 2010 +0200 @@ -13,48 +13,3 @@ numversion = (1, 4, 3) version = '.'.join(str(num) for num in numversion) -license = 'LGPL' -copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - -author = "Logilab" -author_email = "contact@logilab.fr" -web = '' - -short_desc = "Raw file support for the CubicWeb framework" -long_desc = """CubicWeb is a entities / relations bases knowledge management system -developped at Logilab. -. -This package provides schema and views to store files and images in cubicweb -applications. -. -""" - -from os import listdir -from os.path import join - -CUBES_DIR = join('share', 'cubicweb', 'cubes') -try: - data_files = [ - [join(CUBES_DIR, 'file'), - [fname for fname in listdir('.') - if fname.endswith('.py') and fname != 'setup.py']], - [join(CUBES_DIR, 'file', 'data'), - [join('data', fname) for fname in listdir('data')]], - [join(CUBES_DIR, 'file', 'wdoc'), - [join('wdoc', fname) for fname in listdir('wdoc')]], - [join(CUBES_DIR, 'file', 'views'), - [join('views', fname) for fname in listdir('views') if fname.endswith('.py')]], - [join(CUBES_DIR, 'file', 'i18n'), - [join('i18n', fname) for fname in listdir('i18n')]], - [join(CUBES_DIR, 'file', 'migration'), - [join('migration', fname) for fname in listdir('migration')]], - ] -except OSError: - # we are in an installed directory - pass - - -cube_eid = 20320 -# used packages -__use__ = () diff -r f3c2cb460ad9 -r d14bfd477c44 test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Thu Apr 08 12:42:47 2010 +0200 +++ b/test/unittest_cwconfig.py Thu Apr 08 14:11:49 2010 +0200 @@ -7,13 +7,16 @@ """ import sys import os +import tempfile from os.path import dirname, join, abspath from logilab.common.modutils import cleanup_sys_modules -from logilab.common.testlib import TestCase, unittest_main +from logilab.common.testlib import (TestCase, unittest_main, + with_tempdir) from logilab.common.changelog import Version from cubicweb.devtools import ApptestConfiguration +from cubicweb.cwconfig import _find_prefix def unabsolutize(path): parts = path.split(os.sep) @@ -32,7 +35,7 @@ self.config._cubes = ('email', 'file') def tearDown(self): - os.environ.pop('CW_CUBES_PATH', None) + ApptestConfiguration.CUBES_PATH = [] def test_reorder_cubes(self): # jpl depends on email and file and comment @@ -52,7 +55,7 @@ def test_reorder_cubes_recommends(self): from cubes.comment import __pkginfo__ as comment_pkginfo - comment_pkginfo.__recommend__ = ('file',) + comment_pkginfo.__recommends_cubes__ = {'file': None} try: # email recommends comment # comment recommends file @@ -65,7 +68,7 @@ self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), ('forge', 'email', 'comment', 'file')) finally: - comment_pkginfo.__use__ = () + comment_pkginfo.__recommends_cubes__ = {} # def test_vc_config(self): @@ -91,11 +94,11 @@ # make sure we don't import the email cube, but the stdlib email package import email self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR) - os.environ['CW_CUBES_PATH'] = CUSTOM_CUBES_DIR + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] self.assertEquals(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) - os.environ['CW_CUBES_PATH'] = os.pathsep.join([ - CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant']) + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR, + self.config.CUBES_DIR, 'unexistant'] # filter out unexistant and duplicates self.assertEquals(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, @@ -114,6 +117,91 @@ from cubes import file self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) +class FindPrefixTC(TestCase): + def make_dirs(self, *args): + path = join(tempfile.tempdir, *args) + if not os.path.exists(path): + os.makedirs(path) + return path + + def make_file(self, *args): + self.make_dirs(*args[: -1]) + file_path = join(tempfile.tempdir, *args) + file_obj = open(file_path, 'w') + file_obj.write('""" None """') + file_obj.close() + return file_path + + @with_tempdir + def test_samedir(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.assertEquals(_find_prefix(prefix), prefix) + + @with_tempdir + def test_samedir_filepath(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + dir_path = self.make_dirs('bob') + self.assertEquals(_find_prefix(dir_path), prefix) + + @with_tempdir + def test_file_in_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_file_in_deeper_dir_inside_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('bob') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_sister_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_multiple_parent_candidate_prefix(self): + self.make_dirs('share', 'cubicweb') + prefix = self.make_dirs('share', 'cubicweb', 'bob') + self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb') + file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_upper_candidate_prefix(self): + prefix = tempfile.tempdir + self.make_dirs('share', 'cubicweb') + self.make_dirs('bell','bob', 'share', 'cubicweb') + file_path = self.make_file('bell', 'toto.py') + self.assertEquals(_find_prefix(file_path), prefix) + + @with_tempdir + def test_no_prefix(self): + prefix = tempfile.tempdir + self.assertEquals(_find_prefix(prefix), sys.prefix) if __name__ == '__main__': unittest_main() diff -r f3c2cb460ad9 -r d14bfd477c44 test/unittest_cwctl.py --- a/test/unittest_cwctl.py Thu Apr 08 12:42:47 2010 +0200 +++ b/test/unittest_cwctl.py Thu Apr 08 14:11:49 2010 +0200 @@ -10,15 +10,8 @@ from cStringIO import StringIO from logilab.common.testlib import TestCase, unittest_main -if os.environ.get('APYCOT_ROOT'): - root = os.environ['APYCOT_ROOT'] - CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root - os.environ['CW_CUBES_PATH'] = CUBES_DIR - REGISTRY_DIR = '%s/etc/cubicweb.d/' % root - os.environ['CW_INSTANCES_DIR'] = REGISTRY_DIR - from cubicweb.cwconfig import CubicWebConfiguration -CubicWebConfiguration.load_cwctl_plugins() +CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? class CubicWebCtlTC(TestCase): def setUp(self): diff -r f3c2cb460ad9 -r d14bfd477c44 test/unittest_entity.py --- a/test/unittest_entity.py Thu Apr 08 12:42:47 2010 +0200 +++ b/test/unittest_entity.py Thu Apr 08 14:11:49 2010 +0200 @@ -44,8 +44,8 @@ p = req.create_entity('Personne', nom=u'toto') oe = req.create_entity('Note', type=u'x') self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}, ('t','u')) - self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x') + {'t': oe.eid, 'u': p.eid}) + self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.assertEquals(len(e.ecrit_par), 1) @@ -60,7 +60,7 @@ oe = req.create_entity('Note', type=u'x') self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject' self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s', - {'t': oe.eid, 'u': p.eid}, ('t','u')) + {'t': oe.eid, 'u': p.eid}) e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) self.failIf(e.ecrit_par) @@ -69,12 +69,12 @@ def test_copy_with_composite(self): user = self.user() adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x':user.eid}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.use_email[0].address, "toto@logilab.org") self.assertEquals(e.use_email[0].eid, adeleid) usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' 'WHERE G name "users"')[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x':usereid}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) e.copy_relations(user.eid) self.failIf(e.use_email) self.failIf(e.primary_email) @@ -87,14 +87,14 @@ user.fire_transition('deactivate') self.commit() eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] - e = self.entity('Any X WHERE X eid %(x)s', {'x': eid2}, 'x') + e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) e.copy_relations(user.eid) self.commit() e.clear_related_cache('in_state', 'subject') self.assertEquals(e.state, 'activated') def test_related_cache_both(self): - user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x') + user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() self.assertEquals(user._related_cache, {}) @@ -235,7 +235,7 @@ #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') self.login('anon') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' @@ -260,7 +260,7 @@ unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] self.failUnless(p.eid in unrelated) self.execute('SET X tags Y WHERE X is Tag, Y is Personne') - e = self.entity('Any X WHERE X is Tag') + e = self.execute('Any X WHERE X is Tag').get_entity(0, 0) unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')] self.failIf(p.eid in unrelated) @@ -281,7 +281,7 @@ self.assertEquals([x.address for x in rset.entities()], [u'hop']) self.create_user('toto') self.login('toto') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') self.assertEquals([x.login for x in rset.entities()], ['toto']) user = self.request().user @@ -291,7 +291,7 @@ rset = user.unrelated('use_email', 'EmailAddress', 'subject') self.assertEquals([x.address for x in rset.entities()], []) self.login('anon') - email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0) + email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') self.assertEquals([x.login for x in rset.entities()], []) user = self.request().user @@ -439,7 +439,7 @@ eid = session.execute( 'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 ' 'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0] - trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x') + trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) trinfo.complete() self.failUnless(isinstance(trinfo['creation_date'], datetime)) self.failUnless(trinfo.relation_cached('from_state', 'subject')) @@ -449,9 +449,9 @@ def test_request_cache(self): req = self.request() - user = self.entity('CWUser X WHERE X login "admin"', req=req) + user = self.execute('CWUser X WHERE X login "admin"', req=req).get_entity(0, 0) state = user.in_state[0] - samestate = self.entity('State X WHERE X name "activated"', req=req) + samestate = self.execute('State X WHERE X name "activated"', req=req).get_entity(0, 0) self.failUnless(state is samestate) def test_rest_path(self): @@ -481,7 +481,7 @@ self.assertEquals(person.prenom, u'adrien') self.assertEquals(person.nom, u'di mascio') person.set_attributes(prenom=u'sylvain', nom=u'thénault') - person = self.entity('Personne P') # XXX retreival needed ? + person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? self.assertEquals(person.prenom, u'sylvain') self.assertEquals(person.nom, u'thénault') diff -r f3c2cb460ad9 -r d14bfd477c44 test/unittest_rset.py --- a/test/unittest_rset.py Thu Apr 08 12:42:47 2010 +0200 +++ b/test/unittest_rset.py Thu Apr 08 14:11:49 2010 +0200 @@ -212,7 +212,7 @@ def test_get_entity_simple(self): self.request().create_entity('CWUser', login=u'adim', upassword='adim', surname=u'di mascio', firstname=u'adrien') - e = self.entity('Any X,T WHERE X login "adim", X surname T') + e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) self.assertEquals(e['surname'], 'di mascio') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertRaises(KeyError, e.__getitem__, 'creation_date') diff -r f3c2cb460ad9 -r d14bfd477c44 toolsutils.py --- a/toolsutils.py Thu Apr 08 12:42:47 2010 +0200 +++ b/toolsutils.py Thu Apr 08 14:11:49 2010 +0200 @@ -103,10 +103,9 @@ else: print 'no diff between %s and %s' % (appl_file, ref_file) - +SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py') def copy_skeleton(skeldir, targetdir, context, - exclude=('*.py[co]', '*.orig', '*~', '*_flymake.py'), - askconfirm=False): + exclude=SKEL_EXCLUDE, askconfirm=False): import shutil from fnmatch import fnmatch skeldir = normpath(skeldir) @@ -184,7 +183,7 @@ config_file, ex) return config -def env_path(env_var, default, name): +def env_path(env_var, default, name, checkexists=True): """get a path specified in a variable or using the default value and return it. @@ -203,8 +202,8 @@ :raise `ConfigurationError`: if the returned path does not exist """ path = environ.get(env_var, default) - if not exists(path): - raise ConfigurationError('%s path %s doesn\'t exist' % (name, path)) + if checkexists and not exists(path): + raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path)) return abspath(path) diff -r f3c2cb460ad9 -r d14bfd477c44 transaction.py --- a/transaction.py Thu Apr 08 12:42:47 2010 +0200 +++ b/transaction.py Thu Apr 08 14:11:49 2010 +0200 @@ -48,7 +48,7 @@ none if not found. """ return self.req.execute('Any X WHERE X eid %(x)s', - {'x': self.user_eid}, 'x').get_entity(0, 0) + {'x': self.user_eid}).get_entity(0, 0) def actions_list(self, public=True): """return an ordered list of action effectued during that transaction diff -r f3c2cb460ad9 -r d14bfd477c44 utils.py --- a/utils.py Thu Apr 08 12:42:47 2010 +0200 +++ b/utils.py Thu Apr 08 14:11:49 2010 +0200 @@ -309,35 +309,6 @@ self.body.getvalue()) -def _pdf_conversion_availability(): - try: - import pysixt - except ImportError: - return False - from subprocess import Popen, STDOUT - if not os.path.isfile('/usr/bin/fop'): - return False - try: - Popen(['/usr/bin/fop', '-q'], - stdout=open(os.devnull, 'w'), - stderr=STDOUT) - except OSError, e: - getLogger('cubicweb').info('fop not usable (%s)', e) - return False - return True - -def can_do_pdf_conversion(__answer_cache=[]): - """pdf conversion depends on - * pysixt (python package) - * fop 0.9x - - NOTE: actual check is done by _pdf_conversion_availability and - result is cached - """ - if not __answer_cache: # first time, not in cache - __answer_cache.append(_pdf_conversion_availability()) - return __answer_cache[0] - try: # may not be there if cubicweb-web not installed from simplejson import dumps, JSONEncoder diff -r f3c2cb460ad9 -r d14bfd477c44 web/application.py --- a/web/application.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/application.py Thu Apr 08 14:11:49 2010 +0200 @@ -190,7 +190,7 @@ def _update_last_login_time(self, req): try: req.execute('SET X last_login_time NOW WHERE X eid %(x)s', - {'x' : req.user.eid}, 'x') + {'x' : req.user.eid}) req.cnx.commit() except (RepositoryError, Unauthorized): # ldap user are not writeable for instance diff -r f3c2cb460ad9 -r d14bfd477c44 web/component.py --- a/web/component.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/component.py Thu Apr 08 14:11:49 2010 +0200 @@ -169,7 +169,7 @@ rset = entity.related(self.rtype, role(self)) else: eid = self.cw_rset[row][col] - rset = self._cw.execute(self.rql(), {'x': eid}, 'x') + rset = self._cw.execute(self.rql(), {'x': eid}) if not rset.rowcount: return self.w(u'
' % self.div_class()) diff -r f3c2cb460ad9 -r d14bfd477c44 web/data/pdf_icon.gif Binary file web/data/pdf_icon.gif has changed diff -r f3c2cb460ad9 -r d14bfd477c44 web/facet.py --- a/web/facet.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/facet.py Thu Apr 08 14:11:49 2010 +0200 @@ -8,7 +8,6 @@ """ __docformat__ = "restructuredtext en" -from itertools import chain from copy import deepcopy from datetime import date, datetime, timedelta @@ -199,7 +198,7 @@ # add attribute variable to selection rqlst.add_selected(attrvar) # add is restriction if necessary - if not mainvar.stinfo['typerels']: + if mainvar.stinfo['typerel'] is None: etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions) rqlst.add_type_restriction(mainvar, etypes) return var @@ -228,12 +227,16 @@ for ovarname in linkedvars: vargraph[ovarname].remove(trvarname) # remove relation using this variable - for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']): + for rel in trvar.stinfo['relations']: if rel in removed: # already removed continue rqlst.remove_node(rel) removed.add(rel) + rel = trvar.stinfo['typerel'] + if rel is not None and not rel in removed: + rqlst.remove_node(rel) + removed.add(rel) # cleanup groupby clause if rqlst.groupby: for vref in rqlst.groupby[:]: @@ -329,9 +332,9 @@ def support_and(self): return False - def rqlexec(self, rql, args=None, cachekey=None): + def rqlexec(self, rql, args=None): try: - return self._cw.execute(rql, args, cachekey) + return self._cw.execute(rql, args) except Unauthorized: return [] @@ -372,7 +375,7 @@ if self.target_type is not None: rqlst.add_type_restriction(var, self.target_type) try: - rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey) + rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args) except: self.exception('error while getting vocabulary for %s, rql: %s', self, rqlst.as_string()) @@ -461,7 +464,7 @@ newvar = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role) _set_orderby(rqlst, newvar, self.sortasc, self.sortfunc) try: - rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey) + rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args) except: self.exception('error while getting vocabulary for %s, rql: %s', self, rqlst.as_string()) diff -r f3c2cb460ad9 -r d14bfd477c44 web/formfields.py --- a/web/formfields.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/formfields.py Thu Apr 08 14:11:49 2010 +0200 @@ -589,8 +589,7 @@ # raise UnmodifiedField instead of returning None, since the later # will try to remove already attached file if any raise UnmodifiedField() - # skip browser submitted mime type - filename, _, stream = value + filename, stream = value # value is a 3-uple (filename, mimetype, stream) value = Binary(stream.read()) if not value.getvalue(): # usually an unexistant file diff -r f3c2cb460ad9 -r d14bfd477c44 web/http_headers.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/http_headers.py Thu Apr 08 14:11:49 2010 +0200 @@ -0,0 +1,1542 @@ +# This file has been extracted from the abandoned TwistedWeb2 project +# http://twistedmatrix.com/trac/wiki/TwistedWeb2 + + +from __future__ import generators + +import types, time +from calendar import timegm +import base64 +import re + +def dashCapitalize(s): + ''' Capitalize a string, making sure to treat - as a word seperator ''' + return '-'.join([ x.capitalize() for x in s.split('-')]) + +# datetime parsing and formatting +weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] +weekdayname_lower = [name.lower() for name in weekdayname] +monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] +monthname_lower = [name and name.lower() for name in monthname] + +# HTTP Header parsing API + +header_case_mapping = {} + +def casemappingify(d): + global header_case_mapping + newd = dict([(key.lower(),key) for key in d.keys()]) + header_case_mapping.update(newd) + +def lowerify(d): + return dict([(key.lower(),value) for key,value in d.items()]) + + +class HeaderHandler(object): + """HeaderHandler manages header generating and parsing functions. + """ + HTTPParsers = {} + HTTPGenerators = {} + + def __init__(self, parsers=None, generators=None): + """ + @param parsers: A map of header names to parsing functions. + @type parsers: L{dict} + + @param generators: A map of header names to generating functions. + @type generators: L{dict} + """ + + if parsers: + self.HTTPParsers.update(parsers) + if generators: + self.HTTPGenerators.update(generators) + + def parse(self, name, header): + """ + Parse the given header based on its given name. + + @param name: The header name to parse. + @type name: C{str} + + @param header: A list of unparsed headers. + @type header: C{list} of C{str} + + @return: The return value is the parsed header representation, + it is dependent on the header. See the HTTP Headers document. + """ + parser = self.HTTPParsers.get(name, None) + if parser is None: + raise ValueError("No header parser for header '%s', either add one or use getHeaderRaw." % (name,)) + + try: + for p in parser: + # print "Parsing %s: %s(%s)" % (name, repr(p), repr(h)) + header = p(header) + # if isinstance(h, types.GeneratorType): + # h=list(h) + except ValueError,v: + # print v + header=None + + return header + + def generate(self, name, header): + """ + Generate the given header based on its given name. + + @param name: The header name to generate. + @type name: C{str} + + @param header: A parsed header, such as the output of + L{HeaderHandler}.parse. + + @return: C{list} of C{str} each representing a generated HTTP header. + """ + generator = self.HTTPGenerators.get(name, None) + + if generator is None: + # print self.generators + raise ValueError("No header generator for header '%s', either add one or use setHeaderRaw." % (name,)) + + for g in generator: + header = g(header) + + #self._raw_headers[name] = h + return header + + def updateParsers(self, parsers): + """Update en masse the parser maps. + + @param parsers: Map of header names to parser chains. + @type parsers: C{dict} + """ + casemappingify(parsers) + self.HTTPParsers.update(lowerify(parsers)) + + def addParser(self, name, value): + """Add an individual parser chain for the given header. + + @param name: Name of the header to add + @type name: C{str} + + @param value: The parser chain + @type value: C{str} + """ + self.updateParsers({name: value}) + + def updateGenerators(self, generators): + """Update en masse the generator maps. + + @param parsers: Map of header names to generator chains. + @type parsers: C{dict} + """ + casemappingify(generators) + self.HTTPGenerators.update(lowerify(generators)) + + def addGenerators(self, name, value): + """Add an individual generator chain for the given header. + + @param name: Name of the header to add + @type name: C{str} + + @param value: The generator chain + @type value: C{str} + """ + self.updateGenerators({name: value}) + + def update(self, parsers, generators): + """Conveniently update parsers and generators all at once. + """ + self.updateParsers(parsers) + self.updateGenerators(generators) + + +DefaultHTTPHandler = HeaderHandler() + + +## HTTP DateTime parser +def parseDateTime(dateString): + """Convert an HTTP date string (one of three formats) to seconds since epoch.""" + parts = dateString.split() + + if not parts[0][0:3].lower() in weekdayname_lower: + # Weekday is stupid. Might have been omitted. + try: + return parseDateTime("Sun, "+dateString) + except ValueError: + # Guess not. + pass + + partlen = len(parts) + if (partlen == 5 or partlen == 6) and parts[1].isdigit(): + # 1st date format: Sun, 06 Nov 1994 08:49:37 GMT + # (Note: "GMT" is literal, not a variable timezone) + # (also handles without "GMT") + # This is the normal format + day = parts[1] + month = parts[2] + year = parts[3] + time = parts[4] + elif (partlen == 3 or partlen == 4) and parts[1].find('-') != -1: + # 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT + # (Note: "GMT" is literal, not a variable timezone) + # (also handles without without "GMT") + # Two digit year, yucko. + day, month, year = parts[1].split('-') + time = parts[2] + year=int(year) + if year < 69: + year = year + 2000 + elif year < 100: + year = year + 1900 + elif len(parts) == 5: + # 3rd date format: Sun Nov 6 08:49:37 1994 + # ANSI C asctime() format. + day = parts[2] + month = parts[1] + year = parts[4] + time = parts[3] + else: + raise ValueError("Unknown datetime format %r" % dateString) + + day = int(day) + month = int(monthname_lower.index(month.lower())) + year = int(year) + hour, min, sec = map(int, time.split(':')) + return int(timegm((year, month, day, hour, min, sec))) + + +##### HTTP tokenizer +class Token(str): + __slots__=[] + tokens = {} + def __new__(self, char): + token = Token.tokens.get(char) + if token is None: + Token.tokens[char] = token = str.__new__(self, char) + return token + + def __repr__(self): + return "Token(%s)" % str.__repr__(self) + + +http_tokens = " \t\"()<>@,;:\\/[]?={}" +http_ctls = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f" + +def tokenize(header, foldCase=True): + """Tokenize a string according to normal HTTP header parsing rules. + + In particular: + - Whitespace is irrelevant and eaten next to special separator tokens. + Its existance (but not amount) is important between character strings. + - Quoted string support including embedded backslashes. + - Case is insignificant (and thus lowercased), except in quoted strings. + (unless foldCase=False) + - Multiple headers are concatenated with ',' + + NOTE: not all headers can be parsed with this function. + + Takes a raw header value (list of strings), and + Returns a generator of strings and Token class instances. + """ + tokens=http_tokens + ctls=http_ctls + + string = ",".join(header) + list = [] + start = 0 + cur = 0 + quoted = False + qpair = False + inSpaces = -1 + qstring = None + + for x in string: + if quoted: + if qpair: + qpair = False + qstring = qstring+string[start:cur-1]+x + start = cur+1 + elif x == '\\': + qpair = True + elif x == '"': + quoted = False + yield qstring+string[start:cur] + qstring=None + start = cur+1 + elif x in tokens: + if start != cur: + if foldCase: + yield string[start:cur].lower() + else: + yield string[start:cur] + + start = cur+1 + if x == '"': + quoted = True + qstring = "" + inSpaces = False + elif x in " \t": + if inSpaces is False: + inSpaces = True + else: + inSpaces = -1 + yield Token(x) + elif x in ctls: + raise ValueError("Invalid control character: %d in header" % ord(x)) + else: + if inSpaces is True: + yield Token(' ') + inSpaces = False + + inSpaces = False + cur = cur+1 + + if qpair: + raise ValueError, "Missing character after '\\'" + if quoted: + raise ValueError, "Missing end quote" + + if start != cur: + if foldCase: + yield string[start:cur].lower() + else: + yield string[start:cur] + +def split(seq, delim): + """The same as str.split but works on arbitrary sequences. + Too bad it's not builtin to python!""" + + cur = [] + for item in seq: + if item == delim: + yield cur + cur = [] + else: + cur.append(item) + yield cur + +# def find(seq, *args): +# """The same as seq.index but returns -1 if not found, instead +# Too bad it's not builtin to python!""" +# try: +# return seq.index(value, *args) +# except ValueError: +# return -1 + + +def filterTokens(seq): + """Filter out instances of Token, leaving only a list of strings. + + Used instead of a more specific parsing method (e.g. splitting on commas) + when only strings are expected, so as to be a little lenient. + + Apache does it this way and has some comments about broken clients which + forget commas (?), so I'm doing it the same way. It shouldn't + hurt anything, in any case. + """ + + l=[] + for x in seq: + if not isinstance(x, Token): + l.append(x) + return l + +##### parser utilities: +def checkSingleToken(tokens): + if len(tokens) != 1: + raise ValueError, "Expected single token, not %s." % (tokens,) + return tokens[0] + +def parseKeyValue(val): + if len(val) == 1: + return val[0],None + elif len(val) == 3 and val[1] == Token('='): + return val[0],val[2] + raise ValueError, "Expected key or key=value, but got %s." % (val,) + +def parseArgs(field): + args=split(field, Token(';')) + val = args.next() + args = [parseKeyValue(arg) for arg in args] + return val,args + +def listParser(fun): + """Return a function which applies 'fun' to every element in the + comma-separated list""" + def listParserHelper(tokens): + fields = split(tokens, Token(',')) + for field in fields: + if len(field) != 0: + yield fun(field) + + return listParserHelper + +def last(seq): + """Return seq[-1]""" + + return seq[-1] + +##### Generation utilities +def quoteString(s): + return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"') + +def listGenerator(fun): + """Return a function which applies 'fun' to every element in + the given list, then joins the result with generateList""" + def listGeneratorHelper(l): + return generateList([fun(e) for e in l]) + + return listGeneratorHelper + +def generateList(seq): + return ", ".join(seq) + +def singleHeader(item): + return [item] + +def generateKeyValues(kvs): + l = [] + # print kvs + for k,v in kvs: + if v is None: + l.append('%s' % k) + else: + l.append('%s=%s' % (k,v)) + return ";".join(l) + + +class MimeType(object): + def fromString(klass, mimeTypeString): + """Generate a MimeType object from the given string. + + @param mimeTypeString: The mimetype to parse + + @return: L{MimeType} + """ + return DefaultHTTPHandler.parse('content-type', [mimeTypeString]) + + fromString = classmethod(fromString) + + def __init__(self, mediaType, mediaSubtype, params={}, **kwargs): + """ + @type mediaType: C{str} + + @type mediaSubtype: C{str} + + @type params: C{dict} + """ + self.mediaType = mediaType + self.mediaSubtype = mediaSubtype + self.params = dict(params) + + if kwargs: + self.params.update(kwargs) + + def __eq__(self, other): + if not isinstance(other, MimeType): return NotImplemented + return (self.mediaType == other.mediaType and + self.mediaSubtype == other.mediaSubtype and + self.params == other.params) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params) + + def __hash__(self): + return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems())) + +##### Specific header parsers. +def parseAccept(field): + type,args = parseArgs(field) + + if len(type) != 3 or type[1] != Token('/'): + raise ValueError, "MIME Type "+str(type)+" invalid." + + # okay, this spec is screwy. A 'q' parameter is used as the separator + # between MIME parameters and (as yet undefined) additional HTTP + # parameters. + + num = 0 + for arg in args: + if arg[0] == 'q': + mimeparams=tuple(args[0:num]) + params=args[num:] + break + num = num + 1 + else: + mimeparams=tuple(args) + params=[] + + # Default values for parameters: + qval = 1.0 + + # Parse accept parameters: + for param in params: + if param[0] =='q': + qval = float(param[1]) + else: + # Warn? ignored parameter. + pass + + ret = MimeType(type[0],type[2],mimeparams),qval + return ret + +def parseAcceptQvalue(field): + type,args=parseArgs(field) + + type = checkSingleToken(type) + + qvalue = 1.0 # Default qvalue is 1 + for arg in args: + if arg[0] == 'q': + qvalue = float(arg[1]) + return type,qvalue + +def addDefaultCharset(charsets): + if charsets.get('*') is None and charsets.get('iso-8859-1') is None: + charsets['iso-8859-1'] = 1.0 + return charsets + +def addDefaultEncoding(encodings): + if encodings.get('*') is None and encodings.get('identity') is None: + # RFC doesn't specify a default value for identity, only that it + # "is acceptable" if not mentioned. Thus, give it a very low qvalue. + encodings['identity'] = .0001 + return encodings + + +def parseContentType(header): + # Case folding is disabled for this header, because of use of + # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf + # So, we need to explicitly .lower() the type/subtype and arg keys. + + type,args = parseArgs(header) + + if len(type) != 3 or type[1] != Token('/'): + raise ValueError, "MIME Type "+str(type)+" invalid." + + args = [(kv[0].lower(), kv[1]) for kv in args] + + return MimeType(type[0].lower(), type[2].lower(), tuple(args)) + +def parseContentMD5(header): + try: + return base64.decodestring(header) + except Exception,e: + raise ValueError(e) + +def parseContentRange(header): + """Parse a content-range header into (kind, start, end, realLength). + + realLength might be None if real length is not known ('*'). + start and end might be None if start,end unspecified (for response code 416) + """ + kind, other = header.strip().split() + if kind.lower() != "bytes": + raise ValueError("a range of type %r is not supported") + startend, realLength = other.split("/") + if startend.strip() == '*': + start,end=None,None + else: + start, end = map(int, startend.split("-")) + if realLength == "*": + realLength = None + else: + realLength = int(realLength) + return (kind, start, end, realLength) + +def parseExpect(field): + type,args=parseArgs(field) + + type=parseKeyValue(type) + return (type[0], (lambda *args:args)(type[1], *args)) + +def parseExpires(header): + # """HTTP/1.1 clients and caches MUST treat other invalid date formats, + # especially including the value 0, as in the past (i.e., "already expired").""" + + try: + return parseDateTime(header) + except ValueError: + return 0 + +def parseIfModifiedSince(header): + # Ancient versions of netscape and *current* versions of MSIE send + # If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123 + # which is blantantly RFC-violating and not documented anywhere + # except bug-trackers for web frameworks. + + # So, we'll just strip off everything after a ';'. + return parseDateTime(header.split(';', 1)[0]) + +def parseIfRange(headers): + try: + return ETag.parse(tokenize(headers)) + except ValueError: + return parseDateTime(last(headers)) + +def parseRange(range): + range = list(range) + if len(range) < 3 or range[1] != Token('='): + raise ValueError("Invalid range header format: %s" %(range,)) + + type=range[0] + if type != 'bytes': + raise ValueError("Unknown range unit: %s." % (type,)) + rangeset=split(range[2:], Token(',')) + ranges = [] + + for byterangespec in rangeset: + if len(byterangespec) != 1: + raise ValueError("Invalid range header format: %s" % (range,)) + start,end=byterangespec[0].split('-') + + if not start and not end: + raise ValueError("Invalid range header format: %s" % (range,)) + + if start: + start = int(start) + else: + start = None + + if end: + end = int(end) + else: + end = None + + if start and end and start > end: + raise ValueError("Invalid range header, start > end: %s" % (range,)) + ranges.append((start,end)) + return type,ranges + +def parseRetryAfter(header): + try: + # delta seconds + return time.time() + int(header) + except ValueError: + # or datetime + return parseDateTime(header) + +# WWW-Authenticate and Authorization + +def parseWWWAuthenticate(tokenized): + headers = [] + + tokenList = list(tokenized) + + while tokenList: + scheme = tokenList.pop(0) + challenge = {} + last = None + kvChallenge = False + + while tokenList: + token = tokenList.pop(0) + if token == Token('='): + kvChallenge = True + challenge[last] = tokenList.pop(0) + last = None + + elif token == Token(','): + if kvChallenge: + if len(tokenList) > 1 and tokenList[1] != Token('='): + break + + else: + break + + else: + last = token + + if last and scheme and not challenge and not kvChallenge: + challenge = last + last = None + + headers.append((scheme, challenge)) + + if last and last not in (Token('='), Token(',')): + if headers[-1] == (scheme, challenge): + scheme = last + challenge = {} + headers.append((scheme, challenge)) + + return headers + +def parseAuthorization(header): + scheme, rest = header.split(' ', 1) + # this header isn't tokenized because it may eat characters + # in the unquoted base64 encoded credentials + return scheme.lower(), rest + +#### Header generators +def generateAccept(accept): + mimeType,q = accept + + out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + if mimeType.params: + out+=';'+generateKeyValues(mimeType.params.iteritems()) + + if q != 1.0: + out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.') + + return out + +def removeDefaultEncoding(seq): + for item in seq: + if item[0] != 'identity' or item[1] != .0001: + yield item + +def generateAcceptQvalue(keyvalue): + if keyvalue[1] == 1.0: + return "%s" % keyvalue[0:1] + else: + return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.') + +def parseCacheControl(kv): + k, v = parseKeyValue(kv) + if k == 'max-age' or k == 'min-fresh' or k == 's-maxage': + # Required integer argument + if v is None: + v = 0 + else: + v = int(v) + elif k == 'max-stale': + # Optional integer argument + if v is not None: + v = int(v) + elif k == 'private' or k == 'no-cache': + # Optional list argument + if v is not None: + v = [field.strip().lower() for field in v.split(',')] + return k, v + +def generateCacheControl((k, v)): + if v is None: + return str(k) + else: + if k == 'no-cache' or k == 'private': + # quoted list of values + v = quoteString(generateList( + [header_case_mapping.get(name) or dashCapitalize(name) for name in v])) + return '%s=%s' % (k,v) + +def generateContentRange(tup): + """tup is (type, start, end, len) + len can be None. + """ + type, start, end, len = tup + if len == None: + len = '*' + else: + len = int(len) + if start == None and end == None: + startend = '*' + else: + startend = '%d-%d' % (start, end) + + return '%s %s/%s' % (type, startend, len) + +def generateDateTime(secSinceEpoch): + """Convert seconds since epoch to HTTP datetime string.""" + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + weekdayname[wd], + day, monthname[month], year, + hh, mm, ss) + return s + +def generateExpect(item): + if item[1][0] is None: + out = '%s' % (item[0],) + else: + out = '%s=%s' % (item[0], item[1][0]) + if len(item[1]) > 1: + out += ';'+generateKeyValues(item[1][1:]) + return out + +def generateRange(range): + def noneOr(s): + if s is None: + return '' + return s + + type,ranges=range + + if type != 'bytes': + raise ValueError("Unknown range unit: "+type+".") + + return (type+'='+ + ','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1])) + for startend in ranges])) + +def generateRetryAfter(when): + # always generate delta seconds format + return str(int(when - time.time())) + +def generateContentType(mimeType): + out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) + if mimeType.params: + out+=';'+generateKeyValues(mimeType.params.iteritems()) + return out + +def generateIfRange(dateOrETag): + if isinstance(dateOrETag, ETag): + return dateOrETag.generate() + else: + return generateDateTime(dateOrETag) + +# WWW-Authenticate and Authorization + +def generateWWWAuthenticate(headers): + _generated = [] + for seq in headers: + scheme, challenge = seq[0], seq[1] + + # If we're going to parse out to something other than a dict + # we need to be able to generate from something other than a dict + + try: + l = [] + for k,v in dict(challenge).iteritems(): + l.append("%s=%s" % (k, quoteString(v))) + + _generated.append("%s %s" % (scheme, ", ".join(l))) + except ValueError: + _generated.append("%s %s" % (scheme, challenge)) + + return _generated + +def generateAuthorization(seq): + return [' '.join(seq)] + + +#### +class ETag(object): + def __init__(self, tag, weak=False): + self.tag = str(tag) + self.weak = weak + + def match(self, other, strongCompare): + # Sec 13.3. + # The strong comparison function: in order to be considered equal, both + # validators MUST be identical in every way, and both MUST NOT be weak. + # + # The weak comparison function: in order to be considered equal, both + # validators MUST be identical in every way, but either or both of + # them MAY be tagged as "weak" without affecting the result. + + if not isinstance(other, ETag) or other.tag != self.tag: + return False + + if strongCompare and (other.weak or self.weak): + return False + return True + + def __eq__(self, other): + return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "Etag(%r, weak=%r)" % (self.tag, self.weak) + + def parse(tokens): + tokens=tuple(tokens) + if len(tokens) == 1 and not isinstance(tokens[0], Token): + return ETag(tokens[0]) + + if(len(tokens) == 3 and tokens[0] == "w" + and tokens[1] == Token('/')): + return ETag(tokens[2], weak=True) + + raise ValueError("Invalid ETag.") + + parse=staticmethod(parse) + + def generate(self): + if self.weak: + return 'W/'+quoteString(self.tag) + else: + return quoteString(self.tag) + +def parseStarOrETag(tokens): + tokens=tuple(tokens) + if tokens == ('*',): + return '*' + else: + return ETag.parse(tokens) + +def generateStarOrETag(etag): + if etag=='*': + return etag + else: + return etag.generate() + +#### Cookies. Blech! +class Cookie(object): + # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version'] + + def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0): + self.name=name + self.value=value + self.path=path + self.domain=domain + self.ports=ports + self.expires=expires + self.discard=discard + self.secure=secure + self.comment=comment + self.commenturl=commenturl + self.version=version + + def __repr__(self): + s="Cookie(%r=%r" % (self.name, self.value) + if self.path is not None: s+=", path=%r" % (self.path,) + if self.domain is not None: s+=", domain=%r" % (self.domain,) + if self.ports is not None: s+=", ports=%r" % (self.ports,) + if self.expires is not None: s+=", expires=%r" % (self.expires,) + if self.secure is not False: s+=", secure=%r" % (self.secure,) + if self.comment is not None: s+=", comment=%r" % (self.comment,) + if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,) + if self.version != 0: s+=", version=%r" % (self.version,) + s+=")" + return s + + def __eq__(self, other): + return (isinstance(other, Cookie) and + other.path == self.path and + other.domain == self.domain and + other.ports == self.ports and + other.expires == self.expires and + other.secure == self.secure and + other.comment == self.comment and + other.commenturl == self.commenturl and + other.version == self.version) + + def __ne__(self, other): + return not self.__eq__(other) + + +def parseCookie(headers): + """Bleargh, the cookie spec sucks. + This surely needs interoperability testing. + There are two specs that are supported: + Version 0) http://wp.netscape.com/newsref/std/cookie_spec.html + Version 1) http://www.faqs.org/rfcs/rfc2965.html + """ + + cookies = [] + # There can't really be multiple cookie headers according to RFC, because + # if multiple headers are allowed, they must be joinable with ",". + # Neither new RFC2965 cookies nor old netscape cookies are. + + header = ';'.join(headers) + if header[0:8].lower() == "$version": + # RFC2965 cookie + h=tokenize([header], foldCase=False) + r_cookies = split(h, Token(',')) + for r_cookie in r_cookies: + last_cookie = None + rr_cookies = split(r_cookie, Token(';')) + for cookie in rr_cookies: + nameval = tuple(split(cookie, Token('='))) + if len(nameval) == 2: + (name,), (value,) = nameval + else: + (name,), = nameval + value = None + + name=name.lower() + if name == '$version': + continue + if name[0] == '$': + if last_cookie is not None: + if name == '$path': + last_cookie.path=value + elif name == '$domain': + last_cookie.domain=value + elif name == '$port': + if value is None: + last_cookie.ports = () + else: + last_cookie.ports=tuple([int(s) for s in value.split(',')]) + else: + last_cookie = Cookie(name, value, version=1) + cookies.append(last_cookie) + else: + # Oldstyle cookies don't do quoted strings or anything sensible. + # All characters are valid for names except ';' and '=', and all + # characters are valid for values except ';'. Spaces are stripped, + # however. + r_cookies = header.split(';') + for r_cookie in r_cookies: + name,value = r_cookie.split('=', 1) + name=name.strip(' \t') + value=value.strip(' \t') + + cookies.append(Cookie(name, value)) + + return cookies + +cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$" +cookie_validname_re = re.compile(cookie_validname) +cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$' +cookie_validvalue_re = re.compile(cookie_validvalue) + +def generateCookie(cookies): + # There's a fundamental problem with the two cookie specifications. + # They both use the "Cookie" header, and the RFC Cookie header only allows + # one version to be specified. Thus, when you have a collection of V0 and + # V1 cookies, you have to either send them all as V0 or send them all as + # V1. + + # I choose to send them all as V1. + + # You might think converting a V0 cookie to a V1 cookie would be lossless, + # but you'd be wrong. If you do the conversion, and a V0 parser tries to + # read the cookie, it will see a modified form of the cookie, in cases + # where quotes must be added to conform to proper V1 syntax. + # (as a real example: "Cookie: cartcontents=oid:94680,qty:1,auto:0,esp:y") + + # However, that is what we will do, anyways. It has a high probability of + # breaking applications that only handle oldstyle cookies, where some other + # application set a newstyle cookie that is applicable over for site + # (or host), AND where the oldstyle cookie uses a value which is invalid + # syntax in a newstyle cookie. + + # Also, the cookie name *cannot* be quoted in V1, so some cookies just + # cannot be converted at all. (e.g. "Cookie: phpAds_capAd[32]=2"). These + # are just dicarded during conversion. + + # As this is an unsolvable problem, I will pretend I can just say + # OH WELL, don't do that, or else upgrade your old applications to have + # newstyle cookie parsers. + + # I will note offhandedly that there are *many* sites which send V0 cookies + # that are not valid V1 cookie syntax. About 20% for my cookies file. + # However, they do not generally mix them with V1 cookies, so this isn't + # an issue, at least right now. I have not tested to see how many of those + # webapps support RFC2965 V1 cookies. I suspect not many. + + max_version = max([cookie.version for cookie in cookies]) + + if max_version == 0: + # no quoting or anything. + return ';'.join(["%s=%s" % (cookie.name, cookie.value) for cookie in cookies]) + else: + str_cookies = ['$Version="1"'] + for cookie in cookies: + if cookie.version == 0: + # Version 0 cookie: we make sure the name and value are valid + # V1 syntax. + + # If they are, we use them as is. This means in *most* cases, + # the cookie will look literally the same on output as it did + # on input. + # If it isn't a valid name, ignore the cookie. + # If it isn't a valid value, quote it and hope for the best on + # the other side. + + if cookie_validname_re.match(cookie.name) is None: + continue + + value=cookie.value + if cookie_validvalue_re.match(cookie.value) is None: + value = quoteString(value) + + str_cookies.append("%s=%s" % (cookie.name, value)) + else: + # V1 cookie, nice and easy + str_cookies.append("%s=%s" % (cookie.name, quoteString(cookie.value))) + + if cookie.path: + str_cookies.append("$Path=%s" % quoteString(cookie.path)) + if cookie.domain: + str_cookies.append("$Domain=%s" % quoteString(cookie.domain)) + if cookie.ports is not None: + if len(cookie.ports) == 0: + str_cookies.append("$Port") + else: + str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports]))) + return ';'.join(str_cookies) + +def parseSetCookie(headers): + setCookies = [] + for header in headers: + try: + parts = header.split(';') + l = [] + + for part in parts: + namevalue = part.split('=',1) + if len(namevalue) == 1: + name=namevalue[0] + value=None + else: + name,value=namevalue + value=value.strip(' \t') + + name=name.strip(' \t') + + l.append((name, value)) + + setCookies.append(makeCookieFromList(l, True)) + except ValueError: + # If we can't parse one Set-Cookie, ignore it, + # but not the rest of Set-Cookies. + pass + return setCookies + +def parseSetCookie2(toks): + outCookies = [] + for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))] + for y in split(toks, Token(','))]: + try: + outCookies.append(makeCookieFromList(cookie, False)) + except ValueError: + # Again, if we can't handle one cookie -- ignore it. + pass + return outCookies + +def makeCookieFromList(tup, netscapeFormat): + name, value = tup[0] + if name is None or value is None: + raise ValueError("Cookie has missing name or value") + if name.startswith("$"): + raise ValueError("Invalid cookie name: %r, starts with '$'." % name) + cookie = Cookie(name, value) + hadMaxAge = False + + for name,value in tup[1:]: + name = name.lower() + + if value is None: + if name in ("discard", "secure"): + # Boolean attrs + value = True + elif name != "port": + # Can be either boolean or explicit + continue + + if name in ("comment", "commenturl", "discard", "domain", "path", "secure"): + # simple cases + setattr(cookie, name, value) + elif name == "expires" and not hadMaxAge: + if netscapeFormat and value[0] == '"' and value[-1] == '"': + value = value[1:-1] + cookie.expires = parseDateTime(value) + elif name == "max-age": + hadMaxAge = True + cookie.expires = int(value) + time.time() + elif name == "port": + if value is None: + cookie.ports = () + else: + if netscapeFormat and value[0] == '"' and value[-1] == '"': + value = value[1:-1] + cookie.ports = tuple([int(s) for s in value.split(',')]) + elif name == "version": + cookie.version = int(value) + + return cookie + + +def generateSetCookie(cookies): + setCookies = [] + for cookie in cookies: + out = ["%s=%s" % (cookie.name, cookie.value)] + if cookie.expires: + out.append("expires=%s" % generateDateTime(cookie.expires)) + if cookie.path: + out.append("path=%s" % cookie.path) + if cookie.domain: + out.append("domain=%s" % cookie.domain) + if cookie.secure: + out.append("secure") + + setCookies.append('; '.join(out)) + return setCookies + +def generateSetCookie2(cookies): + setCookies = [] + for cookie in cookies: + out = ["%s=%s" % (cookie.name, quoteString(cookie.value))] + if cookie.comment: + out.append("Comment=%s" % quoteString(cookie.comment)) + if cookie.commenturl: + out.append("CommentURL=%s" % quoteString(cookie.commenturl)) + if cookie.discard: + out.append("Discard") + if cookie.domain: + out.append("Domain=%s" % quoteString(cookie.domain)) + if cookie.expires: + out.append("Max-Age=%s" % (cookie.expires - time.time())) + if cookie.path: + out.append("Path=%s" % quoteString(cookie.path)) + if cookie.ports is not None: + if len(cookie.ports) == 0: + out.append("Port") + else: + out.append("Port=%s" % quoteString(",".join([str(x) for x in cookie.ports]))) + if cookie.secure: + out.append("Secure") + out.append('Version="1"') + setCookies.append('; '.join(out)) + return setCookies + +def parseDepth(depth): + if depth not in ("0", "1", "infinity"): + raise ValueError("Invalid depth header value: %s" % (depth,)) + return depth + +def parseOverWrite(overwrite): + if overwrite == "F": + return False + elif overwrite == "T": + return True + raise ValueError("Invalid overwrite header value: %s" % (overwrite,)) + +def generateOverWrite(overwrite): + if overwrite: + return "T" + else: + return "F" + +##### Random stuff that looks useful. +# def sortMimeQuality(s): +# def sorter(item1, item2): +# if item1[0] == '*': +# if item2[0] == '*': +# return 0 + + +# def sortQuality(s): +# def sorter(item1, item2): +# if item1[1] < item2[1]: +# return -1 +# if item1[1] < item2[1]: +# return 1 +# if item1[0] == item2[0]: +# return 0 + + +# def getMimeQuality(mimeType, accepts): +# type,args = parseArgs(mimeType) +# type=type.split(Token('/')) +# if len(type) != 2: +# raise ValueError, "MIME Type "+s+" invalid." + +# for accept in accepts: +# accept,acceptQual=accept +# acceptType=accept[0:1] +# acceptArgs=accept[2] + +# if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and +# (args == acceptArgs or len(acceptArgs) == 0)): +# return acceptQual + +# def getQuality(type, accepts): +# qual = accepts.get(type) +# if qual is not None: +# return qual + +# return accepts.get('*') + +# Headers object +class __RecalcNeeded(object): + def __repr__(self): + return "" + +_RecalcNeeded = __RecalcNeeded() + +class Headers(object): + """This class stores the HTTP headers as both a parsed representation and + the raw string representation. It converts between the two on demand.""" + + def __init__(self, headers=None, rawHeaders=None, handler=DefaultHTTPHandler): + self._raw_headers = {} + self._headers = {} + self.handler = handler + if headers is not None: + for key, value in headers.iteritems(): + self.setHeader(key, value) + if rawHeaders is not None: + for key, value in rawHeaders.iteritems(): + self.setRawHeaders(key, value) + + def _setRawHeaders(self, headers): + self._raw_headers = headers + self._headers = {} + + def _toParsed(self, name): + r = self._raw_headers.get(name, None) + h = self.handler.parse(name, r) + if h is not None: + self._headers[name] = h + return h + + def _toRaw(self, name): + h = self._headers.get(name, None) + r = self.handler.generate(name, h) + if r is not None: + self._raw_headers[name] = r + return r + + def hasHeader(self, name): + """Does a header with the given name exist?""" + name=name.lower() + return self._raw_headers.has_key(name) + + def getRawHeaders(self, name, default=None): + """Returns a list of headers matching the given name as the raw string given.""" + + name=name.lower() + raw_header = self._raw_headers.get(name, default) + if raw_header is not _RecalcNeeded: + return raw_header + + return self._toRaw(name) + + def getHeader(self, name, default=None): + """Ret9urns the parsed representation of the given header. + The exact form of the return value depends on the header in question. + + If no parser for the header exists, raise ValueError. + + If the header doesn't exist, return default (or None if not specified) + """ + name=name.lower() + parsed = self._headers.get(name, default) + if parsed is not _RecalcNeeded: + return parsed + return self._toParsed(name) + + def setRawHeaders(self, name, value): + """Sets the raw representation of the given header. + Value should be a list of strings, each being one header of the + given name. + """ + name=name.lower() + self._raw_headers[name] = value + self._headers[name] = _RecalcNeeded + + def setHeader(self, name, value): + """Sets the parsed representation of the given header. + Value should be a list of objects whose exact form depends + on the header in question. + """ + name=name.lower() + self._raw_headers[name] = _RecalcNeeded + self._headers[name] = value + + def addRawHeader(self, name, value): + """ + Add a raw value to a header that may or may not already exist. + If it exists, add it as a separate header to output; do not + replace anything. + """ + name=name.lower() + raw_header = self._raw_headers.get(name) + if raw_header is None: + # No header yet + raw_header = [] + self._raw_headers[name] = raw_header + elif raw_header is _RecalcNeeded: + raw_header = self._toRaw(name) + + raw_header.append(value) + self._headers[name] = _RecalcNeeded + + def removeHeader(self, name): + """Removes the header named.""" + + name=name.lower() + if self._raw_headers.has_key(name): + del self._raw_headers[name] + del self._headers[name] + + def __repr__(self): + return ''% (self._raw_headers, self._headers) + + def canonicalNameCaps(self, name): + """Return the name with the canonical capitalization, if known, + otherwise, Caps-After-Dashes""" + return header_case_mapping.get(name) or dashCapitalize(name) + + def getAllRawHeaders(self): + """Return an iterator of key,value pairs of all headers + contained in this object, as strings. The keys are capitalized + in canonical capitalization.""" + for k,v in self._raw_headers.iteritems(): + if v is _RecalcNeeded: + v = self._toRaw(k) + yield self.canonicalNameCaps(k), v + + def makeImmutable(self): + """Make this header set immutable. All mutating operations will + raise an exception.""" + self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise + + def _mutateRaise(self, *args): + raise AttributeError("This header object is immutable as the headers have already been sent.") + + +"""The following dicts are all mappings of header to list of operations + to perform. The first operation should generally be 'tokenize' if the + header can be parsed according to the normal tokenization rules. If + it cannot, generally the first thing you want to do is take only the + last instance of the header (in case it was sent multiple times, which + is strictly an error, but we're nice.). + """ + +iteritems = lambda x: x.iteritems() + + +parser_general_headers = { + 'Cache-Control':(tokenize, listParser(parseCacheControl), dict), + 'Connection':(tokenize,filterTokens), + 'Date':(last,parseDateTime), +# 'Pragma':tokenize +# 'Trailer':tokenize + 'Transfer-Encoding':(tokenize,filterTokens), +# 'Upgrade':tokenize +# 'Via':tokenize,stripComment +# 'Warning':tokenize +} + +generator_general_headers = { + 'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader), + 'Connection':(generateList,singleHeader), + 'Date':(generateDateTime,singleHeader), +# 'Pragma': +# 'Trailer': + 'Transfer-Encoding':(generateList,singleHeader), +# 'Upgrade': +# 'Via': +# 'Warning': +} + +parser_request_headers = { + 'Accept': (tokenize, listParser(parseAccept), dict), + 'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset), + 'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding), + 'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict), + 'Authorization': (last, parseAuthorization), + 'Cookie':(parseCookie,), + 'Expect':(tokenize, listParser(parseExpect), dict), + 'From':(last,), + 'Host':(last,), + 'If-Match':(tokenize, listParser(parseStarOrETag), list), + 'If-Modified-Since':(last, parseIfModifiedSince), + 'If-None-Match':(tokenize, listParser(parseStarOrETag), list), + 'If-Range':(parseIfRange,), + 'If-Unmodified-Since':(last,parseDateTime), + 'Max-Forwards':(last,int), +# 'Proxy-Authorization':str, # what is "credentials" + 'Range':(tokenize, parseRange), + 'Referer':(last,str), # TODO: URI object? + 'TE':(tokenize, listParser(parseAcceptQvalue), dict), + 'User-Agent':(last,str), +} + +generator_request_headers = { + 'Accept': (iteritems,listGenerator(generateAccept),singleHeader), + 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader), + 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'Authorization': (generateAuthorization,), # what is "credentials" + 'Cookie':(generateCookie,singleHeader), + 'Expect':(iteritems, listGenerator(generateExpect), singleHeader), + 'From':(str,singleHeader), + 'Host':(str,singleHeader), + 'If-Match':(listGenerator(generateStarOrETag), singleHeader), + 'If-Modified-Since':(generateDateTime,singleHeader), + 'If-None-Match':(listGenerator(generateStarOrETag), singleHeader), + 'If-Range':(generateIfRange, singleHeader), + 'If-Unmodified-Since':(generateDateTime,singleHeader), + 'Max-Forwards':(str, singleHeader), +# 'Proxy-Authorization':str, # what is "credentials" + 'Range':(generateRange,singleHeader), + 'Referer':(str,singleHeader), + 'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader), + 'User-Agent':(str,singleHeader), +} + +parser_response_headers = { + 'Accept-Ranges':(tokenize, filterTokens), + 'Age':(last,int), + 'ETag':(tokenize, ETag.parse), + 'Location':(last,), # TODO: URI object? +# 'Proxy-Authenticate' + 'Retry-After':(last, parseRetryAfter), + 'Server':(last,), + 'Set-Cookie':(parseSetCookie,), + 'Set-Cookie2':(tokenize, parseSetCookie2), + 'Vary':(tokenize, filterTokens), + 'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False), + parseWWWAuthenticate,) +} + +generator_response_headers = { + 'Accept-Ranges':(generateList, singleHeader), + 'Age':(str, singleHeader), + 'ETag':(ETag.generate, singleHeader), + 'Location':(str, singleHeader), +# 'Proxy-Authenticate' + 'Retry-After':(generateRetryAfter, singleHeader), + 'Server':(str, singleHeader), + 'Set-Cookie':(generateSetCookie,), + 'Set-Cookie2':(generateSetCookie2,), + 'Vary':(generateList, singleHeader), + 'WWW-Authenticate':(generateWWWAuthenticate,) +} + +parser_entity_headers = { + 'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens), + 'Content-Encoding':(tokenize, filterTokens), + 'Content-Language':(tokenize, filterTokens), + 'Content-Length':(last, int), + 'Content-Location':(last,), # TODO: URI object? + 'Content-MD5':(last, parseContentMD5), + 'Content-Range':(last, parseContentRange), + 'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType), + 'Expires':(last, parseExpires), + 'Last-Modified':(last, parseDateTime), + } + +generator_entity_headers = { + 'Allow':(generateList, singleHeader), + 'Content-Encoding':(generateList, singleHeader), + 'Content-Language':(generateList, singleHeader), + 'Content-Length':(str, singleHeader), + 'Content-Location':(str, singleHeader), + 'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader), + 'Content-Range':(generateContentRange, singleHeader), + 'Content-Type':(generateContentType, singleHeader), + 'Expires':(generateDateTime, singleHeader), + 'Last-Modified':(generateDateTime, singleHeader), + } + +DefaultHTTPHandler.updateParsers(parser_general_headers) +DefaultHTTPHandler.updateParsers(parser_request_headers) +DefaultHTTPHandler.updateParsers(parser_response_headers) +DefaultHTTPHandler.updateParsers(parser_entity_headers) + +DefaultHTTPHandler.updateGenerators(generator_general_headers) +DefaultHTTPHandler.updateGenerators(generator_request_headers) +DefaultHTTPHandler.updateGenerators(generator_response_headers) +DefaultHTTPHandler.updateGenerators(generator_entity_headers) + + +# casemappingify(DefaultHTTPParsers) +# casemappingify(DefaultHTTPGenerators) + +# lowerify(DefaultHTTPParsers) +# lowerify(DefaultHTTPGenerators) diff -r f3c2cb460ad9 -r d14bfd477c44 web/httpcache.py --- a/web/httpcache.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/httpcache.py Thu Apr 08 14:11:49 2010 +0200 @@ -131,8 +131,5 @@ # max-age=0 to actually force revalidation when needed viewmod.View.cache_max_age = 0 - -viewmod.EntityView.http_cache_manager = EntityHTTPCacheManager - viewmod.StartupView.http_cache_manager = MaxAgeHTTPCacheManager viewmod.StartupView.cache_max_age = 60*60*2 # stay in http cache for 2 hours by default diff -r f3c2cb460ad9 -r d14bfd477c44 web/request.py --- a/web/request.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/request.py Thu Apr 08 14:11:49 2010 +0200 @@ -31,6 +31,7 @@ from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit, RequestError, StatusResponse) +from cubicweb.web.http_headers import Headers _MARKER = object() @@ -88,6 +89,8 @@ self.pageid = None self.datadir_url = self._datadir_url() self._set_pageid() + # prepare output header + self.headers_out = Headers() def _set_pageid(self): """initialize self.pageid @@ -657,17 +660,26 @@ """ raise NotImplementedError() - def set_header(self, header, value): + def set_header(self, header, value, raw=True): """set an output HTTP header""" - raise NotImplementedError() + if raw: + # adding encoded header is important, else page content + # will be reconverted back to unicode and apart unefficiency, this + # may cause decoding problem (e.g. when downloading a file) + self.headers_out.setRawHeaders(header, [str(value)]) + else: + self.headers_out.setHeader(header, value) def add_header(self, header, value): """add an output HTTP header""" - raise NotImplementedError() + # adding encoded header is important, else page content + # will be reconverted back to unicode and apart unefficiency, this + # may cause decoding problem (e.g. when downloading a file) + self.headers_out.addRawHeader(header, str(value)) def remove_header(self, header): """remove an output HTTP header""" - raise NotImplementedError() + self.headers_out.removeHeader(header) def header_authorization(self): """returns a couple (auth-type, auth-value)""" diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/data/sample1.pdf Binary file web/test/data/sample1.pdf has changed diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/data/sample1.xml --- a/web/test/data/sample1.xml Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,138 +0,0 @@ - - - - - ] > - - - - - - - -Comet 0.2.0 (unset title) - - - - - - - - - - - - - - - - - - - - - - -
-
- - - \ No newline at end of file diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/unittest_application.py --- a/web/test/unittest_application.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/test/unittest_application.py Thu Apr 08 14:11:49 2010 +0200 @@ -29,7 +29,7 @@ class MockCursor: def __init__(self): self.executed = [] - def execute(self, rql, args=None, cachekey=None): + def execute(self, rql, args=None, build_descr=False): args = args or {} self.executed.append(rql % args) diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/unittest_pdf.py --- a/web/test/unittest_pdf.py Thu Apr 08 12:42:47 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -import os.path as osp -from tempfile import NamedTemporaryFile -from subprocess import Popen as sub -from xml.etree.cElementTree import ElementTree, fromstring, tostring, dump - -from logilab.common.testlib import TestCase, unittest_main - -from cubicweb.utils import can_do_pdf_conversion -from cubicweb.ext.xhtml2fo import ReportTransformer - -DATADIR = osp.join(osp.dirname(__file__), 'data') - -class PDFTC(TestCase): - - def test_xhtml_to_fop_to_pdf(self): - if not can_do_pdf_conversion(): - self.skip('dependencies not available : check pysixt and fop') - xmltree = ElementTree() - xmltree.parse(osp.join(DATADIR, 'sample1.xml')) - foptree = ReportTransformer(u'contentmain').transform(xmltree) - # next - foptmp = NamedTemporaryFile() - foptree.write(foptmp) - foptmp.flush() - pdftmp = NamedTemporaryFile() - fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name]) - fopproc.wait() - del foptmp - if fopproc.returncode: - self.skip('fop returned status %s' % fopproc.returncode) - pdftmp.seek(0) # a bit superstitious - reference = open(osp.join(DATADIR, 'sample1.pdf'), 'r').read() - output = pdftmp.read() - # XXX almost equals due to ID, creation date, so it seems to fail - self.assertEquals( len(output), len(reference) ) - # cut begin & end 'cause they contain variyng data - self.assertTextEquals(output[150:1500], reference[150:1500]) - -if __name__ == '__main__': - unittest_main() - diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/unittest_urlpublisher.py --- a/web/test/unittest_urlpublisher.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/test/unittest_urlpublisher.py Thu Apr 08 14:11:49 2010 +0200 @@ -26,7 +26,7 @@ req = self.request() b = req.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla') c = req.create_entity('Tag', name=u'yo') # take care: Tag's name normalized to lower case - self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}, 'b') + self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}) def process(self, url): req = self.req = self.request() diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/unittest_views_basecontrollers.py --- a/web/test/unittest_views_basecontrollers.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/test/unittest_views_basecontrollers.py Thu Apr 08 14:11:49 2010 +0200 @@ -71,7 +71,7 @@ 'in_group-subject:'+eid: groups, } path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.firstname, u'Sylvain') self.assertEquals(e.surname, u'Th\xe9nault') self.assertEquals(e.login, user.login) @@ -111,7 +111,7 @@ 'surname-subject:'+eid: u'Sylvain', } path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) self.assertEquals(e.login, user.login) self.assertEquals(e.firstname, u'Th\xe9nault') self.assertEquals(e.surname, u'Sylvain') @@ -346,7 +346,7 @@ self.assertIn('_cwmsgid', params) eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid self.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', - {'x': self.session.user.eid, 'e': eid}, 'x') + {'x': self.session.user.eid, 'e': eid}) self.commit() req = req req.form = {'eid': u(eid), '__type:%s'%eid: 'EmailAddress', @@ -385,7 +385,7 @@ } try: path, params = self.expect_redirect_publish(req, 'edit') - e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}, 'x').get_entity(0, 0) + e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0) self.assertEquals(e.name, 'CWEType') self.assertEquals(sorted(g.eid for g in e.read_permission), groupeids) finally: @@ -407,7 +407,7 @@ path, params = self.expect_redirect_publish(req, 'edit') self.failUnless(path.startswith('blogentry/')) eid = path.split('/')[1] - e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}, 'x').get_entity(0, 0) + e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0) self.assertEquals(e.title, '"13:03:40"') self.assertEquals(e.content, '"13:03:43"') diff -r f3c2cb460ad9 -r d14bfd477c44 web/test/unittest_views_baseviews.py --- a/web/test/unittest_views_baseviews.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/test/unittest_views_baseviews.py Thu Apr 08 14:11:49 2010 +0200 @@ -90,7 +90,7 @@ req = self.request() e = req.create_entity("State", name=u'', description=u'loo"ong blabla') rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, X description D, X creation_date CD, X eid %(x)s', - {'x': e.eid}, 'x') + {'x': e.eid}) view = self.vreg['views'].select('table', req, rset=rset) return e, rset, view diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/autoform.py --- a/web/views/autoform.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/autoform.py Thu Apr 08 14:11:49 2010 +0200 @@ -289,7 +289,7 @@ execute = req.execute for subj, rtype, obj in parse_relations_descr(rdefs): rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) + execute(rql, {'x': subj, 'y': obj}) req.set_message(req._('relations deleted')) def insert_relations(req, rdefs): @@ -297,7 +297,7 @@ execute = req.execute for subj, rtype, obj in parse_relations_descr(rdefs): rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype - execute(rql, {'x': subj, 'y': obj}, ('x', 'y')) + execute(rql, {'x': subj, 'y': obj}) class GenericRelationsWidget(fw.FieldWidget): diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/basecomponents.py --- a/web/views/basecomponents.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/basecomponents.py Thu Apr 08 14:11:49 2010 +0200 @@ -2,7 +2,6 @@ * the rql input form * the logged user link -* pdf view link :organization: Logilab :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/basecontrollers.py --- a/web/views/basecontrollers.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/basecontrollers.py Thu Apr 08 14:11:49 2010 +0200 @@ -22,10 +22,11 @@ from cubicweb.utils import CubicWebJsonEncoder from cubicweb.selectors import authenticated_user, match_form_params from cubicweb.mail import format_mail -from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed, json_dumps +from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed, DirectResponse, json_dumps from cubicweb.web.controller import Controller from cubicweb.web.views import vid_from_rset from cubicweb.web.views.formrenderers import FormRenderer + try: from cubicweb.web.facet import (FilterRQLBuilder, get_facet, prepare_facets_rqlst) @@ -179,7 +180,7 @@ else: rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype for teid in eids: - req.execute(rql, {'x': eid, 'y': typed_eid(teid)}, ('x', 'y')) + req.execute(rql, {'x': eid, 'y': typed_eid(teid)}) def _validation_error(req, ex): @@ -283,7 +284,7 @@ raise RemoteCallFailed(repr(exc)) try: result = func(*args) - except RemoteCallFailed: + except (RemoteCallFailed, DirectResponse): raise except Exception, ex: self.exception('an exception occured while calling js_%s(%s): %s', @@ -316,12 +317,12 @@ form['__action_%s' % action] = u'whatever' return form - def _exec(self, rql, args=None, eidkey=None, rocheck=True): + def _exec(self, rql, args=None, rocheck=True): """json mode: execute RQL and return resultset as json""" if rocheck: self._cw.ensure_ro_rql(rql) try: - return self._cw.execute(rql, args, eidkey) + return self._cw.execute(rql, args) except Exception, ex: self.exception("error in _exec(rql=%s): %s", rql, ex) return None diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/basetemplates.py --- a/web/views/basetemplates.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/basetemplates.py Thu Apr 08 14:11:49 2010 +0200 @@ -14,7 +14,7 @@ from cubicweb.appobject import objectify_selector from cubicweb.selectors import match_kwargs from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW -from cubicweb.utils import UStringIO, can_do_pdf_conversion +from cubicweb.utils import UStringIO from cubicweb.schema import display_name from cubicweb.web import component, formfields as ff, formwidgets as fw from cubicweb.web.views import forms @@ -268,61 +268,6 @@ self.w(u'\n') self.w(u'\n') -if can_do_pdf_conversion(): - try: - from xml.etree.cElementTree import ElementTree - except ImportError: #python2.4 - from elementtree import ElementTree - from subprocess import Popen as sub - from StringIO import StringIO - from tempfile import NamedTemporaryFile - from cubicweb.ext.xhtml2fo import ReportTransformer - - - class PdfViewComponent(component.EntityVComponent): - __regid__ = 'pdfview' - - context = 'ctxtoolbar' - - def cell_call(self, row, col, view): - entity = self.cw_rset.get_entity(row, col) - url = entity.absolute_url(vid=view.__regid__, __template='pdf-main-template') - iconurl = self._cw.build_url('data/pdf_icon.gif') - label = self._cw._('Download page as pdf') - self.w(u'%s' % - (xml_escape(url), label, xml_escape(iconurl), label)) - - class PdfMainTemplate(TheMainTemplate): - __regid__ = 'pdf-main-template' - - def call(self, view): - """build the standard view, then when it's all done, convert xhtml to pdf - """ - super(PdfMainTemplate, self).call(view) - section = self._cw.form.pop('section', 'contentmain') - pdf = self.to_pdf(self._stream, section) - self._cw.set_content_type('application/pdf', filename='report.pdf') - self.binary = True - self.w = None - self.set_stream() - # pylint needs help - self.w(pdf) - - def to_pdf(self, stream, section): - # XXX see ticket/345282 - stream = stream.getvalue().replace(' ', ' ').encode('utf-8') - xmltree = ElementTree() - xmltree.parse(StringIO(stream)) - foptree = ReportTransformer(section).transform(xmltree) - foptmp = NamedTemporaryFile() - pdftmp = NamedTemporaryFile() - foptree.write(foptmp) - foptmp.flush() - fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name]) - fopproc.wait() - pdftmp.seek(0) - pdf = pdftmp.read() - return pdf # page parts templates ######################################################## diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/bookmark.py --- a/web/views/bookmark.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/bookmark.py Thu Apr 08 14:11:49 2010 +0200 @@ -112,7 +112,7 @@ else: # we can't edit shared bookmarks we don't own bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s' - erset = req.execute(bookmarksrql, {'x': ueid}, 'x', + erset = req.execute(bookmarksrql, {'x': ueid}, build_descr=False) bookmarksrql %= {'x': ueid} if erset: diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/editcontroller.py --- a/web/views/editcontroller.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/editcontroller.py Thu Apr 08 14:11:49 2010 +0200 @@ -249,13 +249,13 @@ rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( subjvar, rschema, objvar) for reid in origvalues.difference(values): - self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y'))) + self.relations_rql.append((rql, {'x': eid, 'y': reid})) seteids = values.difference(origvalues) if seteids: rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % ( subjvar, rschema, objvar) for reid in seteids: - self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y'))) + self.relations_rql.append((rql, {'x': eid, 'y': reid})) def delete_entities(self, eidtypes): """delete entities from the repository""" diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/urlpublishing.py --- a/web/views/urlpublishing.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/urlpublishing.py Thu Apr 08 14:11:49 2010 +0200 @@ -130,8 +130,7 @@ if len(parts) != 1: raise PathDontMatch() try: - rset = req.execute('Any X WHERE X eid %(x)s', - {'x': typed_eid(parts[0])}, 'x') + rset = req.execute('Any X WHERE X eid %(x)s', {'x': typed_eid(parts[0])}) except ValueError: raise PathDontMatch() if rset.rowcount == 0: @@ -178,7 +177,7 @@ rql = u'Any X WHERE X is %s, X %s %%(x)s' % (etype, attrname) if attrname == 'eid': try: - rset = req.execute(rql, {'x': typed_eid(value)}, 'x') + rset = req.execute(rql, {'x': typed_eid(value)}) except (ValueError, TypeResolverException): # conflicting eid/type raise PathDontMatch() diff -r f3c2cb460ad9 -r d14bfd477c44 web/views/workflow.py --- a/web/views/workflow.py Thu Apr 08 12:42:47 2010 +0200 +++ b/web/views/workflow.py Thu Apr 08 14:11:49 2010 +0200 @@ -129,7 +129,7 @@ headers = (_('from_state'), _('to_state'), _('comment'), _('date')) rql = '%s %s, X eid %%(x)s' % (sel, rql) try: - rset = self._cw.execute(rql, {'x': eid}, 'x') + rset = self._cw.execute(rql, {'x': eid}) except Unauthorized: return if rset: diff -r f3c2cb460ad9 -r d14bfd477c44 wsgi/request.py --- a/wsgi/request.py Thu Apr 08 12:42:47 2010 +0200 +++ b/wsgi/request.py Thu Apr 08 14:11:49 2010 +0200 @@ -38,9 +38,9 @@ post, files = self.get_posted_data() super(CubicWebWsgiRequest, self).__init__(vreg, https, post) if files is not None: - for fdef in files.itervalues(): - fdef[0] = unicode(fdef[0], self.encoding) - self.form.update(files) + for key, (name, _, stream) in files.iteritems(): + name = unicode(name, self.encoding) + self.form[key] = (name, stream) # prepare output headers self.headers_out = {}