backport stable into oldstable. oldstable is now 3.8 oldstable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Mon, 19 Jul 2010 15:36:16 +0200
brancholdstable
changeset 5993 50e1a6ad3e98
parent 5487 3ab2682a4b37 (current diff)
parent 5976 00b1b6b906cf (diff)
child 6018 f4d1d5d9ccbb
backport stable into oldstable. oldstable is now 3.8
cleanappl.sh
doc/book/en/devweb/form.rst
doc/book/en/devweb/views/editforms.rst
ext/xhtml2fo.py
schemaviewer.py
skeleton/entities.py
skeleton/migration/postcreate.py
skeleton/migration/precreate.py
skeleton/schema.py
skeleton/views.py
stdlib.txt
test/data/migration/depends.map
web/data/pdf_icon.gif
web/test/data/sample1.pdf
web/test/data/sample1.xml
web/test/unittest_pdf.py
--- a/.hgtags	Thu May 06 08:24:46 2010 +0200
+++ b/.hgtags	Mon Jul 19 15:36:16 2010 +0200
@@ -121,3 +121,19 @@
 fefeda65bb83dcc2d775255fe69fdee0e793d135 cubicweb-debian-version-3.7.4-1
 c476d106705ebdd9205d97e64cafa72707acabe7 cubicweb-version-3.7.5
 2d0982252e8d780ba964f293a0e691d48070db6d cubicweb-debian-version-3.7.5-1
+3c703f3245dc7696341ae1d66525554d9fa2d11d cubicweb-version-3.8.0
+24cc65ab2eca05729d66cef3de6f69bb7f9dfa35 cubicweb-debian-version-3.8.0-1
+1e074c6150fe00844160986852db364cc5992848 cubicweb-version-3.8.1
+eb972d125eefd0de2d0743e95c6e1f4e3e93e4c1 cubicweb-debian-version-3.8.1-1
+ef2e37d34013488a2018e73338fbbfbde5901c5c cubicweb-version-3.8.2
+2b962bb9eee8ee7156a12cf137428c292f8e3b35 cubicweb-debian-version-3.8.2-1
+7e6c6a2a272d0a95fd42248f3125e45185f0eef1 cubicweb-version-3.8.3
+1ccaa924786047be66b44f6dbc76e6631f56b04a cubicweb-debian-version-3.8.3-1
+d00d1fab42afec8607fc84d862becfd7f58850f1 cubicweb-version-3.8.4
+b7883287f40c853e8278edc3f24326f2c9549954 cubicweb-debian-version-3.8.4-1
+2de32c0c293ba451b231efe77d6027376af3a2a3 cubicweb-version-3.8.5
+5d05b08adeab1ea301e49ed8537e35ede6db92f6 cubicweb-debian-version-3.8.5-1
+1a24c62aefc5e57f61be3d04affd415288e81904 cubicweb-version-3.8.6
+607a90073911b6bb941a49b5ec0b0d2a9cd479af cubicweb-debian-version-3.8.6-1
+a1a334d934390043a4293a4ee42bdceb1343246e cubicweb-version-3.8.7
+1cccf88d6dfe42986e1091de4c364b7b5814c54f cubicweb-debian-version-3.8.7-1
--- a/MANIFEST.in	Thu May 06 08:24:46 2010 +0200
+++ b/MANIFEST.in	Mon Jul 19 15:36:16 2010 +0200
@@ -5,27 +5,26 @@
 include bin/cubicweb-*
 include man/cubicweb-ctl.1
 
-recursive-include doc *.txt *.zargo *.png *.html makefile *.rst
+recursive-include doc README makefile *.conf *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia
 
-recursive-include misc *
-
-recursive-include web/data *
-recursive-include web/wdoc *.rst *.png *.xml ChangeLog*
+recursive-include misc *.py *.png *.display
 
 include web/views/*.pt
-
-recursive-include etwist *.xml *.html
+recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf
+recursive-include web/wdoc *.rst *.png *.xml ChangeLog*
 
 recursive-include i18n *.pot *.po
 recursive-include schemas *.py *.sql
 
-recursive-include entities/test/data *
-recursive-include sobjects/test/data *
-recursive-include server/test/data *
-recursive-include server/test sources*
-recursive-include web/test/data *.js *.css *.png *.gif *.jpg *.ico external_resources
-recursive-include devtools/test/data *
+recursive-include test/data bootstrap_cubes *.py *.sql
+recursive-include entities/test/data bootstrap_cubes *.py
+recursive-include sobjects/test/data bootstrap_cubes *.py
+recursive-include hooks/test/data bootstrap_cubes *.py
+recursive-include server/test/data bootstrap_cubes *.py source*
+recursive-include web/test/data bootstrap_cubes *.py
+recursive-include devtools/test/data bootstrap_cubes *.py *.txt
 
 recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl
 
 prune misc/cwfs
+prune goa
--- a/README	Thu May 06 08:24:46 2010 +0200
+++ b/README	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,15 @@
 CubicWeb semantic web framework
 ===============================
 
+CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+* a repository server
+* a RQL command line client to the repository
+* an adaptative modpython interface to the server
+* a bunch of other management tools
+
 Install
 -------
 
--- a/__init__.py	Thu May 06 08:24:46 2010 +0200
+++ b/__init__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,6 +21,13 @@
 """
 __docformat__ = "restructuredtext en"
 
+# ignore the pygments UserWarnings
+import warnings
+warnings.filterwarnings('ignore', category=UserWarning,
+                        message='.*was already imported',
+                        module='.*pygments')
+
+
 import __builtin__
 # '_' is available in builtins to mark internationalized string but should
 # not be used to do the actual translation
--- a/__pkginfo__.py	Thu May 06 08:24:46 2010 +0200
+++ b/__pkginfo__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -20,33 +20,17 @@
 software
 """
 
-distname = "cubicweb"
-modname = "cubicweb"
+modname = distname = "cubicweb"
 
-numversion = (3, 7, 5)
+numversion = (3, 8, 7)
 version = '.'.join(str(num) for num in numversion)
 
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
+description = "a repository of entities / relations for knowledge management"
 author = "Logilab"
 author_email = "contact@logilab.fr"
-
-short_desc = "a repository of entities / relations for knowledge management"
-long_desc = """CubicWeb is a entities / relations based knowledge management system
-developped at Logilab.
-
-This package contains:
-* a repository server
-* a RQL command line client to the repository
-* an adaptative modpython interface to the server
-* a bunch of other management tools
-"""
-
 web = 'http://www.cubicweb.org'
 ftp = 'ftp://ftp.logilab.org/pub/cubicweb'
-pyversions = ['2.5', '2.6']
+license = 'LGPL'
 
 classifiers = [
            'Environment :: Web Environment',
@@ -55,6 +39,32 @@
            'Programming Language :: JavaScript',
 ]
 
+__depends__ = {
+    'logilab-common': '>= 0.50.2',
+    'logilab-mtconverter': '>= 0.6.0',
+    'rql': '>= 0.26.2',
+    'yams': '>= 0.28.1',
+    'docutils': '>= 0.6',
+    #gettext                    # for xgettext, msgcat, etc...
+    # web dependancies
+    'simplejson': '>= 2.0.9',
+    'lxml': '',
+    'Twisted': '',
+    # XXX graphviz
+    # server dependencies
+    'logilab-database': '>= 1.0.5',
+    'pysqlite': '>= 2.5.5', # XXX install pysqlite2
+    }
+
+__recommends__ = {
+    'Pyro': '>= 3.9.1',
+    'PIL': '',                  # for captcha
+    'pycrypto': '',             # for crypto extensions
+    'fyzz': '>= 0.1.0',         # for sparql
+    'vobject': '>= 0.6.0',      # for ical view
+    #'Products.FCKeditor':'',
+    #'SimpleTAL':'>= 4.1.6',
+    }
 
 import sys
 from os import listdir, environ
@@ -65,57 +75,53 @@
            if not s.endswith('.bat')]
 include_dirs = [join('test', 'data'),
                 join('server', 'test', 'data'),
+                join('hooks', 'test', 'data'),
                 join('web', 'test', 'data'),
                 join('devtools', 'test', 'data'),
                 'schemas', 'skeleton']
 
 
-entities_dir = 'entities'
-schema_dir = 'schemas'
-sobjects_dir = 'sobjects'
-server_migration_dir = join('misc', 'migration')
-data_dir = join('web', 'data')
-wdoc_dir = join('web', 'wdoc')
-wdocimages_dir = join(wdoc_dir, 'images')
-views_dir = join('web', 'views')
-i18n_dir = 'i18n'
+_server_migration_dir = join('misc', 'migration')
+_data_dir = join('web', 'data')
+_wdoc_dir = join('web', 'wdoc')
+_wdocimages_dir = join(_wdoc_dir, 'images')
+_views_dir = join('web', 'views')
+_i18n_dir = 'i18n'
 
-if environ.get('APYCOT_ROOT'):
+_pyversion = '.'.join(str(num) for num in sys.version_info[0:2])
+if '--home' in sys.argv:
     # --home install
-    pydir = 'python'
+    pydir = 'python' + _pyversion
 else:
-    python_version = '.'.join(str(num) for num in sys.version_info[0:2])
-    pydir = join('python' + python_version, 'site-packages')
+    pydir = join('python' + _pyversion, 'site-packages')
 
 try:
     data_files = [
-        # common data
-        #[join('share', 'cubicweb', 'entities'),
-        # [join(entities_dir, filename) for filename in listdir(entities_dir)]],
         # server data
         [join('share', 'cubicweb', 'schemas'),
-         [join(schema_dir, filename) for filename in listdir(schema_dir)]],
-        #[join('share', 'cubicweb', 'sobjects'),
-        # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]],
+         [join('schemas', filename) for filename in listdir('schemas')]],
         [join('share', 'cubicweb', 'migration'),
-         [join(server_migration_dir, filename)
-          for filename in listdir(server_migration_dir)]],
+         [join(_server_migration_dir, filename)
+          for filename in listdir(_server_migration_dir)]],
         # web data
         [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
-         [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]],
+         [join(_data_dir, fname) for fname in listdir(_data_dir)
+          if not isdir(join(_data_dir, fname))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
-         [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]],
+         [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'),
-         [join(data_dir, 'images', fname) for fname in listdir(join(data_dir, 'images'))]],
+         [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
-         [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]],
+         [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir)
+          if not isdir(join(_wdoc_dir, fname))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
-         [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]],
-        # XXX: .pt install should be handled properly in a near future version
+         [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+         [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]],
+        # XXX: drop .pt files
         [join('lib', pydir, 'cubicweb', 'web', 'views'),
-         [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]],
-        [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
-         [join(i18n_dir, fname) for fname in listdir(i18n_dir)]],
+         [join(_views_dir, fname) for fname in listdir(_views_dir)
+          if fname.endswith('.pt')]],
         # skeleton
         ]
 except OSError:
--- a/_exceptions.py	Thu May 06 08:24:46 2010 +0200
+++ b/_exceptions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -65,9 +65,6 @@
     """raised when when an attempt to establish a connection failed do to wrong
     connection information (login / password or other authentication token)
     """
-    def __init__(self, *args, **kwargs):
-        super(AuthenticationError, self).__init__(*args)
-        self.__dict__.update(kwargs)
 
 class BadConnectionId(ConnectionError):
     """raised when a bad connection id is given"""
--- a/appobject.py	Thu May 06 08:24:46 2010 +0200
+++ b/appobject.py	Mon Jul 19 15:36:16 2010 +0200
@@ -111,14 +111,13 @@
     def __rand__(self, other):
         return AndSelector(other, self)
     def __iand__(self, other):
-        raise NotImplementedError('cant use inplace & (binary and)')
-
+        return AndSelector(self, other)
     def __or__(self, other):
         return OrSelector(self, other)
     def __ror__(self, other):
         return OrSelector(other, self)
     def __ior__(self, other):
-        raise NotImplementedError('cant use inplace | (binary or)')
+        return OrSelector(self, other)
 
     def __invert__(self):
         return NotSelector(self)
@@ -205,7 +204,7 @@
         return int(not score)
 
     def __str__(self):
-        return 'NOT(%s)' % super(NotSelector, self).__str__()
+        return 'NOT(%s)' % self.selector
 
 
 class yes(Selector):
--- a/bin/cubicweb-ctl.bat	Thu May 06 08:24:46 2010 +0200
+++ b/bin/cubicweb-ctl.bat	Mon Jul 19 15:36:16 2010 +0200
@@ -8,8 +8,8 @@
 """
 # -------------------- Python section --------------------
 import sys
-from os.path import join, dirname
-sys.path.insert(0, join(dirname(__file__), '..', '..'))
+from os.path import join, dirname, normpath
+sys.path.insert(0, normpath(join(dirname(__file__), '..', '..')))
 from cubicweb.cwctl import run
 run(sys.argv[1:])
 
--- a/cleanappl.sh	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-#!/bin/sh
-
-rm -f web/test/tmpdb*
-rm -f web/tali18n.py
-
-rm -f applications/*/test/tmpdb*
-rm -f applications/*/tali18n.py
-rm -f applications/*/i18n/*_full.po
-rm -f applications/*/data/Schema.dot
--- a/cwconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/cwconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -131,18 +131,18 @@
 
    Directory where pid files will be written
 """
+
 __docformat__ = "restructuredtext en"
 _ = unicode
 
 import sys
 import os
 import logging
-import tempfile
 from smtplib import SMTP
 from threading import Lock
-from os.path import exists, join, expanduser, abspath, normpath, basename, isdir
+from os.path import (exists, join, expanduser, abspath, normpath,
+                     basename, isdir, dirname)
 from warnings import warn
-
 from logilab.common.decorators import cached, classproperty
 from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods, init_log
@@ -190,6 +190,23 @@
                                  % (directory, modes))
     return modes[0]
 
+def _find_prefix(start_path=CW_SOFTWARE_ROOT):
+    """Runs along the parent directories of *start_path* (default to cubicweb source directory)
+    looking for one containing a 'share/cubicweb' directory.
+    The first matching directory is assumed as the prefix installation of cubicweb
+
+    Returns the matching prefix or None.
+    """
+    prefix = start_path
+    old_prefix = None
+    if not isdir(start_path):
+        prefix = dirname(start_path)
+    while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix:
+        old_prefix = prefix
+        prefix = dirname(prefix)
+    if isdir(join(prefix, 'share', 'cubicweb')):
+        return prefix
+    return sys.prefix
 
 # persistent options definition
 PERSISTENT_OPTIONS = (
@@ -262,6 +279,11 @@
 
 CWDEV = exists(join(CW_SOFTWARE_ROOT, '.hg'))
 
+try:
+    _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX']
+except KeyError:
+    _INSTALL_PREFIX = _find_prefix()
+
 class CubicWebNoAppConfiguration(ConfigurationMixIn):
     """base class for cubicweb configuration without a specific instance directory
     """
@@ -270,58 +292,51 @@
     name = None
     # log messages format (see logging module documentation for available keys)
     log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s'
-    # nor remove appobjects based on unused interface
+    # the format below can be useful to debug multi thread issues:
+    # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s'
+    # nor remove appobjects based on unused interface [???]
     cleanup_interface_sobjects = True
     # debug mode
     debugmode = False
 
-    if os.environ.get('APYCOT_ROOT'):
-        mode = 'test'
-        # allow to test cubes within apycot using cubicweb not installed by
-        # apycot
-        if __file__.startswith(os.environ['APYCOT_ROOT']):
-            CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ
-            # create __init__ file
-            file(join(CUBES_DIR, '__init__.py'), 'w').close()
-        else:
-            CUBES_DIR = '/usr/share/cubicweb/cubes/'
-    elif (CWDEV and _forced_mode != 'system'):
+
+    if (CWDEV and _forced_mode != 'system'):
         mode = 'user'
-        CUBES_DIR = abspath(normpath(join(CW_SOFTWARE_ROOT, '../cubes')))
+        _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
     else:
-        if _forced_mode == 'user':
-            mode = 'user'
-        else:
-            mode = 'system'
-        CUBES_DIR = '/usr/share/cubicweb/cubes/'
+        mode = _forced_mode or 'system'
+        _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
+
+    CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False)
+    CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep)
 
     options = (
        ('log-threshold',
          {'type' : 'string', # XXX use a dedicated type?
           'default': 'WARNING',
           'help': 'server\'s log level',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         # pyro options
         ('pyro-instance-id',
          {'type' : 'string',
           'default': Method('default_instance_id'),
           'help': 'identifier of the CubicWeb instance in the Pyro name server',
-          'group': 'pyro', 'inputlevel': 1,
+          'group': 'pyro', 'level': 1,
           }),
         ('pyro-ns-host',
          {'type' : 'string',
           'default': '',
           'help': 'Pyro name server\'s host. If not set, will be detected by a \
 broadcast query. It may contains port information using <host>:<port> notation.',
-          'group': 'pyro', 'inputlevel': 1,
+          'group': 'pyro', 'level': 1,
           }),
         ('pyro-ns-group',
          {'type' : 'string',
           'default': 'cubicweb',
           'help': 'Pyro name server\'s group where the repository will be \
 registered.',
-          'group': 'pyro', 'inputlevel': 1,
+          'group': 'pyro', 'level': 1,
           }),
         # common configuration options which are potentially required as soon as
         # you're using "base" application objects (ie to really server/web
@@ -330,13 +345,13 @@
          {'type' : 'string',
           'default': None,
           'help': 'web server root url',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('allow-email-login',
          {'type' : 'yn',
           'default': False,
           'help': 'allow users to login with their primary email if set',
-          'group': 'main', 'inputlevel': 2,
+          'group': 'main', 'level': 2,
           }),
         ('use-request-subdomain',
          {'type' : 'yn',
@@ -344,18 +359,17 @@
           'help': ('if set, base-url subdomain is replaced by the request\'s '
                    'host, to help managing sites with several subdomains in a '
                    'single cubicweb instance'),
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('mangle-emails',
          {'type' : 'yn',
           'default': False,
           'help': "don't display actual email addresses but mangle them if \
 this option is set to yes",
-          'group': 'email', 'inputlevel': 3,
+          'group': 'email', 'level': 3,
           }),
         )
     # static and class methods used to get instance independant resources ##
-
     @staticmethod
     def cubicweb_version():
         """return installed cubicweb version"""
@@ -387,28 +401,28 @@
 
     @classmethod
     def available_cubes(cls):
+        import re
         cubes = set()
         for directory in cls.cubes_search_path():
             if not exists(directory):
                 cls.error('unexistant directory in cubes search path: %s'
-                           % directory)
+                          % directory)
                 continue
             for cube in os.listdir(directory):
-                if isdir(join(directory, cube)) and not cube == 'shared':
+                if cube == 'shared':
+                    continue
+                if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube):
+                    continue # skip invalid python package name
+                cubedir = join(directory, cube)
+                if isdir(cubedir) and exists(join(cubedir, '__init__.py')):
                     cubes.add(cube)
         return sorted(cubes)
 
     @classmethod
     def cubes_search_path(cls):
         """return the path of directories where cubes should be searched"""
-        path = []
-        try:
-            for directory in os.environ['CW_CUBES_PATH'].split(os.pathsep):
-                directory = abspath(normpath(directory))
-                if exists(directory) and not directory in path:
-                    path.append(directory)
-        except KeyError:
-            pass
+        path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH
+                if directory.strip() and exists(directory.strip())]
         if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR):
             path.append(cls.CUBES_DIR)
         return path
@@ -424,7 +438,7 @@
     @classmethod
     def cube_dir(cls, cube):
         """return the cube directory for the given cube id,
-        raise ConfigurationError if it doesn't exists
+        raise `ConfigurationError` if it doesn't exists
         """
         for directory in cls.cubes_search_path():
             cubedir = join(directory, cube)
@@ -442,10 +456,12 @@
         """return the information module for the given cube"""
         cube = CW_MIGRATION_MAP.get(cube, cube)
         try:
-            return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__
+            parent = __import__('cubes.%s.__pkginfo__' % cube)
+            return getattr(parent, cube).__pkginfo__
         except Exception, ex:
-            raise ConfigurationError('unable to find packaging information for '
-                                     'cube %s (%s: %s)' % (cube, ex.__class__.__name__, ex))
+            raise ConfigurationError(
+                'unable to find packaging information for cube %s (%s: %s)'
+                % (cube, ex.__class__.__name__, ex))
 
     @classmethod
     def cube_version(cls, cube):
@@ -457,14 +473,43 @@
         return Version(version)
 
     @classmethod
+    def _cube_deps(cls, cube, key, oldkey):
+        """return cubicweb cubes used by the given cube"""
+        pkginfo = cls.cube_pkginfo(cube)
+        try:
+            # explicit __xxx_cubes__ attribute
+            deps = getattr(pkginfo, key)
+        except AttributeError:
+            # deduce cubes from generic __xxx__ attribute
+            try:
+                gendeps = getattr(pkginfo, key.replace('_cubes', ''))
+            except AttributeError:
+                # bw compat
+                if hasattr(pkginfo, oldkey):
+                    warn('[3.8] cube %s: %s is deprecated, use %s dict'
+                         % (cube, oldkey, key), DeprecationWarning)
+                    deps = getattr(pkginfo, oldkey)
+                else:
+                    deps = {}
+            else:
+                deps = dict( (x[len('cubicweb-'):], v)
+                             for x, v in gendeps.iteritems()
+                             if x.startswith('cubicweb-'))
+        if not isinstance(deps, dict):
+            deps = dict((key, None) for key in deps)
+            warn('[3.8] cube %s should define %s as a dict' % (cube, key),
+                 DeprecationWarning)
+        return deps
+
+    @classmethod
     def cube_dependencies(cls, cube):
         """return cubicweb cubes used by the given cube"""
-        return getattr(cls.cube_pkginfo(cube), '__use__', ())
+        return cls._cube_deps(cube, '__depends_cubes__', '__use__')
 
     @classmethod
     def cube_recommends(cls, cube):
         """return cubicweb cubes recommended by the given cube"""
-        return getattr(cls.cube_pkginfo(cube), '__recommend__', ())
+        return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__')
 
     @classmethod
     def expand_cubes(cls, cubes, with_recommends=False):
@@ -493,31 +538,19 @@
         """reorder cubes from the top level cubes to inner dependencies
         cubes
         """
-        from logilab.common.graph import get_cycles
+        from logilab.common.graph import ordered_nodes, UnorderableGraph
         graph = {}
         for cube in cubes:
             cube = CW_MIGRATION_MAP.get(cube, cube)
-            deps = cls.cube_dependencies(cube) + \
-                   cls.cube_recommends(cube)
-            graph[cube] = set(dep for dep in deps if dep in cubes)
-        cycles = get_cycles(graph)
-        if cycles:
-            cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles)
+            graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+                              if dep in cubes)
+            graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+                               if dep in cubes)
+        try:
+            return ordered_nodes(graph)
+        except UnorderableGraph, ex:
             raise ConfigurationError('cycles in cubes dependencies: %s'
-                                     % cycles)
-        cubes = []
-        while graph:
-            # sorted to get predictable results
-            for cube, deps in sorted(graph.items()):
-                if not deps:
-                    cubes.append(cube)
-                    del graph[cube]
-                    for deps in graph.itervalues():
-                        try:
-                            deps.remove(cube)
-                        except KeyError:
-                            continue
-        return tuple(reversed(cubes))
+                                     % ex.cycles)
 
     @classmethod
     def cls_adjust_sys_path(cls):
@@ -636,6 +669,7 @@
         self.adjust_sys_path()
         self.load_defaults()
         self.translations = {}
+        self._site_loaded = set()
         # don't register ReStructured Text directives by simple import, avoid pb
         # with eg sphinx.
         # XXX should be done properly with a function from cw.uicfg
@@ -647,6 +681,7 @@
             cw_rest_init()
 
     def adjust_sys_path(self):
+        # overriden in CubicWebConfiguration
         self.cls_adjust_sys_path()
 
     def init_log(self, logthreshold=None, debug=False,
@@ -658,7 +693,16 @@
             else:
                 logthreshold = self['log-threshold']
         self.debugmode = debug
-        init_log(debug, syslog, logthreshold, logfile, self.log_format)
+        if sys.platform == 'win32':
+            # no logrotate on win32, so use logging rotation facilities
+            # for now, hard code weekly rotation every sunday, and 52 weeks kept
+            # idea: make this configurable?
+            init_log(debug, syslog, logthreshold, logfile, self.log_format,
+                     rotation_parameters={'when': 'W6', # every sunday
+                                          'interval': 1,
+                                          'backupCount': 52})
+        else:
+            init_log(debug, syslog, logthreshold, logfile, self.log_format)
         # configure simpleTal logger
         logging.getLogger('simpleTAL').setLevel(logging.ERROR)
 
@@ -668,6 +712,34 @@
         """
         return []
 
+    apphome = None
+
+    def load_site_cubicweb(self, paths=None):
+        """load instance's specific site_cubicweb file"""
+        if paths is None:
+            paths = self.cubes_path()
+            if self.apphome is not None:
+                paths = [self.apphome] + paths
+        for path in reversed(paths):
+            sitefile = join(path, 'site_cubicweb.py')
+            if exists(sitefile) and not sitefile in self._site_loaded:
+                self._load_site_cubicweb(sitefile)
+                self._site_loaded.add(sitefile)
+            else:
+                sitefile = join(path, 'site_erudi.py')
+                if exists(sitefile) and not sitefile in self._site_loaded:
+                    self._load_site_cubicweb(sitefile)
+                    self._site_loaded.add(sitefile)
+                    self.warning('[3.5] site_erudi.py is deprecated, should be '
+                                 'renamed to site_cubicweb.py')
+
+    def _load_site_cubicweb(self, sitefile):
+        # XXX extrapath argument to load_module_from_file only in lgc > 0.50.2
+        from logilab.common.modutils import load_module_from_modpath, modpath_from_file
+        module = load_module_from_modpath(modpath_from_file(sitefile, self.extrapath))
+        self.info('%s loaded', sitefile)
+        return module
+
     def eproperty_definitions(self):
         cfg = self.persistent_options_configuration()
         for section, options in cfg.options_by_section():
@@ -696,35 +768,24 @@
         """
         return None
 
+
 class CubicWebConfiguration(CubicWebNoAppConfiguration):
     """base class for cubicweb server and web configurations"""
 
-    INSTANCES_DATA_DIR = None
-    if os.environ.get('APYCOT_ROOT'):
-        root = os.environ['APYCOT_ROOT']
-        REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
-        if not exists(REGISTRY_DIR):
-            os.makedirs(REGISTRY_DIR)
-        RUNTIME_DIR = tempfile.gettempdir()
-        # allow to test cubes within apycot using cubicweb not installed by
-        # apycot
-        if __file__.startswith(os.environ['APYCOT_ROOT']):
-            MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root
+    if CubicWebNoAppConfiguration.mode == 'user':
+        _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/')
+    else: #mode = 'system'
+        if _INSTALL_PREFIX == '/usr':
+            _INSTANCES_DIR = '/etc/cubicweb.d/'
         else:
-            MIGRATION_DIR = '/usr/share/cubicweb/migration/'
-    else:
-        if CubicWebNoAppConfiguration.mode == 'user':
-            REGISTRY_DIR = expanduser('~/etc/cubicweb.d/')
-            RUNTIME_DIR = tempfile.gettempdir()
-            INSTANCES_DATA_DIR = REGISTRY_DIR
-        else: #mode = 'system'
-            REGISTRY_DIR = '/etc/cubicweb.d/'
-            RUNTIME_DIR = '/var/run/cubicweb/'
-            INSTANCES_DATA_DIR = '/var/lib/cubicweb/instances/'
-        if CWDEV:
-            MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration')
-        else:
-            MIGRATION_DIR = '/usr/share/cubicweb/migration/'
+            _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d')
+
+    if os.environ.get('APYCOT_ROOT'):
+        _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py')
+        if not exists(_cubes_init):
+            file(join(_cubes_init), 'w').close()
+        if not exists(_INSTANCES_DIR):
+            os.makedirs(_INSTANCES_DIR)
 
     # for some commands (creation...) we don't want to initialize gettext
     set_language = True
@@ -736,57 +797,51 @@
          {'type' : 'string',
           'default': Method('default_log_file'),
           'help': 'file where output logs should be written',
-          'group': 'main', 'inputlevel': 2,
+          'group': 'main', 'level': 2,
           }),
         # email configuration
         ('smtp-host',
          {'type' : 'string',
           'default': 'mail',
           'help': 'hostname of the SMTP mail server',
-          'group': 'email', 'inputlevel': 1,
+          'group': 'email', 'level': 1,
           }),
         ('smtp-port',
          {'type' : 'int',
           'default': 25,
           'help': 'listening port of the SMTP mail server',
-          'group': 'email', 'inputlevel': 1,
+          'group': 'email', 'level': 1,
           }),
         ('sender-name',
          {'type' : 'string',
           'default': Method('default_instance_id'),
           'help': 'name used as HELO name for outgoing emails from the \
 repository.',
-          'group': 'email', 'inputlevel': 2,
+          'group': 'email', 'level': 2,
           }),
         ('sender-addr',
          {'type' : 'string',
           'default': 'cubicweb@mydomain.com',
           'help': 'email address used as HELO address for outgoing emails from \
 the repository',
-          'group': 'email', 'inputlevel': 1,
+          'group': 'email', 'level': 1,
           }),
         )
 
     @classmethod
-    def runtime_dir(cls):
-        """run time directory for pid file..."""
-        return env_path('CW_RUNTIME_DIR', cls.RUNTIME_DIR, 'run time')
-
-    @classmethod
-    def registry_dir(cls):
+    def instances_dir(cls):
         """return the control directory"""
-        return env_path('CW_INSTANCES_DIR', cls.REGISTRY_DIR, 'registry')
-
-    @classmethod
-    def instance_data_dir(cls):
-        """return the instance data directory"""
-        return env_path('CW_INSTANCES_DATA_DIR', cls.INSTANCES_DATA_DIR,
-                        'additional data')
+        return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry')
 
     @classmethod
     def migration_scripts_dir(cls):
         """cubicweb migration scripts directory"""
-        return env_path('CW_MIGRATION_DIR', cls.MIGRATION_DIR, 'migration')
+        if CWDEV:
+            return join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+        mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration')
+        if not exists(mdir):
+            raise ConfigurationError('migration path %s doesn\'t exist' % mdir)
+        return mdir
 
     @classmethod
     def config_for(cls, appid, config=None):
@@ -809,9 +864,10 @@
         """return the home directory of the instance with the given
         instance id
         """
-        home = join(cls.registry_dir(), appid)
+        home = join(cls.instances_dir(), appid)
         if not exists(home):
-            raise ConfigurationError('no such instance %s (check it exists with "cubicweb-ctl list")' % appid)
+            raise ConfigurationError('no such instance %s (check it exists with'
+                                     ' "cubicweb-ctl list")' % appid)
         return home
 
     MODES = ('common', 'repository', 'Any', 'web')
@@ -834,7 +890,9 @@
     def default_log_file(self):
         """return default path to the log file of the instance'server"""
         if self.mode == 'user':
-            basepath = join(tempfile.gettempdir(), '%s-%s' % (basename(self.appid), self.name))
+            import tempfile
+            basepath = join(tempfile.gettempdir(), '%s-%s' % (
+                basename(self.appid), self.name))
             path = basepath + '.log'
             i = 1
             while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
@@ -849,7 +907,13 @@
 
     def default_pid_file(self):
         """return default path to the pid file of the instance'server"""
-        return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name))
+        if self.mode == 'system':
+            # XXX not under _INSTALL_PREFIX, right?
+            rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time')
+        else:
+            import tempfile
+            rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time')
+        return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
 
     # instance methods used to get instance specific resources #############
 
@@ -857,7 +921,6 @@
         self.appid = appid
         CubicWebNoAppConfiguration.__init__(self)
         self._cubes = None
-        self._site_loaded = set()
         self.load_file_configuration(self.main_config_file())
 
     def adjust_sys_path(self):
@@ -869,11 +932,17 @@
 
     @property
     def apphome(self):
-        return join(self.registry_dir(), self.appid)
+        return join(self.instances_dir(), self.appid)
 
     @property
     def appdatahome(self):
-        return join(self.instance_data_dir(), self.appid)
+        if self.mode == 'system':
+            # XXX not under _INSTALL_PREFIX, right?
+            iddir = '/var/lib/cubicweb/instances/'
+        else:
+            iddir = self.instances_dir()
+        iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data')
+        return join(iddir, self.appid)
 
     def init_cubes(self, cubes):
         assert self._cubes is None, self._cubes
@@ -919,39 +988,12 @@
 
     @cached
     def instance_md5_version(self):
-        import md5
+        import hashlib
         infos = []
         for pkg in self.cubes():
             version = self.cube_version(pkg)
             infos.append('%s-%s' % (pkg, version))
-        return md5.new(';'.join(infos)).hexdigest()
-
-    def load_site_cubicweb(self):
-        """load instance's specific site_cubicweb file"""
-        for path in reversed([self.apphome] + self.cubes_path()):
-            sitefile = join(path, 'site_cubicweb.py')
-            if exists(sitefile) and not sitefile in self._site_loaded:
-                self._load_site_cubicweb(sitefile)
-                self._site_loaded.add(sitefile)
-            else:
-                sitefile = join(path, 'site_erudi.py')
-                if exists(sitefile) and not sitefile in self._site_loaded:
-                    self._load_site_cubicweb(sitefile)
-                    self._site_loaded.add(sitefile)
-                    self.warning('[3.5] site_erudi.py is deprecated, should be renamed to site_cubicweb.py')
-
-    def _load_site_cubicweb(self, sitefile):
-        # XXX extrapath argument to load_module_from_file only in lgc > 0.46
-        from logilab.common.modutils import load_module_from_modpath, modpath_from_file
-        def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
-            return load_module_from_modpath(modpath_from_file(filepath, extrapath),
-                                            path, use_sys)
-        module = load_module_from_file(sitefile, extrapath=self.extrapath)
-        self.info('%s loaded', sitefile)
-        # cube specific options
-        if getattr(module, 'options', None):
-            self.register_options(module.options)
-            self.load_defaults()
+        return hashlib.md5(';'.join(infos)).hexdigest()
 
     def load_configuration(self):
         """load instance's configuration files"""
@@ -960,6 +1002,13 @@
             # init gettext
             self._set_language()
 
+    def _load_site_cubicweb(self, sitefile):
+        # overriden to register cube specific options
+        mod = super(CubicWebConfiguration, self)._load_site_cubicweb(sitefile)
+        if getattr(mod, 'options', None):
+            self.register_options(mod.options)
+            self.load_defaults()
+
     def init_log(self, logthreshold=None, debug=False, force=False):
         """init the log service"""
         if not force and hasattr(self, '_logging_initialized'):
@@ -1054,7 +1103,8 @@
             SMTP_LOCK.release()
         return True
 
-set_log_methods(CubicWebConfiguration, logging.getLogger('cubicweb.configuration'))
+set_log_methods(CubicWebNoAppConfiguration,
+                logging.getLogger('cubicweb.configuration'))
 
 # alias to get a configuration instance from an instance id
 instance_configuration = CubicWebConfiguration.config_for
@@ -1123,7 +1173,7 @@
         def as_sql(self, backend, args):
             raise NotImplementedError('source only callback')
 
-        def source_execute(self, source, value):
+        def source_execute(self, source, session, value):
             fpath = source.binary_to_str(value)
             try:
                 return Binary(fpath)
--- a/cwctl.py	Thu May 06 08:24:46 2010 +0200
+++ b/cwctl.py	Mon Jul 19 15:36:16 2010 +0200
@@ -26,6 +26,7 @@
 # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash
 # completion). So import locally in command helpers.
 import sys
+from warnings import warn
 from os import remove, listdir, system, pathsep
 try:
     from os import kill, getpgid
@@ -98,7 +99,7 @@
         Instance used by another one should appears first in the file (one
         instance per line)
         """
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         _allinstances = list_instances(regdir)
         if isfile(join(regdir, 'startorder')):
             allinstances = []
@@ -132,29 +133,33 @@
         self.run_args(args, askconfirm)
 
     def run_args(self, args, askconfirm):
+        status = 0
         for appid in args:
             if askconfirm:
                 print '*'*72
                 if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
                     continue
-            self.run_arg(appid)
+            status = max(status, self.run_arg(appid))
+        sys.exit(status)
 
     def run_arg(self, appid):
         cmdmeth = getattr(self, '%s_instance' % self.name)
         try:
-            cmdmeth(appid)
+            status = cmdmeth(appid)
         except (KeyboardInterrupt, SystemExit):
             print >> sys.stderr, '%s aborted' % self.name
-            sys.exit(2) # specific error code
+            return 2 # specific error code
         except (ExecutionError, ConfigurationError), ex:
             print >> sys.stderr, 'instance %s not %s: %s' % (
                 appid, self.actionverb, ex)
+            status = 4
         except Exception, ex:
             import traceback
             traceback.print_exc()
             print >> sys.stderr, 'instance %s not %s: %s' % (
                 appid, self.actionverb, ex)
-
+            status = 8
+        return status
 
 class InstanceCommandFork(InstanceCommand):
     """Same as `InstanceCommand`, but command is forked in a new environment
@@ -181,86 +186,6 @@
 
 # base commands ###############################################################
 
-def version_strictly_lower(a, b):
-    from logilab.common.changelog import Version
-    if a:
-        a = Version(a)
-    if b:
-        b = Version(b)
-    return a < b
-
-def max_version(a, b):
-    from logilab.common.changelog import Version
-    return str(max(Version(a), Version(b)))
-
-class ConfigurationProblem(object):
-    """Each cube has its own list of dependencies on other cubes/versions.
-
-    The ConfigurationProblem is used to record the loaded cubes, then to detect
-    inconsistencies in their dependencies.
-
-    See configuration management on wikipedia for litterature.
-    """
-
-    def __init__(self):
-        self.cubes = {}
-
-    def add_cube(self, name, info):
-        self.cubes[name] = info
-
-    def solve(self):
-        self.warnings = []
-        self.errors = []
-        self.read_constraints()
-        for cube, versions in sorted(self.constraints.items()):
-            oper, version = None, None
-            # simplify constraints
-            if versions:
-                for constraint in versions:
-                    op, ver = constraint
-                    if oper is None:
-                        oper = op
-                        version = ver
-                    elif op == '>=' and oper == '>=':
-                        version = max_version(ver, version)
-                    else:
-                        print 'unable to handle this case', oper, version, op, ver
-            # "solve" constraint satisfaction problem
-            if cube not in self.cubes:
-                self.errors.append( ('add', cube, version) )
-            elif versions:
-                lower_strict = version_strictly_lower(self.cubes[cube].version, version)
-                if oper in ('>=','='):
-                    if lower_strict:
-                        self.errors.append( ('update', cube, version) )
-                else:
-                    print 'unknown operator', oper
-
-    def read_constraints(self):
-        self.constraints = {}
-        self.reverse_constraints = {}
-        for cube, info in self.cubes.items():
-            if hasattr(info,'__depends_cubes__'):
-                use = info.__depends_cubes__
-                if not isinstance(use, dict):
-                    use = dict((key, None) for key in use)
-                    self.warnings.append('cube %s should define __depends_cubes__ as a dict not a list')
-            elif hasattr(info, '__use__'):
-                self.warnings.append('cube %s should define __depends_cubes__' % cube)
-                use = dict((key, None) for key in info.__use__)
-            else:
-                continue
-            for name, constraint in use.items():
-                self.constraints.setdefault(name,set())
-                if constraint:
-                    try:
-                        oper, version = constraint.split()
-                        self.constraints[name].add( (oper, version) )
-                    except:
-                        self.warnings.append('cube %s depends on %s but constraint badly formatted: %s'
-                                             % (cube, name, constraint))
-                self.reverse_constraints.setdefault(name, set()).add(cube)
-
 class ListCommand(Command):
     """List configurations, cubes and instances.
 
@@ -276,7 +201,8 @@
     def run(self, args):
         """run the command with its specific arguments"""
         if args:
-            raise BadCommandUsage('Too much arguments')
+            raise BadCommandUsage('Too many arguments')
+        from cubicweb.migration import ConfigurationProblem
         print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
         print
         print 'Available configurations:'
@@ -288,7 +214,7 @@
                     continue
                 print '   ', line
         print
-        cfgpb = ConfigurationProblem()
+        cfgpb = ConfigurationProblem(cwcfg)
         try:
             cubesdir = pathsep.join(cwcfg.cubes_search_path())
             namesize = max(len(x) for x in cwcfg.available_cubes())
@@ -299,26 +225,31 @@
         else:
             print 'Available cubes (%s):' % cubesdir
             for cube in cwcfg.available_cubes():
-                if cube in ('CVS', '.svn', 'shared', '.hg'):
-                    continue
                 try:
                     tinfo = cwcfg.cube_pkginfo(cube)
                     tversion = tinfo.version
-                    cfgpb.add_cube(cube, tinfo)
+                    cfgpb.add_cube(cube, tversion)
                 except ConfigurationError:
                     tinfo = None
                     tversion = '[missing cube information]'
                 print '* %s %s' % (cube.ljust(namesize), tversion)
                 if self.config.verbose:
-                    shortdesc = tinfo and (getattr(tinfo, 'short_desc', '')
-                                           or tinfo.__doc__)
-                    if shortdesc:
-                        print '    '+ '    \n'.join(shortdesc.splitlines())
+                    if tinfo:
+                        descr = getattr(tinfo, 'description', '')
+                        if not descr:
+                            descr = getattr(tinfo, 'short_desc', '')
+                            if descr:
+                                warn('[3.8] short_desc is deprecated, update %s'
+                                     ' pkginfo' % cube, DeprecationWarning)
+                            else:
+                                descr = tinfo.__doc__
+                        if descr:
+                            print '    '+ '    \n'.join(descr.splitlines())
                     modes = detect_available_modes(cwcfg.cube_dir(cube))
                     print '    available modes: %s' % ', '.join(modes)
         print
         try:
-            regdir = cwcfg.registry_dir()
+            regdir = cwcfg.instances_dir()
         except ConfigurationError, ex:
             print 'No instance available:', ex
             print
@@ -354,7 +285,7 @@
                     print 'is not installed, but required by %s' % ' '.join(cfgpb.reverse_constraints[cube])
                 else:
                     print '* cube %s version %s is installed, but version %s is required by (%s)' % (
-                        cube, cfgpb.cubes[cube].version, version, ', '.join(cfgpb.reverse_constraints[cube]))
+                        cube, cfgpb.cubes[cube], version, ', '.join(cfgpb.reverse_constraints[cube]))
 
 class CreateInstanceCommand(Command):
     """Create an instance from a cube. This is an unified
@@ -423,7 +354,7 @@
         helper.bootstrap(cubes, self.config.config_level)
         # input for cubes specific options
         for section in set(sect.lower() for sect, opt, optdict in config.all_options()
-                           if optdict.get('inputlevel') <= self.config.config_level):
+                           if optdict.get('level') <= self.config.config_level):
             if section not in ('main', 'email', 'pyro'):
                 print '\n' + underline_title('%s options' % section)
                 config.input_config(section, self.config.config_level)
@@ -626,7 +557,7 @@
     actionverb = 'restarted'
 
     def run_args(self, args, askconfirm):
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         if not isfile(join(regdir, 'startorder')) or len(args) <= 1:
             # no specific startorder
             super(RestartInstanceCommand, self).run_args(args, askconfirm)
@@ -680,6 +611,7 @@
     @staticmethod
     def status_instance(appid):
         """print running status information for an instance"""
+        status = 0
         for mode in cwcfg.possible_configurations(appid):
             config = cwcfg.config_for(appid, mode)
             print '[%s-%s]' % (appid, mode),
@@ -690,6 +622,7 @@
                 continue
             if not exists(pidf):
                 print "doesn't seem to be running"
+                status = 1
                 continue
             pid = int(open(pidf).read().strip())
             # trick to guess whether or not the process is running
@@ -697,9 +630,10 @@
                 getpgid(pid)
             except OSError:
                 print "should be running with pid %s but the process can not be found" % pid
+                status = 1
                 continue
             print "running with pid %s" % (pid)
-
+        return status
 
 class UpgradeInstanceCommand(InstanceCommandFork):
     """Upgrade an instance after cubicweb and/or component(s) upgrade.
@@ -797,7 +731,9 @@
         if cubicwebversion > applcubicwebversion:
             toupgrade.append(('cubicweb', applcubicwebversion, cubicwebversion))
         if not self.config.fs_only and not toupgrade:
-            print '-> no software migration needed for instance %s.' % appid
+            print '-> no data migration needed for instance %s.' % appid
+            self.i18nupgrade(config)
+            mih.shutdown()
             return
         for cube, fromversion, toversion in toupgrade:
             print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)
@@ -808,6 +744,22 @@
         mih.migrate(vcconf, reversed(toupgrade), self.config)
         # rewrite main configuration file
         mih.rewrite_configuration()
+        mih.shutdown()
+        # handle i18n upgrade
+        if not self.i18nupgrade(config):
+            return
+        print
+        print '-> instance migrated.'
+        if not (CWDEV or self.config.nostartstop):
+            # restart instance through fork to get a proper environment, avoid
+            # uicfg pb (and probably gettext catalogs, to check...)
+            forkcmd = '%s start %s' % (sys.argv[0], appid)
+            status = system(forkcmd)
+            if status:
+                print '%s exited with status %s' % (forkcmd, status)
+        print
+
+    def i18nupgrade(self, config):
         # handle i18n upgrade:
         # * install new languages
         # * recompile catalogs
@@ -819,21 +771,10 @@
         if errors:
             print '\n'.join(errors)
             if not ASK.confirm('Error while compiling message catalogs, '
-                               'continue anyway ?'):
+                               'continue anyway?'):
                 print '-> migration not completed.'
-                return
-        mih.shutdown()
-        print
-        print '-> instance migrated.'
-        if not (CWDEV or self.config.nostartstop):
-            # restart instance through fork to get a proper environment, avoid
-            # uicfg pb (and probably gettext catalogs, to check...)
-            forkcmd = '%s start %s' % (sys.argv[0], appid)
-            status = system(forkcmd)
-            if status:
-                print '%s exited with status %s' % (forkcmd, status)
-        print
-
+                return False
+        return True
 
 class ShellCommand(Command):
     """Run an interactive migration shell on an instance. This is a python shell
@@ -968,7 +909,7 @@
 
     def run(self, args):
         """run the command with its specific arguments"""
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         for appid in sorted(listdir(regdir)):
             print appid
 
--- a/cwvreg.py	Thu May 06 08:24:46 2010 +0200
+++ b/cwvreg.py	Mon Jul 19 15:36:16 2010 +0200
@@ -20,7 +20,7 @@
 The `VRegistry`
 ---------------
 
-The `VRegistry` can be seen as a two levels dictionary. It contains
+The `VRegistry` can be seen as a two-level dictionary. It contains
 all dynamically loaded objects (subclasses of :ref:`appobject`) to
 build a |cubicweb| application. Basically:
 
@@ -49,12 +49,12 @@
 .. index::
    vregistry: registration_callback
 
-On startup |cubicweb| loads application objects defined in its library
+On startup, |cubicweb| loads application objects defined in its library
 and in cubes used by the instance. Application objects from the
 library are loaded first, then those provided by cubes are loaded in
 dependency order (e.g. if your cube depends on an other, objects from
-the dependency will be loaded first). Cube's modules or packages where
-appobject are looked for is explained in :ref:`cubelayout`.
+the dependency will be loaded first). The layout of the modules or packages
+in a cube  is explained in :ref:`cubelayout`.
 
 For each module:
 
@@ -144,20 +144,22 @@
 
   - else, the higher the score, the better the object suits the context
 
-* the object with the higher score is selected.
+* the object with the highest score is selected.
 
 .. Note::
 
-  When no score is higher than the others, an exception is raised in development
+  When no single object has the highest score, an exception is raised in development
   mode to let you know that the engine was not able to identify the view to
   apply. This error is silenced in production mode and one of the objects with
-  the higher score is picked.
+  the highest score is picked.
 
-  In such cases you would need to review your design and make sure your selectors
-  or appobjects are properly defined.
+  In such cases you would need to review your design and make sure
+  your selectors or appobjects are properly defined. Such an error is
+  typically caused by either forgetting to change the __regid__ in a
+  derived class, or by having copy-pasted some code.
 
-For instance, if you are selecting the primary (eg `__regid__ =
-'primary'`) view (eg `__registry__ = 'views'`) for a result set
+For instance, if you are selecting the primary (`__regid__ =
+'primary'`) view (`__registry__ = 'views'`) for a result set
 containing a `Card` entity, two objects will probably be selectable:
 
 * the default primary view (`__select__ = implements('Any')`), meaning
@@ -167,9 +169,8 @@
   meaning that the object is selectable for Card entities
 
 Other primary views specific to other entity types won't be selectable in this
-case. Among selectable objects, the implements selector will return a higher
-score than the second view since it's more specific, so it will be selected as
-expected.
+case. Among selectable objects, the `implements('Card')` selector will return a higher
+score since it's more specific, so the correct view will be selected as expected.
 
 .. _SelectionAPI:
 
@@ -179,7 +180,7 @@
 Here is the selection API you'll get on every registry. Some of them, as the
 'etypes' registry, containing entity classes, extend it. In those methods,
 `*args, **kwargs` is what we call the **context**. Those arguments are given to
-selectors that will inspect there content and return a score accordingly.
+selectors that will inspect their content and return a score accordingly.
 
 .. automethod:: cubicweb.vregistry.Registry.select
 
@@ -189,6 +190,7 @@
 
 .. automethod:: cubicweb.vregistry.Registry.object_by_id
 """
+
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -401,6 +403,10 @@
 
 
 class ActionsRegistry(CWRegistry):
+    def poss_visible_objects(self, *args, **kwargs):
+        """return an ordered list of possible actions"""
+        return sorted(self.possible_objects(*args, **kwargs),
+                      key=lambda x: x.order)
 
     def possible_actions(self, req, rset=None, **kwargs):
         if rset is None:
@@ -516,7 +522,7 @@
                     cpath = cfg.build_vregistry_cube_path([cfg.cube_dir(cube)])
                     cleanup_sys_modules(cpath)
         self.reset()
-        self.register_objects(path, force_reload)
+        self.register_objects(path)
         CW_EVENT_MANAGER.emit('after-registry-reload')
 
     def _set_schema(self, schema):
@@ -561,10 +567,11 @@
         if ifaces:
             self._needs_iface[obj] = ifaces
 
-    def register_objects(self, path, force_reload=False):
-        """overriden to remove objects requiring a missing interface"""
+    def register_objects(self, path):
+        """overriden to give cubicweb's extrapath (eg cubes package's __path__)
+        """
         super(CubicWebVRegistry, self).register_objects(
-            path, force_reload, self.config.extrapath)
+            path, self.config.extrapath)
 
     def initialization_completed(self):
         """cw specific code once vreg initialization is completed:
@@ -616,7 +623,7 @@
     def solutions(self, req, rqlst, args):
         def type_from_eid(eid, req=req):
             return req.describe(eid)[0]
-        self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
+        return self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
 
     def parse(self, req, rql, args=None):
         rqlst = self.rqlhelper.parse(rql)
--- a/dbapi.py	Thu May 06 08:24:46 2010 +0200
+++ b/dbapi.py	Mon Jul 19 15:36:16 2010 +0200
@@ -20,19 +20,23 @@
 Take a look at http://www.python.org/peps/pep-0249.html
 
 (most parts of this document are reported here in docstrings)
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
+from threading import currentThread
 from logging import getLogger
 from time import time, clock
 from itertools import count
+from warnings import warn
+from os.path import join
 
 from logilab.common.logging_ext import set_log_methods
 from logilab.common.decorators import monkeypatch
 from logilab.common.deprecation import deprecated
 
-from cubicweb import ETYPE_NAME_MAP, ConnectionError, cwvreg, cwconfig
+from cubicweb import ETYPE_NAME_MAP, ConnectionError, AuthenticationError,\
+     cwvreg, cwconfig
 from cubicweb.req import RequestSessionBase
 
 
@@ -206,10 +210,34 @@
     cnx = repo_connect(repo, login, cnxprops=cnxprops, **kwargs)
     return repo, cnx
 
+class _NeedAuthAccessMock(object):
+    def __getattribute__(self, attr):
+        raise AuthenticationError()
+    def __nonzero__(self):
+        return False
+
+class DBAPISession(object):
+    def __init__(self, cnx, login=None, authinfo=None):
+        self.cnx = cnx
+        self.data = {}
+        self.login = login
+        self.authinfo = authinfo
+        # dbapi session identifier is the same as the first connection
+        # identifier, but may later differ in case of auto-reconnection as done
+        # by the web authentication manager (in cw.web.views.authentication)
+        if cnx is not None:
+            self.sessionid = cnx.sessionid
+        else:
+            self.sessionid = None
+
+    @property
+    def anonymous_session(self):
+        return not self.cnx or self.cnx.anonymous_connection
+
 
 class DBAPIRequest(RequestSessionBase):
 
-    def __init__(self, vreg, cnx=None):
+    def __init__(self, vreg, session=None):
         super(DBAPIRequest, self).__init__(vreg)
         try:
             # no vreg or config which doesn't handle translations
@@ -219,12 +247,13 @@
         self.set_default_language(vreg)
         # cache entities built during the request
         self._eid_cache = {}
-        # these args are initialized after a connection is
-        # established
-        self.cnx = None   # connection associated to the request
-        self._user = None # request's user, set at authentication
-        if cnx is not None:
-            self.set_connection(cnx)
+        if session is not None:
+            self.set_session(session)
+        else:
+            # these args are initialized after a connection is
+            # established
+            self.session = None
+            self.cnx = self.user = _NeedAuthAccessMock()
 
     def base_url(self):
         return self.vreg.config['base-url']
@@ -232,13 +261,25 @@
     def from_controller(self):
         return 'view'
 
-    def set_connection(self, cnx, user=None):
+    def set_session(self, session, user=None):
         """method called by the session handler when the user is authenticated
         or an anonymous connection is open
         """
-        self.cnx = cnx
-        self.cursor = cnx.cursor(self)
-        self.set_user(user)
+        self.session = session
+        if session.cnx:
+            self.cnx = session.cnx
+            self.execute = session.cnx.cursor(self).execute
+            if user is None:
+                user = self.cnx.user(self, {'lang': self.lang})
+        if user is not None:
+            self.user = user
+            self.set_entity_cache(user)
+
+    def execute(self, *args, **kwargs):
+        """overriden when session is set. By default raise authentication error
+        so authentication is requested.
+        """
+        raise AuthenticationError()
 
     def set_default_language(self, vreg):
         try:
@@ -256,14 +297,6 @@
             self.pgettext = lambda x, y: y
         self.debug('request default language: %s', self.lang)
 
-    def describe(self, eid):
-        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.cnx.describe(eid)
-
-    def source_defs(self):
-        """return the definition of sources used by the repository."""
-        return self.cnx.source_defs()
-
     # entities cache management ###############################################
 
     def entity_cache(self, eid):
@@ -283,24 +316,6 @@
 
     # low level session data management #######################################
 
-    def session_data(self):
-        """return a dictionnary containing session data"""
-        return self.cnx.session_data()
-
-    def get_session_data(self, key, default=None, pop=False):
-        """return value associated to `key` in session data"""
-        if self.cnx is None:
-            return default # before the connection has been established
-        return self.cnx.get_session_data(key, default, pop)
-
-    def set_session_data(self, key, value):
-        """set value associated to `key` in session data"""
-        return self.cnx.set_session_data(key, value)
-
-    def del_session_data(self, key):
-        """remove value associated to `key` in session data"""
-        return self.cnx.del_session_data(key)
-
     def get_shared_data(self, key, default=None, pop=False):
         """return value associated to `key` in shared data"""
         return self.cnx.get_shared_data(key, default, pop)
@@ -317,26 +332,39 @@
 
     # server session compat layer #############################################
 
+    def describe(self, eid):
+        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+        return self.cnx.describe(eid)
+
+    def source_defs(self):
+        """return the definition of sources used by the repository."""
+        return self.cnx.source_defs()
+
     def hijack_user(self, user):
         """return a fake request/session using specified user"""
         req = DBAPIRequest(self.vreg)
-        req.set_connection(self.cnx, user)
+        req.set_session(self.session, user)
         return req
 
-    @property
-    def user(self):
-        if self._user is None and self.cnx:
-            self.set_user(self.cnx.user(self, {'lang': self.lang}))
-        return self._user
+    @deprecated('[3.8] use direct access to req.session.data dictionary')
+    def session_data(self):
+        """return a dictionnary containing session data"""
+        return self.session.data
 
-    def set_user(self, user):
-        self._user = user
-        if user:
-            self.set_entity_cache(user)
+    @deprecated('[3.8] use direct access to req.session.data dictionary')
+    def get_session_data(self, key, default=None, pop=False):
+        if pop:
+            return self.session.data.pop(key, default)
+        return self.session.data.get(key, default)
 
-    def execute(self, *args, **kwargs):
-        """Session interface compatibility"""
-        return self.cursor.execute(*args, **kwargs)
+    @deprecated('[3.8] use direct access to req.session.data dictionary')
+    def set_session_data(self, key, value):
+        self.session.data[key] = value
+
+    @deprecated('[3.8] use direct access to req.session.data dictionary')
+    def del_session_data(self, key):
+        self.session.data.pop(key, None)
+
 
 set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
 
@@ -351,68 +379,113 @@
     etc.
     """
 
-# module level objects ########################################################
+
+# cursor / connection objects ##################################################
+
+class Cursor(object):
+    """These objects represent a database cursor, which is used to manage the
+    context of a fetch operation. Cursors created from the same connection are
+    not isolated, i.e., any changes done to the database by a cursor are
+    immediately visible by the other cursors. Cursors created from different
+    connections are isolated.
+    """
+
+    def __init__(self, connection, repo, req=None):
+        """This read-only attribute return a reference to the Connection
+        object on which the cursor was created.
+        """
+        self.connection = connection
+        """optionnal issuing request instance"""
+        self.req = req
+        self._repo = repo
+        self._sessid = connection.sessionid
+
+    def close(self):
+        """no effect"""
+        pass
+
+    def _txid(self):
+        return self.connection._txid(self)
+
+    def execute(self, rql, args=None, eid_key=None, build_descr=True):
+        """execute a rql query, return resulting rows and their description in
+        a :class:`~cubicweb.rset.ResultSet` object
+
+        * `rql` should be an Unicode string or a plain ASCII string, containing
+          the rql query
+
+        * `args` the optional args dictionary associated to the query, with key
+          matching named substitution in `rql`
+
+        * `build_descr` is a boolean flag indicating if the description should
+          be built on select queries (if false, the description will be en empty
+          list)
+
+        on INSERT queries, there will be one row for each inserted entity,
+        containing its eid
+
+        on SET queries, XXX describe
+
+        DELETE queries returns no result.
+
+        .. Note::
+          to maximize the rql parsing/analyzing cache performance, you should
+          always use substitute arguments in queries, i.e. avoid query such as::
+
+            execute('Any X WHERE X eid 123')
+
+          use::
+
+            execute('Any X WHERE X eid %(x)s', {'x': 123})
+        """
+        if eid_key is not None:
+            warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
+        # XXX use named argument for build_descr in case repo is < 3.8
+        rset = self._repo.execute(self._sessid, rql, args,
+                                  build_descr=build_descr, **self._txid())
+        rset.req = self.req
+        return rset
 
 
-apilevel = '2.0'
-
-"""Integer constant stating the level of thread safety the interface supports.
-Possible values are:
-
-                0     Threads may not share the module.
-                1     Threads may share the module, but not connections.
-                2     Threads may share the module and connections.
-                3     Threads may share the module, connections and
-                      cursors.
-
-Sharing in the above context means that two threads may use a resource without
-wrapping it using a mutex semaphore to implement resource locking. Note that
-you cannot always make external resources thread safe by managing access using
-a mutex: the resource may rely on global variables or other external sources
-that are beyond your control.
-"""
-threadsafety = 1
+class LogCursor(Cursor):
+    """override the standard cursor to log executed queries"""
 
-"""String constant stating the type of parameter marker formatting expected by
-the interface. Possible values are :
+    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+        """override the standard cursor to log executed queries"""
+        if eid_key is not None:
+            warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
+        tstart, cstart = time(), clock()
+        rset = Cursor.execute(self, operation, parameters, build_descr=build_descr)
+        self.connection.executed_queries.append((operation, parameters,
+                                                 time() - tstart, clock() - cstart))
+        return rset
 
-                'qmark'         Question mark style,
-                                e.g. '...WHERE name=?'
-                'numeric'       Numeric, positional style,
-                                e.g. '...WHERE name=:1'
-                'named'         Named style,
-                                e.g. '...WHERE name=:name'
-                'format'        ANSI C printf format codes,
-                                e.g. '...WHERE name=%s'
-                'pyformat'      Python extended format codes,
-                                e.g. '...WHERE name=%(name)s'
-"""
-paramstyle = 'pyformat'
-
-
-# connection object ###########################################################
 
 class Connection(object):
     """DB-API 2.0 compatible Connection object for CubicWeb
     """
     # make exceptions available through the connection object
     ProgrammingError = ProgrammingError
+    # attributes that may be overriden per connection instance
+    anonymous_connection = False
+    cursor_class = Cursor
+    vreg = None
+    _closed = None
 
     def __init__(self, repo, cnxid, cnxprops=None):
         self._repo = repo
         self.sessionid = cnxid
         self._close_on_del = getattr(cnxprops, 'close_on_del', True)
         self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro')
-        self._closed = None
         if cnxprops and cnxprops.log_queries:
             self.executed_queries = []
             self.cursor_class = LogCursor
-        else:
-            self.cursor_class = Cursor
-        self.anonymous_connection = False
-        self.vreg = None
-        # session's data
-        self.data = {}
+        if self._cnxtype == 'pyro':
+            # check client/server compat
+            if self._repo.get_versions()['cubicweb'] < (3, 8, 6):
+                self._txid = lambda cursor=None: {}
 
     def __repr__(self):
         if self.anonymous_connection:
@@ -429,30 +502,12 @@
             self.rollback()
             return False #propagate the exception
 
-    def request(self):
-        return DBAPIRequest(self.vreg, self)
-
-    def session_data(self):
-        """return a dictionnary containing session data"""
-        return self.data
+    def _txid(self, cursor=None): # XXX could now handle various isolation level!
+        # return a dict as bw compat trick
+        return {'txid': currentThread().getName()}
 
-    def get_session_data(self, key, default=None, pop=False):
-        """return value associated to `key` in session data"""
-        if pop:
-            return self.data.pop(key, default)
-        else:
-            return self.data.get(key, default)
-
-    def set_session_data(self, key, value):
-        """set value associated to `key` in session data"""
-        self.data[key] = value
-
-    def del_session_data(self, key):
-        """remove value associated to `key` in session data"""
-        try:
-            del self.data[key]
-        except KeyError:
-            pass
+    def request(self):
+        return DBAPIRequest(self.vreg, DBAPISession(self))
 
     def check(self):
         """raise `BadConnectionId` if the connection is no more valid"""
@@ -493,8 +548,7 @@
             raise ProgrammingError('Closed connection')
         return self._repo.get_schema()
 
-    def load_appobjects(self, cubes=_MARKER, subpath=None, expand=True,
-                        force_reload=None):
+    def load_appobjects(self, cubes=_MARKER, subpath=None, expand=True):
         config = self.vreg.config
         if cubes is _MARKER:
             cubes = self._repo.get_cubes()
@@ -512,21 +566,13 @@
         if 'views' in subpath:
             esubpath = list(subpath)
             esubpath.remove('views')
-            esubpath.append('web/views')
-        cubes = reversed([config.cube_dir(p) for p in cubes])
-        vpath = config.build_vregistry_path(cubes, evobjpath=esubpath,
+            esubpath.append(join('web', 'views'))
+        cubespath = [config.cube_dir(p) for p in cubes]
+        config.load_site_cubicweb(cubespath)
+        vpath = config.build_vregistry_path(reversed(cubespath),
+                                            evobjpath=esubpath,
                                             tvobjpath=subpath)
-        self.vreg.register_objects(vpath, force_reload)
-        if self._cnxtype == 'inmemory':
-            # should reinit hooks manager as well
-            hm, config = self._repo.hm, self._repo.config
-            hm.set_schema(hm.schema) # reset structure
-            hm.register_system_hooks(config)
-            # instance specific hooks
-            if self._repo.config.instance_hooks:
-                hm.register_hooks(config.load_hooks(self.vreg))
-
-    load_vobjects = deprecated()(load_appobjects)
+        self.vreg.register_objects(vpath)
 
     def use_web_compatible_requests(self, baseurl, sitetitle=None):
         """monkey patch DBAPIRequest to fake a cw.web.request, so you should
@@ -574,9 +620,13 @@
         if req is None:
             req = self.request()
         rset = req.eid_rset(eid, 'CWUser')
-        user = self.vreg['etypes'].etype_class('CWUser')(req, rset, row=0,
-                                                         groups=groups,
-                                                         properties=properties)
+        if self.vreg is not None and 'etypes' in self.vreg:
+            user = self.vreg['etypes'].etype_class('CWUser')(req, rset, row=0,
+                                                             groups=groups,
+                                                             properties=properties)
+        else:
+            from cubicweb.entity import Entity
+            user = Entity(req, rset, row=0)
         user['login'] = login # cache login
         return user
 
@@ -591,7 +641,7 @@
     def describe(self, eid):
         if self._closed is not None:
             raise ProgrammingError('Closed connection')
-        return self._repo.describe(self.sessionid, eid)
+        return self._repo.describe(self.sessionid, eid, **self._txid())
 
     def close(self):
         """Close the connection now (rather than whenever __del__ is called).
@@ -604,7 +654,7 @@
         """
         if self._closed:
             raise ProgrammingError('Connection is already closed')
-        self._repo.close(self.sessionid)
+        self._repo.close(self.sessionid, **self._txid())
         del self._repo # necessary for proper garbage collection
         self._closed = 1
 
@@ -618,7 +668,7 @@
         """
         if not self._closed is None:
             raise ProgrammingError('Connection is already closed')
-        return self._repo.commit(self.sessionid)
+        return self._repo.commit(self.sessionid, **self._txid())
 
     def rollback(self):
         """This method is optional since not all databases provide transaction
@@ -631,13 +681,14 @@
         """
         if not self._closed is None:
             raise ProgrammingError('Connection is already closed')
-        self._repo.rollback(self.sessionid)
+        self._repo.rollback(self.sessionid, **self._txid())
 
     def cursor(self, req=None):
-        """Return a new Cursor Object using the connection.  If the database
-        does not provide a direct cursor concept, the module will have to
-        emulate cursors using other means to the extent needed by this
-        specification.
+        """Return a new Cursor Object using the connection.
+
+        On pyro connection, you should get cursor after calling if
+        load_appobjects method if desired (which you should call if you intend
+        to use ORM abilities).
         """
         if self._closed is not None:
             raise ProgrammingError('Can\'t get cursor on closed connection')
@@ -670,6 +721,7 @@
           only searched in 'public' actions, unless a `public` argument is given
           and set to false.
         """
+        actionfilters.update(self._txid())
         txinfos = self._repo.undoable_transactions(self.sessionid, ueid,
                                                    **actionfilters)
         if req is None:
@@ -685,7 +737,8 @@
         allowed (eg not in managers group and the transaction doesn't belong to
         him).
         """
-        txinfo = self._repo.transaction_info(self.sessionid, txuuid)
+        txinfo = self._repo.transaction_info(self.sessionid, txuuid,
+                                             **self._txid())
         if req is None:
             req = self.request()
         txinfo.req = req
@@ -701,7 +754,8 @@
         session's user is not allowed (eg not in managers group and the
         transaction doesn't belong to him).
         """
-        return self._repo.transaction_actions(self.sessionid, txuuid, public)
+        return self._repo.transaction_actions(self.sessionid, txuuid, public,
+                                              **self._txid())
 
     def undo_transaction(self, txuuid):
         """Undo the given transaction. Return potential restoration errors.
@@ -710,208 +764,5 @@
         allowed (eg not in managers group and the transaction doesn't belong to
         him).
         """
-        return self._repo.undo_transaction(self.sessionid, txuuid)
-
-
-# cursor object ###############################################################
-
-class Cursor(object):
-    """This represents a database cursor, which is used to manage the
-    context of a fetch operation. Cursors created from the same connection are
-    not isolated, i.e., any changes done to the database by a cursor are
-    immediately visible by the other cursors. Cursors created from different
-    connections can or can not be isolated, depending on how the transaction
-    support is implemented (see also the connection's rollback() and commit()
-    methods.)
-    """
-
-    def __init__(self, connection, repo, req=None):
-        # This read-only attribute returns a reference to the Connection
-        # object on which the cursor was created.
-        self.connection = connection
-        # optionnal issuing request instance
-        self.req = req
-
-        # This read/write attribute specifies the number of rows to fetch at a
-        # time with fetchmany(). It defaults to 1 meaning to fetch a single row
-        # at a time.
-        # Implementations must observe this value with respect to the fetchmany()
-        # method, but are free to interact with the database a single row at a
-        # time. It may also be used in the implementation of executemany().
-        self.arraysize = 1
-
-        self._repo = repo
-        self._sessid = connection.sessionid
-        self._res = None
-        self._closed = None
-        self._index = 0
-
-    def close(self):
-        """Close the cursor now (rather than whenever __del__ is called).  The
-        cursor will be unusable from this point forward; an Error (or subclass)
-        exception will be raised if any operation is attempted with the cursor.
-        """
-        self._closed = True
-
-
-    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
-        """Prepare and execute a database operation (query or command).
-        Parameters may be provided as sequence or mapping and will be bound to
-        variables in the operation.  Variables are specified in a
-        database-specific notation (see the module's paramstyle attribute for
-        details).
-
-        A reference to the operation will be retained by the cursor.  If the
-        same operation object is passed in again, then the cursor can optimize
-        its behavior.  This is most effective for algorithms where the same
-        operation is used, but different parameters are bound to it (many
-        times).
-
-        For maximum efficiency when reusing an operation, it is best to use the
-        setinputsizes() method to specify the parameter types and sizes ahead
-        of time.  It is legal for a parameter to not match the predefined
-        information; the implementation should compensate, possibly with a loss
-        of efficiency.
-
-        The parameters may also be specified as list of tuples to e.g. insert
-        multiple rows in a single operation, but this kind of usage is
-        depreciated: executemany() should be used instead.
-
-        Return values are not defined by the DB-API, but this here it returns a
-        ResultSet object.
-        """
-        self._res = rset = self._repo.execute(self._sessid, operation,
-                                              parameters, eid_key, build_descr)
-        rset.req = self.req
-        self._index = 0
-        return rset
-
-
-    def executemany(self, operation, seq_of_parameters):
-        """Prepare a database operation (query or command) and then execute it
-        against all parameter sequences or mappings found in the sequence
-        seq_of_parameters.
-
-        Modules are free to implement this method using multiple calls to the
-        execute() method or by using array operations to have the database
-        process the sequence as a whole in one call.
-
-        Use of this method for an operation which produces one or more result
-        sets constitutes undefined behavior, and the implementation is
-        permitted (but not required) to raise an exception when it detects that
-        a result set has been created by an invocation of the operation.
-
-        The same comments as for execute() also apply accordingly to this
-        method.
-
-        Return values are not defined.
-        """
-        for parameters in seq_of_parameters:
-            self.execute(operation, parameters)
-            if self._res.rows is not None:
-                self._res = None
-                raise ProgrammingError('Operation returned a result set')
-
-
-    def fetchone(self):
-        """Fetch the next row of a query result set, returning a single
-        sequence, or None when no more data is available.
-
-        An Error (or subclass) exception is raised if the previous call to
-        execute*() did not produce any result set or no call was issued yet.
-        """
-        if self._res is None:
-            raise ProgrammingError('No result set')
-        row = self._res.rows[self._index]
-        self._index += 1
-        return row
-
-
-    def fetchmany(self, size=None):
-        """Fetch the next set of rows of a query result, returning a sequence
-        of sequences (e.g. a list of tuples). An empty sequence is returned
-        when no more rows are available.
-
-        The number of rows to fetch per call is specified by the parameter.  If
-        it is not given, the cursor's arraysize determines the number of rows
-        to be fetched. The method should try to fetch as many rows as indicated
-        by the size parameter. If this is not possible due to the specified
-        number of rows not being available, fewer rows may be returned.
-
-        An Error (or subclass) exception is raised if the previous call to
-        execute*() did not produce any result set or no call was issued yet.
-
-        Note there are performance considerations involved with the size
-        parameter.  For optimal performance, it is usually best to use the
-        arraysize attribute.  If the size parameter is used, then it is best
-        for it to retain the same value from one fetchmany() call to the next.
-        """
-        if self._res is None:
-            raise ProgrammingError('No result set')
-        if size is None:
-            size = self.arraysize
-        rows = self._res.rows[self._index:self._index + size]
-        self._index += size
-        return rows
-
-
-    def fetchall(self):
-        """Fetch all (remaining) rows of a query result, returning them as a
-        sequence of sequences (e.g. a list of tuples).  Note that the cursor's
-        arraysize attribute can affect the performance of this operation.
-
-        An Error (or subclass) exception is raised if the previous call to
-        execute*() did not produce any result set or no call was issued yet.
-        """
-        if self._res is None:
-            raise ProgrammingError('No result set')
-        if not self._res.rows:
-            return []
-        rows = self._res.rows[self._index:]
-        self._index = len(self._res)
-        return rows
-
-
-    def setinputsizes(self, sizes):
-        """This can be used before a call to execute*() to predefine memory
-        areas for the operation's parameters.
-
-        sizes is specified as a sequence -- one item for each input parameter.
-        The item should be a Type Object that corresponds to the input that
-        will be used, or it should be an integer specifying the maximum length
-        of a string parameter.  If the item is None, then no predefined memory
-        area will be reserved for that column (this is useful to avoid
-        predefined areas for large inputs).
-
-        This method would be used before the execute*() method is invoked.
-
-        Implementations are free to have this method do nothing and users are
-        free to not use it.
-        """
-        pass
-
-
-    def setoutputsize(self, size, column=None):
-        """Set a column buffer size for fetches of large columns (e.g. LONGs,
-        BLOBs, etc.).  The column is specified as an index into the result
-        sequence.  Not specifying the column will set the default size for all
-        large columns in the cursor.
-
-        This method would be used before the execute*() method is invoked.
-
-        Implementations are free to have this method do nothing and users are
-        free to not use it.
-        """
-        pass
-
-
-class LogCursor(Cursor):
-    """override the standard cursor to log executed queries"""
-
-    def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
-        """override the standard cursor to log executed queries"""
-        tstart, cstart = time(), clock()
-        rset = Cursor.execute(self, operation, parameters, eid_key, build_descr)
-        self.connection.executed_queries.append((operation, parameters,
-                                                 time() - tstart, clock() - cstart))
-        return rset
+        return self._repo.undo_transaction(self.sessionid, txuuid,
+                                           **self._txid())
--- a/debian.hardy/rules	Thu May 06 08:24:46 2010 +0200
+++ b/debian.hardy/rules	Mon Jul 19 15:36:16 2010 +0200
@@ -14,7 +14,7 @@
 	# and I can't get pbuilder find them in its chroot :(
 	# cd doc && make
 	# FIXME cleanup and use sphinx-build as build-depends ?
-	python setup.py build
+	NO_SETUPTOOLS=1 python setup.py build
 	touch build-stamp
 
 clean: 
@@ -33,7 +33,7 @@
 	dh_clean
 	dh_installdirs
 
-	python setup.py -q install --no-compile --prefix=debian/tmp/usr
+	NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/tmp/usr
 
 	# Put all the python library and data in cubicweb-common
 	# and scripts in cubicweb-server
--- a/debian/changelog	Thu May 06 08:24:46 2010 +0200
+++ b/debian/changelog	Mon Jul 19 15:36:16 2010 +0200
@@ -1,3 +1,51 @@
+cubicweb (3.8.7-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Julien Jehannet <julien.jehannet@logilab.fr>  Wed, 07 Jul 2010 11:42:02 +0200
+
+cubicweb (3.8.6-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 02 Jul 2010 00:39:36 +0200
+
+cubicweb (3.8.5-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Mon, 21 Jun 2010 10:42:01 +0200
+
+cubicweb (3.8.4-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 11 Jun 2010 11:36:50 +0200
+
+cubicweb (3.8.3-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Mon, 07 Jun 2010 09:19:50 +0200
+
+cubicweb (3.8.2-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Tue, 18 May 2010 14:59:07 +0200
+
+cubicweb (3.8.1-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Mon, 26 Apr 2010 17:11:36 +0200
+
+cubicweb (3.8.0-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Tue, 20 Apr 2010 16:31:44 +0200
+
 cubicweb (3.7.5-1) unstable; urgency=low
 
   * new upstream release on the 3.7 branch
@@ -38,7 +86,7 @@
 
  -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Tue, 16 Mar 2010 17:55:37 +0100
 
- cubicweb (3.6.3-1) unstable; urgency=low
+cubicweb (3.6.3-1) unstable; urgency=low
 
   * remove postgresql-contrib from cubicweb dependency (using tsearch
     which is included with postgres >= 8.3)
--- a/debian/control	Thu May 06 08:24:46 2010 +0200
+++ b/debian/control	Mon Jul 19 15:36:16 2010 +0200
@@ -33,7 +33,7 @@
 Conflicts: cubicweb-multisources
 Replaces: cubicweb-multisources
 Provides: cubicweb-multisources
-Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.0.2), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.0.5), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
 Recommends: pyro, cubicweb-documentation (= ${source:Version})
 Description: server part of the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -68,7 +68,7 @@
 Architecture: all
 XB-Python-Version: ${python:Versions}
 Provides: cubicweb-web-frontend
-Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web2
+Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
 Recommends: pyro, cubicweb-documentation (= ${source:Version})
 Description: twisted-based web interface for the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -83,7 +83,7 @@
 Architecture: all
 XB-Python-Version: ${python:Versions}
 Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3)
-Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-pysixt, fop, python-imaging
+Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging
 Description: web interface library for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -97,7 +97,7 @@
 Package: cubicweb-common
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.25.0), python-lxml
+Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.50.2), python-yams (>= 0.29.0), python-rql (>= 0.26.3), python-lxml
 Recommends: python-simpletal (>= 4.0), python-crypto
 Conflicts: cubicweb-core
 Replaces: cubicweb-core
--- a/debian/cubicweb-ctl.logrotate	Thu May 06 08:24:46 2010 +0200
+++ b/debian/cubicweb-ctl.logrotate	Mon Jul 19 15:36:16 2010 +0200
@@ -9,9 +9,9 @@
         sharedscripts
         postrotate
            if [ -x /usr/sbin/invoke-rc.d ]; then \
-              invoke-rc.d cubicweb reload > /dev/null; \
+              invoke-rc.d cubicweb reload > /dev/null 2>&1; \
            else \
-              /etc/init.d/cubicweb reload > /dev/null; \
+              /etc/init.d/cubicweb reload > /dev/null 2>&1; \
            fi; \
         endscript
 }
--- a/debian/rules	Thu May 06 08:24:46 2010 +0200
+++ b/debian/rules	Mon Jul 19 15:36:16 2010 +0200
@@ -14,7 +14,7 @@
 	# and I can't get pbuilder find them in its chroot :(
 	# cd doc && make
 	# FIXME cleanup and use sphinx-build as build-depends ?
-	python setup.py build
+	NO_SETUPTOOLS=1 python setup.py build
 	touch build-stamp
 
 clean:
@@ -34,7 +34,7 @@
 	dh_installdirs
 
 	#python setup.py install_lib --no-compile --install-dir=debian/cubicweb-common/usr/lib/python2.4/site-packages/
-	python setup.py -q install --no-compile --prefix=debian/tmp/usr
+	NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/tmp/usr
 
 	# Put all the python library and data in cubicweb-common
 	# and scripts in cubicweb-server
--- a/devtools/__init__.py	Thu May 06 08:24:46 2010 +0200
+++ b/devtools/__init__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -100,19 +100,16 @@
          {'type' : 'string',
           'default': None,
           'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('anonymous-password',
          {'type' : 'string',
           'default': None,
           'help': 'password of the CubicWeb user account matching login',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ))
 
-    if not os.environ.get('APYCOT_ROOT'):
-        REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes'))
-
     def __init__(self, appid, log_threshold=logging.CRITICAL+10):
         ServerConfiguration.__init__(self, appid)
         self.init_log(log_threshold, force=True)
@@ -252,7 +249,7 @@
     """initialize a fresh sqlserver databse used for testing purpose"""
     if config.init_repository:
         from cubicweb.server import init_repository
-        init_repository(config, interactive=False, drop=True, vreg=vreg)
+        init_repository(config, interactive=False, drop=True)
 
 ### sqlite test database handling ##############################################
 
@@ -281,7 +278,6 @@
 def init_test_database_sqlite(config):
     """initialize a fresh sqlite databse used for testing purpose"""
     # remove database file if it exists
-    dbfile = config.sources()['system']['db-name']
     if not reset_test_database_sqlite(config):
         # initialize the database
         import shutil
--- a/devtools/devctl.py	Thu May 06 08:24:46 2010 +0200
+++ b/devtools/devctl.py	Mon Jul 19 15:36:16 2010 +0200
@@ -35,7 +35,8 @@
 
 from cubicweb.__pkginfo__ import version as cubicwebversion
 from cubicweb import CW_SOFTWARE_ROOT as BASEDIR, BadCommandUsage
-from cubicweb.toolsutils import Command, copy_skeleton, underline_title
+from cubicweb.toolsutils import (SKEL_EXCLUDE, Command,
+                                 copy_skeleton, underline_title)
 from cubicweb.web.webconfig import WebConfiguration
 from cubicweb.server.serverconfig import ServerConfiguration
 
@@ -57,6 +58,7 @@
         if cubes:
             self._cubes = self.reorder_cubes(
                 self.expand_cubes(cubes, with_recommends=True))
+            self.load_site_cubicweb()
         else:
             self._cubes = ()
 
@@ -272,7 +274,7 @@
     def run(self, args):
         """run the command with its specific arguments"""
         if args:
-            raise BadCommandUsage('Too much arguments')
+            raise BadCommandUsage('Too many arguments')
         import shutil
         import tempfile
         import yams
@@ -350,23 +352,23 @@
 
 def update_cubes_catalogs(cubes):
     for cubedir in cubes:
-        toedit = []
         if not isdir(cubedir):
             print '-> ignoring %s that is not a directory.' % cubedir
             continue
         try:
-            toedit += update_cube_catalogs(cubedir)
+            toedit = update_cube_catalogs(cubedir)
         except Exception:
             import traceback
             traceback.print_exc()
             print '-> error while updating catalogs for cube', cubedir
         else:
             # instructions pour la suite
-            print '-> regenerated .po catalogs for cube %s.' % cubedir
-            print '\nYou can now edit the following files:'
-            print '* ' + '\n* '.join(toedit)
-            print ('When you are done, run "cubicweb-ctl i18ninstance '
-                   '<yourinstance>" to see changes in your instances.')
+            if toedit:
+                print '-> regenerated .po catalogs for cube %s.' % cubedir
+                print '\nYou can now edit the following files:'
+                print '* ' + '\n* '.join(toedit)
+                print ('When you are done, run "cubicweb-ctl i18ninstance '
+                       '<yourinstance>" to see changes in your instances.')
 
 def update_cube_catalogs(cubedir):
     import shutil
@@ -374,7 +376,6 @@
     from logilab.common.fileutils import ensure_fs_mode
     from logilab.common.shellutils import find, rm
     from cubicweb.i18n import extract_from_tal, execute
-    toedit = []
     cube = basename(normpath(cubedir))
     tempdir = tempfile.mkdtemp()
     print underline_title('Updating i18n catalogs for cube %s' % cube)
@@ -419,8 +420,14 @@
     print '-> merging %i .pot files:' % len(potfiles)
     execute('msgcat -o %s %s' % (potfile,
                                  ' '.join('"%s"' % f for f in potfiles)))
+    if not exists(potfile):
+        print 'no message catalog for cube', cube, 'nothing to translate'
+        # cleanup
+        rm(tempdir)
+        return ()
     print '-> merging main pot file with existing translations:'
     chdir('i18n')
+    toedit = []
     for lang in LANGS:
         print '-> language', lang
         cubepo = '%s.po' % lang
@@ -454,12 +461,19 @@
     """Create a new cube.
 
     <cubename>
-      the name of the new cube
+      the name of the new cube. It should be a valid python module name.
     """
     name = 'newcube'
     arguments = '<cubename>'
 
     options = (
+        ("layout",
+         {'short': 'L', 'type' : 'choice', 'metavar': '<cube layout>',
+          'default': 'simple', 'choices': ('simple', 'full'),
+          'help': 'cube layout. You\'ll get a minimal cube with the "simple" \
+layout, and a full featured cube with "full" layout.',
+          }
+         ),
         ("directory",
          {'short': 'd', 'type' : 'string', 'metavar': '<cubes directory>',
           'help': 'directory where the new cube should be created',
@@ -489,14 +503,54 @@
           'help': 'cube author\'s web site',
           }
          ),
+        ("license",
+         {'short': 'l', 'type' : 'choice', 'metavar': '<license>',
+          'default': 'LGPL', 'choices': ('GPL', 'LGPL', ''),
+          'help': 'cube license',
+          }
+         ),
         )
 
+    LICENSES = {
+        'LGPL': '''\
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+''',
+
+        'GPL': '''\
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 2.1 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program. If not, see <http://www.gnu.org/licenses/>.
+''',
+        '': '# INSERT LICENSE HERE'
+        }
 
     def run(self, args):
+        import re
         from logilab.common.shellutils import ASK
         if len(args) != 1:
             raise BadCommandUsage("exactly one argument (cube name) is expected")
-        cubename, = args
+        cubename = args[0]
+        if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cubename):
+            raise BadCommandUsage("cube name should be a valid python module name")
         verbose = self.get('verbose')
         cubesdir = self.get('directory')
         if not cubesdir:
@@ -515,7 +569,7 @@
         if exists(cubedir):
             self.fail("%s already exists !" % (cubedir))
         skeldir = join(BASEDIR, 'skeleton')
-        default_name = 'cubicweb-%s' % cubename.lower()
+        default_name = 'cubicweb-%s' % cubename.lower().replace('_', '-')
         if verbose:
             distname = raw_input('Debian name for your cube ? [%s]): ' % default_name).strip()
             if not distname:
@@ -525,41 +579,49 @@
                     distname = 'cubicweb-' + distname
         else:
             distname = default_name
-
+        if not re.match('[a-z][-a-z0-9]*$', distname):
+            raise BadCommandUsage("cube distname should be a valid debian package name")
         longdesc = shortdesc = raw_input('Enter a short description for your cube: ')
         if verbose:
             longdesc = raw_input('Enter a long description (leave empty to reuse the short one): ')
-        dependencies = {}
+        dependencies = {'cubicweb': '>= %s' % cubicwebversion}
         if verbose:
-            dependencies = self._ask_for_dependencies()
+            dependencies.update(self._ask_for_dependencies())
         context = {'cubename' : cubename,
                    'distname' : distname,
                    'shortdesc' : shortdesc,
                    'longdesc' : longdesc or shortdesc,
-                   'dependencies' : dict((dep, None) for dep in dependencies),
+                   'dependencies' : dependencies,
                    'version'  : cubicwebversion,
                    'year'  : str(datetime.now().year),
                    'author': self['author'],
                    'author-email': self['author-email'],
                    'author-web-site': self['author-web-site'],
+                   'license': self['license'],
+                   'long-license': self.LICENSES[self['license']],
                    }
-        copy_skeleton(skeldir, cubedir, context)
+        exclude = SKEL_EXCLUDE
+        if self['layout'] == 'simple':
+            exclude += ('sobjects.py*', 'precreate.py*', 'realdb_test*',
+                        'cubes.*', 'external_resources*')
+        copy_skeleton(skeldir, cubedir, context, exclude=exclude)
 
     def _ask_for_dependencies(self):
         from logilab.common.shellutils import ASK
         from logilab.common.textutils import splitstrip
-        includes = []
-        for stdtype in ServerConfiguration.available_cubes():
-            answer = ASK.ask("Depends on cube %s? " % stdtype,
+        depcubes = []
+        for cube in ServerConfiguration.available_cubes():
+            answer = ASK.ask("Depends on cube %s? " % cube,
                              ('N','y','skip','type'), 'N')
             if answer == 'y':
-                includes.append(stdtype)
+                depcubes.append(cube)
             if answer == 'type':
-                includes = splitstrip(raw_input('type dependencies: '))
+                depcubes = splitstrip(raw_input('type dependencies: '))
                 break
             elif answer == 'skip':
                 break
-        return includes
+        return dict(('cubicweb-' + cube, ServerConfiguration.cube_version(cube))
+                    for cube in depcubes)
 
 
 class ExamineLogCommand(Command):
@@ -588,7 +650,7 @@
             except OSError, ex:
                 raise BadCommandUsage("can't open rql log file %s: %s"
                                       % (filepath, ex))
-            for lineno, line in enumerate(file):
+            for lineno, line in enumerate(stream):
                 if not ' WHERE ' in line:
                     continue
                 try:
--- a/devtools/fill.py	Thu May 06 08:24:46 2010 +0200
+++ b/devtools/fill.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,9 +16,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""This modules defines func / methods for creating test repositories
+"""This modules defines func / methods for creating test repositories"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from random import randint, choice
--- a/devtools/repotest.py	Thu May 06 08:24:46 2010 +0200
+++ b/devtools/repotest.py	Mon Jul 19 15:36:16 2010 +0200
@@ -246,8 +246,8 @@
         self._dumb_sessions.append(s)
         return s
 
-    def execute(self, rql, args=None, eid_key=None, build_descr=True):
-        return self.o.execute(self.session, rql, args, eid_key, build_descr)
+    def execute(self, rql, args=None, build_descr=True):
+        return self.o.execute(self.session, rql, args, build_descr)
 
     def commit(self):
         self.session.commit()
--- a/devtools/testlib.py	Thu May 06 08:24:46 2010 +0200
+++ b/devtools/testlib.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,10 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""this module contains base classes and utilities for cubicweb tests
+"""this module contains base classes and utilities for cubicweb tests"""
 
-"""
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
 import os
@@ -26,6 +27,7 @@
 from urllib import unquote
 from math import log
 from contextlib import contextmanager
+from warnings import warn
 
 import yams.schema
 
@@ -38,9 +40,10 @@
 
 from cubicweb import ValidationError, NoSelectableObject, AuthenticationError
 from cubicweb import cwconfig, devtools, web, server
-from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+from cubicweb.dbapi import ProgrammingError, DBAPISession, repo_connect
 from cubicweb.sobjects import notification
 from cubicweb.web import Redirect, application
+from cubicweb.server.session import security_enabled
 from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
 from cubicweb.devtools import fake, htmlparser
 from cubicweb.utils import json
@@ -79,6 +82,9 @@
 
 
 def refresh_repo(repo, resetschema=False, resetvreg=False):
+    for pool in repo.pools:
+        pool.close(True)
+    repo.system_source.shutdown()
     devtools.reset_test_database(repo.config)
     for pool in repo.pools:
         pool.reconnect()
@@ -211,7 +217,10 @@
         if not 'repo' in cls.__dict__:
             cls._build_repo()
         else:
-            cls.cnx.rollback()
+            try:
+                cls.cnx.rollback()
+            except ProgrammingError:
+                pass
             cls._refresh_repo()
 
     @classmethod
@@ -220,11 +229,10 @@
         cls.init_config(cls.config)
         cls.repo.hm.call_hooks('server_startup', repo=cls.repo)
         cls.vreg = cls.repo.vreg
-        cls._orig_cnx = cls.cnx
+        cls.websession = DBAPISession(cls.cnx, cls.admlogin,
+                                      {'password': cls.admpassword})
+        cls._orig_cnx = (cls.cnx, cls.websession)
         cls.config.repository = lambda x=None: cls.repo
-        # necessary for authentication tests
-        cls.cnx.login = cls.admlogin
-        cls.cnx.authinfo = {'password': cls.admpassword}
 
     @classmethod
     def _refresh_repo(cls):
@@ -247,7 +255,7 @@
     @property
     def adminsession(self):
         """return current server side session (using default manager account)"""
-        return self.repo._sessions[self._orig_cnx.sessionid]
+        return self.repo._sessions[self._orig_cnx[0].sessionid]
 
     def set_option(self, optname, value):
         self.config.global_set_option(optname, value)
@@ -274,6 +282,8 @@
         MAILBOX[:] = [] # reset mailbox
 
     def tearDown(self):
+        if not self.cnx._closed:
+            self.cnx.rollback()
         for cnx in self._cnxs:
             if not cnx._closed:
                 cnx.close()
@@ -297,12 +307,12 @@
         if password is None:
             password = login.encode('utf8')
         if req is None:
-            req = self._orig_cnx.request()
+            req = self._orig_cnx[0].request()
         user = req.create_entity('CWUser', login=unicode(login),
                                  upassword=password, **kwargs)
         req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
                     % ','.join(repr(g) for g in groups),
-                    {'x': user.eid}, 'x')
+                    {'x': user.eid})
         user.clear_related_cache('in_group', 'subject')
         if commit:
             req.cnx.commit()
@@ -315,22 +325,22 @@
         else:
             if not kwargs:
                 kwargs['password'] = str(login)
-            self.cnx = repo_connect(self.repo, unicode(login),
-                                    cnxprops=ConnectionProperties('inmemory'),
-                                    **kwargs)
+            self.cnx = repo_connect(self.repo, unicode(login), **kwargs)
+            self.websession = DBAPISession(self.cnx)
             self._cnxs.append(self.cnx)
         if login == self.vreg.config.anonymous_user()[0]:
             self.cnx.anonymous_connection = True
         return self.cnx
 
     def restore_connection(self):
-        if not self.cnx is self._orig_cnx:
+        if not self.cnx is self._orig_cnx[0]:
+            if not self.cnx._closed:
+                self.cnx.close()
             try:
-                self.cnx.close()
                 self._cnxs.remove(self.cnx)
-            except ProgrammingError:
-                pass # already closed
-        self.cnx = self._orig_cnx
+            except ValueError:
+                pass
+        self.cnx, self.websession = self._orig_cnx
 
     # db api ##################################################################
 
@@ -343,8 +353,11 @@
         """executes <rql>, builds a resultset, and returns a couple (rset, req)
         where req is a FakeRequest
         """
+        if eidkey is not None:
+            warn('[3.8] eidkey is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
         req = req or self.request(rql=rql)
-        return self.cnx.cursor(req).execute(unicode(rql), args, eidkey)
+        return req.execute(unicode(rql), args)
 
     @nocoverage
     def commit(self):
@@ -358,21 +371,21 @@
         try:
             self.cnx.rollback()
         except ProgrammingError:
-            pass
+            pass # connection closed
         finally:
             self.session.set_pool() # ensure pool still set after commit
 
     # # server side db api #######################################################
 
     def sexecute(self, rql, args=None, eid_key=None):
+        if eid_key is not None:
+            warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
         self.session.set_pool()
-        return self.session.execute(rql, args, eid_key)
+        return self.session.execute(rql, args)
 
     # other utilities #########################################################
 
-    def entity(self, rql, args=None, eidkey=None, req=None):
-        return self.execute(rql, args, eidkey, req=req).get_entity(0, 0)
-
     @contextmanager
     def temporary_appobjects(self, *appobjects):
         self.vreg._loadedmods.setdefault(self.__module__, {})
@@ -489,7 +502,7 @@
     def request(self, *args, **kwargs):
         """return a web ui request"""
         req = self.requestcls(self.vreg, form=kwargs)
-        req.set_connection(self.cnx)
+        req.set_session(self.websession)
         return req
 
     def remote_call(self, fname, *args):
@@ -505,7 +518,7 @@
 
     def ctrl_publish(self, req, ctrl='edit'):
         """call the publish method of the edit controller"""
-        ctrl = self.vreg['controllers'].select(ctrl, req)
+        ctrl = self.vreg['controllers'].select(ctrl, req, appli=self.app)
         try:
             result = ctrl.publish()
             req.cnx.commit()
@@ -545,27 +558,31 @@
         self.set_option('auth-mode', authmode)
         self.set_option('anonymous-user', anonuser)
         req = self.request()
-        origcnx = req.cnx
-        req.cnx = None
+        origsession = req.session
+        req.session = req.cnx = None
+        del req.execute # get back to class implementation
         sh = self.app.session_handler
         authm = sh.session_manager.authmanager
         authm.anoninfo = self.vreg.config.anonymous_user()
+        authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]}
         # not properly cleaned between tests
         self.open_sessions = sh.session_manager._sessions = {}
-        return req, origcnx
+        return req, origsession
 
-    def assertAuthSuccess(self, req, origcnx, nbsessions=1):
+    def assertAuthSuccess(self, req, origsession, nbsessions=1):
         sh = self.app.session_handler
         path, params = self.expect_redirect(lambda x: self.app.connect(x), req)
-        cnx = req.cnx
+        session = req.session
         self.assertEquals(len(self.open_sessions), nbsessions, self.open_sessions)
-        self.assertEquals(cnx.login, origcnx.login)
-        self.assertEquals(cnx.anonymous_connection, False)
+        self.assertEquals(session.login, origsession.login)
+        self.assertEquals(session.anonymous_session, False)
         self.assertEquals(path, 'view')
-        self.assertEquals(params, {'__message': 'welcome %s !' % cnx.user().login})
+        self.assertEquals(params, {'__message': 'welcome %s !' % req.user.login})
 
     def assertAuthFailure(self, req, nbsessions=0):
-        self.assertRaises(AuthenticationError, self.app.connect, req)
+        self.app.connect(req)
+        self.assertIsInstance(req.session, DBAPISession)
+        self.assertEquals(req.session.cnx, None)
         self.assertEquals(req.cnx, None)
         self.assertEquals(len(self.open_sessions), nbsessions)
         clear_cache(req, 'get_authorization')
@@ -696,29 +713,19 @@
 
     # deprecated ###############################################################
 
+    @deprecated('[3.8] use self.execute(...).get_entity(0, 0)')
+    def entity(self, rql, args=None, eidkey=None, req=None):
+        if eidkey is not None:
+            warn('[3.8] eidkey is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
+        return self.execute(rql, args, req=req).get_entity(0, 0)
+
     @deprecated('[3.6] use self.request().create_entity(...)')
     def add_entity(self, etype, req=None, **kwargs):
         if req is None:
             req = self.request()
         return req.create_entity(etype, **kwargs)
 
-    @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())')
-    def etype_instance(self, etype, req=None):
-        req = req or self.request()
-        e = self.vreg['etypes'].etype_class(etype)(req)
-        e.eid = None
-        return e
-
-    @nocoverage
-    @deprecated('[3.4] use req = self.request(); rset = req.execute()',
-                stacklevel=3)
-    def rset_and_req(self, rql, optional_args=None, args=None, eidkey=None):
-        """executes <rql>, builds a resultset, and returns a
-        couple (rset, req) where req is a FakeRequest
-        """
-        return (self.execute(rql, args, eidkey),
-                self.request(rql=rql, **optional_args or {}))
-
 
 # auto-populating test classes and utilities ###################################
 
@@ -802,6 +809,10 @@
         """this method populates the database with `how_many` entities
         of each possible type. It also inserts random relations between them
         """
+        with security_enabled(self.session, read=False, write=False):
+            self._auto_populate(how_many)
+
+    def _auto_populate(self, how_many):
         cu = self.cursor()
         self.custom_populate(how_many, cu)
         vreg = self.vreg
--- a/doc/book/en/annexes/faq.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/annexes/faq.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -463,6 +463,13 @@
 
 You can find additional information in the section :ref:`securitymodel`.
 
+Is it possible to bypass security from the UI (web front) part ?
+----------------------------------------------------------------
+
+No.
+
+Only Hooks/Operations can do that.
+
 Can PostgreSQL and CubicWeb authentication work with kerberos ?
 ----------------------------------------------------------------
 
--- a/doc/book/en/annexes/index.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/annexes/index.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -17,11 +17,3 @@
    rql/index
    mercurial
    depends
-
-(X)HTML tricks to apply
------------------------
-
-Some web browser (Firefox for example) are not happy with empty `<div>`
-(by empty we mean that there is no content in the tag, but there
-could be attributes), so we should always use `<div></div>` even if
-it is empty and not use `<div/>`.
--- a/doc/book/en/devrepo/cubes/available-cubes.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devrepo/cubes/available-cubes.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -2,8 +2,8 @@
 Available cubes
 ---------------
 
-An instance is based on several basic cubes. In the set of available
-basic cubes we can find for example :
+An instance is made of several basic cubes. In the set of available
+basic cubes we can find for example:
 
 Base entity types
 ~~~~~~~~~~~~~~~~~
@@ -22,14 +22,14 @@
 
 Classification
 ~~~~~~~~~~~~~~
-* folder_: Folder (to organize things but grouping them in folders)
+* folder_: Folder (to organize things by grouping them in folders)
 * keyword_: Keyword (to define classification schemes)
 * tag_: Tag (to tag anything)
 
 Other features
 ~~~~~~~~~~~~~~
 * basket_: Basket (like a shopping cart)
-* blog_: a blogging system uxing Blog and BlogEntry entity types
+* blog_: a blogging system using Blog and BlogEntry entity types
 * comment_: system to attach comment threads to entities)
 * email_: archiving management for emails (`Email`, `Emailpart`,
   `Emailthread`), trigger action in cubicweb through email
@@ -55,8 +55,9 @@
 .. _task: http://www.cubicweb.org/project/cubicweb-task
 .. _zone: http://www.cubicweb.org/project/cubicweb-zone
 
-To declare the use of a component, once installed, add the name of the component
-to the variable `__use__` in the file `__pkginfo__.py` of your own component.
+To declare the use of a cube, once installed, add the name of the cube
+and its dependency relation in the `__depends_cubes__` dictionary
+defined in the file `__pkginfo__.py` of your own component.
 
 .. note::
   The listed cubes above are available as debian-packages on `CubicWeb's forge`_.
--- a/doc/book/en/devrepo/cubes/cc-newcube.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devrepo/cubes/cc-newcube.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -14,15 +14,15 @@
   hg ci
 
 If all went well, you should see the cube you just created in the list
-returned by ``cubicweb-ctl list`` in the section *Available cubes*,
-and if it is not the case please refer to :ref:`ConfigurationEnv`.
+returned by ``cubicweb-ctl list`` in the  *Available cubes* section. 
+If not, please refer to :ref:`ConfigurationEnv`.
 
 To reuse an existing cube, add it to the list named
-``__depends_cubes__`` and defined in :file:`__pkginfo__.py`.  This
-variable is used for the instance packaging (dependencies handled by
-system utility tools such as APT) and the usable cubes at the time the
-base is created (import_erschema('MyCube') will not properly work
-otherwise).
+``__depends_cubes__`` which is defined in :file:`__pkginfo__.py`.
+This variable is used for the instance packaging (dependencies handled
+by system utility tools such as APT) and to find used cubes when the
+database for the instance is created (import_erschema('MyCube') will
+not properly work otherwise).
 
 .. note::
 
--- a/doc/book/en/devrepo/cubes/layout.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devrepo/cubes/layout.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -70,6 +70,7 @@
   |-- entities.py
   |-- hooks.py
   `-- views/
+      |-- __init__.py
       |-- forms.py
       |-- primary.py
       `-- widgets.py
@@ -78,14 +79,14 @@
 where :
 
 * ``schema`` contains the schema definition (server side only)
-* ``entities`` contains the entities definition (server side and web interface)
+* ``entities`` contains the entity definitions (server side and web interface)
 * ``hooks`` contains hooks and/or views notifications (server side only)
 * ``views`` contains the web interface components (web interface only)
 * ``test`` contains tests related to the cube (not installed)
 * ``i18n`` contains message catalogs for supported languages (server side and
   web interface)
-* ``data`` contains data files for static content (images, css, javascripts)
-  ...(web interface only)
+* ``data`` contains data files for static content (images, css,
+  javascript code)...(web interface only)
 * ``migration`` contains initialization files for new instances (``postcreate.py``)
   and a file containing dependencies of the component depending on the version
   (``depends.map``)
@@ -102,10 +103,12 @@
 The :file:`__init__.py` and :file:`site_cubicweb.py` files
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+.. XXX WRITEME
+
 The :file:`__pkginfo__.py` file
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-It contains metadata describing your cubes, mostly useful for
+It contains metadata describing your cube, mostly useful for
 packaging.
 
 
--- a/doc/book/en/devrepo/profiling.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devrepo/profiling.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -26,7 +26,7 @@
 
 .. sourcecode:: sh
 
-    $ cubicweb-ctl exlog < ~/myapp-rql.log
+    $ cubicweb-ctl exlog ~/myapp-rql.log
     0.07 50 Any A WHERE X eid %(x)s, X firstname A {}
     0.05 50 Any A WHERE X eid %(x)s, X lastname A {}
     0.01 1 Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E employees X, X modification_date AA {}
--- a/doc/book/en/devrepo/vreg.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devrepo/vreg.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -1,8 +1,8 @@
 The VRegistry, selectors and application objects
 ================================================
 
-This chapter talks about core concepts of the |cubicweb| framework,
-that make it different from other frameworks (and maybe not easy to
+This chapter deals with some of the  core concepts of the |cubicweb| framework
+which make it different from other frameworks (and maybe not easy to
 grasp at a first glance). To be able to do advanced development with
 |cubicweb| you need a good understanding of what is explained below.
 
--- a/doc/book/en/devweb/controllers.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devweb/controllers.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -36,16 +36,16 @@
   operations in response to a form being submitted; it works in close
   association with the Forms, to which it delegates some of the work
 
-* the Form validator controller provides form validation from Ajax
+* the ``Form validator controller`` provides form validation from Ajax
   context, using the Edit controller, to implement the classic form
-  handling loop (user edits, hits 'submit/apply', validation occurs
+  handling loop (user edits, hits `submit/apply`, validation occurs
   server-side by way of the Form validator controller, and the UI is
   decorated with failure information, either global or per-field ,
   until it is valid)
 
 `Other`:
 
-* the SendMail controller (web/views/basecontrollers.py) is reponsible
+* the ``SendMail controller`` (web/views/basecontrollers.py) is reponsible
   for outgoing email notifications
 
 * the MailBugReport controller (web/views/basecontrollers.py) allows
@@ -57,14 +57,16 @@
 All controllers (should) live in the 'controllers' namespace within
 the global registry.
 
-API
-+++
+Concrete controllers
+++++++++++++++++++++
 
 Most API details should be resolved by source code inspection, as the
-various controllers have differing goals.
+various controllers have differing goals. See for instance the
+:ref:`edit_controller` chapter.
 
-`web/controller.py` contains the top-level abstract Controller class and
-its (NotImplemented) entry point `publish(rset=None)` method.
+:mod:`cubicweb.web.controller` contains the top-level abstract
+Controller class and its unimplemented entry point
+`publish(rset=None)` method.
 
 A handful of helpers are also provided there:
 
@@ -77,125 +79,3 @@
   implementations dealing with HTTP (thus, for instance, not the
   SendMail controller) may very well call this in their publication
   process.
-
-
-.. _edit_controller:
-
-The `edit controller`
-+++++++++++++++++++++
-
-It can be found in (:mod:`cubicweb.web.views.editcontroller`).
-
-Editing control
-~~~~~~~~~~~~~~~~
-
-.. XXX this look obsolete
-
-The parameters related to entities to edit are specified as follows ::
-
-  <field name>:<entity eid>
-
-where entity eid could be a letter in case of an entity to create. We
-name those parameters as *qualified*.
-
-1. Retrieval of entities to edit by looking for the forms parameters
-   starting by `eid:` and also having a parameter `__type` associated
-   (also *qualified* by eid)
-
-2. For all the attributes and the relations of an entity to edit:
-
-   1. search for a parameter `edits-<relation name>` or `edito-<relation name>`
-      qualified in the case of a relation where the entity is object
-   2. if found, the value returned is considered as the initial value
-      for this relaiton and we then look for the new value(s)  in the parameter
-      <relation name> (qualified)
-   3. if the value returned is different from the initial value, an database update
-      request is done
-
-3. For each entity to edit:
-
-   1. if a qualified parameter `__linkto` is specified, its value has to be
-      a string (or a list of string) such as: ::
-
-        <relation type>:<eids>:<target>
-
-      where <target> is either `subject` or `object` and each eid could be
-      separated from the others by a `_`. Target specifies if the *edited entity*
-      is subject or object of the relation and each relation specified will
-      be inserted.
-
-    2. if a qualified parameter `__clone_eid` is specified for an entity, the
-       relations of the specified entity passed as value of this parameter are
-       copied on the edited entity.
-
-    3. if a qualified parameter `__delete` is specified, its value must be
-       a string or a list of string such as follows: ::
-
-          <ssubjects eids>:<relation type>:<objects eids>
-
-       where each eid subject or object can be seperated from the other
-       by `_`. Each relation specified will be deleted.
-
-    4. if a qualified parameter `__insert` is specified, its value should
-       follow the same pattern as `__delete`, but each relation specified is
-       inserted.
-
-4. If the parameters `__insert` and/or `__delete` are found not qualified,
-   they are interpreted as explained above (independantly from the number
-   of entities edited).
-
-5. If no entity is edited but the form contains the parameters `__linkto`
-   and `eid`, this one is interpreted by using the value specified for `eid`
-   to designate the entity on which to add the relations.
-
-
-.. note::
-
-   * If the parameter `__action_delete` is found, all the entities specified
-     as to be edited will be deleted.
-
-   * If the parameter `__action_cancel` is found, no action is completed.
-
-   * If the parameter `__action_apply` is found, the editing is
-     applied normally but the redirection is done on the form (see
-     :ref:`RedirectionControl`).
-
-   * The parameter `__method` is also supported as for the main template
-
-   * If no entity is found to be edited and if there is no parameter
-     `__action_delete`, `__action_cancel`, `__linkto`, `__delete` or
-     `__insert`, an error is raised.
-
-   * Using the parameter `__message` in the form will allow to use its value
-     as a message to provide the user once the editing is completed.
-
-
-.. _RedirectionControl:
-
-Redirection control
-~~~~~~~~~~~~~~~~~~~
-Once editing is completed, there is still an issue left: where should we go
-now? If nothing is specified, the controller will do his job but it does not
-mean we will be happy with the result. We can control that by using the
-following parameters:
-
-* `__redirectpath`: path of the URL (relative to the root URL of the site,
-  no form parameters
-
-* `__redirectparams`: forms parameters to add to the path
-
-* `__redirectrql`: redirection RQL request
-
-* `__redirectvid`: redirection view identifier
-
-* `__errorurl`: initial form URL, used for redirecting in case a validation
-  error is raised during editing. If this one is not specified, an error page
-  is displayed instead of going back to the form (which is, if necessary,
-  responsible for displaying the errors)
-
-* `__form_id`: initial view form identifier, used if `__action_apply` is
-  found
-
-In general we use either `__redirectpath` and `__redirectparams` or
-`__redirectrql` and `__redirectvid`.
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/devweb/edition/dissection.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,369 @@
+
+.. _form_dissection:
+
+Dissection of a form
+--------------------
+
+This is done (again) with a vanilla instance of the `tracker`_
+cube. We will populate the database with a bunch of entities and see
+what kind of job the automatic entity form does.
+
+.. _`tracker`: http://www.cubicweb.org/project/cubicweb-tracker
+
+Patching the session object
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In order to play interactively with web side application objects, we
+have to cheat a bit: we will decorate the session object with some
+missing artifacts that should belong to a web request object. With
+that we can instantiate and render forms interactively.
+
+The function below does the minimum to allow going through this
+exercice. Some attributes or methods may be missing for other
+purposes. It is nevertheless not complicated to enhance it if need
+arises.
+
+.. sourcecode:: python
+
+ def monkey_patch_session(session):
+     """ useful to use the cw shell session object
+     with web appobjects, which expect more than a plain
+     data repository session
+     """
+     # for autoform selection
+     session.json_request = False
+     session.url = lambda: u'http://perdu.com'
+     session.session = session
+     session.form = {}
+     session.list_form_param = lambda *args: []
+     # for render
+     session.use_fckeditor = lambda: False
+     session._ressources = []
+     session.add_js = session.add_css = lambda *args: session._ressources.append(args)
+     session.external_resource = lambda x:{}
+     session._tabcount = 0
+     def next_tabindex():
+         session._tabcount += 1
+         return session._tabcount
+     session.next_tabindex = next_tabindex
+     return session
+
+Populating the database
+~~~~~~~~~~~~~~~~~~~~~~~
+
+We should start by setting up a bit of context: a project with two
+unpublished versions, and a ticket linked to the project and the first
+version.
+
+.. sourcecode:: python
+
+ >>> p = rql('INSERT Project P: P name "cubicweb"')
+ >>> for num in ('0.1.0', '0.2.0'):
+ ...  rql('INSERT Version V: V num "%s", V version_of P WHERE P eid %%(p)s' % num, {'p': p[0][0]})
+ ...
+ <resultset 'INSERT Version V: V num "0.1.0", V version_of P WHERE P eid %(p)s' (1 rows): [765L] (('Version',))>
+ <resultset 'INSERT Version V: V num "0.2.0", V version_of P WHERE P eid %(p)s' (1 rows): [766L] (('Version',))>
+ >>> t = rql('INSERT Ticket T: T title "let us write more doc", T done_in V, '
+             'T concerns P WHERE V num "0.1.0"', P eid %(p)s', {'p': p[0][0]})
+ >>> commit()
+
+Now let's see what the edition form builds for us.
+
+.. sourcecode:: python
+
+ >>> monkey_patch_session(session)
+ >>> form = session.vreg['forms'].select('edition', session, rset=rql('Ticket T'))
+ >>> html = form.render()
+
+This creates an automatic entity form. The ``.render()`` call yields
+an html (unicode) string. The html output is shown below (with
+internal fieldset omitted).
+
+Looking at the html output
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The form enveloppe
+''''''''''''''''''
+
+.. sourcecode:: html
+
+ <div class="iformTitle"><span>main informations</span></div>
+ <div class="formBody">
+  <form action="http://crater:9999/validateform" method="post" enctype="application/x-www-form-urlencoded"
+        id="entityForm" onsubmit="return freezeFormButtons(&#39;entityForm&#39;);"
+        class="entityForm" cubicweb:target="eformframe">
+    <div id="progress">validating...</div>
+    <fieldset>
+      <input name="__form_id" type="hidden" value="edition" />
+      <input name="__errorurl" type="hidden" value="http://perdu.com#entityForm" />
+      <input name="__domid" type="hidden" value="entityForm" />
+      <input name="__type:763" type="hidden" value="Ticket" />
+      <input name="eid" type="hidden" value="763" />
+      <input name="__maineid" type="hidden" value="763" />
+      <input name="_cw_edited_fields:763" type="hidden"
+             value="concerns-subject,done_in-subject,priority-subject,type-subject,title-subject,description-subject,__type,_cw_generic_field" />
+      ...
+    </fieldset>
+   </form>
+ </div>
+
+The main fieldset encloses a set of hidden fields containing various
+metadata, that will be used by the `edit controller` to process it
+back correctly.
+
+The `freezeFormButtons(...)` javascript callback defined on the
+``onlick`` event of the form element prevents accidental multiple
+clicks in a row.
+
+The ``action`` of the form is mapped to the ``validateform`` controller
+(situated in :mod:`cubicweb.web.views.basecontrollers`).
+
+A full explanation of the validation loop is given in
+:ref:`validation_process`.
+
+.. _attributes_section:
+
+The attributes section
+''''''''''''''''''''''
+
+We can have a look at some of the inner nodes of the form. Some fields
+are omitted as they are redundant for our purposes.
+
+.. sourcecode:: html
+
+      <fieldset class="default">
+        <table class="attributeForm">
+          <tr class="title_subject_row">
+            <th class="labelCol"><label class="required" for="title-subject:763">title</label></th>
+            <td>
+              <input id="title-subject:763" maxlength="128" name="title-subject:763" size="45"
+                     tabindex="1" type="text" value="let us write more doc" />
+            </td>
+          </tr>
+          ... (description field omitted) ...
+          <tr class="priority_subject_row">
+            <th class="labelCol"><label class="required" for="priority-subject:763">priority</label></th>
+            <td>
+              <select id="priority-subject:763" name="priority-subject:763" size="1" tabindex="4">
+                <option value="important">important</option>
+                <option selected="selected" value="normal">normal</option>
+                <option value="minor">minor</option>
+              </select>
+              <div class="helper">importance</div>
+            </td>
+          </tr>
+          ... (type field omitted) ...
+          <tr class="concerns_subject_row">
+            <th class="labelCol"><label class="required" for="concerns-subject:763">concerns</label></th>
+            <td>
+              <select id="concerns-subject:763" name="concerns-subject:763" size="1" tabindex="6">
+                <option selected="selected" value="760">Foo</option>
+              </select>
+            </td>
+          </tr>
+          <tr class="done_in_subject_row">
+            <th class="labelCol"><label for="done_in-subject:763">done in</label></th>
+            <td>
+              <select id="done_in-subject:763" name="done_in-subject:763" size="1" tabindex="7">
+                <option value="__cubicweb_internal_field__"></option>
+                <option selected="selected" value="761">Foo 0.1.0</option>
+                <option value="762">Foo 0.2.0</option>
+              </select>
+              <div class="helper">version in which this ticket will be / has been  done</div>
+            </td>
+          </tr>
+        </table>
+      </fieldset>
+
+
+Note that the whole form layout has been computed by the form
+renderer. It is the renderer which produces the table
+structure. Otherwise, the fields html structure is emitted by their
+associated widget.
+
+While it is called the `attributes` section of the form, it actually
+contains attributes and *mandatory relations*. For each field, we
+observe:
+
+* a dedicated row with a specific class, such as ``title_subject_row``
+  (responsability of the form renderer)
+
+* an html widget (input, select, ...) with:
+
+  * an id built from the ``rtype-role:eid`` pattern
+
+  * a name built from the same pattern
+
+  * possible values or preselected options
+
+The relations section
+'''''''''''''''''''''
+
+.. sourcecode:: html
+
+      <fieldset class="This ticket :">
+        <legend>This ticket :</legend>
+        <table class="attributeForm">
+          <tr class="_cw_generic_field_None_row">
+            <td colspan="2">
+              <table id="relatedEntities">
+                <tr><th>&#160;</th><td>&#160;</td></tr>
+                <tr id="relationSelectorRow_763" class="separator">
+                  <th class="labelCol">
+                    <select id="relationSelector_763" tabindex="8"
+                            onchange="javascript:showMatchingSelect(this.options[this.selectedIndex].value,763);">
+                      <option value="">select a relation</option>
+                      <option value="appeared_in_subject">appeared in</option>
+                      <option value="custom_workflow_subject">custom workflow</option>
+                      <option value="depends_on_object">dependency of</option>
+                      <option value="depends_on_subject">depends on</option>
+                      <option value="identical_to_subject">identical to</option>
+                      <option value="see_also_subject">see also</option>
+                    </select>
+                  </th>
+                  <td id="unrelatedDivs_763"></td>
+                </tr>
+              </table>
+            </td>
+          </tr>
+        </table>
+      </fieldset>
+
+The optional relations are grouped into a drop-down combo
+box. Selection of an item triggers a javascript function which will:
+
+* show already related entities in the div of id `relatedentities`
+  using a two-colown layout, with an action to allow deletion of
+  individual relations (there are none in this example)
+
+* provide a relation selector in the div of id `relationSelector_EID`
+  to allow the user to set up relations and trigger dynamic action on
+  the last div
+
+* fill the div of id `unrelatedDivs_EID` with a dynamically computed
+  selection widget allowing direct selection of an unrelated (but
+  relatable) entity or a switch towards the `search mode` of
+  |cubicweb| which allows full browsing and selection of an entity
+  using a dedicated action situated in the left column boxes.
+
+
+The buttons zone
+''''''''''''''''
+
+Finally comes the buttons zone.
+
+.. sourcecode:: html
+
+      <table width="100%">
+        <tbody>
+          <tr>
+            <td align="center">
+              <button class="validateButton" tabindex="9" type="submit" value="validate">
+                <img alt="OK_ICON" src="http://myapp/datafd8b5d92771209ede1018a8d5da46a37/ok.png" />
+                validate
+              </button>
+            </td>
+            <td style="align: right; width: 50%;">
+              <button class="validateButton"
+                      onclick="postForm(&#39;__action_apply&#39;, &#39;button_apply&#39;, &#39;entityForm&#39;)"
+                      tabindex="10" type="button" value="apply">
+                <img alt="APPLY_ICON" src="http://myapp/datafd8b5d92771209ede1018a8d5da46a37/plus.png" />
+                apply
+              </button>
+              <button class="validateButton"
+                      onclick="postForm(&#39;__action_cancel&#39;, &#39;button_cancel&#39;, &#39;entityForm&#39;)"
+                      tabindex="11" type="button" value="cancel">
+                <img alt="CANCEL_ICON" src="http://myapp/datafd8b5d92771209ede1018a8d5da46a37/cancel.png" />
+                cancel
+              </button>
+            </td>
+          </tr>
+        </tbody>
+      </table>
+
+The most notable artifacts here are the ``postForm(...)`` calls
+defined on click events on these buttons. This function basically
+submits the form.
+
+.. _validation_process:
+
+The form validation process
+---------------------------
+
+Preparation
+~~~~~~~~~~~
+
+After the (html) document is loaded, the ``setFormsTarget`` javascript
+function dynamically transforms the DOM as follows. For all forms of
+the DOM, it:
+
+* sets the ``target`` attribute where there is a ``cubicweb:target``
+  attribute (with the same value)
+
+* appends an empty `IFRAME` element at the end
+
+Let us have a look again at the form element. We have omitted some
+irrelevant attributes.
+
+.. sourcecode::html
+
+  <form action="http://crater:9999/validateform" method="post"
+        enctype="application/x-www-form-urlencoded"
+        id="entityForm" cubicweb:target="eformframe"
+        target="eformframe">
+  ...
+  </form>
+
+Validation loop
+~~~~~~~~~~~~~~~
+
+On form submission, the form.action is invoked. Basically, the
+``validateform`` controller is called and its output lands in the
+specified ``target``, the iframe that was previously prepared.
+
+Hence, the main page is not replaced, only the iframe contents. The
+``validateform`` controller only outputs a tiny javascript fragment
+which is then immediately executed.
+
+.. sourcecode:: html
+
+ <iframe width="0px" height="0px" name="eformframe" id="eformframe" src="javascript: void(0)">
+   <script type="text/javascript">
+     window.parent.handleFormValidationResponse('entityForm', null, null,
+                                                [false, [2164, {"name-subject": "required field"}], null],
+                                                null);
+   </script>
+ </iframe>
+
+The ``window.parent`` part ensures the javascript function is called
+on the right context (that is: the form element). We will describe its
+parameters:
+
+* first comes the form id (`entityForm`)
+
+* then two optional callbacks for the success and failure case
+
+* an array containing:
+
+  * a boolean which indicates status (success or failure), and then, on error:
+
+    * an array structured as ``[eid, {'rtype-role': 'error msg'}, ...]``
+
+  * on success:
+
+    * an url (string) representing the next thing to jump to
+
+Given the array structure described above, it is quite simple to
+manipulate the DOM to show the errors at appropriate places.
+
+Explanation
+~~~~~~~~~~~
+
+This mecanism may seem a bit overcomplicated but we have to deal with
+two realities:
+
+* in the (strict) XHTML world, there are no iframes (hence the dynamic
+  inclusion, tolerated by Firefox)
+
+* no (or not all) browser(s) support file input field handling through
+  ajax.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/devweb/edition/editcontroller.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,113 @@
+.. _edit_controller:
+
+The `edit controller`
+---------------------
+
+It can be found in (:mod:`cubicweb.web.views.editcontroller`). This
+controller processes data received from an html form to create or
+update entities.
+
+Edition handling
+~~~~~~~~~~~~~~~~
+
+The parameters related to entities to edit are specified as follows
+(first seen in :ref:`attributes_section`)::
+
+  <rtype-role>:<entity eid>
+
+where entity eid could be a letter in case of an entity to create. We
+name those parameters as *qualified*.
+
+* Retrieval of entities to edit is done by using the forms parameters
+  `eid` and `__type`
+
+* For all the attributes and the relations of an entity to edit
+  (attributes and relations are handled a bit differently but these
+  details are not much relevant here) :
+
+   * using the ``rtype``, ``role`` and ``__type`` information, fetch
+     an appropriate field instance
+
+   * check if the field has been modified (if not, proceed to the next
+     relation)
+
+   * build an rql expression to update the entity
+
+At the end, all rql expressions are executed.
+
+* For each entity to edit:
+
+   * if a qualified parameter `__linkto` is specified, its value has
+     to be a string (or a list of strings) such as: ::
+
+        <relation type>:<eids>:<target>
+
+     where <target> is either `subject` or `object` and each eid could
+     be separated from the others by a `_`. Target specifies if the
+     *edited entity* is subject or object of the relation and each
+     relation specified will be inserted.
+
+    * if a qualified parameter `__clone_eid` is specified for an entity, the
+      relations of the specified entity passed as value of this parameter are
+      copied on the edited entity.
+
+    * if a qualified parameter `__delete` is specified, its value must be
+      a string or a list of string such as follows: ::
+
+          <subjects eids>:<relation type>:<objects eids>
+
+      where each eid subject or object can be seperated from the other
+      by `_`. Each specified relation will be deleted.
+
+
+* If no entity is edited but the form contains the parameters `__linkto`
+  and `eid`, this one is interpreted by using the value specified for `eid`
+  to designate the entity on which to add the relations.
+
+.. note::
+
+   * if the parameter `__action_delete` is found, all the entities specified
+     as to be edited will be deleted.
+
+   * if the parameter `__action_cancel` is found, no action is completed.
+
+   * if the parameter `__action_apply` is found, the editing is
+     applied normally but the redirection is done on the form (see
+     :ref:`RedirectionControl`).
+
+   * if no entity is found to be edited and if there is no parameter
+     `__action_delete`, `__action_cancel`, `__linkto`, `__delete` or
+     `__insert`, an error is raised.
+
+   * using the parameter `__message` in the form will allow to use its value
+     as a message to provide the user once the editing is completed.
+
+
+.. _RedirectionControl:
+
+Redirection control
+~~~~~~~~~~~~~~~~~~~
+Once editing is completed, there is still an issue left: where should we go
+now? If nothing is specified, the controller will do his job but it does not
+mean we will be happy with the result. We can control that by using the
+following parameters:
+
+* `__redirectpath`: path of the URL (relative to the root URL of the site,
+  no form parameters
+
+* `__redirectparams`: forms parameters to add to the path
+
+* `__redirectrql`: redirection RQL request
+
+* `__redirectvid`: redirection view identifier
+
+* `__errorurl`: initial form URL, used for redirecting in case a validation
+  error is raised during editing. If this one is not specified, an error page
+  is displayed instead of going back to the form (which is, if necessary,
+  responsible for displaying the errors)
+
+* `__form_id`: initial view form identifier, used if `__action_apply` is
+  found
+
+In general we use either `__redirectpath` and `__redirectparams` or
+`__redirectrql` and `__redirectvid`.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/devweb/edition/examples.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,231 @@
+Examples
+--------
+
+(Automatic) Entity form
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Looking at some cubes available on the `cubicweb forge`_ we find some
+with form manipulation. The following example comes from the the
+`conference`_ cube. It extends the change state form for the case
+where a ``Talk`` entity is getting into ``submitted`` state. The goal
+is to select reviewers for the submitted talk.
+
+.. _`cubicweb forge`: http://www.cubicweb.org/view?rql=Any+P+ORDERBY+N+WHERE+P+name+LIKE+%22cubicweb-%25%22%2C+P+is+Project%2C+P+name+N
+.. _`conference`: http://www.cubicweb.org/project/cubicweb-conference
+
+.. sourcecode:: python
+
+ from cubicweb.web import formfields as ff, formwidgets as fwdgs
+ class SendToReviewerStatusChangeView(ChangeStateFormView):
+     __select__ = (ChangeStateFormView.__select__ &
+                   implements('Talk') &
+                   rql_condition('X in_state S, S name "submitted"'))
+
+     def get_form(self, entity, transition, **kwargs):
+         form = super(SendToReviewerStatusChangeView, self).get_form(entity, transition, **kwargs)
+         relation = ff.RelationField(name='reviews', role='object',
+                                     eidparam=True,
+                                     label=_('select reviewers'),
+                                     widget=fwdgs.Select(multiple=True))
+         form.append_field(relation)
+         return form
+
+Simple extension of a form can be done from within the `FormView`
+wrapping the form. FormView instances have a handy ``get_form`` method
+that returns the form to be rendered. Here we add a ``RelationField``
+to the base state change form.
+
+One notable point is the ``eidparam`` argument: it tells both the
+field and the ``edit controller`` that the field is linked to a
+specific entity.
+
+It is hence entirely possible to add ad-hoc fields that will be
+processed by some specialized instance of the edit controller.
+
+
+Ad-hoc fields form
+~~~~~~~~~~~~~~~~~~
+
+We want to define a form doing something else than editing an entity. The idea is
+to propose a form to send an email to entities in a resultset which implements
+:class:`IEmailable`.  Let's take a simplified version of what you'll find in
+:mod:`cubicweb.web.views.massmailing`.
+
+Here is the source code:
+
+.. sourcecode:: python
+
+    def sender_value(form):
+	return '%s <%s>' % (form._cw.user.dc_title(), form._cw.user.get_email())
+
+    def recipient_choices(form, field):
+	return [(e.get_email(), e.eid)
+                 for e in form.cw_rset.entities()
+		 if e.get_email()]
+
+    def recipient_value(form):
+	return [e.eid for e in form.cw_rset.entities()
+                if e.get_email()]
+
+    class MassMailingForm(forms.FieldsForm):
+	__regid__ = 'massmailing'
+
+	needs_js = ('cubicweb.widgets.js',)
+	domid = 'sendmail'
+	action = 'sendmail'
+
+	sender = ff.StringField(widget=TextInput({'disabled': 'disabled'}),
+				label=_('From:'),
+				value=sender_value)
+
+	recipient = ff.StringField(widget=CheckBox(),
+	                           label=_('Recipients:'),
+				   choices=recipient_choices,
+				   value=recipients_value)
+
+	subject = ff.StringField(label=_('Subject:'), max_length=256)
+
+	mailbody = ff.StringField(widget=AjaxWidget(wdgtype='TemplateTextField',
+						    inputid='mailbody'))
+
+	form_buttons = [ImgButton('sendbutton', "javascript: $('#sendmail').submit()",
+				  _('send email'), 'SEND_EMAIL_ICON'),
+			ImgButton('cancelbutton', "javascript: history.back()",
+				  stdmsgs.BUTTON_CANCEL, 'CANCEL_EMAIL_ICON')]
+
+Let's detail what's going on up there. Our form will hold four fields:
+
+* a sender field, which is disabled and will simply contains the user's name and
+  email
+
+* a recipients field, which will be displayed as a list of users in the context
+  result set with checkboxes so user can still choose who will receive his mailing
+  by checking or not the checkboxes. By default all of them will be checked since
+  field's value return a list containing same eids as those returned by the
+  vocabulary function.
+
+* a subject field, limited to 256 characters (hence we know a
+  :class:`~cubicweb.web.formwidgets.TextInput` will be used, as explained in
+  :class:`~cubicweb.web.formfields.StringField`)
+
+* a mailbody field. This field use an ajax widget, defined in `cubicweb.widgets.js`,
+  and whose definition won't be shown here. Notice though that we tell this form
+  need this javascript file by using `needs_js`
+
+Last but not least, we add two buttons control: one to post the form using
+javascript (`$('#sendmail')` being the jQuery call to get the element with DOM id
+set to 'sendmail', which is our form DOM id as specified by its `domid`
+attribute), another to cancel the form which will go back to the previous page
+using another javascript call. Also we specify an image to use as button icon as a
+resource identifier (see :ref:`external_resources`) given as last argument to
+:class:`cubicweb.web.formwidgets.ImgButton`.
+
+To see this form, we still have to wrap it in a view. This is pretty simple:
+
+.. sourcecode:: python
+
+    class MassMailingFormView(form.FormViewMixIn, EntityView):
+	__regid__ = 'massmailing'
+	__select__ = implements(IEmailable) & authenticated_user()
+
+	def call(self):
+	    form = self._cw.vreg['forms'].select('massmailing', self._cw,
+	                                         rset=self.cw_rset)
+	    self.w(form.render())
+
+As you see, we simply define a view with proper selector so it only apply to a
+result set containing :class:`IEmailable` entities, and so that only users in the
+managers or users group can use it. Then in the `call()` method for this view we
+simply select the above form and write what its `.render()` method returns.
+
+When this form is submitted, a controller with id 'sendmail' will be called (as
+specified using `action`). This controller will be responsible to actually send
+the mail to specified recipients.
+
+Here is what it looks like:
+
+.. sourcecode:: python
+
+   class SendMailController(Controller):
+       __regid__ = 'sendmail'
+       __select__ = (authenticated_user() &
+                     match_form_params('recipient', 'mailbody', 'subject'))
+
+       def publish(self, rset=None):
+           body = self._cw.form['mailbody']
+           subject = self._cw.form['subject']
+           eids = self._cw.form['recipient']
+           # eids may be a string if only one recipient was specified
+           if isinstance(eids, basestring):
+               rset = self._cw.execute('Any X WHERE X eid %(x)s', {'x': eids})
+           else:
+               rset = self._cw.execute('Any X WHERE X eid in (%s)' % (','.join(eids)))
+           recipients = list(rset.entities())
+           msg = format_mail({'email' : self._cw.user.get_email(),
+                              'name' : self._cw.user.dc_title()},
+                             recipients, body, subject)
+           if not self._cw.vreg.config.sendmails([(msg, recipients]):
+               msg = self._cw._('could not connect to the SMTP server')
+           else:
+               msg = self._cw._('emails successfully sent')
+           raise Redirect(self._cw.build_url(__message=msg))
+
+
+The entry point of a controller is the publish method. In that case we simply get
+back post values in request's `form` attribute, get user instances according
+to eids found in the 'recipient' form value, and send email after calling
+:func:`format_mail` to get a proper email message. If we can't send email or
+if we successfully sent email, we redirect to the index page with proper message
+to inform the user.
+
+Also notice that our controller has a selector that deny access to it
+to anonymous users (we don't want our instance to be used as a spam
+relay), but also checks if the expected parameters are specified in
+forms. That avoids later defensive programming (though it's not enough
+to handle all possible error cases).
+
+To conclude our example, suppose we wish a different form layout and that existent
+renderers are not satisfying (we would check that first of course :). We would then
+have to define our own renderer:
+
+.. sourcecode:: python
+
+    class MassMailingFormRenderer(formrenderers.FormRenderer):
+        __regid__ = 'massmailing'
+
+        def _render_fields(self, fields, w, form):
+            w(u'<table class="headersform">')
+            for field in fields:
+                if field.name == 'mailbody':
+                    w(u'</table>')
+                    w(u'<div id="toolbar">')
+                    w(u'<ul>')
+                    for button in form.form_buttons:
+                        w(u'<li>%s</li>' % button.render(form))
+                    w(u'</ul>')
+                    w(u'</div>')
+                    w(u'<div>')
+                    w(field.render(form, self))
+                    w(u'</div>')
+                else:
+                    w(u'<tr>')
+                    w(u'<td class="hlabel">%s</td>' %
+                      self.render_label(form, field))
+                    w(u'<td class="hvalue">')
+                    w(field.render(form, self))
+                    w(u'</td></tr>')
+
+        def render_buttons(self, w, form):
+            pass
+
+We simply override the `_render_fields` and `render_buttons` method of the base form renderer
+to arrange fields as we desire it: here we'll have first a two columns table with label and
+value of the sender, recipients and subject field (form order respected), then form controls,
+then a div containing the textarea for the email's content.
+
+To bind this renderer to our form, we should add to our form definition above:
+
+.. sourcecode:: python
+
+    form_renderer_id = 'massmailing'
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/devweb/edition/form.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,223 @@
+HTML form construction
+----------------------
+
+CubicWeb provides the somewhat usual form / field / widget / renderer abstraction
+to provide generic building blocks which will greatly help you in building forms
+properly integrated with CubicWeb (coherent display, error handling, etc...),
+while keeping things as flexible as possible.
+
+A ``form`` basically only holds a set of ``fields``, and has te be bound to a
+``renderer`` which is responsible to layout them. Each field is bound to a
+``widget`` that will be used to fill in value(s) for that field (at form
+generation time) and 'decode' (fetch and give a proper Python type to) values
+sent back by the browser.
+
+The ``field`` should be used according to the type of what you want to edit.
+E.g. if you want to edit some date, you'll have to use the
+:class:`cubicweb.web.formfields.DateField`. Then you can choose among multiple
+widgets to edit it, for instance :class:`cubicweb.web.formwidgets.TextInput` (a
+bare text field), :class:`~cubicweb.web.formwidgets.DateTimePicker` (a simple
+calendar) or even :class:`~cubicweb.web.formwidgets.JQueryDatePicker` (the JQuery
+calendar).  You can of course also write your own widget.
+
+Exploring the available forms
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+A small excursion into a |cubicweb| shell is the quickest way to
+discover available forms (or application objects in general).
+
+.. sourcecode:: python
+
+ >>> from pprint import pprint
+ >>> pprint( session.vreg['forms'] )
+ {'base': [<class 'cubicweb.web.views.forms.FieldsForm'>,
+           <class 'cubicweb.web.views.forms.EntityFieldsForm'>],
+  'changestate': [<class 'cubicweb.web.views.workflow.ChangeStateForm'>,
+                  <class 'cubes.tracker.views.forms.VersionChangeStateForm'>],
+  'composite': [<class 'cubicweb.web.views.forms.CompositeForm'>,
+                <class 'cubicweb.web.views.forms.CompositeEntityForm'>],
+  'deleteconf': [<class 'cubicweb.web.views.editforms.DeleteConfForm'>],
+  'edition': [<class 'cubicweb.web.views.autoform.AutomaticEntityForm'>,
+              <class 'cubicweb.web.views.workflow.TransitionEditionForm'>,
+              <class 'cubicweb.web.views.workflow.StateEditionForm'>],
+  'logform': [<class 'cubicweb.web.views.basetemplates.LogForm'>],
+  'massmailing': [<class 'cubicweb.web.views.massmailing.MassMailingForm'>],
+  'muledit': [<class 'cubicweb.web.views.editforms.TableEditForm'>],
+  'sparql': [<class 'cubicweb.web.views.sparql.SparqlForm'>]}
+
+
+The two most important form families here (for all pracitcal purposes)
+are `base` and `edition`. Most of the time one wants alterations of
+the AutomaticEntityForm (from the `edition` category).
+
+The Automatic Entity Form
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: cubicweb.web.views.autoform
+
+Anatomy of a choices function
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Let's have a look at the `ticket_done_in_choices` function given to
+the `choices` parameter of the relation tag that is applied to the
+('Ticket', 'done_in', '*') relation definition, as it is both typical
+and sophisticated enough. This is a code snippet from the `tracker`_
+cube.
+
+.. _`tracker`: http://www.cubicweb.org/project/cubicweb-tracker
+
+The ``Ticket`` entity type can be related to a ``Project`` and a
+``Version``, respectively through the ``concerns`` and ``done_in``
+relations. When a user is about to edit a ticket, we want to fill the
+combo box for the ``done_in`` relation with values pertinent with
+respect to the context. The important context here is:
+
+* creation or modification (we cannot fetch values the same way in
+  either case)
+
+* ``__linkto`` url parameter given in a creation context
+
+.. sourcecode:: python
+
+    from cubicweb.web import formfields
+
+    def ticket_done_in_choices(form, field):
+        entity = form.edited_entity
+        # first see if its specified by __linkto form parameters
+        linkedto = formfields.relvoc_linkedto(entity, 'done_in', 'subject')
+        if linkedto:
+            return linkedto
+        # it isn't, get initial values
+        vocab = formfields.relvoc_init(entity, 'done_in', 'subject')
+        veid = None
+        # try to fetch the (already or pending) related version and project
+        if not entity.has_eid():
+            peids = entity.linked_to('concerns', 'subject')
+            peid = peids and peids[0]
+        else:
+            peid = entity.project.eid
+            veid = entity.done_in and entity.done_in[0].eid
+        if peid:
+            # we can complete the vocabulary with relevant values
+            rschema = form._cw.vreg.schema['done_in'].rdef('Ticket', 'Version')
+            rset = form._cw.execute(
+                'Any V, VN ORDERBY version_sort_value(VN) '
+                'WHERE V version_of P, P eid %(p)s, V num VN, '
+                'V in_state ST, NOT ST name "published"', {'p': peid}, 'p')
+            vocab += [(v.view('combobox'), v.eid) for v in rset.entities()
+                      if rschema.has_perm(form._cw, 'add', toeid=v.eid)
+                      and v.eid != veid]
+        return vocab
+
+The first thing we have to do is fetch potential values from the
+``__linkto`` url parameter that is often found in entity creation
+contexts (the creation action provides such a parameter with a
+predetermined value; for instance in this case, ticket creation could
+occur in the context of a `Version` entity). The
+:mod:`cubicweb.web.formfields` module provides a ``relvoc_linkedto``
+utility function that gets a list suitably filled with vocabulary
+values.
+
+.. sourcecode:: python
+
+        linkedto = formfields.relvoc_linkedto(entity, 'done_in', 'subject')
+        if linkedto:
+            return linkedto
+
+Then, if no ``__linkto`` argument was given, we must prepare the
+vocabulary with an initial empty value (because `done_in` is not
+mandatory, we must allow the user to not select a verson) and already
+linked values. This is done with the ``relvoc_init`` function.
+
+.. sourcecode:: python
+
+        vocab = formfields.relvoc_init(entity, 'done_in', 'subject')
+
+But then, we have to give more: if the ticket is related to a project,
+we should provide all the non published versions of this project
+(`Version` and `Project` can be related through the `version_of`
+relation). Conversely, if we do not know yet the project, it would not
+make sense to propose all existing versions as it could potentially
+lead to incoherences. Even if these will be caught by some
+RQLConstraint, it is wise not to tempt the user with error-inducing
+candidate values.
+
+The "ticket is related to a project" part must be decomposed as:
+
+* this is a new ticket which is created is the context of a project
+
+* this is an already existing ticket, linked to a project (through the
+  `concerns` relation)
+
+* there is no related project (quite unlikely given the cardinality of
+  the `concerns` relation, so it can only mean that we are creating a
+  new ticket, and a project is about to be selected but there is no
+  ``__linkto`` argument)
+
+.. note::
+
+   the last situation could happen in several ways, but of course in a
+   polished application, the paths to ticket creation should be
+   controlled so as to avoid a suboptimal end-user experience
+
+Hence, we try to fetch the related project.
+
+.. sourcecode:: python
+
+        veid = None
+        if not entity.has_eid():
+            peids = entity.linked_to('concerns', 'subject')
+            peid = peids and peids[0]
+        else:
+            peid = entity.project.eid
+            veid = entity.done_in and entity.done_in[0].eid
+
+We distinguish between entity creation and entity modification using
+the ``Entity.has_eid()`` method, which returns `False` on creation. At
+creation time the only way to get a project is through the
+``__linkto`` parameter. Notice that we fetch the version in which the
+ticket is `done_in` if any, for later.
+
+.. note::
+
+  the implementation above assumes that if there is a ``__linkto``
+  parameter, it is only about a project. While it makes sense most of
+  the time, it is not an absolute. Depending on how an entity creation
+  action action url is built, several outcomes could be possible
+  there
+
+If the ticket is already linked to a project, fetching it is
+trivial. Then we add the relevant version to the initial vocabulary.
+
+.. sourcecode:: python
+
+        if peid:
+            rschema = form._cw.vreg.schema['done_in'].rdef('Ticket', 'Version')
+            rset = form._cw.execute(
+                'Any V, VN ORDERBY version_sort_value(VN) '
+                'WHERE V version_of P, P eid %(p)s, V num VN, '
+                'V in_state ST, NOT ST name "published"', {'p': peid})
+            vocab += [(v.view('combobox'), v.eid) for v in rset.entities()
+                      if rschema.has_perm(form._cw, 'add', toeid=v.eid)
+                      and v.eid != veid]
+
+.. warning::
+
+   we have to defend ourselves against lack of a project eid. Given
+   the cardinality of the `concerns` relation, there *must* be a
+   project, but this rule can only be enforced at validation time,
+   which will happen of course only after form subsmission
+
+Here, given a project eid, we complete the vocabulary with all
+unpublished versions defined in the project (sorted by number) for
+which the current user is allowed to establish the relation.
+
+APIs
+~~~~
+
+.. automodule:: cubicweb.web.formfields
+.. automodule:: cubicweb.web.formwidgets
+.. automodule:: cubicweb.web.views.forms
+.. automodule:: cubicweb.web.views.formrenderers
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/devweb/edition/index.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,15 @@
+Edition control
+===============
+
+This chapter covers the editing capabilities of |cubicweb|. It
+explains html Form construction, the Edit Controller and their
+interactions.
+
+
+.. toctree::
+   :maxdepth: 2
+
+   form
+   dissection
+   editcontroller
+   examples
--- a/doc/book/en/devweb/form.rst	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,216 +0,0 @@
-HTML form construction
-----------------------
-
-CubicWeb provides the somewhat usual form / field / widget / renderer abstraction
-to provide generic building blocks which will greatly help you in building forms
-properly integrated with CubicWeb (coherent display, error handling, etc...),
-while keeping things as flexible as possible.
-
-A **form** basically only holds a set of **fields**, and has te be bound to a
-**renderer** which is responsible to layout them. Each field is bound to a
-**widget** that will be used to fill in value(s) for that field (at form
-generation time) and 'decode' (fetch and give a proper Python type to) values
-sent back by the browser.
-
-The **field** should be used according to the type of what you want to edit.
-E.g. if you want to edit some date, you'll have to use the
-:class:`cubicweb.web.formfields.DateField`. Then you can choose among multiple
-widgets to edit it, for instance :class:`cubicweb.web.formwidgets.TextInput` (a
-bare text field), :class:`~cubicweb.web.formwidgets.DateTimePicker` (a simple
-calendar) or even :class:`~cubicweb.web.formwidgets.JQueryDatePicker` (the JQuery
-calendar).  You can of course also write your own widget.
-
-
-.. automodule:: cubicweb.web.views.autoform
-
-
-Example of bare fields form
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-We want to define a form doing something else than editing an entity. The idea is
-to propose a form to send an email to entities in a resultset which implements
-:class:`IEmailable`.  Let's take a simplified version of what you'll find in
-:mod:`cubicweb.web.views.massmailing`.
-
-Here is the source code:
-
-.. sourcecode:: python
-
-    def sender_value(form):
-	return '%s <%s>' % (form._cw.user.dc_title(), form._cw.user.get_email())
-
-    def recipient_choices(form, field):
-	return [(e.get_email(), e.eid) for e in form.cw_rset.entities()
-		 if e.get_email()]
-
-    def recipient_value(form):
-	return [e.eid for e in form.cw_rset.entities() if e.get_email()]
-
-    class MassMailingForm(forms.FieldsForm):
-	__regid__ = 'massmailing'
-
-	needs_js = ('cubicweb.widgets.js',)
-	domid = 'sendmail'
-	action = 'sendmail'
-
-	sender = ff.StringField(widget=TextInput({'disabled': 'disabled'}),
-				label=_('From:'),
-				value=sender_value)
-
-	recipient = ff.StringField(widget=CheckBox(),
-	                           label=_('Recipients:'),
-				   choices=recipient_choices,
-				   value=recipients_value)
-
-	subject = ff.StringField(label=_('Subject:'), max_length=256)
-
-	mailbody = ff.StringField(widget=AjaxWidget(wdgtype='TemplateTextField',
-						    inputid='mailbody'))
-
-	form_buttons = [ImgButton('sendbutton', "javascript: $('#sendmail').submit()",
-				  _('send email'), 'SEND_EMAIL_ICON'),
-			ImgButton('cancelbutton', "javascript: history.back()",
-				  stdmsgs.BUTTON_CANCEL, 'CANCEL_EMAIL_ICON')]
-
-Let's detail what's going on up there. Our form will hold four fields:
-
-* a sender field, which is disabled and will simply contains the user's name and
-  email
-
-* a recipients field, which will be displayed as a list of users in the context
-  result set with checkboxes so user can still choose who will receive his mailing
-  by checking or not the checkboxes. By default all of them will be checked since
-  field's value return a list containing same eids as those returned by the
-  vocabulary function.
-
-* a subject field, limited to 256 characters (hence we know a
-  :class:`~cubicweb.web.formwidgets.TextInput` will be used, as explained in
-  :class:`~cubicweb.web.formfields.StringField`)
-
-* a mailbody field. This field use an ajax widget, defined in `cubicweb.widgets.js`,
-  and whose definition won't be shown here. Notice though that we tell this form
-  need this javascript file by using `needs_js`
-
-Last but not least, we add two buttons control: one to post the form using
-javascript (`$('#sendmail')` being the jQuery call to get the element with DOM id
-set to 'sendmail', which is our form DOM id as specified by its `domid`
-attribute), another to cancel the form which will go back to the previous page
-using another javascript call. Also we specify an image to use as button icon as a
-resource identifier (see :ref:`external_resources`) given as last argument to
-:class:`cubicweb.web.formwidgets.ImgButton`.
-
-To see this form, we still have to wrap it in a view. This is pretty simple:
-
-.. sourcecode:: python
-
-    class MassMailingFormView(form.FormViewMixIn, EntityView):
-	__regid__ = 'massmailing'
-	__select__ = implements(IEmailable) & authenticated_user()
-
-	def call(self):
-	    form = self._cw.vreg['forms'].select('massmailing', self._cw,
-	                                         rset=self.cw_rset)
-	    self.w(form.render())
-
-As you see, we simply define a view with proper selector so it only apply to a
-result set containing :class:`IEmailable` entities, and so that only users in the
-managers or users group can use it. Then in the `call()` method for this view we
-simply select the above form and write what its `.render()` method returns.
-
-When this form is submitted, a controller with id 'sendmail' will be called (as
-specified using `action`). This controller will be responsible to actually send
-the mail to specified recipients.
-
-Here is what it looks like:
-
-.. sourcecode:: python
-
-   class SendMailController(Controller):
-       __regid__ = 'sendmail'
-       __select__ = authenticated_user() & match_form_params('recipient', 'mailbody', 'subject')
-
-       def publish(self, rset=None):
-           body = self._cw.form['mailbody']
-           subject = self._cw.form['subject']
-           eids = self._cw.form['recipient']
-           # eids may be a string if only one recipient was specified
-           if isinstance(eids, basestring):
-               rset = self._cw.execute('Any X WHERE X eid %(x)s', {'x': eids})
-           else:
-               rset = self._cw.execute('Any X WHERE X eid in (%s)' % (','.join(eids)))
-           recipients = list(rset.entities())
-           msg = format_mail({'email' : self._cw.user.get_email(),
-                              'name' : self._cw.user.dc_title()},
-                             recipients, body, subject)
-           if not self._cw.vreg.config.sendmails([(msg, recipients]):
-               msg = self._cw._('could not connect to the SMTP server')
-           else:
-               msg = self._cw._('emails successfully sent')
-           raise Redirect(self._cw.build_url(__message=msg))
-
-
-The entry point of a controller is the publish method. In that case we simply get
-back post values in request's `form` attribute, get user instances according
-to eids found in the 'recipient' form value, and send email after calling
-:func:`format_mail` to get a proper email message. If we can't send email or
-if we successfully sent email, we redirect to the index page with proper message
-to inform the user.
-
-Also notice that our controller has a selector that deny access to it to
-anonymous users (we don't want our instance to be used as a spam relay), but also
-check expected parameters are specified in forms. That avoids later defensive
-programming (though it's not enough to handle all possible error cases).
-
-To conclude our example, suppose we wish a different form layout and that existent
-renderers are not satisfying (we would check that first of course :). We would then
-have to define our own renderer:
-
-.. sourcecode:: python
-
-    class MassMailingFormRenderer(formrenderers.FormRenderer):
-        __regid__ = 'massmailing'
-
-        def _render_fields(self, fields, w, form):
-            w(u'<table class="headersform">')
-            for field in fields:
-                if field.name == 'mailbody':
-                    w(u'</table>')
-                    w(u'<div id="toolbar">')
-                    w(u'<ul>')
-                    for button in form.form_buttons:
-                        w(u'<li>%s</li>' % button.render(form))
-                    w(u'</ul>')
-                    w(u'</div>')
-                    w(u'<div>')
-                    w(field.render(form, self))
-                    w(u'</div>')
-                else:
-                    w(u'<tr>')
-                    w(u'<td class="hlabel">%s</td>' % self.render_label(form, field))
-                    w(u'<td class="hvalue">')
-                    w(field.render(form, self))
-                    w(u'</td></tr>')
-
-        def render_buttons(self, w, form):
-            pass
-
-We simply override the `_render_fields` and `render_buttons` method of the base form renderer
-to arrange fields as we desire it: here we'll have first a two columns table with label and
-value of the sender, recipients and subject field (form order respected), then form controls,
-then a div containing the textarea for the email's content.
-
-To bind this renderer to our form, we should add to our form definition above:
-
-.. sourcecode:: python
-
-    form_renderer_id = 'massmailing'
-
-API
-~~~
-
-.. automodule:: cubicweb.web.formfields
-.. automodule:: cubicweb.web.formwidgets
-.. automodule:: cubicweb.web.views.forms
-.. automodule:: cubicweb.web.views.formrenderers
-
-.. Example of entity fields form
--- a/doc/book/en/devweb/index.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devweb/index.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -14,7 +14,7 @@
    rtags
    js
    css
-   form
+   edition/index
    facets
    internationalization
 ..   property
--- a/doc/book/en/devweb/js.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devweb/js.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -22,8 +22,8 @@
 
 .. XXX external_resources variable (which needs love)
 
-CubicWeb javascript API
-~~~~~~~~~~~~~~~~~~~~~~~
+Server-side Javascript API
+~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 Javascript resources are typically loaded on demand, from views. The
 request object (available as self._cw from most application objects,
@@ -39,8 +39,8 @@
   snippet inline in the html headers. This is quite useful for setting
   up early jQuery(document).ready(...) initialisations.
 
-CubicWeb javascript events
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+Javascript events
+~~~~~~~~~~~~~~~~~
 
 * ``server-response``: this event is triggered on HTTP responses (both
   standard and ajax). The two following extra parameters are passed
@@ -53,8 +53,8 @@
     ajax request, otherwise the document itself for standard HTTP
     requests.
 
-Important AJAX APIS
-~~~~~~~~~~~~~~~~~~~
+Important javascript AJAX APIS
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 * `asyncRemoteExec` and `remoteExec` are the base building blocks for
   doing arbitrary async (resp. sync) communications with the server
@@ -72,10 +72,10 @@
 A simple example with asyncRemoteExec
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-In the python side, we have to extend the BaseController class. The
-@jsonize decorator ensures that the `return value` of the method is
-encoded as JSON data. By construction, the JSonController inputs
-everything in JSON format.
+In the python side, we have to extend the ``BaseController``
+class. The ``@jsonize`` decorator ensures that the return value of the
+method is encoded as JSON data. By construction, the JSonController
+inputs everything in JSON format.
 
 .. sourcecode: python
 
@@ -225,13 +225,13 @@
 `http://myinstance/json?`). The actual JSonController method name is
 encoded in the `params` dictionary using the `fname` key.
 
-A more real-life example from CubicWeb
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+A more real-life example
+~~~~~~~~~~~~~~~~~~~~~~~~
 
-A frequent use case of Web 2 applications is the delayed (or
-on-demand) loading of pieces of the DOM. This is typically achieved
-using some preparation of the initial DOM nodes, jQuery event handling
-and proper use of loadxhtml.
+A frequent need of Web 2 applications is the delayed (or demand
+driven) loading of pieces of the DOM. This is typically achieved using
+some preparation of the initial DOM nodes, jQuery event handling and
+proper use of loadxhtml.
 
 We present here a skeletal version of the mecanism used in CubicWeb
 and available in web/views/tabs.py, in the `LazyViewMixin` class.
@@ -317,9 +317,6 @@
     }
 
 
-
-
-.. XXX reloadComponent
 .. XXX userCallback / user_callback
 
 Javascript library: overview
--- a/doc/book/en/devweb/request.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devweb/request.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -4,20 +4,28 @@
 Overview
 ````````
 
-A request instance is created when an HTTP request is sent to the web server.
-It contains informations such as form parameters, user authenticated, etc.
+A request instance is created when an HTTP request is sent to the web
+server.  It contains informations such as form parameters,
+authenticated user, etc. It is a very prevalent object and is used
+throughout all of the framework and applications.
 
-**Globally, a request represents a user query, either through HTTP or not
-(we also talk about RQL queries on the server side for example).**
+**A request represents a user query, either through HTTP or not (we
+also talk about RQL queries on the server side for example).**
+
+Here is a non-exhaustive list of attributes and methods available on
+request objects (grouped by category):
 
-An instance of `Request` has the following attributes:
+* `Browser control`:
 
-* `user`, instance of `cubicweb.common.utils.User` corresponding to the authenticated
-  user
-* `form`, dictionary containing the values of a web form
-* `encoding`, character encoding to use in the response
+  * `ie_browser`: tells if the browser belong to the Internet Explorer
+    family
+  * `xhtml_browser`: tells if the browser is able to properly handle
+    XHTML (at the HTTP content_type level)
 
-But also:
+* `User and identification`:
+
+  * `user`, instance of `cubicweb.common.utils.User` corresponding to
+    the authenticated user
 
 * `Session data handling`
 
@@ -27,6 +35,36 @@
   * `set_session_data(key, value)`, assign a value to a key
   * `del_session_data(key)`,  suppress the value associated to a key
 
+* `Edition` (utilities for edition control):
+
+  * `cancel_edition`: resets error url and cleans up pending operations
+  * `create_entity`: utility to create an entity (from an etype,
+    attributes and relation values)
+  * `datadir_url`: returns the url to the merged external resources
+    (|cubicweb|'s `web/data` directory plus all `data` directories of
+    used cubes)
+  * `edited_eids`: returns the list of eids of entities that are
+    edited under the current http request
+  * `eid_rset(eid)`: utility which returns a result set from an eid
+  * `entity_from_eid(eid)`: returns an entity instance from the given eid
+  * `encoding`: returns the encoding of the current HTTP request
+  * `ensure_ro_rql(rql)`: ensure some rql query is a data request
+  * etype_rset
+  * `form`, dictionary containing the values of a web form
+  * `encoding`, character encoding to use in the response
+  * `next_tabindex()`: returns a monotonically growing integer used to
+    build the html tab index of forms
+
+* `HTTP`
+
+  * `authmode`: returns a string describing the authentication mode
+    (http, cookie, ...)
+  * `lang`: returns the user agents/browser's language as carried by
+    the http request
+  * `demote_to_html()`: in the context of an XHTML compliant browser,
+    this will force emission of the response as an HTML document
+    (using the http content negociation)
+
 *  `Cookies handling`
 
   * `get_cookie()`, returns a dictionary containing the value of the header
@@ -39,10 +77,28 @@
 
 * `URL handling`
 
+  * `build_url(__vid, *args, **kwargs)`: return an absolute URL using
+    params dictionary key/values as URL parameters. Values are
+    automatically URL quoted, and the publishing method to use may be
+    specified or will be guessed.
+  * `build_url_params(**kwargs)`: returns a properly prepared (quoted,
+    separators, ...) string from the given parameters
   * `url()`, returns the full URL of the HTTP request
   * `base_url()`, returns the root URL of the web application
   * `relative_path()`, returns the relative path of the request
 
+* `Web resource (.css, .js files, etc.) handling`:
+
+  * `add_css(cssfiles)`: adds the given list of css resources to the current
+    html headers
+  * `add_js(jsfiles)`: adds the given list of javascript resources to the
+    current html headers
+  * `add_onload(jscode)`: inject the given jscode fragment (an unicode
+    string) into the current html headers, wrapped inside a
+    document.ready(...) or another ajax-friendly one-time trigger event
+  * `add_header(header, values)`: adds the header/value pair to the
+    current html headers
+
 * `And more...`
 
   * `set_content_type(content_type, filename=None)`, adds the header HTTP
--- a/doc/book/en/devweb/rtags.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/devweb/rtags.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -9,8 +9,8 @@
 
 .. _uicfg:
 
-The ``uicfg`` module
-~~~~~~~~~~~~~~~~~~~~
+The uicfg module
+~~~~~~~~~~~~~~~~
 
 .. note::
 
--- a/doc/book/en/devweb/views/editforms.rst	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-Standard forms
---------------
-
- (:mod:`cubicweb.web.views.editforms`)
-
-XXX feed me
--- a/doc/book/en/intro/concepts.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/intro/concepts.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -173,15 +173,15 @@
 achieved by dynamic objects (`application objects` or `appobjects`) stored in a
 two-levels registry (the `vregistry`). Each object is affected to a registry with
 an identifier in this registry. You may have more than one object sharing an
-identifier in the same registry, At runtime, appobjects are selected in a
-registry according to the context. Selection is done by comparing *score*
+identifier in the same registry. At runtime, appobjects are selected in a
+registry according to the context. Selection is done by comparing the *score*
 returned by each appobject's *selector*.
 
 Application objects are stored in the vregistry using a two-level hierarchy :
 
   object's `__registry__` : object's `__regid__` : [list of app objects]
 
-E.g. the `vregistry` contains several (sub-)registries which hold a
+In other words, the `vregistry` contains several (sub-)registries which hold a
 list of appobjects associated to an identifier.
 
 The base class of appobjects is :class:`cubicweb.appobject.AppObject`.
@@ -189,15 +189,15 @@
 Selectors
 ~~~~~~~~~
 
-Each appobject has a selector, that is used to compute how well the object fits a
-given context. The better the object fits the context, the higher the score. They
-are the glue that tie appobjects to the data model. Using them appropriately is
+Each appobject has a selector that is used to compute how well the object fits a
+given context. The better the object fits the context, the higher the score. Scores
+are the glue that ties appobjects to the data model. Using them appropriately is
 an essential part of the construction of well behaved cubes.
 
 |cubicweb| provides a set of basic selectors that may be parametrized.  Also,
 selectors can be combined with the `~` unary operator (negation) and the binary
 operators `&` and `|` (respectivly 'and' and 'or') to build more complex
-selector. Of course complex selector may be combined too. Last but not least, you
+selectors. Of course complex selectors may be combined too. Last but not least, you
 can write your own selectors.
 
 The `vregistry`
@@ -339,5 +339,5 @@
 cubicweb application.
 
 .. note::
-   RQL queries executed in hooks and operations are *unsafe* by default, e.g. the
+   RQL queries executed in hooks and operations are *unsafe* by default, i.e. the
    read and write security is deactivated unless explicitly asked.
--- a/doc/book/en/tutorials/advanced/index.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/tutorials/advanced/index.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -8,12 +8,12 @@
 
 * basically a photo gallery
 
-* photo stored onto the fs and displayed dynamically through a web interface
+* photo stored on the file system and displayed dynamically through a web interface
 
 * navigation through folder (album), tags, geographical zone, people on the
   picture... using facets
 
-* advanced security (eg not everyone can see everything). More on this later.
+* advanced security (not everyone can see everything). More on this later.
 
 
 Cube creation and schema definition
@@ -24,7 +24,7 @@
 Step 1: creating a new cube for my web site
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-One note about my development environment: I wanted to use packaged
+One note about my development environment: I wanted to use the packaged
 version of CubicWeb and cubes while keeping my cube in my user
 directory, let's say `~src/cubes`.  I achieve this by setting the
 following environment variables::
@@ -43,10 +43,10 @@
 Step 2: pick building blocks into existing cubes
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Almost everything I want represent in my web-site is somewhat already modelized in
-some cube that I'll extend for my need. So I'll pick the following cubes:
+Almost everything I want to handle in my web-site is somehow already modelized in
+existing cubes that I'll extend for my need. So I'll pick the following cubes:
 
-* `folder`, containing `Folder` entity type, which will be used as
+* `folder`, containing the `Folder` entity type, which will be used as
   both 'album' and a way to map file system folders. Entities are
   added to a given folder using the `filed_under` relation.
 
@@ -62,7 +62,7 @@
 * `comment`, providing a full commenting system allowing one to comment entity types
   supporting the `comments` relation by adding a `Comment` entity.
 
-* `tag`, providing a full tagging system as a easy and powerful way to classify
+* `tag`, providing a full tagging system as an easy and powerful way to classify
   entities supporting the `tags` relation by linking the to `Tag` entities. This
   will allows navigation into a large number of picture.
 
@@ -131,20 +131,20 @@
   picture.
 
 This schema will probably have to evolve as time goes (for security handling at
-least), but since the possibility to make schema evolving is one of CubicWeb
-feature (and goal), we won't worry and see that later when needed.
+least), but since the possibility to let a schema evolve is one of CubicWeb's
+features (and goals), we won't worry about it for now and see that later when needed.
 
 
 Step 4: creating the instance
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Now that I've a schema, I want to create an instance so I can start To
-create an instance using this new 'sytweb' cube, I run::
+Now that I have a schema, I want to create an instance. To
+do so using this new 'sytweb' cube, I run::
 
   cubicweb-ctl create sytweb sytweb_instance
 
-hint : if you get an error while the database is initialized, you can
-avoid having to reanswer to questions by runing ::
+Hint: if you get an error while the database is initialized, you can
+avoid having to answer the questions again by running::
 
    cubicweb-ctl db-create sytweb_instance
 
@@ -161,7 +161,7 @@
 Security, testing and migration
 -------------------------------
 
-This post will cover various topics:
+This part will cover various topics:
 
 * configuring security
 * migrating existing instance
@@ -174,10 +174,10 @@
   - ``authenticated``, only authenticated users can see it
   - ``restricted``, only a subset of authenticated users can see it
 * managers (e.g. me) can see everything
-* only authenticated user can see people
-* everyone can  see classifier entities, eg tag and zone
+* only authenticated users can see people
+* everyone can see classifier entities, such as tag and zone
 
-Also, unless explicity specified, visibility of an image should be the same as
+Also, unless explicitly specified, the visibility of an image should be the same as
 its parent folder, as well as visibility of a comment should be the same as the
 commented entity. If there is no parent entity, the default visibility is
 ``authenticated``.
@@ -198,19 +198,19 @@
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 In schema, you can grant access according to groups, or to some RQL expressions:
-users get access it the expression return some results. To implements the read
-security defined earlier, groups are not enough, we'll need RQL expression. Here
+users get access if the expression returns some results. To implement the read
+security defined earlier, groups are not enough, we'll need some RQL expression. Here
 is the idea:
 
-* add a `visibility` attribute on folder, image and comment, which may be one of
+* add a `visibility` attribute on Folder, Image and Comment, which may be one of
   the value explained above
 
-* add a `may_be_read_by` relation from folder, image and comment to users,
+* add a `may_be_read_by` relation from Folder, Image and Comment to users,
   which will define who can see the entity
 
 * security propagation will be done in hook.
 
-So the first thing to do is to modify my cube'schema.py to define those
+So the first thing to do is to modify my cube's schema.py to define those
 relations:
 
 .. sourcecode:: python
@@ -319,9 +319,9 @@
 system. Hooks are triggered on database event such as addition of new
 entity or relation.
 
-The trick part of the requirement is in *unless explicitly specified*, notably
-because when the entity addition hook is added, we don't know yet its 'parent'
-entity (eg folder of an image, image commented by a comment). To handle such things,
+The tricky part of the requirement is in *unless explicitly specified*, notably
+because when the entity is added, we don't know yet its 'parent'
+entity (e.g. Folder of an Image, Image commented by a Comment). To handle such things,
 CubicWeb provides `Operation`, which allow to schedule things to do at commit time.
 
 In our case we will:
@@ -508,9 +508,11 @@
 It's not complete, but show most things you'll want to do in tests: adding some
 content, creating users and connecting as them in the test, etc...
 
-To run it type: ::
+To run it type:
 
-    [syt@scorpius test]$ pytest unittest_sytweb.py
+.. sourcecode:: bash
+
+    $ pytest unittest_sytweb.py
     ========================  unittest_sytweb.py  ========================
     -> creating tables [....................]
     -> inserting default user and default groups.
@@ -524,9 +526,11 @@
 
 
 The first execution is taking time, since it creates a sqlite database for the
-test instance. The second one will be much quicker: ::
+test instance. The second one will be much quicker:
 
-    [syt@scorpius test]$ pytest unittest_sytweb.py
+.. sourcecode:: bash
+    
+    $ pytest unittest_sytweb.py
     ========================  unittest_sytweb.py  ========================
     .
     ----------------------------------------------------------------------
@@ -537,12 +541,11 @@
 If you do some changes in your schema, you'll have to force regeneration of that
 database. You do that by removing the tmpdb files before running the test: ::
 
-    [syt@scorpius test]$ rm tmpdb*
+    $ rm tmpdb*
 
 
 .. Note::
-  pytest is a very convenient utilities to control test execution, from the `logilab-common`_
-  package
+  pytest is a very convenient utility used to control test execution. It is available from the `logilab-common`_ package.
 
 .. _`logilab-common`: http://www.logilab.org/project/logilab-common
 
@@ -551,7 +554,7 @@
 Step 4: writing the migration script and migrating the instance
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Prior to those changes, Iv'e created an instance, feeded it with some data, so I
+Prior to those changes, I  created an instance, feeded it with some data, so I
 don't want to create a new one, but to migrate the existing one. Let's see how to
 do that.
 
@@ -573,12 +576,12 @@
 * update the instance's schema by adding our two new relations and update the
   underlying database tables accordingly (the two first instructions)
 
-* update schema's permissions definition (the later instruction)
+* update schema's permissions definition (the last instruction)
 
 
 To migrate my instance I simply type::
 
-   [syt@scorpius ~]$ cubicweb-ctl upgrade sytweb
+   cubicweb-ctl upgrade sytweb
 
 I'll then be asked some questions to do the migration step by step. You should say
 YES when it asks if a backup of your database should be done, so you can get back
--- a/doc/book/en/tutorials/base/blog-in-five-minutes.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/tutorials/base/blog-in-five-minutes.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -29,9 +29,9 @@
 Instance parameters
 ~~~~~~~~~~~~~~~~~~~
 
-If the database installation failed, you'd like to change some instance parameters, for example, the database host or the user name. These informations can be edited in the `source` file located in the /etc/cubicweb.d/myblog directory.
+If you would like to change some instance parameters, for example, the database host or the user name, edit the `source` file located in the /etc/cubicweb.d/myblog directory.
 
-Then relaunch the database creation:
+Then relaunch the database creation::
 
      cubicweb-ctl db-create myblog
 
--- a/doc/book/en/tutorials/base/conclusion.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/tutorials/base/conclusion.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -3,13 +3,11 @@
 What's next?
 ------------
 
-We demonstrated how from a straight out of the box *CubicWeb* installation, you
-can build your web application based on a data model. It's all already there:
-views, templates, permissions, etc. The step forward is now for you to customize
-according to your needs.
+In this chapter, we have seen have you can, right after the installation of *CubicWeb*, build a web application in five minutes by defining a data model. Everything is there already: views, templates, permissions, etc.
 
-Many features are available to extend your application, for example: RSS channel
-integration (:ref:`XmlAndRss`), hooks (:ref:`hooks`), support of sources such as
-Google App Engine (:ref:`GoogleAppEngineSource`) and lots of others to discover
-through our book.
+The next step is to change the design and learn about the many features available to customize and extend your application: RSS channels (:ref:`XmlAndRss`), events (:ref:`hooks`), support of sources such as
+Google App Engine (:ref:`GoogleAppEngineSource`), etc.
 
+You will find more `tutorials and howtos`_ in the blog published on the CubicWeb.org website.
+
+.. _`tutorials and howtos`: http://www.cubicweb.org/view?rql=Any+X+ORDERBY+D+DESC+WHERE+X+is+BlogEntry%2C+T+tags+X%2C+T+name+IN+%28%22tutorial%22%2C+%22howto%22%29%2C+X+creation_date+D
--- a/doc/book/en/tutorials/base/create-cube.rst	Thu May 06 08:24:46 2010 +0200
+++ b/doc/book/en/tutorials/base/create-cube.rst	Mon Jul 19 15:36:16 2010 +0200
@@ -30,7 +30,7 @@
 
 Customize the views of your data: how and which part of your data are showed.
 
-Note: views don't concern the look'n'feel or design of the site. For that, you should use CSS instead, and default CSS or your new cube are located in 'blog/data/'.
+.. note:: views do not define the look'n'feel and the design of your application. For that, you will use CSS and the files located 'blog/data/'.
 
 
 5. :ref:`DefineEntities`
@@ -396,6 +396,7 @@
 want to add a ``category`` attribute in the ``Blog`` data type. This is called a migration.
 
 The required steps are:
+
 1. modify the file ``schema.py``. The ``Blog`` class looks now like this:
 
 .. sourcecode:: python
@@ -405,7 +406,11 @@
    description = String()
    category = String(required=True, vocabulary=(_('Professional'), _('Personal')), default='Personal')
 
-2. stop your ``blogdemo`` instance
+2. stop your ``blogdemo`` instance:
+
+.. sourcecode:: bash
+
+  cubicweb-ctl stop blogdemo
 
 3. start the cubicweb shell for your instance by running the following command:
 
@@ -413,15 +418,21 @@
 
   cubicweb-ctl shell blogdemo
 
-4. in the shell, execute:
+4. at the cubicweb shell prompt, execute:
 
 .. sourcecode:: python
 
  add_attribute('Blog', 'category')
 
-5. you can restart your instance, modify a blog entity and check that the new attribute
+5. restart your instance:
+   
+.. sourcecode:: bash
+
+  cubicweb-ctl start blogdemo
+
+6. modify a blog entity and check that the new attribute
 ``category`` has been added.
 
-Of course, you may also want to add relations, entity types, ... See :ref:`migration`
+Of course, you may also want to add relations, entity types, etc. See :ref:`migration`
 for a list of all available migration commands.
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/stdlib.txt	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,18 @@
+addressbook
+basket
+blog
+book
+calendar
+comment
+company
+email
+file
+folder
+i18ncontent
+keyword
+link
+mailinglist
+person
+tag
+timeseries
+vcsfile
--- a/entities/authobjs.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/authobjs.py	Mon Jul 19 15:36:16 2010 +0200
@@ -109,7 +109,7 @@
         try:
             return self._cw.execute(
                 'Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
-                {'x': eid, 'u': self.eid}, 'x')
+                {'x': eid, 'u': self.eid})
         except Unauthorized:
             return False
     owns = cached(owns, keyarg=1)
@@ -118,13 +118,11 @@
         rql = 'Any P WHERE P is CWPermission, U eid %(u)s, U in_group G, '\
               'P name %(pname)s, P require_group G'
         kwargs = {'pname': pname, 'u': self.eid}
-        cachekey = None
         if contexteid is not None:
             rql += ', X require_permission P, X eid %(x)s'
             kwargs['x'] = contexteid
-            cachekey = 'x'
         try:
-            return self._cw.execute(rql, kwargs, cachekey)
+            return self._cw.execute(rql, kwargs)
         except Unauthorized:
             return False
 
--- a/entities/lib.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/lib.py	Mon Jul 19 15:36:16 2010 +0200
@@ -36,6 +36,7 @@
         return address
     return '%s at %s' % (name, host.replace('.', ' dot '))
 
+
 class EmailAddress(AnyEntity):
     __regid__ = 'EmailAddress'
     fetch_attrs, fetch_order = fetch_config(['address', 'alias'])
@@ -63,8 +64,10 @@
         subjrels = self.e_schema.object_relations()
         if not ('sender' in subjrels and 'recipients' in subjrels):
             return
-        rql = 'DISTINCT Any X, S, D ORDERBY D DESC WHERE X sender Y or X recipients Y, X subject S, X date D, Y eid %(y)s'
-        rset = self._cw.execute(rql, {'y': self.eid}, 'y')
+        rset = self._cw.execute('DISTINCT Any X, S, D ORDERBY D DESC '
+                                'WHERE X sender Y or X recipients Y, '
+                                'X subject S, X date D, Y eid %(y)s',
+                                {'y': self.eid})
         if skipeids is None:
             skipeids = set()
         for i in xrange(len(rset)):
@@ -144,7 +147,7 @@
 
     def touch(self):
         self._cw.execute('SET X timestamp %(t)s WHERE X eid %(x)s',
-                         {'t': datetime.now(), 'x': self.eid}, 'x')
+                         {'t': datetime.now(), 'x': self.eid})
 
     def valid(self, date):
         if date:
--- a/entities/schemaobjs.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/schemaobjs.py	Mon Jul 19 15:36:16 2010 +0200
@@ -135,6 +135,9 @@
     def otype(self):
         return self.to_entity[0]
 
+    def yams_schema(self):
+        rschema = self._cw.vreg.schema.rschema(self.rtype.name)
+        return rschema.rdefs[(self.stype.name, self.otype.name)]
 
 class CWAttribute(CWRelation):
     __regid__ = 'CWAttribute'
@@ -175,6 +178,9 @@
     fetch_attrs, fetch_order = fetch_config(['exprtype', 'mainvars', 'expression'])
 
     def dc_title(self):
+        return self.expression or u''
+
+    def dc_long_title(self):
         return '%s(%s)' % (self.exprtype, self.expression or u'')
 
     @property
--- a/entities/test/unittest_base.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/test/unittest_base.py	Mon Jul 19 15:36:16 2010 +0200
@@ -82,18 +82,18 @@
 class CWUserTC(BaseEntityTC):
 
     def test_complete(self):
-        e = self.entity('CWUser X WHERE X login "admin"')
+        e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
         e.complete()
 
     def test_matching_groups(self):
-        e = self.entity('CWUser X WHERE X login "admin"')
+        e = self.execute('CWUser X WHERE X login "admin"').get_entity(0, 0)
         self.failUnless(e.matching_groups('managers'))
         self.failIf(e.matching_groups('xyz'))
         self.failUnless(e.matching_groups(('xyz', 'managers')))
         self.failIf(e.matching_groups(('xyz', 'abcd')))
 
     def test_dc_title_and_name(self):
-        e = self.entity('CWUser U WHERE U login "member"')
+        e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
         self.assertEquals(e.dc_title(), 'member')
         self.assertEquals(e.name(), 'member')
         e.set_attributes(firstname=u'bouah')
@@ -104,7 +104,7 @@
         self.assertEquals(e.name(), u'bouah lôt')
 
     def test_allowed_massmail_keys(self):
-        e = self.entity('CWUser U WHERE U login "member"')
+        e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
         # Bytes/Password attributes should be omited
         self.assertEquals(e.allowed_massmail_keys(),
                           set(('surname', 'firstname', 'login', 'last_login_time',
--- a/entities/test/unittest_wfobjs.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/test/unittest_wfobjs.py	Mon Jul 19 15:36:16 2010 +0200
@@ -113,7 +113,7 @@
         self.assertEquals(e.latest_trinfo().comment, 'deactivate 2')
 
     def test_possible_transitions(self):
-        user = self.entity('CWUser X')
+        user = self.execute('CWUser X').get_entity(0, 0)
         trs = list(user.possible_transitions())
         self.assertEquals(len(trs), 1)
         self.assertEquals(trs[0].name, u'deactivate')
@@ -148,7 +148,7 @@
         with security_enabled(self.session, write=False):
             ex = self.assertRaises(ValidationError, self.session.execute,
                                'SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
-                               {'x': self.user().eid, 's': s.eid}, 'x')
+                               {'x': self.user().eid, 's': s.eid})
             self.assertEquals(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. "
                                       "You may want to set a custom workflow for this entity first."})
 
@@ -430,9 +430,9 @@
         wf = add_wf(self, 'Company')
         wf.add_state('asleep', initial=True)
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
-                     {'wf': wf.eid, 'x': self.member.eid}, 'x')
+                     {'wf': wf.eid, 'x': self.member.eid})
         ex = self.assertRaises(ValidationError, self.commit)
-        self.assertEquals(ex.errors, {'custom_workflow-subject': 'workflow isn\'t a workflow for this type'})
+        self.assertEquals(ex.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"})
 
     def test_del_custom_wf(self):
         """member in some state shared by the new workflow, nothing has to be
--- a/entities/wfobjs.py	Thu May 06 08:24:46 2010 +0200
+++ b/entities/wfobjs.py	Mon Jul 19 15:36:16 2010 +0200
@@ -78,7 +78,7 @@
     def state_by_name(self, statename):
         rset = self._cw.execute('Any S, SN WHERE S name SN, S name %(n)s, '
                                 'S state_of WF, WF eid %(wf)s',
-                                {'n': statename, 'wf': self.eid}, 'wf')
+                                {'n': statename, 'wf': self.eid})
         if rset:
             return rset.get_entity(0, 0)
         return None
@@ -86,7 +86,7 @@
     def state_by_eid(self, eid):
         rset = self._cw.execute('Any S, SN WHERE S name SN, S eid %(s)s, '
                                 'S state_of WF, WF eid %(wf)s',
-                                {'s': eid, 'wf': self.eid}, ('wf', 's'))
+                                {'s': eid, 'wf': self.eid})
         if rset:
             return rset.get_entity(0, 0)
         return None
@@ -94,7 +94,7 @@
     def transition_by_name(self, trname):
         rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, '
                                 'T transition_of WF, WF eid %(wf)s',
-                                {'n': trname, 'wf': self.eid}, 'wf')
+                                {'n': trname, 'wf': self.eid})
         if rset:
             return rset.get_entity(0, 0)
         return None
@@ -102,7 +102,7 @@
     def transition_by_eid(self, eid):
         rset = self._cw.execute('Any T, TN WHERE T name TN, T eid %(t)s, '
                                 'T transition_of WF, WF eid %(wf)s',
-                                {'t': eid, 'wf': self.eid}, ('wf', 't'))
+                                {'t': eid, 'wf': self.eid})
         if rset:
             return rset.get_entity(0, 0)
         return None
@@ -113,12 +113,12 @@
         """add a state to this workflow"""
         state = self._cw.create_entity('State', name=unicode(name), **kwargs)
         self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s',
-                         {'s': state.eid, 'wf': self.eid}, ('s', 'wf'))
+                         {'s': state.eid, 'wf': self.eid})
         if initial:
             assert not self.initial, "Initial state already defined as %s" % self.initial
             self._cw.execute('SET WF initial_state S '
                              'WHERE S eid %(s)s, WF eid %(wf)s',
-                             {'s': state.eid, 'wf': self.eid}, ('s', 'wf'))
+                             {'s': state.eid, 'wf': self.eid})
         return state
 
     def _add_transition(self, trtype, name, fromstates,
@@ -126,7 +126,7 @@
         tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs)
         self._cw.execute('SET T transition_of WF '
                          'WHERE T eid %(t)s, WF eid %(wf)s',
-                         {'t': tr.eid, 'wf': self.eid}, ('t', 'wf'))
+                         {'t': tr.eid, 'wf': self.eid})
         assert fromstates, fromstates
         if not isinstance(fromstates, (tuple, list)):
             fromstates = (fromstates,)
@@ -135,7 +135,7 @@
                 state = state.eid
             self._cw.execute('SET S allowed_transition T '
                              'WHERE S eid %(s)s, T eid %(t)s',
-                             {'s': state, 't': tr.eid}, ('s', 't'))
+                             {'s': state, 't': tr.eid})
         tr.set_permissions(requiredgroups, conditions, reset=False)
         return tr
 
@@ -149,7 +149,7 @@
                 tostate = tostate.eid
             self._cw.execute('SET T destination_state S '
                              'WHERE S eid %(s)s, T eid %(t)s',
-                             {'t': tr.eid, 's': tostate}, ('s', 't'))
+                             {'t': tr.eid, 's': tostate})
         return tr
 
     def add_wftransition(self, name, subworkflow, fromstates, exitpoints=(),
@@ -160,7 +160,7 @@
         if hasattr(subworkflow, 'eid'):
             subworkflow = subworkflow.eid
         assert self._cw.execute('SET T subworkflow WF WHERE WF eid %(wf)s,T eid %(t)s',
-                                {'t': tr.eid, 'wf': subworkflow}, ('wf', 't'))
+                                {'t': tr.eid, 'wf': subworkflow})
         for fromstate, tostate in exitpoints:
             tr.add_exit_point(fromstate, tostate)
         return tr
@@ -172,11 +172,11 @@
         if not hasattr(replacement, 'eid'):
             replacement = self.state_by_name(replacement)
         execute = self._cw.execute
-        execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}, 's')
+        execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid})
         execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
-                {'os': todelstate.eid, 'ns': replacement.eid}, 's')
+                {'os': todelstate.eid, 'ns': replacement.eid})
         execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
-                {'os': todelstate.eid, 'ns': replacement.eid}, 's')
+                {'os': todelstate.eid, 'ns': replacement.eid})
         todelstate.delete()
 
 
@@ -187,7 +187,7 @@
     fired by the logged user
     """
     __regid__ = 'BaseTransition'
-    fetch_attrs, fetch_order = fetch_config(['name'])
+    fetch_attrs, fetch_order = fetch_config(['name', 'type'])
 
     def __init__(self, *args, **kwargs):
         if self.__regid__ == 'BaseTransition':
@@ -240,13 +240,13 @@
         """
         if reset:
             self._cw.execute('DELETE T require_group G WHERE T eid %(x)s',
-                             {'x': self.eid}, 'x')
+                             {'x': self.eid})
             self._cw.execute('DELETE T condition R WHERE T eid %(x)s',
-                             {'x': self.eid}, 'x')
+                             {'x': self.eid})
         for gname in requiredgroups:
             rset = self._cw.execute('SET T require_group G '
                                     'WHERE T eid %(x)s, G name %(gn)s',
-                                    {'x': self.eid, 'gn': gname}, 'x')
+                                    {'x': self.eid, 'gn': gname})
             assert rset, '%s is not a known group' % gname
         if isinstance(conditions, basestring):
             conditions = (conditions,)
@@ -260,7 +260,7 @@
             kwargs.setdefault('mainvars', u'X')
             self._cw.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
                              'X expression %(expr)s, X mainvars %(mainvars)s, '
-                             'T condition X WHERE T eid %(x)s',kwargs, 'x')
+                             'T condition X WHERE T eid %(x)s', kwargs)
         # XXX clear caches?
 
     @deprecated('[3.6.1] use set_permission')
@@ -312,15 +312,14 @@
         if tostate is None:
             self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
                              'X subworkflow_state FS WHERE T eid %(t)s, FS eid %(fs)s',
-                             {'t': self.eid, 'fs': fromstate}, ('t', 'fs'))
+                             {'t': self.eid, 'fs': fromstate})
         else:
             if hasattr(tostate, 'eid'):
                 tostate = tostate.eid
             self._cw.execute('INSERT SubWorkflowExitPoint X: T subworkflow_exit X, '
                              'X subworkflow_state FS, X destination_state TS '
                              'WHERE T eid %(t)s, FS eid %(fs)s, TS eid %(ts)s',
-                             {'t': self.eid, 'fs': fromstate, 'ts': tostate},
-                             ('t', 'fs', 'ts'))
+                             {'t': self.eid, 'fs': fromstate, 'ts': tostate})
 
     def get_exit_point(self, entity, stateeid):
         """if state is an exit point, return its associated destination state"""
@@ -482,7 +481,7 @@
             'T type TT, T type %(type)s, '
             'T name TN, T transition_of WF, WF eid %(wfeid)s',
             {'x': self.current_state.eid, 'type': type,
-             'wfeid': self.current_workflow.eid}, 'x')
+             'wfeid': self.current_workflow.eid})
         for tr in rset.entities():
             if tr.may_be_fired(self.eid):
                 yield tr
--- a/entity.py	Thu May 06 08:24:46 2010 +0200
+++ b/entity.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Base class for entity objects manipulated in clients
+"""Base class for entity objects manipulated in clients"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
@@ -432,12 +431,12 @@
     def has_perm(self, action):
         return self.e_schema.has_perm(self._cw, action, eid=self.eid)
 
-    def view(self, __vid, __registry='views', **kwargs):
+    def view(self, __vid, __registry='views', w=None, **kwargs):
         """shortcut to apply a view on this entity"""
         view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset,
                                                 row=self.cw_row, col=self.cw_col,
                                                 **kwargs)
-        return view.render(row=self.cw_row, col=self.cw_col, **kwargs)
+        return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs)
 
     def absolute_url(self, *args, **kwargs):
         """return an absolute url to view this entity"""
@@ -574,7 +573,7 @@
                 continue
             rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % (
                 rschema.type, rschema.type)
-            execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+            execute(rql, {'x': self.eid, 'y': ceid})
             self.clear_related_cache(rschema.type, 'subject')
         for rschema in self.e_schema.object_relations():
             if rschema.meta:
@@ -592,7 +591,7 @@
                 continue
             rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % (
                 rschema.type, rschema.type)
-            execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+            execute(rql, {'x': self.eid, 'y': ceid})
             self.clear_related_cache(rschema.type, 'object')
 
     # data fetching methods ###################################################
@@ -694,8 +693,7 @@
             # if some outer join are included to fetch inlined relations
             rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected),
                                     ','.join(rql))
-            rset = self._cw.execute(rql, {'x': self.eid}, 'x',
-                                    build_descr=False)[0]
+            rset = self._cw.execute(rql, {'x': self.eid}, build_descr=False)[0]
             # handle attributes
             for i in xrange(1, lastattr):
                 self[str(selected[i-1][0])] = rset[i]
@@ -724,7 +722,7 @@
                 return None
             rql = "Any A WHERE X eid %%(x)s, X %s A" % name
             try:
-                rset = self._cw.execute(rql, {'x': self.eid}, 'x')
+                rset = self._cw.execute(rql, {'x': self.eid})
             except Unauthorized:
                 self[name] = value = None
             else:
@@ -753,9 +751,12 @@
             return self.related_cache(rtype, role, entities, limit)
         except KeyError:
             pass
-        assert self.has_eid()
+        if not self.has_eid():
+            if entities:
+                return []
+            return self.empty_rset()
         rql = self.related_rql(rtype, role)
-        rset = self._cw.execute(rql, {'x': self.eid}, 'x')
+        rset = self._cw.execute(rql, {'x': self.eid})
         self.set_related_cache(rtype, role, rset)
         return self.related(rtype, role, limit, entities)
 
@@ -881,7 +882,7 @@
         if limit is not None:
             before, after = rql.split(' WHERE ', 1)
             rql = '%s LIMIT %s WHERE %s' % (before, limit, after)
-        return self._cw.execute(rql, args, tuple(args))
+        return self._cw.execute(rql, args)
 
     # relations cache handling ################################################
 
@@ -964,7 +965,7 @@
         # and now update the database
         kwargs['x'] = self.eid
         self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
-                         kwargs, 'x')
+                         kwargs)
         kwargs.pop('x')
         # update current local object _after_ the rql query to avoid
         # interferences between the query execution itself and the
@@ -987,13 +988,13 @@
                 restr = 'X %s Y' % attr
             if values is None:
                 self._cw.execute('DELETE %s WHERE X eid %%(x)s' % restr,
-                                 {'x': self.eid}, 'x')
+                                 {'x': self.eid})
                 continue
             if not isinstance(values, (tuple, list, set, frozenset)):
                 values = (values,)
             self._cw.execute('SET %s WHERE X eid %%(x)s, Y eid IN (%s)' % (
                 restr, ','.join(str(r.eid) for r in values)),
-                             {'x': self.eid}, 'x')
+                             {'x': self.eid})
 
     def delete(self, **kwargs):
         assert self.has_eid(), self.eid
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/http.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,72 @@
+"""twisted server for CubicWeb web instances
+
+:organization: Logilab
+:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+
+__docformat__ = "restructuredtext en"
+
+from cubicweb.web.http_headers import Headers
+
+class HTTPResponse(object):
+    """An object representing an HTTP Response to be sent to the client.
+    """
+    def __init__(self, twisted_request, code=None, headers=None, stream=None):
+        self._headers_out = headers
+        self._twreq = twisted_request
+        self._stream = stream
+        self._code = code
+
+        self._init_headers()
+        self._finalize()
+
+    def _init_headers(self):
+        if self._headers_out is None:
+            return
+
+        # initialize cookies
+        cookies = self._headers_out.getHeader('set-cookie') or []
+        for cookie in cookies:
+            self._twreq.addCookie(cookie.name, cookie.value, cookie.expires,
+                                  cookie.domain, cookie.path, #TODO max-age
+                                  comment = cookie.comment, secure=cookie.secure)
+        self._headers_out.removeHeader('set-cookie')
+
+        # initialize other headers
+        for k, v in self._headers_out.getAllRawHeaders():
+            self._twreq.setHeader(k, v[0])
+
+        # add content-length if not present
+        if (self._headers_out.getHeader('content-length') is None
+            and self._stream is not None):
+           self._twreq.setHeader('content-length', len(self._stream))
+
+
+    def _finalize(self):
+        # we must set code before writing anything, else it's too late
+        if self._code is not None:
+            self._twreq.setResponseCode(self._code)
+        if self._stream is not None:
+            self._twreq.write(str(self._stream))
+        self._twreq.finish()
+
+    def __repr__(self):
+        return "<%s.%s code=%d>" % (self.__module__, self.__class__.__name__, self._code)
+
+
+def not_modified_response(twisted_request, headers_in):
+    headers_out = Headers()
+
+    for header in (
+        # Required from sec 10.3.5:
+        'date', 'etag', 'content-location', 'expires',
+        'cache-control', 'vary',
+        # Others:
+        'server', 'proxy-authenticate', 'www-authenticate', 'warning'):
+        value = headers_in.getRawHeaders(header)
+        if value is not None:
+            headers_out.setRawHeaders(header, value)
+    return HTTPResponse(twisted_request=twisted_request,
+                        headers=headers_out)
--- a/etwist/request.py	Thu May 06 08:24:46 2010 +0200
+++ b/etwist/request.py	Mon Jul 19 15:36:16 2010 +0200
@@ -22,22 +22,13 @@
 
 from datetime import datetime
 
-from twisted.web2 import http, http_headers
+from twisted.web import http
 
 from cubicweb.web import DirectResponse
 from cubicweb.web.request import CubicWebRequestBase
 from cubicweb.web.httpcache import GMTOFFSET
-
-def cleanup_files(dct, encoding):
-    d = {}
-    for k, infos in dct.items():
-        for (filename, mt, stream) in infos:
-            if filename:
-                # XXX: suppose that no file submitted <-> no filename
-                filename = unicode(filename, encoding)
-                mt = u'%s/%s' % (mt.mediaType, mt.mediaSubtype)
-                d[k] = (filename, mt, stream)
-    return d
+from cubicweb.web.http_headers import Headers
+from cubicweb.etwist.http import not_modified_response
 
 
 class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
@@ -45,10 +36,15 @@
         self._twreq = req
         self._base_url = base_url
         super(CubicWebTwistedRequestAdapter, self).__init__(vreg, https, req.args)
-        self.form.update(cleanup_files(req.files, self.encoding))
-        # prepare output headers
-        self.headers_out = http_headers.Headers()
-        self._headers = req.headers
+        for key, (name, stream) in req.files.iteritems():
+            if name is None:
+                self.form[key] = (name, stream)
+            else:
+                self.form[key] = (unicode(name, self.encoding), stream)
+        # XXX can't we keep received_headers?
+        self._headers_in = Headers()
+        for k, v in req.received_headers.iteritems():
+            self._headers_in.addRawHeader(k, v)
 
     def base_url(self):
         """return the root url of the instance"""
@@ -76,29 +72,8 @@
         raise KeyError if the header is not set
         """
         if raw:
-            return self._twreq.headers.getRawHeaders(header, [default])[0]
-        return self._twreq.headers.getHeader(header, default)
-
-    def set_header(self, header, value, raw=True):
-        """set an output HTTP header"""
-        if raw:
-            # adding encoded header is important, else page content
-            # will be reconverted back to unicode and apart unefficiency, this
-            # may cause decoding problem (e.g. when downloading a file)
-            self.headers_out.setRawHeaders(header, [str(value)])
-        else:
-            self.headers_out.setHeader(header, value)
-
-    def add_header(self, header, value):
-        """add an output HTTP header"""
-        # adding encoded header is important, else page content
-        # will be reconverted back to unicode and apart unefficiency, this
-        # may cause decoding problem (e.g. when downloading a file)
-        self.headers_out.addRawHeader(header, str(value))
-
-    def remove_header(self, header):
-        """remove an output HTTP header"""
-        self.headers_out.removeHeader(header)
+            return self._headers_in.getRawHeaders(header, [default])[0]
+        return self._headers_in.getHeader(header, default)
 
     def _validate_cache(self):
         """raise a `DirectResponse` exception if a cached page along the way
@@ -108,11 +83,22 @@
             # Expires header seems to be required by IE7
             self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
             return
-        try:
-            http.checkPreconditions(self._twreq, _PreResponse(self))
-        except http.HTTPError, ex:
-            self.info('valid http cache, no actual rendering')
-            raise DirectResponse(ex.response)
+        # when using both 'Last-Modified' and 'ETag' response headers
+        # (i.e. using respectively If-Modified-Since and If-None-Match request
+        # headers, see
+        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4 for
+        # reference
+        last_modified = self.headers_out.getHeader('last-modified')
+        if last_modified is not None:
+            status = self._twreq.setLastModified(last_modified)
+            if status != http.CACHED:
+                return
+        etag = self.headers_out.getRawHeaders('etag')
+        if etag is not None:
+            status = self._twreq.setETag(etag[0])
+            if status == http.CACHED:
+                response = not_modified_response(self._twreq, self._headers_in)
+                raise DirectResponse(response)
         # Expires header seems to be required by IE7
         self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')
 
@@ -133,9 +119,3 @@
             # :/ twisted is returned a localized time stamp
             return datetime.fromtimestamp(mtime) + GMTOFFSET
         return None
-
-
-class _PreResponse(object):
-    def __init__(self, request):
-        self.headers = request.headers_out
-        self.code = 200
--- a/etwist/server.py	Thu May 06 08:24:46 2010 +0200
+++ b/etwist/server.py	Mon Jul 19 15:36:16 2010 +0200
@@ -24,46 +24,31 @@
 import os
 import select
 import errno
+import traceback
+import threading
+from os.path import join
 from time import mktime
 from datetime import date, timedelta
 from urlparse import urlsplit, urlunsplit
+from cgi import FieldStorage, parse_header
+from cStringIO import StringIO
 
 from twisted.internet import reactor, task, threads
 from twisted.internet.defer import maybeDeferred
-from twisted.web2 import channel, http, server, iweb
-from twisted.web2 import static, resource, responsecode
+from twisted.web import http, server
+from twisted.web import static, resource
+from twisted.web.server import NOT_DONE_YET
 
-from cubicweb import ConfigurationError, CW_EVENT_MANAGER
-from cubicweb.web import (AuthenticationError, NotFound, Redirect,
-                          RemoteCallFailed, DirectResponse, StatusResponse,
-                          ExplicitLogin)
-from cubicweb.web.application import CubicWebPublisher
-
-from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
+from cubicweb.web import dumps
 
-def daemonize():
-    # XXX unix specific
-    # XXX factorize w/ code in cw.server.server and cw.server.serverctl
-    # (start-repository command)
-    # See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16
-    if os.fork():   # launch child and...
-        return 1
-    os.setsid()
-    if os.fork():   # launch child again.
-        return 1
-    # move to the root to avoit mount pb
-    os.chdir('/')
-    # set paranoid umask
-    os.umask(077)
-    null = os.open('/dev/null', os.O_RDWR)
-    for i in range(3):
-        try:
-            os.dup2(null, i)
-        except OSError, e:
-            if e.errno != errno.EBADF:
-                raise
-    os.close(null)
-    return None
+from logilab.common.decorators import monkeypatch
+
+from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER
+from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
+from cubicweb.web.application import CubicWebPublisher
+from cubicweb.web.http_headers import generateDateTime
+from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
+from cubicweb.etwist.http import HTTPResponse
 
 def start_task(interval, func):
     lc = task.LoopingCall(func)
@@ -80,8 +65,20 @@
     return baseurl
 
 
-class LongTimeExpiringFile(static.File):
-    """overrides static.File and sets a far futre ``Expires`` date
+class ForbiddenDirectoryLister(resource.Resource):
+    def render(self, request):
+        return HTTPResponse(twisted_request=request,
+                            code=http.FORBIDDEN,
+                            stream='Access forbidden')
+
+class File(static.File):
+    """Prevent from listing directories"""
+    def directoryListing(self):
+        return ForbiddenDirectoryLister()
+
+
+class LongTimeExpiringFile(File):
+    """overrides static.File and sets a far future ``Expires`` date
     on the resouce.
 
     versions handling is done by serving static files by different
@@ -92,22 +89,16 @@
       etc.
 
     """
-    def renderHTTP(self, request):
-        def setExpireHeader(response):
-            response = iweb.IResponse(response)
-            # Don't provide additional resource information to error responses
-            if response.code < 400:
-                # the HTTP RFC recommands not going further than 1 year ahead
-                expires = date.today() + timedelta(days=6*30)
-                response.headers.setHeader('Expires', mktime(expires.timetuple()))
-            return response
-        d = maybeDeferred(super(LongTimeExpiringFile, self).renderHTTP, request)
-        return d.addCallback(setExpireHeader)
+    def render(self, request):
+        # XXX: Don't provide additional resource information to error responses
+        #
+        # the HTTP RFC recommands not going further than 1 year ahead
+        expires = date.today() + timedelta(days=6*30)
+        request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
+        return File.render(self, request)
 
 
-class CubicWebRootResource(resource.PostableResource):
-    addSlash = False
-
+class CubicWebRootResource(resource.Resource):
     def __init__(self, config, debug=None):
         self.debugmode = debug
         self.config = config
@@ -116,7 +107,11 @@
         self.appli = CubicWebPublisher(config, debug=self.debugmode)
         self.base_url = config['base-url']
         self.https_url = config['https-url']
-        self.versioned_datadir = 'data%s' % config.instance_md5_version()
+        self.children = {}
+        self.static_directories = set(('data%s' % config.instance_md5_version(),
+                                       'data', 'static', 'fckeditor'))
+        global MAX_POST_LENGTH
+        MAX_POST_LENGTH = config['max-post-length']
 
     def init_publisher(self):
         config = self.config
@@ -156,35 +151,38 @@
         except select.error:
             return
 
-    def locateChild(self, request, segments):
+    def getChild(self, path, request):
         """Indicate which resource to use to process down the URL's path"""
-        if segments:
-            if segments[0] == 'https':
-                segments = segments[1:]
-            if len(segments) >= 2:
-                if segments[0] in (self.versioned_datadir, 'data', 'static'):
-                    # Anything in data/, static/ is treated as static files
-                    if segments[0] == 'static':
-                        # instance static directory
-                        datadir = self.config.static_directory
-                    elif segments[1] == 'fckeditor':
-                        fckeditordir = self.config.ext_resources['FCKEDITOR_PATH']
-                        return static.File(fckeditordir), segments[2:]
-                    else:
-                        # cube static data file
-                        datadir = self.config.locate_resource(segments[1])
-                        if datadir is None:
-                            return None, []
-                    self.debug('static file %s from %s', segments[-1], datadir)
-                    if segments[0] == 'data':
-                        return static.File(str(datadir)), segments[1:]
-                    else:
-                        return LongTimeExpiringFile(datadir), segments[1:]
-                elif segments[0] == 'fckeditor':
-                    fckeditordir = self.config.ext_resources['FCKEDITOR_PATH']
-                    return static.File(fckeditordir), segments[1:]
+        pre_path = request.path.split('/')[1:]
+        if pre_path[0] == 'https':
+            pre_path.pop(0)
+        directory = pre_path[0]
+        # Anything in data/, static/, fckeditor/ and the generated versioned
+        # data directory is treated as static files
+        if directory in self.static_directories:
+            # take care fckeditor may appears as root directory or as a data
+            # subdirectory
+            if directory == 'static':
+                return File(self.config.static_directory)
+            if directory == 'fckeditor':
+                return File(self.config.ext_resources['FCKEDITOR_PATH'])
+            if directory != 'data':
+                # versioned directory, use specific file with http cache
+                # headers so their are cached for a very long time
+                cls = LongTimeExpiringFile
+            else:
+                cls = File
+            if path == 'fckeditor':
+                return cls(self.config.ext_resources['FCKEDITOR_PATH'])
+            if path == directory: # recurse
+                return self
+            datadir = self.config.locate_resource(path)
+            if datadir is None:
+                return self # recurse
+            self.debug('static file %s from %s', path, datadir)
+            return cls(join(datadir, path))
         # Otherwise we use this single resource
-        return self, ()
+        return self
 
     def render(self, request):
         """Render a page from the root resource"""
@@ -194,9 +192,24 @@
         if self.config['profile']: # default profiler don't trace threads
             return self.render_request(request)
         else:
-            return threads.deferToThread(self.render_request, request)
+            deferred = threads.deferToThread(self.render_request, request)
+            return NOT_DONE_YET
 
     def render_request(self, request):
+        try:
+            # processing HUGE files (hundred of megabytes) in http.processReceived
+            # blocks other HTTP requests processing
+            # due to the clumsy & slow parsing algorithm of cgi.FieldStorage
+            # so we deferred that part to the cubicweb thread
+            request.process_multipart()
+            return self._render_request(request)
+        except:
+            errorstream = StringIO()
+            traceback.print_exc(file=errorstream)
+            return HTTPResponse(stream='<pre>%s</pre>' % errorstream.getvalue(),
+                                code=500, twisted_request=request)
+
+    def _render_request(self, request):
         origpath = request.path
         host = request.host
         # dual http/https access handling: expect a rewrite rule to prepend
@@ -219,13 +232,11 @@
             req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
         try:
             self.appli.connect(req)
-        except AuthenticationError:
-            return self.request_auth(req)
         except Redirect, ex:
-            return self.redirect(req, ex.location)
-        if https and req.cnx.anonymous_connection:
+            return self.redirect(request=req, location=ex.location)
+        if https and req.session.anonymous_session:
             # don't allow anonymous on https connection
-            return self.request_auth(req)
+            return self.request_auth(request=req)
         if self.url_rewriter is not None:
             # XXX should occur before authentication?
             try:
@@ -242,234 +253,146 @@
         except DirectResponse, ex:
             return ex.response
         except StatusResponse, ex:
-            return http.Response(stream=ex.content, code=ex.status,
-                                 headers=req.headers_out or None)
-        except RemoteCallFailed, ex:
-            req.set_header('content-type', 'application/json')
-            return http.Response(stream=ex.dumps(),
-                                 code=responsecode.INTERNAL_SERVER_ERROR)
-        except NotFound:
-            result = self.appli.notfound_content(req)
-            return http.Response(stream=result, code=responsecode.NOT_FOUND,
-                                 headers=req.headers_out or None)
-        except ExplicitLogin:  # must be before AuthenticationError
-            return self.request_auth(req)
-        except AuthenticationError, ex:
-            if self.config['auth-mode'] == 'cookie' and getattr(ex, 'url', None):
-                return self.redirect(req, ex.url)
+            return HTTPResponse(stream=ex.content, code=ex.status,
+                                twisted_request=req._twreq,
+                                headers=req.headers_out)
+        except AuthenticationError:
+            return self.request_auth(request=req)
+        except LogOut, ex:
+            if self.config['auth-mode'] == 'cookie' and ex.url:
+                return self.redirect(request=req, location=ex.url)
             # in http we have to request auth to flush current http auth
             # information
-            return self.request_auth(req, loggedout=True)
+            return self.request_auth(request=req, loggedout=True)
         except Redirect, ex:
-            return self.redirect(req, ex.location)
+            return self.redirect(request=req, location=ex.location)
         # request may be referenced by "onetime callback", so clear its entity
         # cache to avoid memory usage
         req.drop_entity_cache()
-        return http.Response(stream=result, code=responsecode.OK,
-                             headers=req.headers_out or None)
-
-    def redirect(self, req, location):
-        req.headers_out.setHeader('location', str(location))
-        self.debug('redirecting to %s', location)
-        # 303 See other
-        return http.Response(code=303, headers=req.headers_out)
+        return HTTPResponse(twisted_request=req._twreq, code=http.OK,
+                            stream=result, headers=req.headers_out)
 
-    def request_auth(self, req, loggedout=False):
-        if self.https_url and req.base_url() != self.https_url:
-            req.headers_out.setHeader('location', self.https_url + 'login')
-            return http.Response(code=303, headers=req.headers_out)
+    def redirect(self, request, location):
+        self.debug('redirecting to %s', str(location))
+        request.headers_out.setHeader('location', str(location))
+        # 303 See other
+        return HTTPResponse(twisted_request=request._twreq, code=303,
+                            headers=request.headers_out)
+
+    def request_auth(self, request, loggedout=False):
+        if self.https_url and request.base_url() != self.https_url:
+            return self.redirect(request, self.https_url + 'login')
         if self.config['auth-mode'] == 'http':
-            code = responsecode.UNAUTHORIZED
+            code = http.UNAUTHORIZED
         else:
-            code = responsecode.FORBIDDEN
+            code = http.FORBIDDEN
         if loggedout:
-            if req.https:
-                req._base_url =  self.base_url
-                req.https = False
-            content = self.appli.loggedout_content(req)
+            if request.https:
+                request._base_url =  self.base_url
+                request.https = False
+            content = self.appli.loggedout_content(request)
         else:
-            content = self.appli.need_login_content(req)
-        return http.Response(code, req.headers_out, content)
+            content = self.appli.need_login_content(request)
+        return HTTPResponse(twisted_request=request._twreq,
+                            stream=content, code=code,
+                            headers=request.headers_out)
 
-from twisted.internet import defer
-from twisted.web2 import fileupload
 
-# XXX set max file size to 200MB: put max upload size in the configuration
-# line below for twisted >= 8.0, default param value for earlier version
-resource.PostableResource.maxSize = 200*1024*1024
-def parsePOSTData(request, maxMem=100*1024, maxFields=1024,
-                  maxSize=200*1024*1024):
-    if request.stream.length == 0:
-        return defer.succeed(None)
+JSON_PATHS = set(('json',))
+FRAME_POST_PATHS = set(('validateform',))
 
-    ctype = request.headers.getHeader('content-type')
-
-    if ctype is None:
-        return defer.succeed(None)
-
-    def updateArgs(data):
-        args = data
-        request.args.update(args)
+orig_gotLength = http.Request.gotLength
+@monkeypatch(http.Request)
+def gotLength(self, length):
+    orig_gotLength(self, length)
+    if length > MAX_POST_LENGTH: # length is 0 on GET
+        path = self.channel._path.split('?', 1)[0].rstrip('/').rsplit('/', 1)[-1]
+        self.clientproto = 'HTTP/1.1' # not yet initialized
+        self.channel.persistent = 0   # force connection close on cleanup
+        self.setResponseCode(http.BAD_REQUEST)
+        if path in JSON_PATHS: # XXX better json path detection
+            self.setHeader('content-type',"application/json")
+            body = dumps({'reason': 'request max size exceeded'})
+        elif path in FRAME_POST_PATHS: # XXX better frame post path detection
+            self.setHeader('content-type',"text/html")
+            body = ('<script type="text/javascript">'
+                    'window.parent.handleFormValidationResponse(null, null, null, %s, null);'
+                    '</script>' % dumps( (False, 'request max size exceeded', None) ))
+        else:
+            self.setHeader('content-type',"text/html")
+            body = ("<html><head><title>Processing Failed</title></head><body>"
+                    "<b>request max size exceeded</b></body></html>")
+        self.setHeader('content-length', str(len(body)))
+        self.write(body)
+        # see request.finish(). Done here since we get error due to not full
+        # initialized request
+        self.finished = 1
+        if not self.queued:
+            self._cleanup()
+        for d in self.notifications:
+            d.callback(None)
+        self.notifications = []
 
-    def updateArgsAndFiles(data):
-        args, files = data
-        request.args.update(args)
-        request.files.update(files)
-
-    def error(f):
-        f.trap(fileupload.MimeFormatError)
-        raise http.HTTPError(responsecode.BAD_REQUEST)
+@monkeypatch(http.Request)
+def requestReceived(self, command, path, version):
+    """Called by channel when all data has been received.
 
-    if ctype.mediaType == 'application' and ctype.mediaSubtype == 'x-www-form-urlencoded':
-        d = fileupload.parse_urlencoded(request.stream, keep_blank_values=True)
-        d.addCallbacks(updateArgs, error)
-        return d
-    elif ctype.mediaType == 'multipart' and ctype.mediaSubtype == 'form-data':
-        boundary = ctype.params.get('boundary')
-        if boundary is None:
-            return defer.fail(http.HTTPError(
-                http.StatusResponse(responsecode.BAD_REQUEST,
-                                    "Boundary not specified in Content-Type.")))
-        d = fileupload.parseMultipartFormData(request.stream, boundary,
-                                              maxMem, maxFields, maxSize)
-        d.addCallbacks(updateArgsAndFiles, error)
-        return d
+    This method is not intended for users.
+    """
+    self.content.seek(0, 0)
+    self.args = {}
+    self.files = {}
+    self.stack = []
+    self.method, self.uri = command, path
+    self.clientproto = version
+    x = self.uri.split('?', 1)
+    if len(x) == 1:
+        self.path = self.uri
     else:
-        raise http.HTTPError(responsecode.BAD_REQUEST)
+        self.path, argstring = x
+        self.args = http.parse_qs(argstring, 1)
+    # cache the client and server information, we'll need this later to be
+    # serialized and sent with the request so CGIs will work remotely
+    self.client = self.channel.transport.getPeer()
+    self.host = self.channel.transport.getHost()
+    # Argument processing
+    ctype = self.getHeader('content-type')
+    self._do_process_multipart = False
+    if self.method == "POST" and ctype:
+        key, pdict = parse_header(ctype)
+        if key == 'application/x-www-form-urlencoded':
+            self.args.update(http.parse_qs(self.content.read(), 1))
+        elif key == 'multipart/form-data':
+            # defer this as it can be extremely time consumming
+            # with big files
+            self._do_process_multipart = True
+    self.process()
+
 
-server.parsePOSTData = parsePOSTData
-
+@monkeypatch(http.Request)
+def process_multipart(self):
+    if not self._do_process_multipart:
+        return
+    form = FieldStorage(self.content, self.received_headers,
+                        environ={'REQUEST_METHOD': 'POST'},
+                        keep_blank_values=1,
+                        strict_parsing=1)
+    for key in form:
+        value = form[key]
+        if isinstance(value, list):
+            self.args[key] = [v.value for v in value]
+        elif value.filename:
+            if value.done != -1: # -1 is transfer has been interrupted
+                self.files[key] = (value.filename, value.file)
+            else:
+                self.files[key] = (None, None)
+        else:
+            self.args[key] = value.value
 
 from logging import getLogger
 from cubicweb import set_log_methods
-set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted'))
-
-
-listiterator = type(iter([]))
-
-def _gc_debug(all=True):
-    import gc
-    from pprint import pprint
-    from cubicweb.appobject import AppObject
-    gc.collect()
-    count = 0
-    acount = 0
-    fcount = 0
-    rcount = 0
-    ccount = 0
-    scount = 0
-    ocount = {}
-    from rql.stmts import Union
-    from cubicweb.schema import CubicWebSchema
-    from cubicweb.rset import ResultSet
-    from cubicweb.dbapi import Connection, Cursor
-    from cubicweb.req import RequestSessionBase
-    from cubicweb.server.repository import Repository
-    from cubicweb.server.sources.native import NativeSQLSource
-    from cubicweb.server.session import Session
-    from cubicweb.devtools.testlib import CubicWebTC
-    from logilab.common.testlib import TestSuite
-    from optparse import Values
-    import types, weakref
-    for obj in gc.get_objects():
-        if isinstance(obj, RequestSessionBase):
-            count += 1
-            if isinstance(obj, Session):
-                print '   session', obj, referrers(obj, True)
-        elif isinstance(obj, AppObject):
-            acount += 1
-        elif isinstance(obj, ResultSet):
-            rcount += 1
-            #print '   rset', obj, referrers(obj)
-        elif isinstance(obj, Repository):
-            print '   REPO', obj, referrers(obj, True)
-        #elif isinstance(obj, NativeSQLSource):
-        #    print '   SOURCe', obj, referrers(obj)
-        elif isinstance(obj, CubicWebTC):
-            print '   TC', obj, referrers(obj)
-        elif isinstance(obj, TestSuite):
-            print '   SUITE', obj, referrers(obj)
-        #elif isinstance(obj, Values):
-        #    print '   values', '%#x' % id(obj), referrers(obj, True)
-        elif isinstance(obj, Connection):
-            ccount += 1
-            #print '   cnx', obj, referrers(obj)
-        #elif isinstance(obj, Cursor):
-        #    ccount += 1
-        #    print '   cursor', obj, referrers(obj)
-        elif isinstance(obj, file):
-            fcount += 1
-        #    print '   open file', file.name, file.fileno
-        elif isinstance(obj, CubicWebSchema):
-            scount += 1
-            print '   schema', obj, referrers(obj)
-        elif not isinstance(obj, (type, tuple, dict, list, set, frozenset,
-                                  weakref.ref, weakref.WeakKeyDictionary,
-                                  listiterator,
-                                  property, classmethod,
-                                  types.ModuleType, types.MemberDescriptorType,
-                                  types.FunctionType, types.MethodType)):
-            try:
-                ocount[obj.__class__] += 1
-            except KeyError:
-                ocount[obj.__class__] = 1
-            except AttributeError:
-                pass
-    if count:
-        print ' NB REQUESTS/SESSIONS', count
-    if acount:
-        print ' NB APPOBJECTS', acount
-    if ccount:
-        print ' NB CONNECTIONS', ccount
-    if rcount:
-        print ' NB RSETS', rcount
-    if scount:
-        print ' NB SCHEMAS', scount
-    if fcount:
-        print ' NB FILES', fcount
-    if all:
-        ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20]
-        pprint(ocount)
-    if gc.garbage:
-        print 'UNREACHABLE', gc.garbage
-
-def referrers(obj, showobj=False):
-    try:
-        return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x)))
-                          for x in _referrers(obj)))
-    except TypeError:
-        s = set()
-        unhashable = []
-        for x in _referrers(obj):
-            try:
-                s.add(x)
-            except TypeError:
-                unhashable.append(x)
-        return sorted(s) + unhashable
-
-def _referrers(obj, seen=None, level=0):
-    import gc, types
-    from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
-    interesting = []
-    if seen is None:
-        seen = set()
-    for x in gc.get_referrers(obj):
-        if id(x) in seen:
-            continue
-        seen.add(id(x))
-        if isinstance(x, types.FrameType):
-            continue
-        if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)):
-            continue
-        if isinstance(x, (list, tuple, set, dict, listiterator)):
-            if level >= 5:
-                pass
-                #interesting.append(x)
-            else:
-                interesting += _referrers(x, seen, level+1)
-        else:
-            interesting.append(x)
-    return interesting
+LOGGER = getLogger('cubicweb.twisted')
+set_log_methods(CubicWebRootResource, LOGGER)
 
 def run(config, debug):
     # create the site
@@ -477,22 +400,16 @@
     website = server.Site(root_resource)
     # serve it via standard HTTP on port set in the configuration
     port = config['port'] or 8080
-    reactor.listenTCP(port, channel.HTTPFactory(website))
+    reactor.listenTCP(port, website)
     logger = getLogger('cubicweb.twisted')
     if not debug:
         if sys.platform == 'win32':
             raise ConfigurationError("Under windows, you must use the service management "
                                      "commands (e.g : 'net start my_instance)'")
+        from logilab.common.daemon import daemonize
         print 'instance starting in the background'
-        if daemonize():
+        if daemonize(config['pid-file']):
             return # child process
-        if config['pid-file']:
-            # ensure the directory where the pid-file should be set exists (for
-            # instance /var/run/cubicweb may be deleted on computer restart)
-            piddir = os.path.dirname(config['pid-file'])
-            if not os.path.exists(piddir):
-                os.makedirs(piddir)
-            file(config['pid-file'], 'w').write(str(os.getpid()))
     root_resource.init_publisher() # before changing uid
     if config['uid'] is not None:
         try:
@@ -503,8 +420,11 @@
         os.setuid(uid)
     root_resource.start_service()
     logger.info('instance started on %s', root_resource.base_url)
+    # avoid annoying warnign if not in Main Thread
+    signals = threading.currentThread().getName() == 'MainThread'
     if config['profile']:
         import cProfile
-        cProfile.runctx('reactor.run()', globals(), locals(), config['profile'])
+        cProfile.runctx('reactor.run(installSignalHandlers=%s)' % signals,
+                        globals(), locals(), config['profile'])
     else:
-        reactor.run()
+        reactor.run(installSignalHandlers=signals)
--- a/etwist/service.py	Thu May 06 08:24:46 2010 +0200
+++ b/etwist/service.py	Mon Jul 19 15:36:16 2010 +0200
@@ -26,8 +26,7 @@
     sys.exit(3)
 
 
-from cubicweb.etwist.server import (CubicWebRootResource, reactor, server,
-                                    parsePOSTData, channel)
+from cubicweb.etwist.server import (CubicWebRootResource, reactor, server)
 
 import logging
 from logging import getLogger, handlers
@@ -39,7 +38,7 @@
     for var in env_vars:
         if var not in env:
             raise Exception('The environment variables %s must be set.' % \
-                            ', '.join(env_vars))            
+                            ', '.join(env_vars))
     if not env.get('USERNAME'):
         env['USERNAME'] = 'cubicweb'
 
@@ -53,7 +52,6 @@
         cwcfg.load_cwctl_plugins()
         logger = getLogger('cubicweb')
         set_log_methods(CubicWebRootResource, logger)
-        server.parsePOSTData = parsePOSTData
 
     def SvcStop(self):
         self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
@@ -73,12 +71,14 @@
             _check_env(os.environ)
             # create the site
             config = cwcfg.config_for(self.instance)
+            config.init_log(force=True)
+            logger.info('starting cubicweb instance %s ', self.instance)
             root_resource = CubicWebRootResource(config, False)
             website = server.Site(root_resource)
             # serve it via standard HTTP on port set in the configuration
             port = config['port'] or 8080
             logger.info('listening on port %s' % port)
-            reactor.listenTCP(port, channel.HTTPFactory(website))
+            reactor.listenTCP(port, website)
             root_resource.init_publisher()
             root_resource.start_service()
             logger.info('instance started on %s', root_resource.base_url)
--- a/etwist/twconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/etwist/twconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -39,49 +39,55 @@
 
     options = merge_options((
         # ctl configuration
+        ('port',
+         {'type' : 'int',
+          'default': None,
+          'help': 'http server port number (default to 8080)',
+          'group': 'web', 'level': 0,
+          }),
+        ('max-post-length',
+         {'type' : 'bytes',
+          'default': '100MB',
+          'help': 'maximum length of HTTP request. Default to 100 MB.',
+          'group': 'web', 'level': 1,
+          }),
+        ('profile',
+         {'type' : 'string',
+          'default': None,
+          'help': 'profile code and use the specified file to store stats if this option is set',
+          'group': 'web', 'level': 3,
+          }),
         ('host',
          {'type' : 'string',
           'default': None,
           'help': 'host name if not correctly detectable through gethostname',
-          'group': 'main', 'inputlevel': 1,
-          }),
-        ('port',
-         {'type' : 'int',
-          'default': None,
-          'help': 'http server port number (default to 8080)',
-          'group': 'main', 'inputlevel': 0,
+          'group': 'main', 'level': 1,
           }),
         ('pid-file',
          {'type' : 'string',
           'default': Method('default_pid_file'),
           'help': 'repository\'s pid file',
-          'group': 'main', 'inputlevel': 2,
+          'group': 'main', 'level': 2,
           }),
         ('uid',
          {'type' : 'string',
           'default': None,
           'help': 'if this option is set, use the specified user to start \
 the repository rather than the user running the command',
-          'group': 'main', 'inputlevel': WebConfiguration.mode == 'system'
+          'group': 'main', 'level': WebConfiguration.mode == 'system'
           }),
         ('session-time',
          {'type' : 'time',
           'default': '30min',
           'help': 'session expiration time, default to 30 minutes',
-          'group': 'main', 'inputlevel': 1,
-          }),
-        ('profile',
-         {'type' : 'string',
-          'default': None,
-          'help': 'profile code and use the specified file to store stats if this option is set',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 1,
           }),
         ('pyro-server',
          {'type' : 'yn',
           # pyro is only a recommends by default, so don't activate it here
           'default': False,
           'help': 'run a pyro server',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ) + WebConfiguration.options)
 
--- a/etwist/twctl.py	Thu May 06 08:24:46 2010 +0200
+++ b/etwist/twctl.py	Mon Jul 19 15:36:16 2010 +0200
@@ -40,6 +40,9 @@
     cmdname = 'stop'
     cfgname = 'twisted'
 
+    def poststop(self):
+        pass
+
 
 try:
     from cubicweb.server import serverctl
--- a/ext/xhtml2fo.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,154 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-from xml.etree.ElementTree import QName
-from pysixt.standard.xhtml_xslfo.transformer import XHTML2FOTransformer
-from pysixt.utils.xslfo.standard import cm
-from pysixt.utils.xslfo import SimplePageMaster
-from pysixt.standard.xhtml_xslfo.default_styling import default_styles
-from pysixt.standard.xhtml_xslfo import XHTML_NS
-
-
-class ReportTransformer(XHTML2FOTransformer):
-    """
-    Class transforming an XHTML input tree into a FO document
-    displaying reports (one report for each <div class="contentmain">
-    element in the input tree.
-    """
-
-    def __init__(self, section,
-                 page_width=21.0, page_height=29.7,
-                 margin_top=1.0, margin_bottom=1.0,
-                 margin_left=1.0, margin_right=1.0,
-                 header_footer_height=0.75,
-                 standard_font_size=11.0, default_lang=u"fr" ):
-        """
-        Initializes a transformer turning an XHTML input tree
-        containing <div class="contentmain"> elements representing
-        main content sections into a FO output tree displaying the
-        reports.
-
-        page_width: float - width of the page (in cm)
-        page_height: float - height of the page (in cm)
-        margin_top: float - top margin of the page (in cm)
-        margin_bottom: float - bottom margin of the page (in cm)
-        margin_left: float - left margin of the page (in cm)
-        margin_right: float - right margin of the page (in cm)
-        header_footer_height: float - height of the header or the footer of the
-                              page that the page number (if any) will be
-                              inserted in.
-        standard_font_size: float - standard size of the font (in pt)
-        default_lang: u"" - default language (used for hyphenation)
-        """
-        self.section = section
-        self.page_width = page_width
-        self.page_height = page_height
-
-        self.page_tmargin = margin_top
-        self.page_bmargin = margin_bottom
-        self.page_lmargin = margin_left
-        self.page_rmargin = margin_right
-
-        self.hf_height = header_footer_height
-
-        self.font_size = standard_font_size
-        self.lang = default_lang
-
-        XHTML2FOTransformer.__init__(self)
-
-
-    def define_pagemasters(self):
-        """
-        Defines the page masters for the FO output document.
-        """
-        pm = SimplePageMaster(u"page-report")
-        pm.set_page_dims( self.page_width*cm, self.page_height*cm )
-        pm.set_page_margins({u'top'   : self.page_tmargin*cm,
-                             u'bottom': self.page_bmargin*cm,
-                             u'left'  : self.page_lmargin*cm,
-                             u'right' : self.page_rmargin*cm })
-        pm.add_peripheral_region(u"end", self.hf_height)
-        dims = {}
-        dims[u"bottom"] = self.hf_height + 0.25
-        pm.set_main_region_margins(dims)
-        return [pm]
-
-    def _visit_report(self, in_elt, _out_elt, params):
-        """
-        Specific visit function for the input <div> elements whose class is
-        "report". The _root_visit method of this class selects these input
-        elements and asks the process of these elements with this specific
-        visit function.
-        """
-
-        ps = self.create_pagesequence(u"page-report")
-        props = { u"force-page-count": u"no-force",
-                  u"initial-page-number": u"1",
-                  u"format": u"1", }
-        self._output_properties(ps, props)
-
-        sc = self.create_staticcontent(ps, u"end")
-        sc_bl = self.create_block(sc)
-        attrs = { u"hyphenate": u"false", }
-        attrs[u"font-size"] = u"%.1fpt" % (self.font_size * 0.7)
-        attrs[u"language"] = self.lang
-        attrs[u"text-align"] = u"center"
-        self._output_properties(sc_bl, attrs)
-        sc_bl.text = u"Page" + u" " # ### Should be localised!
-        pn = self.create_pagenumber(sc_bl)
-        pn.tail = u"/"
-        self.create_pagenumbercitation(
-            sc_bl, u"last-block-of-report-%d" % params[u"context_pos"])
-
-        fl = self.create_flow(ps, u"body")
-        bl = self.create_block(fl)
-
-        # Sets on the highest block element the properties of the XHTML body
-        # element. These properties (at the least the inheritable ones) will
-        # be inherited by all the future FO elements.
-        bodies = list(self.in_tree.getiterator(QName(XHTML_NS, u"body")))
-        if len(bodies) > 0:
-            attrs = self._extract_properties([bodies[0]])
-        else:
-            attrs = default_styles[u"body"].copy()
-        attrs[u"font-size"] = u"%.1fpt" % self.font_size
-        attrs[u"language"] = self.lang
-        self._output_properties(bl,attrs)
-
-        # Processes the report content
-        self._copy_text(in_elt, bl)
-        self._process_nodes(in_elt.getchildren(), bl)
-
-        # Inserts an empty block at the end of the report in order to be able
-        # to compute the last page number of this report.
-        last_bl = self.create_block(bl)
-        props = { u"keep-with-previous": u"always", }
-        props[u"id"] = u"last-block-of-report-%d" % params[u"context_pos"]
-        self._output_properties(last_bl,props)
-
-
-    def _root_visit(self):
-        """
-        Visit function called when starting the process of the input tree.
-        """
-        content = [ d for d in self.in_tree.getiterator(QName(XHTML_NS, u"div"))
-                    if d.get(u"id") == self.section ]
-        # Asks the process of the report elements with a specific visit
-        # function
-        self._process_nodes(content, self.fo_root,
-                            with_function=self._visit_report)
-
--- a/goa/goaconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/goa/goaconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -51,25 +51,25 @@
          {'type' : 'csv',
           'default': [],
           'help': 'list of db model based cubes used by the instance.',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('included-yams-cubes',
          {'type' : 'csv',
           'default': [],
           'help': 'list of yams based cubes used by the instance.',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('use-google-auth',
          {'type' : 'yn',
           'default': True,
           'help': 'does this instance rely on google authentication service or not.',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('schema-type',
          {'type' : 'choice', 'choices': ('yams', 'dbmodel'),
           'default': 'yams',
           'help': 'does this instance is defining its schema using yams or db model.',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         # overriden options
         ('query-log-file',
@@ -78,7 +78,7 @@
           'help': 'web instance query log file: DON\'T SET A VALUE HERE WHEN '
           'UPLOADING YOUR INSTANCE. This should only be used to analyse '
           'queries issued by your instance in the development environment.',
-          'group': 'main', 'inputlevel': 2,
+          'group': 'main', 'level': 2,
           }),
         ('anonymous-user',
          {'type' : 'string',
@@ -87,7 +87,7 @@
           '(if you want to allow anonymous). This option will be ignored if '
           'use-google-auth option is set (in which case you should control '
           'anonymous access using the app.yaml file)',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
 
         ) + WebConfiguration.options + ServerConfiguration.options)
--- a/hooks/email.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/email.py	Mon Jul 19 15:36:16 2010 +0200
@@ -41,7 +41,7 @@
         if self.condition():
             self.session.execute(
                 'SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % self.rtype,
-                {'x': self.entity.eid, 'y': self.email.eid}, 'x')
+                {'x': self.entity.eid, 'y': self.email.eid})
 
 
 class SetPrimaryEmailRelationOp(SetUseEmailRelationOp):
--- a/hooks/integrity.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/integrity.py	Mon Jul 19 15:36:16 2010 +0200
@@ -90,7 +90,7 @@
                 continue
             if rtype in pendingrtypes:
                 continue
-            if not session.execute(self.base_rql % rtype, {'x': eid}, 'x'):
+            if not session.execute(self.base_rql % rtype, {'x': eid}):
                 etype = session.describe(eid)[0]
                 _ = session._
                 msg = _('at least one relation %(rtype)s is required on '
@@ -136,10 +136,10 @@
             if rdef.role_cardinality(role) in '1+':
                 if role == 'subject':
                     set_operation(self._cw, '_cwisrel', (eid, rschema.type),
-                                  _CheckSRelationOp)
+                                  _CheckSRelationOp, list)
                 else:
                     set_operation(self._cw, '_cwiorel', (eid, rschema.type),
-                                  _CheckORelationOp)
+                                  _CheckORelationOp, list)
 
     def before_delete_relation(self):
         rtype = self.rtype
@@ -153,35 +153,37 @@
         card = session.schema_rproperty(rtype, eidfrom, eidto, 'cardinality')
         if card[0] in '1+' and not session.deleted_in_transaction(eidfrom):
             set_operation(self._cw, '_cwisrel', (eidfrom, rtype),
-                          _CheckSRelationOp)
+                          _CheckSRelationOp, list)
         if card[1] in '1+' and not session.deleted_in_transaction(eidto):
             set_operation(self._cw, '_cwiorel', (eidto, rtype),
-                          _CheckORelationOp)
+                          _CheckORelationOp, list)
 
 
 class _CheckConstraintsOp(hook.LateOperation):
-    """check a new relation satisfy its constraints
-    """
+    """ check a new relation satisfy its constraints """
+
     def precommit_event(self):
-        eidfrom, rtype, eidto = self.rdef
-        # first check related entities have not been deleted in the same
-        # transaction
-        if self.session.deleted_in_transaction(eidfrom):
-            return
-        if self.session.deleted_in_transaction(eidto):
-            return
-        for constraint in self.constraints:
-            # XXX
-            # * lock RQLConstraint as well?
-            # * use a constraint id to use per constraint lock and avoid
-            #   unnecessary commit serialization ?
-            if isinstance(constraint, RQLUniqueConstraint):
-                _acquire_unique_cstr_lock(self.session)
-            try:
-                constraint.repo_check(self.session, eidfrom, rtype, eidto)
-            except NotImplementedError:
-                self.critical('can\'t check constraint %s, not supported',
-                              constraint)
+        session = self.session
+        for values in session.transaction_data.pop('check_constraints_op'):
+            eidfrom, rtype, eidto, constraints = values
+            # first check related entities have not been deleted in the same
+            # transaction
+            if session.deleted_in_transaction(eidfrom):
+                return
+            if session.deleted_in_transaction(eidto):
+                return
+            for constraint in constraints:
+                # XXX
+                # * lock RQLConstraint as well?
+                # * use a constraint id to use per constraint lock and avoid
+                #   unnecessary commit serialization ?
+                if isinstance(constraint, RQLUniqueConstraint):
+                    _acquire_unique_cstr_lock(session)
+                try:
+                    constraint.repo_check(session, eidfrom, rtype, eidto)
+                except NotImplementedError:
+                    self.critical('can\'t check constraint %s, not supported',
+                                  constraint)
 
     def commit_event(self):
         pass
@@ -201,8 +203,9 @@
         constraints = self._cw.schema_rproperty(self.rtype, self.eidfrom, self.eidto,
                                                 'constraints')
         if constraints:
-            _CheckConstraintsOp(self._cw, constraints=constraints,
-                               rdef=(self.eidfrom, self.rtype, self.eidto))
+            hook.set_operation(self._cw, 'check_constraints_op',
+                               (self.eidfrom, self.rtype, self.eidto, tuple(constraints)),
+                               _CheckConstraintsOp, list)
 
 
 class CheckAttributeConstraintHook(IntegrityHook):
@@ -221,8 +224,9 @@
                 constraints = [c for c in eschema.rdef(attr).constraints
                                if isinstance(c, (RQLUniqueConstraint, RQLConstraint))]
                 if constraints:
-                    _CheckConstraintsOp(self._cw, constraints=constraints,
-                                        rdef=(self.entity.eid, attr, None))
+                    hook.set_operation(self._cw, 'check_constraints_op',
+                                       (self.entity.eid, attr, None, tuple(constraints)),
+                                       _CheckConstraintsOp, list)
 
 
 class CheckUniqueHook(IntegrityHook):
@@ -317,7 +321,7 @@
             # don't do anything if the entity is being created or deleted
             if not (eid in pendingeids or eid in neweids):
                 etype = session.describe(eid)[0]
-                session.execute(self.base_rql % (etype, rtype), {'x': eid}, 'x')
+                session.execute(self.base_rql % (etype, rtype), {'x': eid})
 
 class _DelayedDeleteSEntityOp(_DelayedDeleteOp):
     """delete orphan subject entity of a composite relation"""
--- a/hooks/metadata.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/metadata.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,12 +15,10 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Core hooks: set generic metadata
+"""Core hooks: set generic metadata"""
 
-"""
 __docformat__ = "restructuredtext en"
 
-
 from datetime import datetime
 
 from cubicweb.selectors import implements
@@ -69,11 +67,13 @@
 
     def precommit_event(self):
         session = self.session
-        if session.deleted_in_transaction(self.entity.eid):
-            # entity have been created and deleted in the same transaction
-            return
-        if not self.entity.created_by:
-            session.add_relation(self.entity.eid, 'created_by', session.user.eid)
+        for eid in session.transaction_data.pop('set_creator_op'):
+            if session.deleted_in_transaction(eid):
+                # entity have been created and deleted in the same transaction
+                continue
+            entity = session.entity_from_eid(eid)
+            if not entity.created_by:
+                session.add_relation(eid, 'created_by', session.user.eid)
 
 
 class SetIsHook(MetaDataHook):
@@ -108,15 +108,14 @@
     def __call__(self):
         if not self._cw.is_internal_session:
             self._cw.add_relation(self.entity.eid, 'owned_by', self._cw.user.eid)
-            _SetCreatorOp(self._cw, entity=self.entity)
-
+            hook.set_operation(self._cw, 'set_creator_op', self.entity.eid, _SetCreatorOp)
 
 class _SyncOwnersOp(hook.Operation):
     def precommit_event(self):
-        self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,'
-                             'NOT EXISTS(X owned_by U, X eid %(x)s)',
-                             {'c': self.compositeeid, 'x': self.composedeid},
-                             ('c', 'x'))
+        for compositeeid, composedeid in self.session.transaction_data.pop('sync_owners_op'):
+            self.session.execute('SET X owned_by U WHERE C owned_by U, C eid %(c)s,'
+                                 'NOT EXISTS(X owned_by U, X eid %(x)s)',
+                                 {'c': compositeeid, 'x': composedeid})
 
 
 class SyncCompositeOwner(MetaDataHook):
@@ -133,9 +132,9 @@
         eidfrom, eidto = self.eidfrom, self.eidto
         composite = self._cw.schema_rproperty(self.rtype, eidfrom, eidto, 'composite')
         if composite == 'subject':
-            _SyncOwnersOp(self._cw, compositeeid=eidfrom, composedeid=eidto)
+            hook.set_operation(self._cw, 'sync_owners_op', (eidfrom, eidto), _SyncOwnersOp)
         elif composite == 'object':
-            _SyncOwnersOp(self._cw, compositeeid=eidto, composedeid=eidfrom)
+            hook.set_operation(self._cw, 'sync_owners_op', (eidto, eidfrom), _SyncOwnersOp)
 
 
 class FixUserOwnershipHook(MetaDataHook):
@@ -159,18 +158,10 @@
         rtype = self.rtype
         session = self._cw
         ftcontainer = session.vreg.schema.rschema(rtype).fulltext_container
-        if self.event == 'after_add_relation':
-            if ftcontainer == 'subject':
-                session.repo.system_source.index_entity(
-                    session, session.entity_from_eid(self.eidfrom))
-            elif ftcontainer == 'object':
-                session.repo.system_source.index_entity(
-                    session, session.entity_from_eid(self.eidto))
-        # after delete relation
-        elif ftcontainer == 'subject':
+        if ftcontainer == 'subject':
             session.repo.system_source.index_entity(
-                session, entity=session.entity_from_eid(self.eidfrom))
+                session, session.entity_from_eid(self.eidfrom))
         elif ftcontainer == 'object':
             session.repo.system_source.index_entity(
-                session, entity=session.entity_from_eid(self.eidto))
+                session, session.entity_from_eid(self.eidto))
 
--- a/hooks/notification.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/notification.py	Mon Jul 19 15:36:16 2010 +0200
@@ -137,7 +137,7 @@
             rqlsel.append(var)
             rqlrestr.append('X %s %s' % (attr, var))
         rql = 'Any %s WHERE %s' % (','.join(rqlsel), ','.join(rqlrestr))
-        rset = session.execute(rql, {'x': self.entity.eid}, 'x')
+        rset = session.execute(rql, {'x': self.entity.eid})
         for i, attr in enumerate(attrs):
             oldvalue = rset[0][i]
             newvalue = self.entity[attr]
@@ -188,7 +188,7 @@
         except:
             # may raise an error during deletion process, for instance due to
             # missing required relation
-            title = '#%s' % eid
+            title = '#%s' % self.entity.eid
         self._cw.transaction_data.setdefault('pendingchanges', []).append(
-            ('delete_entity', (self.entity.eid, str(self.entity.e_schema), title)))
+            ('delete_entity', (self.entity.eid, self.entity.__regid__, title)))
         return True
--- a/hooks/security.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/security.py	Mon Jul 19 15:36:16 2010 +0200
@@ -17,8 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Security hooks: check permissions to add/delete/update entities according to
 the user connected to a session
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from cubicweb import Unauthorized
@@ -26,7 +26,7 @@
 from cubicweb.server import BEFORE_ADD_RELATIONS, ON_COMMIT_ADD_RELATIONS, hook
 
 
-def check_entity_attributes(session, entity, editedattrs=None):
+def check_entity_attributes(session, entity, editedattrs=None, creation=False):
     eid = entity.eid
     eschema = entity.e_schema
     # .skip_security_attributes is there to bypass security for attributes
@@ -43,6 +43,8 @@
         rdef = eschema.rdef(attr)
         if rdef.final: # non final relation are checked by other hooks
             # add/delete should be equivalent (XXX: unify them into 'update' ?)
+            if creation and not rdef.permissions.get('update'):
+                continue
             rdef.check_perm(session, 'update', eid=eid)
     # don't update dontcheck until everything went fine: see usage in
     # after_update_entity, where if we got an Unauthorized at hook time, we will
@@ -53,8 +55,13 @@
 class _CheckEntityPermissionOp(hook.LateOperation):
     def precommit_event(self):
         #print 'CheckEntityPermissionOp', self.session.user, self.entity, self.action
-        self.entity.check_perm(self.action)
-        check_entity_attributes(self.session, self.entity, self.editedattrs)
+        session = self.session
+        for values in session.transaction_data.pop('check_entity_perm_op'):
+            entity = session.entity_from_eid(values[0])
+            action = values[1]
+            entity.check_perm(action)
+            check_entity_attributes(session, entity, values[2:],
+                                    creation=self.creation)
 
     def commit_event(self):
         pass
@@ -62,10 +69,12 @@
 
 class _CheckRelationPermissionOp(hook.LateOperation):
     def precommit_event(self):
-        rdef = self.rschema.rdef(self.session.describe(self.eidfrom)[0],
-                                 self.session.describe(self.eidto)[0])
-        rdef.check_perm(self.session, self.action,
-                        fromeid=self.eidfrom, toeid=self.eidto)
+        session = self.session
+        for args in session.transaction_data.pop('check_relation_perm_op'):
+            action, rschema, eidfrom, eidto = args
+            rdef = rschema.rdef(session.describe(eidfrom)[0],
+                                session.describe(eidto)[0])
+            rdef.check_perm(session, action, fromeid=eidfrom, toeid=eidto)
 
     def commit_event(self):
         pass
@@ -89,9 +98,9 @@
     events = ('after_add_entity',)
 
     def __call__(self):
-        _CheckEntityPermissionOp(self._cw, entity=self.entity,
-                                 editedattrs=tuple(self.entity.edited_attributes),
-                                 action='add')
+        hook.set_operation(self._cw, 'check_entity_perm_op',
+                           (self.entity.eid, 'add') + tuple(self.entity.edited_attributes),
+                           _CheckEntityPermissionOp, creation=True)
 
 
 class AfterUpdateEntitySecurityHook(SecurityHook):
@@ -108,9 +117,9 @@
             # save back editedattrs in case the entity is reedited later in the
             # same transaction, which will lead to edited_attributes being
             # overwritten
-            _CheckEntityPermissionOp(self._cw, entity=self.entity,
-                                     editedattrs=tuple(self.entity.edited_attributes),
-                                     action='update')
+            hook.set_operation(self._cw, 'check_entity_perm_op',
+                               (self.entity.eid, 'update') + tuple(self.entity.edited_attributes),
+                               _CheckEntityPermissionOp, creation=False)
 
 
 class BeforeDelEntitySecurityHook(SecurityHook):
@@ -147,10 +156,9 @@
                 return
             rschema = self._cw.repo.schema[self.rtype]
             if self.rtype in ON_COMMIT_ADD_RELATIONS:
-                _CheckRelationPermissionOp(self._cw, action='add',
-                                           rschema=rschema,
-                                           eidfrom=self.eidfrom,
-                                           eidto=self.eidto)
+                hook.set_operation(self._cw, 'check_relation_perm_op',
+                                   ('add', rschema, self.eidfrom, self.eidto),
+                                   _CheckRelationPermissionOp)
             else:
                 rdef = rschema.rdef(self._cw.describe(self.eidfrom)[0],
                                     self._cw.describe(self.eidto)[0])
--- a/hooks/syncschema.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/syncschema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,8 +21,8 @@
 - perform physical update on the source when necessary
 
 checking for schema consistency is done in hooks.py
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from copy import copy
@@ -83,7 +83,7 @@
     table = SQL_PREFIX + etype
     column = SQL_PREFIX + rtype
     try:
-        session.system_sql(str('ALTER TABLE %s ADD COLUMN %s integer'
+        session.system_sql(str('ALTER TABLE %s ADD %s integer'
                                % (table, column)), rollback_on_failure=False)
         session.info('added column %s to table %s', column, table)
     except:
@@ -144,15 +144,17 @@
     table = column = None # make pylint happy
     def precommit_event(self):
         session, table, column = self.session, self.table, self.column
+        source = session.repo.system_source
         # drop index if any
-        session.pool.source('system').drop_index(session, table, column)
-        try:
+        source.drop_index(session, table, column)
+        if source.dbhelper.alter_column_support:
             session.system_sql('ALTER TABLE %s DROP COLUMN %s'
                                % (table, column), rollback_on_failure=False)
             self.info('dropped column %s from table %s', column, table)
-        except Exception, ex:
+        else:
             # not supported by sqlite for instance
-            self.error('error while altering table %s: %s', table, ex)
+            self.error('dropping column not supported by the backend, handle '
+                       'it yourself (%s.%s)', table, column)
 
 
 # base operations for in-memory schema synchronization  ########################
@@ -249,12 +251,11 @@
             return
         session = self.session
         if 'fulltext_container' in self.values:
-            ftiupdates = session.transaction_data.setdefault(
-                'fti_update_etypes', set())
             for subjtype, objtype in rschema.rdefs:
-                ftiupdates.add(subjtype)
-                ftiupdates.add(objtype)
-            UpdateFTIndexOp(session)
+                hook.set_operation(session, 'fti_update_etypes', subjtype,
+                                   UpdateFTIndexOp)
+                hook.set_operation(session, 'fti_update_etypes', objtype,
+                                   UpdateFTIndexOp)
         if not 'inlined' in self.values:
             return # nothing to do
         inlined = self.values['inlined']
@@ -283,9 +284,10 @@
                 sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL'
                         % (rtype, eidcolumn, column, table, column))
             # drop existant columns
+            #if session.repo.system_source.dbhelper.alter_column_support:
             for etype in rschema.subjects():
                 DropColumn(session, table=SQL_PREFIX + str(etype),
-                             column=SQL_PREFIX + rtype)
+                           column=SQL_PREFIX + rtype)
         else:
             for etype in rschema.subjects():
                 try:
@@ -363,7 +365,7 @@
         sysource = session.pool.source('system')
         attrtype = y2sql.type_from_constraints(
             sysource.dbhelper, rdef.object, rdef.constraints)
-        # XXX should be moved somehow into lgc.adbh: sqlite doesn't support to
+        # XXX should be moved somehow into lgdb: sqlite doesn't support to
         # add a new column with UNIQUE, it should be added after the ALTER TABLE
         # using ADD INDEX
         if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype:
@@ -376,7 +378,7 @@
         table = SQL_PREFIX + rdef.subject
         column = SQL_PREFIX + rdef.name
         try:
-            session.system_sql(str('ALTER TABLE %s ADD COLUMN %s %s'
+            session.system_sql(str('ALTER TABLE %s ADD %s %s'
                                    % (table, column, attrtype)),
                                rollback_on_failure=False)
             self.info('added column %s to table %s', table, column)
@@ -502,23 +504,21 @@
             else:
                 sysource.drop_index(session, table, column)
         if 'cardinality' in self.values and self.rschema.final:
-            adbh = session.pool.source('system').dbhelper
-            if not adbh.alter_column_support:
+            syssource = session.pool.source('system')
+            if not syssource.dbhelper.alter_column_support:
                 # not supported (and NOT NULL not set by yams in that case, so
-                # no worry)
+                # no worry) XXX (syt) then should we set NOT NULL below ??
                 return
             atype = self.rschema.objects(etype)[0]
             constraints = self.rschema.rdef(etype, atype).constraints
-            coltype = y2sql.type_from_constraints(adbh, atype, constraints,
+            coltype = y2sql.type_from_constraints(syssource.dbhelper, atype, constraints,
                                                   creating=False)
             # XXX check self.values['cardinality'][0] actually changed?
-            notnull = self.values['cardinality'][0] != '1'
-            sql = adbh.sql_set_null_allowed(table, column, coltype, notnull)
-            session.system_sql(sql)
+            syssource.set_null_allowed(self.session, table, column, coltype,
+                                       self.values['cardinality'][0] != '1')
         if 'fulltextindexed' in self.values:
-            UpdateFTIndexOp(session)
-            session.transaction_data.setdefault(
-                'fti_update_etypes', set()).add(etype)
+            hook.set_operation(session, 'fti_update_etypes', etype,
+                               UpdateFTIndexOp)
 
 
 class SourceDbCWConstraintAdd(hook.Operation):
@@ -544,15 +544,14 @@
         # alter the physical schema on size constraint changes
         if newcstr.type() == 'SizeConstraint' and (
             oldcstr is None or oldcstr.max != newcstr.max):
-            adbh = self.session.pool.source('system').dbhelper
+            syssource = self.session.pool.source('system')
             card = rtype.rdef(subjtype, objtype).cardinality
-            coltype = y2sql.type_from_constraints(adbh, objtype, [newcstr],
-                                                  creating=False)
-            sql = adbh.sql_change_col_type(table, column, coltype, card != '1')
+            coltype = y2sql.type_from_constraints(syssource.dbhelper, objtype,
+                                                  [newcstr], creating=False)
             try:
-                session.system_sql(sql, rollback_on_failure=False)
-                self.info('altered column %s of table %s: now VARCHAR(%s)',
-                          column, table, newcstr.max)
+                syssource.change_col_type(session, table, column, coltype, card[0] != '1')
+                self.info('altered column %s of table %s: now %s',
+                          column, table, coltype)
             except Exception, ex:
                 # not supported by sqlite for instance
                 self.error('error while altering table %s: %s', table, ex)
@@ -567,16 +566,19 @@
 
     def precommit_event(self):
         cstrtype = self.cstr.type()
-        table = SQL_PREFIX + str(self.subjtype)
-        column = SQL_PREFIX + str(self.rtype)
+        table = SQL_PREFIX + str(self.rdef.subject)
+        column = SQL_PREFIX + str(self.rdef.rtype)
         # alter the physical schema on size/unique constraint changes
         if cstrtype == 'SizeConstraint':
+            syssource = self.session.pool.source('system')
+            coltype = y2sql.type_from_constraints(syssource.dbhelper,
+                                                  self.rdef.object, [],
+                                                  creating=False)
             try:
-                self.session.system_sql('ALTER TABLE %s ALTER COLUMN %s TYPE TEXT'
-                                        % (table, column),
-                                        rollback_on_failure=False)
-                self.info('altered column %s of table %s: now TEXT',
-                          column, table)
+                syssource.change_col_type(session, table, column, coltype,
+                                          self.rdef.cardinality[0] != '1')
+                self.info('altered column %s of table %s: now %s',
+                          column, table, coltype)
             except Exception, ex:
                 # not supported by sqlite for instance
                 self.error('error while altering table %s: %s', table, ex)
@@ -1017,7 +1019,7 @@
             DropRelationTable(session, rschema.type)
         # if this is the last instance, drop associated relation type
         if lastrel and not self.eidto in pendings:
-            execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x')
+            execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto})
         MemSchemaRDefDel(session, (subjschema, rschema, objschema))
 
 
@@ -1107,8 +1109,7 @@
         except IndexError:
             self._cw.critical('constraint type no more accessible')
         else:
-            SourceDbCWConstraintDel(self._cw, cstr=cstr,
-                                    subjtype=rdef.subject, rtype=rdef.rtype)
+            SourceDbCWConstraintDel(self._cw, rdef=rdef, cstr=cstr)
             MemSchemaCWConstraintDel(self._cw, rdef=rdef, cstr=cstr)
 
 
@@ -1164,7 +1165,7 @@
     def postcommit_event(self):
         session = self.session
         source = session.repo.system_source
-        to_reindex = session.transaction_data.get('fti_update_etypes', ())
+        to_reindex = session.transaction_data.pop('fti_update_etypes', ())
         self.info('%i etypes need full text indexed reindexation',
                   len(to_reindex))
         schema = self.session.repo.vreg.schema
@@ -1177,7 +1178,7 @@
                 source.fti_unindex_entity(session, entity.eid)
                 for container in entity.fti_containers():
                     if still_fti or container is not entity:
-                        source.fti_unindex_entity(session, entity.eid)
+                        source.fti_unindex_entity(session, container.eid)
                         source.fti_index_entity(session, container)
         if len(to_reindex):
             # Transaction have already been committed
--- a/hooks/syncsession.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/syncsession.py	Mon Jul 19 15:36:16 2010 +0200
@@ -49,7 +49,7 @@
         no query should be emitted while comitting
         """
         rql = 'Any N WHERE G eid %(x)s, G name N'
-        result = session.execute(rql, {'x': kwargs['geid']}, 'x', build_descr=False)
+        result = session.execute(rql, {'x': kwargs['geid']}, build_descr=False)
         hook.Operation.__init__(self, session, *args, **kwargs)
         self.group = result[0][0]
 
@@ -229,7 +229,7 @@
         if not session.describe(eidfrom)[0] == 'CWProperty':
             return
         key, value = session.execute('Any K,V WHERE P eid %(x)s,P pkey K,P value V',
-                                     {'x': eidfrom}, 'x')[0]
+                                     {'x': eidfrom})[0]
         if session.vreg.property_info(key)['sitewide']:
             qname = role_name('for_user', 'subject')
             msg = session._("site-wide property can't be set for user")
@@ -247,7 +247,7 @@
     def __call__(self):
         session = self._cw
         key = session.execute('Any K WHERE P eid %(x)s, P pkey K',
-                              {'x': self.eidfrom}, 'x')[0][0]
+                              {'x': self.eidfrom})[0][0]
         session.transaction_data.setdefault('pendingrelations', []).append(
             (self.eidfrom, self.rtype, self.eidto))
         for session_ in get_user_sessions(session.repo, self.eidto):
--- a/hooks/test/unittest_bookmarks.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/test/unittest_bookmarks.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
 from logilab.common.testlib import unittest_main
 from cubicweb.devtools.testlib import CubicWebTC
 
@@ -31,10 +28,10 @@
         self.commit()
         self.execute('DELETE X bookmarked_by U WHERE U login "admin"')
         self.commit()
-        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': beid}))
         self.execute('DELETE X bookmarked_by U WHERE U login "anon"')
         self.commit()
-        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}, 'x'))
+        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': beid}))
 
 if __name__ == '__main__':
     unittest_main()
--- a/hooks/test/unittest_syncschema.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/test/unittest_syncschema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -49,17 +49,17 @@
 
     def _set_perms(self, eid):
         self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
-                     {'x': eid}, 'x')
+                     {'x': eid})
         self.execute('SET X add_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"',
-                     {'x': eid}, 'x')
+                     {'x': eid})
         self.execute('SET X delete_permission G WHERE X eid %(x)s, G is CWGroup, G name "owners"',
-                     {'x': eid}, 'x')
+                     {'x': eid})
 
     def _set_attr_perms(self, eid):
         self.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
-                     {'x': eid}, 'x')
+                     {'x': eid})
         self.execute('SET X update_permission G WHERE X eid %(x)s, G is CWGroup, G name "managers"',
-                     {'x': eid}, 'x')
+                     {'x': eid})
 
     def test_base(self):
         schema = self.repo.schema
@@ -105,7 +105,7 @@
                                'WHERE RT name "concerne2", E name "CWUser"')[0][0]
         self._set_perms(rdefeid)
         self.commit()
-        self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid}, 'x')
+        self.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid})
         self.commit()
         self.failUnless('concerne2' in schema['CWUser'].subject_relations())
         self.failIf('concerne2' in schema['Societe2'].subject_relations())
@@ -265,7 +265,7 @@
         attreid = self.execute('INSERT CWAttribute X: X cardinality "11", X defaultval "noname", X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
                                'WHERE RT name "messageid", E name "BaseTransition", F name "String"')[0][0]
         assert self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"',
-                     {'x': attreid}, 'x')
+                     {'x': attreid})
         self.commit()
         self.schema.rebuild_infered_relations()
         self.failUnless('Transition' in self.schema['messageid'].subjects())
@@ -316,10 +316,10 @@
         if not getattr(cstr, 'eid', None):
             self.skip('start me alone') # bug in schema reloading, constraint's eid not restored
         self.execute('SET X value %(v)s WHERE X eid %(x)s',
-                     {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}, 'x')
+                     {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"})
         self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
                      'WHERE CT name %(ct)s, EDEF eid %(x)s',
-                     {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}, 'x')
+                     {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid})
         self.commit()
         cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
         self.assertEquals(cstr.values, (u'normal', u'auto', u'new'))
--- a/hooks/workflow.py	Thu May 06 08:24:46 2010 +0200
+++ b/hooks/workflow.py	Mon Jul 19 15:36:16 2010 +0200
@@ -146,7 +146,7 @@
 
 class WorkflowHook(hook.Hook):
     __abstract__ = True
-    category = 'worfklow'
+    category = 'workflow'
 
 
 class SetInitialStateHook(WorkflowHook):
--- a/i18n/en.po	Thu May 06 08:24:46 2010 +0200
+++ b/i18n/en.po	Mon Jul 19 15:36:16 2010 +0200
@@ -1,11 +1,11 @@
 # cubicweb i18n catalog
-# Copyright 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # Logilab <contact@logilab.fr>
 msgid ""
 msgstr ""
 "Project-Id-Version: 2.0\n"
 "POT-Creation-Date: 2006-01-12 17:35+CET\n"
-"PO-Revision-Date: 2009-09-17 11:53+0200\n"
+"PO-Revision-Date: 2010-05-16 18:58+0200\n"
 "Last-Translator: Sylvain Thenault <sylvain.thenault@logilab.fr>\n"
 "Language-Team: English <devel@logilab.fr.org>\n"
 "MIME-Version: 1.0\n"
@@ -30,6 +30,9 @@
 msgid "  from state %(fromstate)s to state %(tostate)s\n"
 msgstr ""
 
+msgid " :"
+msgstr ":"
+
 #, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr ""
@@ -51,6 +54,10 @@
 msgstr ""
 
 #, python-format
+msgid "%(value)r doesn't match the %(regexp)r regular expression"
+msgstr ""
+
+#, python-format
 msgid "%d days"
 msgstr ""
 
@@ -165,14 +172,15 @@
 msgstr "1 0..1"
 
 #, python-format
+msgid "<%s not specified>"
+msgstr ""
+
+#, python-format
 msgid ""
 "<div>This schema of the data model <em>excludes</em> the meta-data, but you "
 "can also display a <a href=\"%s\">complete schema with meta-data</a>.</div>"
 msgstr ""
 
-msgid "<no value>"
-msgstr ""
-
 msgid "?*"
 msgstr "0..1 0..n"
 
@@ -188,13 +196,13 @@
 msgid "AND"
 msgstr ""
 
-msgid "Add permissions"
-msgstr ""
-
 msgid "Any"
 msgstr ""
 
-msgid "Attributes"
+msgid "Attributes permissions:"
+msgstr ""
+
+msgid "Attributes with non default permissions:"
 msgstr ""
 
 # schema pot file, generated on 2009-09-16 16:46:55
@@ -221,6 +229,9 @@
 msgid "BoundConstraint"
 msgstr "bound constraint"
 
+msgid "BoundaryConstraint"
+msgstr ""
+
 msgid "Browse by category"
 msgstr ""
 
@@ -362,13 +373,10 @@
 msgid "Decimal_plural"
 msgstr "Decimal numbers"
 
-msgid "Delete permissions"
-msgstr ""
-
 msgid "Do you want to delete the following element(s) ?"
 msgstr ""
 
-msgid "Download page as pdf"
+msgid "Download schema as OWL"
 msgstr ""
 
 msgctxt "inlined:CWUser.use_email.subject"
@@ -384,6 +392,9 @@
 msgid "Entities"
 msgstr ""
 
+msgid "Entity types"
+msgstr ""
+
 msgid "ExternalUri"
 msgstr "External Uri"
 
@@ -411,6 +422,9 @@
 msgid "Help"
 msgstr ""
 
+msgid "Index"
+msgstr ""
+
 msgid "Instance"
 msgstr ""
 
@@ -504,15 +518,27 @@
 msgid "No result matching query"
 msgstr ""
 
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr ""
+
 msgid "OR"
 msgstr ""
 
+msgid "Parent classes:"
+msgstr ""
+
 msgid "Password"
 msgstr "Password"
 
 msgid "Password_plural"
 msgstr "Passwords"
 
+msgid "Permissions for entity types"
+msgstr ""
+
+msgid "Permissions for relations"
+msgstr ""
+
 msgid "Please note that this is only a shallow copy"
 msgstr ""
 
@@ -531,9 +557,6 @@
 msgid "RQLVocabularyConstraint"
 msgstr "RQL vocabulary constraint"
 
-msgid "Read permissions"
-msgstr ""
-
 msgid "Recipients:"
 msgstr ""
 
@@ -543,6 +566,9 @@
 msgid "Registry's content"
 msgstr ""
 
+msgid "Relation types"
+msgstr ""
+
 msgid "Relations"
 msgstr ""
 
@@ -580,6 +606,9 @@
 msgid "String_plural"
 msgstr "Strings"
 
+msgid "Sub-classes:"
+msgstr ""
+
 msgid "SubWorkflowExitPoint"
 msgstr "Subworkflow exit-point"
 
@@ -606,6 +635,9 @@
 msgid "The view %s could not be found"
 msgstr ""
 
+msgid "There is no default workflow"
+msgstr ""
+
 msgid "This BaseTransition"
 msgstr "This abstract transition"
 
@@ -672,6 +704,9 @@
 msgid "This WorkflowTransition"
 msgstr "This workflow-transition"
 
+msgid "This entity type permissions:"
+msgstr ""
+
 msgid "Time"
 msgstr "Time"
 
@@ -696,9 +731,6 @@
 msgid "Unreachable objects"
 msgstr ""
 
-msgid "Update permissions"
-msgstr ""
-
 msgid "Used by:"
 msgstr ""
 
@@ -776,183 +808,12 @@
 msgid "abstract base class for transitions"
 msgstr ""
 
-msgid "access type"
-msgstr ""
-
 msgid "action(s) on this selection"
 msgstr ""
 
 msgid "actions"
 msgstr ""
 
-msgid "actions_about"
-msgstr ""
-
-msgid "actions_about_description"
-msgstr ""
-
-msgid "actions_addentity"
-msgstr "add an entity of this type"
-
-msgid "actions_addentity_description"
-msgstr ""
-
-msgid "actions_addrelated"
-msgstr ""
-
-msgid "actions_addrelated_description"
-msgstr ""
-
-msgid "actions_cancel"
-msgstr "cancel the selection"
-
-msgid "actions_cancel_description"
-msgstr ""
-
-msgid "actions_changelog"
-msgstr ""
-
-msgid "actions_changelog_description"
-msgstr ""
-
-msgid "actions_copy"
-msgstr "copy"
-
-msgid "actions_copy_description"
-msgstr ""
-
-msgid "actions_delete"
-msgstr "delete"
-
-msgid "actions_delete_description"
-msgstr ""
-
-msgid "actions_download_as_owl"
-msgstr "download as owl"
-
-msgid "actions_download_as_owl_description"
-msgstr ""
-
-msgid "actions_edit"
-msgstr "modify"
-
-msgid "actions_edit_description"
-msgstr ""
-
-msgid "actions_embed"
-msgstr "embed"
-
-msgid "actions_embed_description"
-msgstr ""
-
-msgid "actions_entitiesoftype"
-msgstr ""
-
-msgid "actions_entitiesoftype_description"
-msgstr ""
-
-msgid "actions_follow"
-msgstr "follow"
-
-msgid "actions_follow_description"
-msgstr ""
-
-msgid "actions_help"
-msgstr ""
-
-msgid "actions_help_description"
-msgstr ""
-
-msgid "actions_logout"
-msgstr "logout"
-
-msgid "actions_logout_description"
-msgstr ""
-
-msgid "actions_manage"
-msgstr "manage"
-
-msgid "actions_manage_description"
-msgstr ""
-
-msgid "actions_managepermission"
-msgstr "manage permissions"
-
-msgid "actions_managepermission_description"
-msgstr ""
-
-msgid "actions_muledit"
-msgstr "modify all"
-
-msgid "actions_muledit_description"
-msgstr ""
-
-msgid "actions_myinfos"
-msgstr "my profile"
-
-msgid "actions_myinfos_description"
-msgstr ""
-
-msgid "actions_myprefs"
-msgstr "my preferences"
-
-msgid "actions_myprefs_description"
-msgstr ""
-
-msgid "actions_poweredby"
-msgstr ""
-
-msgid "actions_poweredby_description"
-msgstr ""
-
-msgid "actions_prefs"
-msgstr "preferences"
-
-msgid "actions_prefs_description"
-msgstr ""
-
-msgid "actions_schema"
-msgstr "schema"
-
-msgid "actions_schema_description"
-msgstr ""
-
-msgid "actions_select"
-msgstr "select"
-
-msgid "actions_select_description"
-msgstr ""
-
-msgid "actions_sendemail"
-msgstr "send email"
-
-msgid "actions_sendemail_description"
-msgstr ""
-
-msgid "actions_siteconfig"
-msgstr "site configuration"
-
-msgid "actions_siteconfig_description"
-msgstr ""
-
-msgid "actions_siteinfo"
-msgstr "site information"
-
-msgid "actions_siteinfo_description"
-msgstr ""
-
-msgid "actions_view"
-msgstr "view"
-
-msgid "actions_view_description"
-msgstr ""
-
-msgid "actions_workflow"
-msgstr "see workflow"
-
-msgid "actions_workflow_description"
-msgstr ""
-
 msgid "activate"
 msgstr ""
 
@@ -1198,9 +1059,6 @@
 msgid "attribute"
 msgstr ""
 
-msgid "attributes with modified permissions:"
-msgstr ""
-
 msgid "august"
 msgstr ""
 
@@ -1517,9 +1375,6 @@
 msgid "condition"
 msgstr "condition"
 
-msgid "condition:"
-msgstr "condtion:"
-
 msgctxt "RQLExpression"
 msgid "condition_object"
 msgstr "condition of"
@@ -1527,6 +1382,9 @@
 msgid "condition_object"
 msgstr "condition of"
 
+msgid "conditions"
+msgstr ""
+
 msgid "config mode"
 msgstr ""
 
@@ -1563,6 +1421,9 @@
 msgid "constraints applying on this relation"
 msgstr ""
 
+msgid "content type"
+msgstr ""
+
 msgid "contentnavigation"
 msgstr "contextual components"
 
@@ -1578,12 +1439,6 @@
 msgid "contentnavigation_metadata_description"
 msgstr ""
 
-msgid "contentnavigation_pdfview"
-msgstr "view page as pdf icon"
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
 msgid "contentnavigation_prevnext"
 msgstr "previous / next entity"
 
@@ -1768,6 +1623,9 @@
 msgid "creation"
 msgstr ""
 
+msgid "creation date"
+msgstr ""
+
 msgid "creation time of an entity"
 msgstr ""
 
@@ -1803,18 +1661,33 @@
 msgid "custom_workflow_object"
 msgstr "custom workflow of"
 
-msgid "cwetype-schema-image"
-msgstr "schema"
-
-msgid "cwetype-schema-permissions"
+msgid "cwetype-box"
+msgstr "\"box\" view"
+
+msgid "cwetype-description"
+msgstr "description"
+
+msgid "cwetype-permissions"
 msgstr "permissions"
 
-msgid "cwetype-schema-text"
-msgstr "description"
+msgid "cwetype-views"
+msgstr "views"
 
 msgid "cwetype-workflow"
 msgstr "workflow"
 
+msgid "cwgroup-main"
+msgstr "description"
+
+msgid "cwgroup-permissions"
+msgstr "permissions"
+
+msgid "cwrtype-description"
+msgstr "description"
+
+msgid "cwrtype-permissions"
+msgstr "permissions"
+
 msgid "cwuri"
 msgstr "internal uri"
 
@@ -1842,6 +1715,9 @@
 msgid "default user workflow"
 msgstr ""
 
+msgid "default value"
+msgstr ""
+
 msgid "default workflow for an entity type"
 msgstr ""
 
@@ -2068,18 +1944,12 @@
 msgid "detach attached file"
 msgstr ""
 
-msgid "display order of the action"
-msgstr ""
-
 msgid "display order of the box"
 msgstr ""
 
 msgid "display order of the component"
 msgstr ""
 
-msgid "display the action or not"
-msgstr ""
-
 msgid "display the box or not"
 msgstr ""
 
@@ -2285,6 +2155,9 @@
 msgid "final"
 msgstr ""
 
+msgid "first name"
+msgstr ""
+
 msgid "firstname"
 msgstr ""
 
@@ -2398,10 +2271,6 @@
 msgid "granted to groups"
 msgstr ""
 
-#, python-format
-msgid "graphical representation of %s"
-msgstr ""
-
 msgid "graphical representation of the instance'schema"
 msgstr ""
 
@@ -2425,9 +2294,6 @@
 msgid "groups to which the permission is granted"
 msgstr ""
 
-msgid "groups:"
-msgstr ""
-
 msgid "guests"
 msgstr ""
 
@@ -2466,6 +2332,18 @@
 msgid "i18n_login_popup"
 msgstr "login"
 
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
 msgid "i18nprevnext_next"
 msgstr "next"
 
@@ -2501,12 +2379,6 @@
 msgid "image"
 msgstr ""
 
-msgid "in memory entity schema"
-msgstr ""
-
-msgid "in memory relation schema"
-msgstr ""
-
 msgid "in_group"
 msgstr "in group"
 
@@ -2537,9 +2409,6 @@
 msgid "incorrect value (%(value)s) for type \"%(type)s\""
 msgstr ""
 
-msgid "index"
-msgstr ""
-
 msgid "index this attribute's value in the plain text index"
 msgstr ""
 
@@ -2609,9 +2478,19 @@
 msgid "invalid action %r"
 msgstr ""
 
+#, python-format
+msgid "invalid value %(value)s, it must be one of %(choices)s"
+msgstr ""
+
 msgid "is"
 msgstr ""
 
+msgid "is object of:"
+msgstr ""
+
+msgid "is subject of:"
+msgstr ""
+
 msgid ""
 "is the subject/object entity of the relation composed of the other ? This "
 "implies that when the composite is deleted, composants are also deleted."
@@ -2659,6 +2538,12 @@
 msgid "last connection date"
 msgstr ""
 
+msgid "last login time"
+msgstr ""
+
+msgid "last name"
+msgstr ""
+
 msgid "last usage"
 msgstr ""
 
@@ -2715,6 +2600,9 @@
 msgid "log in"
 msgstr ""
 
+msgid "log out first"
+msgstr ""
+
 msgid "login"
 msgstr ""
 
@@ -2803,6 +2691,9 @@
 msgid "more actions"
 msgstr ""
 
+msgid "more info about this workflow"
+msgstr ""
+
 msgid "multiple edit"
 msgstr ""
 
@@ -2944,8 +2835,8 @@
 msgid "object"
 msgstr ""
 
-msgid "object_plural:"
-msgstr "objects:"
+msgid "object type"
+msgstr ""
 
 msgid "october"
 msgstr ""
@@ -2971,6 +2862,9 @@
 msgid "opened web sessions"
 msgstr ""
 
+msgid "options"
+msgstr ""
+
 msgid "order"
 msgstr ""
 
@@ -3026,10 +2920,7 @@
 msgid "permission"
 msgstr ""
 
-msgid "permissions for entities"
-msgstr ""
-
-msgid "permissions for relations"
+msgid "permissions"
 msgstr ""
 
 msgid "permissions for this entity"
@@ -3106,11 +2997,17 @@
 msgid "project"
 msgstr ""
 
+msgid "rdef-description"
+msgstr "description"
+
+msgid "rdef-permissions"
+msgstr "permissions"
+
 msgid "read"
 msgstr ""
 
 msgid "read_perm"
-msgstr "read perm"
+msgstr "read permission"
 
 msgid "read_permission"
 msgstr "can be read by"
@@ -3147,6 +3044,9 @@
 msgid "related entity has no workflow set"
 msgstr ""
 
+msgid "relation"
+msgstr ""
+
 #, python-format
 msgid "relation %(relname)s of %(ent)s"
 msgstr ""
@@ -3175,9 +3075,6 @@
 msgid "relation_type_object"
 msgstr "relation definitions"
 
-msgid "relations"
-msgstr ""
-
 msgid "relations deleted"
 msgstr ""
 
@@ -3219,6 +3116,9 @@
 msgid "require_permission_object"
 msgstr "required by"
 
+msgid "required"
+msgstr ""
+
 msgid "required attribute"
 msgstr ""
 
@@ -3262,11 +3162,17 @@
 msgid "schema's permissions definitions"
 msgstr ""
 
-msgid "schema-image"
-msgstr "schema"
-
-msgid "schema-text"
-msgstr "description"
+msgid "schema-diagram"
+msgstr "diagram"
+
+msgid "schema-entity-types"
+msgstr "entities"
+
+msgid "schema-relation-types"
+msgstr "relations"
+
+msgid "schema-security"
+msgstr "permissions"
 
 msgid "search"
 msgstr ""
@@ -3453,12 +3359,12 @@
 msgid "subject"
 msgstr ""
 
+msgid "subject type"
+msgstr ""
+
 msgid "subject/object cardinality"
 msgstr ""
 
-msgid "subject_plural:"
-msgstr "subjects:"
-
 msgid "subworkflow"
 msgstr ""
 
@@ -3857,6 +3763,9 @@
 msgid "used to grant a permission to a group"
 msgstr ""
 
+msgid "user"
+msgstr ""
+
 #, python-format
 msgid ""
 "user %s has made the following change(s):\n"
@@ -3895,6 +3804,26 @@
 msgid "value associated to this key is not editable manually"
 msgstr ""
 
+#, python-format
+msgid "value must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value must be >= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value should have maximum size of %s"
+msgstr ""
+
+#, python-format
+msgid "value should have minimum size of %s"
+msgstr ""
+
 msgid "vcard"
 msgstr ""
 
@@ -3913,15 +3842,18 @@
 msgid "view history"
 msgstr ""
 
+msgid "view identifier"
+msgstr ""
+
+msgid "view title"
+msgstr ""
+
 msgid "view workflow"
 msgstr ""
 
 msgid "view_index"
 msgstr "index"
 
-msgid "views"
-msgstr ""
-
 msgid "visible"
 msgstr ""
 
@@ -3944,6 +3876,12 @@
 msgid "wf_info_for_object"
 msgstr "workflow history"
 
+msgid "wf_tab_info"
+msgstr ""
+
+msgid "wfgraph"
+msgstr ""
+
 msgid ""
 "when multiple addresses are equivalent (such as python-projects@logilab.org "
 "and python-projects@lists.logilab.org), set this to indicate which is the "
--- a/i18n/es.po	Thu May 06 08:24:46 2010 +0200
+++ b/i18n/es.po	Mon Jul 19 15:36:16 2010 +0200
@@ -1,10 +1,10 @@
 # cubicweb i18n catalog
-# Copyright 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # Logilab <contact@logilab.fr>
 msgid ""
 msgstr ""
 "Project-Id-Version: cubicweb 2.46.0\n"
-"PO-Revision-Date: 2008-11-27 07:59+0100\n"
+"PO-Revision-Date: 2010-11-27 07:59+0100\n"
 "Last-Translator: Celso Flores<jcelsoflores@gmail.com>\n"
 "Language-Team: es <contact@logilab.fr>\n"
 "MIME-Version: 1.0\n"
@@ -35,6 +35,9 @@
 msgid "  from state %(fromstate)s to state %(tostate)s\n"
 msgstr "  del estado %(fromstate)s hacia el estado %(tostate)s\n"
 
+msgid " :"
+msgstr ""
+
 #, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr ""
@@ -56,6 +59,10 @@
 msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)"
 
 #, python-format
+msgid "%(value)r doesn't match the %(regexp)r regular expression"
+msgstr ""
+
+#, python-format
 msgid "%d days"
 msgstr "%d días"
 
@@ -170,6 +177,10 @@
 msgstr "1 0..1"
 
 #, python-format
+msgid "<%s not specified>"
+msgstr ""
+
+#, python-format
 msgid ""
 "<div>This schema of the data model <em>excludes</em> the meta-data, but you "
 "can also display a <a href=\"%s\">complete schema with meta-data</a>.</div>"
@@ -178,9 +189,6 @@
 "pero se puede ver a un <a href=\"%s\">modelo completo con meta-datos</a>.</"
 "div>"
 
-msgid "<no value>"
-msgstr ""
-
 msgid "?*"
 msgstr "0..1 0..n"
 
@@ -196,14 +204,14 @@
 msgid "AND"
 msgstr "Y"
 
-msgid "Add permissions"
-msgstr "Añadir autorizaciónes"
-
 msgid "Any"
 msgstr "Cualquiera"
 
-msgid "Attributes"
-msgstr "Atributos"
+msgid "Attributes permissions:"
+msgstr ""
+
+msgid "Attributes with non default permissions:"
+msgstr ""
 
 # schema pot file, generated on 2009-09-16 16:46:55
 #
@@ -229,6 +237,9 @@
 msgid "BoundConstraint"
 msgstr ""
 
+msgid "BoundaryConstraint"
+msgstr ""
+
 msgid "Browse by category"
 msgstr "Busca por categoría"
 
@@ -370,13 +381,10 @@
 msgid "Decimal_plural"
 msgstr "Decimales"
 
-msgid "Delete permissions"
-msgstr "Autorización de suprimir"
-
 msgid "Do you want to delete the following element(s) ?"
 msgstr "Desea suprimir el(los) elemento(s) siguiente(s)"
 
-msgid "Download page as pdf"
+msgid "Download schema as OWL"
 msgstr ""
 
 msgctxt "inlined:CWUser.use_email.subject"
@@ -392,6 +400,9 @@
 msgid "Entities"
 msgstr "Entidades"
 
+msgid "Entity types"
+msgstr ""
+
 msgid "ExternalUri"
 msgstr ""
 
@@ -419,6 +430,9 @@
 msgid "Help"
 msgstr ""
 
+msgid "Index"
+msgstr ""
+
 msgid "Instance"
 msgstr ""
 
@@ -512,15 +526,27 @@
 msgid "No result matching query"
 msgstr "Ningún resultado corresponde a su búsqueda"
 
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr ""
+
 msgid "OR"
 msgstr "O"
 
+msgid "Parent classes:"
+msgstr ""
+
 msgid "Password"
 msgstr "Contraseña"
 
 msgid "Password_plural"
 msgstr "Contraseñas"
 
+msgid "Permissions for entity types"
+msgstr ""
+
+msgid "Permissions for relations"
+msgstr ""
+
 msgid "Please note that this is only a shallow copy"
 msgstr "Recuerde que no es más que una copia superficial"
 
@@ -539,9 +565,6 @@
 msgid "RQLVocabularyConstraint"
 msgstr ""
 
-msgid "Read permissions"
-msgstr "Autorización de leer"
-
 msgid "Recipients:"
 msgstr "Destinatarios"
 
@@ -551,6 +574,9 @@
 msgid "Registry's content"
 msgstr ""
 
+msgid "Relation types"
+msgstr ""
+
 msgid "Relations"
 msgstr "Relaciones"
 
@@ -588,6 +614,9 @@
 msgid "String_plural"
 msgstr "Cadenas de caracteres"
 
+msgid "Sub-classes:"
+msgstr ""
+
 msgid "SubWorkflowExitPoint"
 msgstr ""
 
@@ -614,6 +643,9 @@
 msgid "The view %s could not be found"
 msgstr "La vista %s no ha podido ser encontrada"
 
+msgid "There is no default workflow"
+msgstr ""
+
 msgid "This BaseTransition"
 msgstr ""
 
@@ -680,6 +712,9 @@
 msgid "This WorkflowTransition"
 msgstr ""
 
+msgid "This entity type permissions:"
+msgstr ""
+
 msgid "Time"
 msgstr "Hora"
 
@@ -704,9 +739,6 @@
 msgid "Unreachable objects"
 msgstr ""
 
-msgid "Update permissions"
-msgstr "Autorización de modificar"
-
 msgid "Used by:"
 msgstr "Utilizado por :"
 
@@ -799,183 +831,12 @@
 msgid "abstract base class for transitions"
 msgstr ""
 
-msgid "access type"
-msgstr "Tipo de Acceso"
-
 msgid "action(s) on this selection"
 msgstr "acción(es) en esta selección"
 
 msgid "actions"
 msgstr "acciones"
 
-msgid "actions_about"
-msgstr ""
-
-msgid "actions_about_description"
-msgstr ""
-
-msgid "actions_addentity"
-msgstr "agregar una entidad de este tipo"
-
-msgid "actions_addentity_description"
-msgstr ""
-
-msgid "actions_addrelated"
-msgstr ""
-
-msgid "actions_addrelated_description"
-msgstr ""
-
-msgid "actions_cancel"
-msgstr "Anular"
-
-msgid "actions_cancel_description"
-msgstr ""
-
-msgid "actions_changelog"
-msgstr ""
-
-msgid "actions_changelog_description"
-msgstr ""
-
-msgid "actions_copy"
-msgstr "Copiar"
-
-msgid "actions_copy_description"
-msgstr ""
-
-msgid "actions_delete"
-msgstr "Eliminar"
-
-msgid "actions_delete_description"
-msgstr ""
-
-msgid "actions_download_as_owl"
-msgstr "Download como OWL"
-
-msgid "actions_download_as_owl_description"
-msgstr ""
-
-msgid "actions_edit"
-msgstr "Modificar"
-
-msgid "actions_edit_description"
-msgstr ""
-
-msgid "actions_embed"
-msgstr "Embarcar"
-
-msgid "actions_embed_description"
-msgstr ""
-
-msgid "actions_entitiesoftype"
-msgstr ""
-
-msgid "actions_entitiesoftype_description"
-msgstr ""
-
-msgid "actions_follow"
-msgstr "Seguir"
-
-msgid "actions_follow_description"
-msgstr ""
-
-msgid "actions_help"
-msgstr ""
-
-msgid "actions_help_description"
-msgstr ""
-
-msgid "actions_logout"
-msgstr "Desconectarse"
-
-msgid "actions_logout_description"
-msgstr ""
-
-msgid "actions_manage"
-msgstr "Administración del sitio"
-
-msgid "actions_manage_description"
-msgstr ""
-
-msgid "actions_managepermission"
-msgstr "Administración de autorizaciónes"
-
-msgid "actions_managepermission_description"
-msgstr ""
-
-msgid "actions_muledit"
-msgstr "Edición múltiple"
-
-msgid "actions_muledit_description"
-msgstr ""
-
-msgid "actions_myinfos"
-msgstr "Información personal"
-
-msgid "actions_myinfos_description"
-msgstr ""
-
-msgid "actions_myprefs"
-msgstr "Preferencias del usuario"
-
-msgid "actions_myprefs_description"
-msgstr ""
-
-msgid "actions_poweredby"
-msgstr ""
-
-msgid "actions_poweredby_description"
-msgstr ""
-
-msgid "actions_prefs"
-msgstr "Preferencias"
-
-msgid "actions_prefs_description"
-msgstr ""
-
-msgid "actions_schema"
-msgstr "Ver el esquema"
-
-msgid "actions_schema_description"
-msgstr ""
-
-msgid "actions_select"
-msgstr "Seleccionar"
-
-msgid "actions_select_description"
-msgstr ""
-
-msgid "actions_sendemail"
-msgstr "Enviar un email"
-
-msgid "actions_sendemail_description"
-msgstr ""
-
-msgid "actions_siteconfig"
-msgstr "Configuración del sitio"
-
-msgid "actions_siteconfig_description"
-msgstr ""
-
-msgid "actions_siteinfo"
-msgstr ""
-
-msgid "actions_siteinfo_description"
-msgstr ""
-
-msgid "actions_view"
-msgstr "Ver"
-
-msgid "actions_view_description"
-msgstr ""
-
-msgid "actions_workflow"
-msgstr "Ver el workflow"
-
-msgid "actions_workflow_description"
-msgstr ""
-
 msgid "activate"
 msgstr "Activar"
 
@@ -1223,9 +1084,6 @@
 msgid "attribute"
 msgstr "Atributo"
 
-msgid "attributes with modified permissions:"
-msgstr "atributos con autorizaciónes modificadas:"
-
 msgid "august"
 msgstr "Agosto"
 
@@ -1548,9 +1406,6 @@
 msgid "condition"
 msgstr ""
 
-msgid "condition:"
-msgstr "condición:"
-
 msgctxt "RQLExpression"
 msgid "condition_object"
 msgstr ""
@@ -1558,6 +1413,9 @@
 msgid "condition_object"
 msgstr "condición de"
 
+msgid "conditions"
+msgstr ""
+
 msgid "config mode"
 msgstr ""
 
@@ -1594,6 +1452,9 @@
 msgid "constraints applying on this relation"
 msgstr "Restricciones que se aplican a esta relación"
 
+msgid "content type"
+msgstr ""
+
 msgid "contentnavigation"
 msgstr "Componentes contextuales"
 
@@ -1609,12 +1470,6 @@
 msgid "contentnavigation_metadata_description"
 msgstr ""
 
-msgid "contentnavigation_pdfview"
-msgstr ""
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
 msgid "contentnavigation_prevnext"
 msgstr "Elemento anterior / siguiente"
 
@@ -1809,6 +1664,9 @@
 msgid "creation"
 msgstr "Creación"
 
+msgid "creation date"
+msgstr ""
+
 msgid "creation time of an entity"
 msgstr "Fecha de creación de una entidad"
 
@@ -1844,17 +1702,32 @@
 msgid "custom_workflow_object"
 msgstr ""
 
-msgid "cwetype-schema-image"
-msgstr "Esquema"
-
-msgid "cwetype-schema-permissions"
-msgstr "Autorizaciónes"
-
-msgid "cwetype-schema-text"
-msgstr "Modelo de datos"
+msgid "cwetype-box"
+msgstr ""
+
+msgid "cwetype-description"
+msgstr ""
+
+msgid "cwetype-permissions"
+msgstr ""
+
+msgid "cwetype-views"
+msgstr ""
 
 msgid "cwetype-workflow"
-msgstr "Workflow"
+msgstr ""
+
+msgid "cwgroup-main"
+msgstr ""
+
+msgid "cwgroup-permissions"
+msgstr ""
+
+msgid "cwrtype-description"
+msgstr ""
+
+msgid "cwrtype-permissions"
+msgstr ""
 
 msgid "cwuri"
 msgstr ""
@@ -1883,6 +1756,9 @@
 msgid "default user workflow"
 msgstr ""
 
+msgid "default value"
+msgstr ""
+
 msgid "default workflow for an entity type"
 msgstr ""
 
@@ -2111,18 +1987,12 @@
 msgid "detach attached file"
 msgstr "soltar el archivo existente"
 
-msgid "display order of the action"
-msgstr "Orden de aparición de la acción"
-
 msgid "display order of the box"
 msgstr "Orden de aparición de la caja"
 
 msgid "display order of the component"
 msgstr "Orden de aparición del componente"
 
-msgid "display the action or not"
-msgstr "Mostrar la acción o no"
-
 msgid "display the box or not"
 msgstr "Mostrar la caja o no"
 
@@ -2335,6 +2205,9 @@
 msgid "final"
 msgstr ""
 
+msgid "first name"
+msgstr ""
+
 msgid "firstname"
 msgstr "Nombre"
 
@@ -2448,10 +2321,6 @@
 msgid "granted to groups"
 msgstr "Otorgado a los grupos"
 
-#, python-format
-msgid "graphical representation of %s"
-msgstr ""
-
 msgid "graphical representation of the instance'schema"
 msgstr ""
 
@@ -2475,9 +2344,6 @@
 msgid "groups to which the permission is granted"
 msgstr "Grupos quienes tienen otorgada esta autorización"
 
-msgid "groups:"
-msgstr "Grupos :"
-
 msgid "guests"
 msgstr "Invitados"
 
@@ -2522,6 +2388,18 @@
 msgid "i18n_login_popup"
 msgstr "Identificarse"
 
+msgid "i18ncard_*"
+msgstr ""
+
+msgid "i18ncard_+"
+msgstr ""
+
+msgid "i18ncard_1"
+msgstr ""
+
+msgid "i18ncard_?"
+msgstr ""
+
 msgid "i18nprevnext_next"
 msgstr "Siguiente"
 
@@ -2559,12 +2437,6 @@
 msgid "image"
 msgstr "Imagen"
 
-msgid "in memory entity schema"
-msgstr "Esquema de la entidad en memoria"
-
-msgid "in memory relation schema"
-msgstr "Esquema de la relación en memoria"
-
 msgid "in_group"
 msgstr "En el grupo"
 
@@ -2595,9 +2467,6 @@
 msgid "incorrect value (%(value)s) for type \"%(type)s\""
 msgstr "valor %(value)s incorrecto para el tipo \"%(type)s\""
 
-msgid "index"
-msgstr "Indice"
-
 msgid "index this attribute's value in the plain text index"
 msgstr "Indexar el valor de este atributo en el índice de texto simple"
 
@@ -2668,9 +2537,19 @@
 msgid "invalid action %r"
 msgstr "Acción %r invalida"
 
+#, python-format
+msgid "invalid value %(value)s, it must be one of %(choices)s"
+msgstr ""
+
 msgid "is"
 msgstr "es"
 
+msgid "is object of:"
+msgstr "es objeto de"
+
+msgid "is subject of:"
+msgstr "es sujeto de"
+
 msgid ""
 "is the subject/object entity of the relation composed of the other ? This "
 "implies that when the composite is deleted, composants are also deleted."
@@ -2722,6 +2601,12 @@
 msgid "last connection date"
 msgstr "Ultima fecha de conexión"
 
+msgid "last login time"
+msgstr ""
+
+msgid "last name"
+msgstr ""
+
 msgid "last usage"
 msgstr ""
 
@@ -2783,6 +2668,9 @@
 msgid "log in"
 msgstr "Identificarse"
 
+msgid "log out first"
+msgstr ""
+
 msgid "login"
 msgstr "Clave de acesso"
 
@@ -2871,6 +2759,9 @@
 msgid "more actions"
 msgstr "mas acciones"
 
+msgid "more info about this workflow"
+msgstr ""
+
 msgid "multiple edit"
 msgstr "Edicion multiple"
 
@@ -3018,8 +2909,8 @@
 msgid "object"
 msgstr "objeto"
 
-msgid "object_plural:"
-msgstr "objetos:"
+msgid "object type"
+msgstr ""
 
 msgid "october"
 msgstr "octubre"
@@ -3045,6 +2936,9 @@
 msgid "opened web sessions"
 msgstr ""
 
+msgid "options"
+msgstr ""
+
 msgid "order"
 msgstr "orden"
 
@@ -3099,11 +2993,8 @@
 msgid "permission"
 msgstr "Permiso"
 
-msgid "permissions for entities"
-msgstr "autorizaciónes para entidades"
-
-msgid "permissions for relations"
-msgstr "autorizaciónes para relaciones"
+msgid "permissions"
+msgstr ""
 
 msgid "permissions for this entity"
 msgstr "Permisos para esta entidad"
@@ -3179,6 +3070,12 @@
 msgid "project"
 msgstr "Proyecto"
 
+msgid "rdef-description"
+msgstr ""
+
+msgid "rdef-permissions"
+msgstr ""
+
 msgid "read"
 msgstr "Lectura"
 
@@ -3220,6 +3117,9 @@
 msgid "related entity has no workflow set"
 msgstr ""
 
+msgid "relation"
+msgstr ""
+
 #, python-format
 msgid "relation %(relname)s of %(ent)s"
 msgstr "relación %(relname)s de %(ent)s"
@@ -3248,9 +3148,6 @@
 msgid "relation_type_object"
 msgstr "Definición"
 
-msgid "relations"
-msgstr "relaciones"
-
 msgid "relations deleted"
 msgstr "Relaciones eliminadas"
 
@@ -3292,6 +3189,9 @@
 msgid "require_permission_object"
 msgstr "Requerido por autorización"
 
+msgid "required"
+msgstr ""
+
 msgid "required attribute"
 msgstr "Atributo requerido"
 
@@ -3339,11 +3239,17 @@
 msgid "schema's permissions definitions"
 msgstr "definiciones de permisos del esquema"
 
-msgid "schema-image"
-msgstr "esquema imagen"
-
-msgid "schema-text"
-msgstr "esquema text"
+msgid "schema-diagram"
+msgstr ""
+
+msgid "schema-entity-types"
+msgstr ""
+
+msgid "schema-relation-types"
+msgstr ""
+
+msgid "schema-security"
+msgstr ""
 
 msgid "search"
 msgstr "buscar"
@@ -3534,12 +3440,12 @@
 msgid "subject"
 msgstr "sujeto"
 
+msgid "subject type"
+msgstr ""
+
 msgid "subject/object cardinality"
 msgstr "cardinalidad sujeto/objeto"
 
-msgid "subject_plural:"
-msgstr "sujetos:"
-
 msgid "subworkflow"
 msgstr ""
 
@@ -3942,6 +3848,9 @@
 msgid "used to grant a permission to a group"
 msgstr "utilizado para otorgar permisos a un grupo"
 
+msgid "user"
+msgstr ""
+
 #, python-format
 msgid ""
 "user %s has made the following change(s):\n"
@@ -3982,6 +3891,26 @@
 msgid "value associated to this key is not editable manually"
 msgstr "el valor asociado a este elemento no es editable manualmente"
 
+#, python-format
+msgid "value must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value must be >= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value should have maximum size of %s"
+msgstr ""
+
+#, python-format
+msgid "value should have minimum size of %s"
+msgstr ""
+
 msgid "vcard"
 msgstr "vcard"
 
@@ -4000,15 +3929,18 @@
 msgid "view history"
 msgstr ""
 
+msgid "view identifier"
+msgstr ""
+
+msgid "view title"
+msgstr ""
+
 msgid "view workflow"
 msgstr "ver workflow"
 
 msgid "view_index"
 msgstr ""
 
-msgid "views"
-msgstr "vistas"
-
 msgid "visible"
 msgstr "visible"
 
@@ -4031,6 +3963,12 @@
 msgid "wf_info_for_object"
 msgstr "historial de transiciones"
 
+msgid "wf_tab_info"
+msgstr ""
+
+msgid "wfgraph"
+msgstr ""
+
 msgid ""
 "when multiple addresses are equivalent (such as python-projects@logilab.org "
 "and python-projects@lists.logilab.org), set this to indicate which is the "
@@ -4101,345 +4039,5 @@
 msgid "you should probably delete that property"
 msgstr "deberia probablamente suprimir esta propriedad"
 
-#~ msgid "%(fmt1)s, or without time: %(fmt2)s"
-#~ msgstr "%(fmt1)s, o bien sin especificar horario: %(fmt2)s"
-
-#~ msgid "%s results matching query"
-#~ msgstr "%s resultados de la demanda"
-
-#~ msgid "Application"
-#~ msgstr "Aplicación"
-
-#~ msgid "Debug level set to %s"
-#~ msgstr "Nivel de debug puesto a %s"
-
-#~ msgid "Environment"
-#~ msgstr "Ambiente"
-
-#~ msgid "No query has been executed"
-#~ msgstr "Ninguna búsqueda ha sido ejecutada"
-
-#~ msgid "Request"
-#~ msgstr "Petición"
-
-#~ msgid "Server"
-#~ msgstr "Servidor"
-
-#~ msgid "There is no workflow defined for this entity."
-#~ msgstr "No hay workflow para este entidad"
-
-#~ msgid "Unable to find anything named \"%s\" in the schema !"
-#~ msgstr "No encontramos el nombre \"%s\" en el esquema"
-
-#~ msgid "You are now connected to %s"
-#~ msgstr "Usted esta conectado a %s"
-
-#~ msgid ""
-#~ "You have no access to this view or it's not applyable to current data"
-#~ msgstr "No tiene acceso a esta vista o No es aplicable a los datos actuales"
-
-#~ msgid "__msg state changed"
-#~ msgstr "El estado a cambiado"
-
-#~ msgid "account state"
-#~ msgstr "Estado de la Cuenta"
-
-#~ msgid "add CWRType add_permission RQLExpression subject"
-#~ msgstr "Expresión RQL de agregación"
-
-#~ msgid "add CWRType delete_permission RQLExpression subject"
-#~ msgstr "Expresión RQL de eliminación"
-
-#~ msgid "add CWRType read_permission RQLExpression subject"
-#~ msgstr "Expresión RQL de lectura"
-
-#~ msgid "add State state_of CWEType object"
-#~ msgstr "Estado"
-
-#~ msgid "add Transition transition_of CWEType object"
-#~ msgstr "Transición"
-
-#~ msgid "add a Bookmark"
-#~ msgstr "Agregar un Favorito"
-
-#~ msgid "add a CWAttribute"
-#~ msgstr "Agregar un tipo de relación"
-
-#~ msgid "add a CWCache"
-#~ msgstr "Agregar un cache"
-
-#~ msgid "add a CWConstraint"
-#~ msgstr "Agregar una Restricción"
-
-#~ msgid "add a CWConstraintType"
-#~ msgstr "Agregar un tipo de Restricción"
-
-#~ msgid "add a CWEType"
-#~ msgstr "Agregar un tipo de entidad"
-
-#~ msgid "add a CWGroup"
-#~ msgstr "Agregar un grupo de usuarios"
-
-#~ msgid "add a CWPermission"
-#~ msgstr "Agregar una autorización"
-
-#~ msgid "add a CWProperty"
-#~ msgstr "Agregar una propiedad"
-
-#~ msgid "add a CWRType"
-#~ msgstr "Agregar un tipo de relación"
-
-#~ msgid "add a CWRelation"
-#~ msgstr "Agregar una relación"
-
-#~ msgid "add a CWUser"
-#~ msgstr "Agregar un usuario"
-
-#~ msgid "add a EmailAddress"
-#~ msgstr "Agregar un email"
-
-#~ msgid "add a RQLExpression"
-#~ msgstr "Agregar una expresión rql"
-
-#~ msgid "add a State"
-#~ msgstr "Agregar un estado"
-
-#~ msgid "add a TrInfo"
-#~ msgstr "Agregar una información de transición"
-
-#~ msgid "add a Transition"
-#~ msgstr "Agregar una transición"
-
-#~ msgid "add relation"
-#~ msgstr "Agregar una relación"
-
-#~ msgid ""
-#~ "added relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s #%"
-#~ "(toeid)s"
-#~ msgstr ""
-#~ "Relación agregada %(rtype)s de %(frometype)s #%(fromeid)s hacia %(toetype)"
-#~ "s #%(toeid)s"
-
-#~ msgid "allowed transition from this state"
-#~ msgstr "transición autorizada desde este estado"
-
-#~ msgid "button_reset"
-#~ msgstr "Cancelar los cambios"
-
-#~ msgid "canonical"
-#~ msgstr "canónico"
-
-#~ msgid "comment:"
-#~ msgstr "Comentario:"
-
-#~ msgid "copy edition"
-#~ msgstr "Edición de una copia"
-
-#~ msgid ""
-#~ "core relation giving to a group the permission to add an entity or "
-#~ "relation type"
-#~ msgstr ""
-#~ "Relación sistema que otorga a un grupo la autorización de agregar una "
-#~ "entidad o una relación"
-
-#~ msgid ""
-#~ "core relation giving to a group the permission to delete an entity or "
-#~ "relation type"
-#~ msgstr ""
-#~ "Relación sistema que otorga a un grupo la autorización de eliminar una "
-#~ "entidad o relación"
-
-#~ msgid ""
-#~ "core relation giving to a group the permission to read an entity or "
-#~ "relation type"
-#~ msgstr ""
-#~ "Relación sistema que otorga a un grupo la autorización de leer una "
-#~ "entidad o una relación "
-
-#~ msgid ""
-#~ "core relation giving to a group the permission to update an entity type"
-#~ msgstr ""
-#~ "Relación sistema que otorga a un grupo la autorización de actualizar una "
-#~ "entidad"
-
-#~ msgid ""
-#~ "creating RQLExpression (CWRType %(linkto)s add_permission RQLExpression)"
-#~ msgstr ""
-#~ "Creación de una expresión RQL para la autorización de agregar relaciones %"
-#~ "(linkto)s"
-
-#~ msgid ""
-#~ "creating RQLExpression (CWRType %(linkto)s delete_permission "
-#~ "RQLExpression)"
-#~ msgstr ""
-#~ "creación de una expresión RQL para autorizar la eliminación de relaciones "
-#~ "%(linkto)s"
-
-#~ msgid ""
-#~ "creating RQLExpression (CWRType %(linkto)s read_permission RQLExpression)"
-#~ msgstr ""
-#~ "Creación de una expresión RQL para autorizar la lectura de relaciones %"
-#~ "(linkto)s"
-
-#~ msgid "creating State (State state_of CWEType %(linkto)s)"
-#~ msgstr "Creación de un estado por el tipo %(linkto)s"
-
-#~ msgid "creating Transition (Transition transition_of CWEType %(linkto)s)"
-#~ msgstr "Creación de una transición para el tipo %(linkto)s"
-
-#~ msgid "currently attached file: %s"
-#~ msgstr "archivo adjunto: %s"
-
-#~ msgid ""
-#~ "deleted relation %(rtype)s from %(frometype)s #%(fromeid)s to %(toetype)s "
-#~ "#%(toeid)s"
-#~ msgstr ""
-#~ "Eliminación de la relación %(rtype)s de %(frometype)s #%(fromeid)s hacia %"
-#~ "(toetype)s #%(toeid)s"
-
-#~ msgid "detach attached file %s"
-#~ msgstr "Quitar archivo adjunto %s"
-
-#~ msgid "element copied"
-#~ msgstr "Elemento copiado"
-
-#~ msgid "element created"
-#~ msgstr "Elemento creado"
-
-#~ msgid "element edited"
-#~ msgstr "Elemento editado"
-
-#~ msgid "entity types which may use this state"
-#~ msgstr "Tipo de entidades que pueden utilizar este estado"
-
-#~ msgid "entity types which may use this transition"
-#~ msgstr "Entidades que pueden utilizar esta transición"
-
-#~ msgid "groups allowed to add entities/relations of this type"
-#~ msgstr "Grupos autorizados a agregar entidades/relaciones de este tipo"
-
-#~ msgid "groups allowed to delete entities/relations of this type"
-#~ msgstr "Grupos autorizados a eliminar entidades/relaciones de este tipo"
-
-#~ msgid "groups allowed to read entities/relations of this type"
-#~ msgstr "Grupos autorizados a leer entidades/relaciones de este tipo"
-
-#~ msgid "groups allowed to update entities of this type"
-#~ msgstr "Grupos autorizados a actualizar entidades de este tipo"
-
-#~ msgid "home"
-#~ msgstr "Inicio"
-
-#~ msgid "initial state for entities of this type"
-#~ msgstr "Estado inicial para las entidades de este tipo"
-
-#~ msgid "invalid date"
-#~ msgstr "Esta fecha no es válida"
-
-#~ msgid "link a state to one or more entity type"
-#~ msgstr "liga un estado a una o mas entidades"
-
-#~ msgid "link a transition to one or more entity type"
-#~ msgstr "liga una transición a una o mas tipos de entidad"
-
-#~ msgid "link to each item in"
-#~ msgstr "ligar hacia cada elemento en"
-
-#~ msgid "loading"
-#~ msgstr "Cargando"
-
-#~ msgid "nothing to edit"
-#~ msgstr "nada que editar"
-
-#~ msgid "remove this Bookmark"
-#~ msgstr "Eliminar este Favorito"
-
-#~ msgid "remove this CWAttribute"
-#~ msgstr "Eliminar este atributo"
-
-#~ msgid "remove this CWCache"
-#~ msgstr "Eliminar esta cache de aplicación"
-
-#~ msgid "remove this CWConstraint"
-#~ msgstr "Eliminar esta restricción"
-
-#~ msgid "remove this CWConstraintType"
-#~ msgstr "Eliminar este tipo de restricción"
-
-#~ msgid "remove this CWEType"
-#~ msgstr "Eliminar este tipo de entidad"
-
-#~ msgid "remove this CWGroup"
-#~ msgstr "Eliminar este grupo"
-
-#~ msgid "remove this CWPermission"
-#~ msgstr "Eliminar este permiso"
-
-#~ msgid "remove this CWProperty"
-#~ msgstr "Eliminar esta propiedad"
-
-#~ msgid "remove this CWRType"
-#~ msgstr "Eliminar esta definición de relación"
-
-#~ msgid "remove this CWRelation"
-#~ msgstr "Eliminar esta relación"
-
-#~ msgid "remove this CWUser"
-#~ msgstr "Eliminar este usuario"
-
-#~ msgid "remove this EmailAddress"
-#~ msgstr "Eliminar este correo electronico"
-
-#~ msgid "remove this RQLExpression"
-#~ msgstr "Eliminar esta expresión RQL"
-
-#~ msgid "remove this State"
-#~ msgstr "Eliminar este estado"
-
-#~ msgid "remove this TrInfo"
-#~ msgstr "Eliminar información de esta transición"
-
-#~ msgid "remove this Transition"
-#~ msgstr "Eliminar esta transición"
-
-#~ msgid "rql expression allowing to add entities/relations of this type"
-#~ msgstr "expresion RQL permitiendo agregar entidades/relaciones de este tipo"
-
-#~ msgid "rql expression allowing to delete entities/relations of this type"
-#~ msgstr ""
-#~ "expresion RQL permitiendo eliminar entidades/relaciones de este tipo"
-
-#~ msgid "rql expression allowing to read entities/relations of this type"
-#~ msgstr "expresion RQL permitiendo leer entidades/relaciones de este tipo"
-
-#~ msgid "rql expression allowing to update entities of this type"
-#~ msgstr "expresion RQL permitiendo actualizar entidades de este tipo"
-
-#~ msgid "server debug information"
-#~ msgstr "server debug information"
-
-#~ msgid ""
-#~ "use to define a transition from one or multiple states to a destination "
-#~ "states in workflow's definitions."
-#~ msgstr ""
-#~ "utilizado para definir una transición desde uno o multiples estados hacia "
-#~ "uno o varios estados destino en las definiciones del workflow"
-
-#~ msgid ""
-#~ "user for which this property is applying. If this relation is not set, "
-#~ "the property is considered as a global property"
-#~ msgstr ""
-#~ "usuario para el cual aplica esta propiedad. Si no se establece esta "
-#~ "relación, la propiedad es considerada como una propiedad global."
-
-#~ msgid ""
-#~ "when multiple addresses are equivalent (such as python-projects@logilab."
-#~ "org and python-projects@lists.logilab.org), set this to true on one of "
-#~ "them which is the preferred form."
-#~ msgstr ""
-#~ "cuando multiples direcciones de correo son equivalentes (como python-"
-#~ "projects@logilab.org y python-projects@lists.logilab.org), establecer "
-#~ "esto como verdadero en una de ellas es la forma preferida "
-
-#~ msgid "workflow for %s"
-#~ msgstr "workflow para %s"
+#~ msgid "schema-image"
+#~ msgstr "esquema imagen"
--- a/i18n/fr.po	Thu May 06 08:24:46 2010 +0200
+++ b/i18n/fr.po	Mon Jul 19 15:36:16 2010 +0200
@@ -1,10 +1,10 @@
 # cubicweb i18n catalog
-# Copyright 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# Copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # Logilab <contact@logilab.fr>
 msgid ""
 msgstr ""
 "Project-Id-Version: cubicweb 2.46.0\n"
-"PO-Revision-Date: 2010-01-15 09:35+0100\n"
+"PO-Revision-Date: 2010-05-16 18:59+0200\n"
 "Last-Translator: Logilab Team <contact@logilab.fr>\n"
 "Language-Team: fr <contact@logilab.fr>\n"
 "MIME-Version: 1.0\n"
@@ -35,6 +35,9 @@
 msgid "  from state %(fromstate)s to state %(tostate)s\n"
 msgstr "  de l'état %(fromstate)s vers l'état %(tostate)s\n"
 
+msgid " :"
+msgstr ""
+
 #, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr "%(attr)s modifié à %(newvalue)s"
@@ -56,6 +59,10 @@
 msgstr "%(subject)s %(etype)s #%(eid)s (%(login)s)"
 
 #, python-format
+msgid "%(value)r doesn't match the %(regexp)r regular expression"
+msgstr "%(value)r ne correspond pas à l'expression régulière %(regexp)r"
+
+#, python-format
 msgid "%d days"
 msgstr "%d jours"
 
@@ -170,6 +177,10 @@
 msgstr "1 0..1"
 
 #, python-format
+msgid "<%s not specified>"
+msgstr "<%s non spécifié>"
+
+#, python-format
 msgid ""
 "<div>This schema of the data model <em>excludes</em> the meta-data, but you "
 "can also display a <a href=\"%s\">complete schema with meta-data</a>.</div>"
@@ -177,9 +188,6 @@
 "<div>Ce schéma du modèle de données <em>exclue</em> les méta-données, mais "
 "vous pouvez afficher un <a href=\"%s\">schéma complet</a>.</div>"
 
-msgid "<no value>"
-msgstr "<non spécifié>"
-
 msgid "?*"
 msgstr "0..1 0..n"
 
@@ -195,14 +203,14 @@
 msgid "AND"
 msgstr "ET"
 
-msgid "Add permissions"
-msgstr "Permissions d'ajouter"
-
 msgid "Any"
 msgstr "N'importe"
 
-msgid "Attributes"
-msgstr "Attributs"
+msgid "Attributes permissions:"
+msgstr "Permissions des attributs"
+
+msgid "Attributes with non default permissions:"
+msgstr "Attributs ayant des permissions non-standard"
 
 # schema pot file, generated on 2009-09-16 16:46:55
 #
@@ -228,6 +236,9 @@
 msgid "BoundConstraint"
 msgstr "contrainte de bornes"
 
+msgid "BoundaryConstraint"
+msgstr "contrainte de bornes"
+
 msgid "Browse by category"
 msgstr "Naviguer par catégorie"
 
@@ -381,14 +392,11 @@
 msgid "Decimal_plural"
 msgstr "Nombres décimaux"
 
-msgid "Delete permissions"
-msgstr "Permissions de supprimer"
-
 msgid "Do you want to delete the following element(s) ?"
 msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?"
 
-msgid "Download page as pdf"
-msgstr "télécharger la page au format PDF"
+msgid "Download schema as OWL"
+msgstr "Télécharger le schéma au format OWL"
 
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "EmailAddress"
@@ -403,6 +411,9 @@
 msgid "Entities"
 msgstr "entités"
 
+msgid "Entity types"
+msgstr "Types d'entités"
+
 msgid "ExternalUri"
 msgstr "Uri externe"
 
@@ -430,6 +441,9 @@
 msgid "Help"
 msgstr "Aide"
 
+msgid "Index"
+msgstr "Index"
+
 msgid "Instance"
 msgstr "Instance"
 
@@ -523,15 +537,27 @@
 msgid "No result matching query"
 msgstr "aucun résultat"
 
+msgid "Non exhaustive list of views that may apply to entities of this type"
+msgstr "Liste non exhausite des vues s'appliquant à ce type d'entité"
+
 msgid "OR"
 msgstr "OU"
 
+msgid "Parent classes:"
+msgstr "Classes parentes :"
+
 msgid "Password"
 msgstr "Mot de passe"
 
 msgid "Password_plural"
 msgstr "Mots de passe"
 
+msgid "Permissions for entity types"
+msgstr "Permissions pour les types d'entités"
+
+msgid "Permissions for relations"
+msgstr "Permissions pour les relations"
+
 msgid "Please note that this is only a shallow copy"
 msgstr "Attention, cela n'effectue qu'une copie de surface"
 
@@ -550,9 +576,6 @@
 msgid "RQLVocabularyConstraint"
 msgstr "contrainte rql de vocabulaire"
 
-msgid "Read permissions"
-msgstr "Permissions de lire"
-
 msgid "Recipients:"
 msgstr "Destinataires :"
 
@@ -562,6 +585,9 @@
 msgid "Registry's content"
 msgstr "Contenu du registre"
 
+msgid "Relation types"
+msgstr "Types de relation"
+
 msgid "Relations"
 msgstr "Relations"
 
@@ -599,6 +625,9 @@
 msgid "String_plural"
 msgstr "Chaînes de caractères"
 
+msgid "Sub-classes:"
+msgstr "Classes filles :"
+
 msgid "SubWorkflowExitPoint"
 msgstr "Sortie de sous-workflow"
 
@@ -625,6 +654,9 @@
 msgid "The view %s could not be found"
 msgstr "La vue %s est introuvable"
 
+msgid "There is no default workflow"
+msgstr "Ce type d'entité n'a pas de workflow par défault"
+
 msgid "This BaseTransition"
 msgstr "Cette transition abstraite"
 
@@ -691,6 +723,9 @@
 msgid "This WorkflowTransition"
 msgstr "Cette transition workflow"
 
+msgid "This entity type permissions:"
+msgstr "Permissions pour ce type d'endité"
+
 msgid "Time"
 msgstr "Heure"
 
@@ -715,9 +750,6 @@
 msgid "Unreachable objects"
 msgstr "Objets inacessible"
 
-msgid "Update permissions"
-msgstr "Permissions de modifier"
-
 msgid "Used by:"
 msgstr "Utilisé par :"
 
@@ -816,183 +848,12 @@
 msgid "abstract base class for transitions"
 msgstr "classe de base abstraite pour les transitions"
 
-msgid "access type"
-msgstr "type d'accès"
-
 msgid "action(s) on this selection"
 msgstr "action(s) sur cette sélection"
 
 msgid "actions"
 msgstr "actions"
 
-msgid "actions_about"
-msgstr "à propos"
-
-msgid "actions_about_description"
-msgstr ""
-
-msgid "actions_addentity"
-msgstr "ajouter une entité de ce type"
-
-msgid "actions_addentity_description"
-msgstr ""
-
-msgid "actions_addrelated"
-msgstr "menu ajouter"
-
-msgid "actions_addrelated_description"
-msgstr ""
-
-msgid "actions_cancel"
-msgstr "annuler la sélection"
-
-msgid "actions_cancel_description"
-msgstr ""
-
-msgid "actions_changelog"
-msgstr "changements récents"
-
-msgid "actions_changelog_description"
-msgstr ""
-
-msgid "actions_copy"
-msgstr "copier"
-
-msgid "actions_copy_description"
-msgstr ""
-
-msgid "actions_delete"
-msgstr "supprimer"
-
-msgid "actions_delete_description"
-msgstr ""
-
-msgid "actions_download_as_owl"
-msgstr "télécharger en owl"
-
-msgid "actions_download_as_owl_description"
-msgstr ""
-
-msgid "actions_edit"
-msgstr "modifier"
-
-msgid "actions_edit_description"
-msgstr ""
-
-msgid "actions_embed"
-msgstr "embarquer"
-
-msgid "actions_embed_description"
-msgstr ""
-
-msgid "actions_entitiesoftype"
-msgstr "voir les entités de ce type"
-
-msgid "actions_entitiesoftype_description"
-msgstr ""
-
-msgid "actions_follow"
-msgstr "suivre"
-
-msgid "actions_follow_description"
-msgstr ""
-
-msgid "actions_help"
-msgstr "aide"
-
-msgid "actions_help_description"
-msgstr ""
-
-msgid "actions_logout"
-msgstr "se déconnecter"
-
-msgid "actions_logout_description"
-msgstr ""
-
-msgid "actions_manage"
-msgstr "gestion du site"
-
-msgid "actions_manage_description"
-msgstr ""
-
-msgid "actions_managepermission"
-msgstr "gestion des permissions"
-
-msgid "actions_managepermission_description"
-msgstr ""
-
-msgid "actions_muledit"
-msgstr "édition multiple"
-
-msgid "actions_muledit_description"
-msgstr ""
-
-msgid "actions_myinfos"
-msgstr "informations personnelles"
-
-msgid "actions_myinfos_description"
-msgstr ""
-
-msgid "actions_myprefs"
-msgstr "préférences utilisateur"
-
-msgid "actions_myprefs_description"
-msgstr ""
-
-msgid "actions_poweredby"
-msgstr "powered by"
-
-msgid "actions_poweredby_description"
-msgstr ""
-
-msgid "actions_prefs"
-msgstr "préférences"
-
-msgid "actions_prefs_description"
-msgstr ""
-
-msgid "actions_schema"
-msgstr "voir le schéma"
-
-msgid "actions_schema_description"
-msgstr ""
-
-msgid "actions_select"
-msgstr "sélectionner"
-
-msgid "actions_select_description"
-msgstr ""
-
-msgid "actions_sendemail"
-msgstr "envoyer un email"
-
-msgid "actions_sendemail_description"
-msgstr ""
-
-msgid "actions_siteconfig"
-msgstr "configuration du site"
-
-msgid "actions_siteconfig_description"
-msgstr ""
-
-msgid "actions_siteinfo"
-msgstr "information sur ce site"
-
-msgid "actions_siteinfo_description"
-msgstr ""
-
-msgid "actions_view"
-msgstr "voir"
-
-msgid "actions_view_description"
-msgstr ""
-
-msgid "actions_workflow"
-msgstr "voir le workflow"
-
-msgid "actions_workflow_description"
-msgstr ""
-
 msgid "activate"
 msgstr "activer"
 
@@ -1242,9 +1103,6 @@
 msgid "attribute"
 msgstr "attribut"
 
-msgid "attributes with modified permissions:"
-msgstr "attributs ayant des permissions modifiées :"
-
 msgid "august"
 msgstr "août"
 
@@ -1568,9 +1426,6 @@
 msgid "condition"
 msgstr "condition"
 
-msgid "condition:"
-msgstr "condition :"
-
 msgctxt "RQLExpression"
 msgid "condition_object"
 msgstr "condition de"
@@ -1578,6 +1433,9 @@
 msgid "condition_object"
 msgstr "condition de"
 
+msgid "conditions"
+msgstr "conditions"
+
 msgid "config mode"
 msgstr "mode de configuration"
 
@@ -1614,6 +1472,9 @@
 msgid "constraints applying on this relation"
 msgstr "contraintes s'appliquant à cette relation"
 
+msgid "content type"
+msgstr "type MIME"
+
 msgid "contentnavigation"
 msgstr "composants contextuels"
 
@@ -1630,12 +1491,6 @@
 msgid "contentnavigation_metadata_description"
 msgstr ""
 
-msgid "contentnavigation_pdfview"
-msgstr "icône pdf"
-
-msgid "contentnavigation_pdfview_description"
-msgstr ""
-
 msgid "contentnavigation_prevnext"
 msgstr "élément précedent / suivant"
 
@@ -1832,6 +1687,9 @@
 msgid "creation"
 msgstr "création"
 
+msgid "creation date"
+msgstr "date de création"
+
 msgid "creation time of an entity"
 msgstr "date de création d'une entité"
 
@@ -1867,18 +1725,33 @@
 msgid "custom_workflow_object"
 msgstr "workflow de"
 
-msgid "cwetype-schema-image"
-msgstr "schéma"
-
-msgid "cwetype-schema-permissions"
+msgid "cwetype-box"
+msgstr "vue \"boîte\""
+
+msgid "cwetype-description"
+msgstr "description"
+
+msgid "cwetype-permissions"
 msgstr "permissions"
 
-msgid "cwetype-schema-text"
-msgstr "description"
+msgid "cwetype-views"
+msgstr "vues"
 
 msgid "cwetype-workflow"
 msgstr "workflow"
 
+msgid "cwgroup-main"
+msgstr "description"
+
+msgid "cwgroup-permissions"
+msgstr "permissions"
+
+msgid "cwrtype-description"
+msgstr "description"
+
+msgid "cwrtype-permissions"
+msgstr "permissions"
+
 msgid "cwuri"
 msgstr "uri interne"
 
@@ -1906,6 +1779,9 @@
 msgid "default user workflow"
 msgstr "workflow par défaut des utilisateurs"
 
+msgid "default value"
+msgstr "valeur par défaut"
+
 msgid "default workflow for an entity type"
 msgstr "workflow par défaut pour un type d'entité"
 
@@ -2146,18 +2022,12 @@
 msgid "detach attached file"
 msgstr "détacher le fichier existant"
 
-msgid "display order of the action"
-msgstr "ordre d'affichage de l'action"
-
 msgid "display order of the box"
 msgstr "ordre d'affichage de la boîte"
 
 msgid "display order of the component"
 msgstr "ordre d'affichage du composant"
 
-msgid "display the action or not"
-msgstr "afficher l'action ou non"
-
 msgid "display the box or not"
 msgstr "afficher la boîte ou non"
 
@@ -2369,6 +2239,9 @@
 msgid "final"
 msgstr "final"
 
+msgid "first name"
+msgstr "prénom"
+
 msgid "firstname"
 msgstr "prénom"
 
@@ -2387,7 +2260,7 @@
 msgstr "suivez ce lien pour plus d'information sur ce %s"
 
 msgid "follow this link if javascript is deactivated"
-msgstr ""
+msgstr "suivez ce lien si javascript est désactivé"
 
 msgid "for_user"
 msgstr "pour l'utilisateur"
@@ -2484,10 +2357,6 @@
 msgid "granted to groups"
 msgstr "accordée aux groupes"
 
-#, python-format
-msgid "graphical representation of %s"
-msgstr "représentation graphique de %s"
-
 msgid "graphical representation of the instance'schema"
 msgstr "représentation graphique du schéma de l'instance"
 
@@ -2512,9 +2381,6 @@
 msgid "groups to which the permission is granted"
 msgstr "groupes auquels cette permission est donnée"
 
-msgid "groups:"
-msgstr "groupes :"
-
 msgid "guests"
 msgstr "invités"
 
@@ -2559,6 +2425,18 @@
 msgid "i18n_login_popup"
 msgstr "s'authentifier"
 
+msgid "i18ncard_*"
+msgstr "0..n"
+
+msgid "i18ncard_+"
+msgstr "1..n"
+
+msgid "i18ncard_1"
+msgstr "1"
+
+msgid "i18ncard_?"
+msgstr "0..1"
+
 msgid "i18nprevnext_next"
 msgstr "suivant"
 
@@ -2596,12 +2474,6 @@
 msgid "image"
 msgstr "image"
 
-msgid "in memory entity schema"
-msgstr "schéma de l'entité en mémoire"
-
-msgid "in memory relation schema"
-msgstr "schéma de la relation en mémoire"
-
 msgid "in_group"
 msgstr "dans le groupe"
 
@@ -2632,9 +2504,6 @@
 msgid "incorrect value (%(value)s) for type \"%(type)s\""
 msgstr "valeur %(value)s incorrecte pour le type \"%(type)s\""
 
-msgid "index"
-msgstr "index"
-
 msgid "index this attribute's value in the plain text index"
 msgstr "indexer la valeur de cet attribut dans l'index plein texte"
 
@@ -2705,9 +2574,19 @@
 msgid "invalid action %r"
 msgstr "action %r invalide"
 
+#, python-format
+msgid "invalid value %(value)s, it must be one of %(choices)s"
+msgstr "valeur %(value)s incorrect, doit être parmi %(choices)s"
+
 msgid "is"
 msgstr "de type"
 
+msgid "is object of:"
+msgstr "est object de"
+
+msgid "is subject of:"
+msgstr "est sujet de"
+
 msgid ""
 "is the subject/object entity of the relation composed of the other ? This "
 "implies that when the composite is deleted, composants are also deleted."
@@ -2760,6 +2639,12 @@
 msgid "last connection date"
 msgstr "dernière date de connexion"
 
+msgid "last login time"
+msgstr "dernière date de connexion"
+
+msgid "last name"
+msgstr "nom"
+
 msgid "last usage"
 msgstr "dernier usage"
 
@@ -2821,6 +2706,9 @@
 msgid "log in"
 msgstr "s'identifier"
 
+msgid "log out first"
+msgstr "déconnecter vous d'abord"
+
 msgid "login"
 msgstr "identifiant"
 
@@ -2909,6 +2797,9 @@
 msgid "more actions"
 msgstr "plus d'actions"
 
+msgid "more info about this workflow"
+msgstr "plus d'information sur ce workflow"
+
 msgid "multiple edit"
 msgstr "édition multiple"
 
@@ -3052,8 +2943,8 @@
 msgid "object"
 msgstr "objet"
 
-msgid "object_plural:"
-msgstr "objets :"
+msgid "object type"
+msgstr "type de l'objet"
 
 msgid "october"
 msgstr "octobre"
@@ -3079,6 +2970,9 @@
 msgid "opened web sessions"
 msgstr "sessions web ouvertes"
 
+msgid "options"
+msgstr "options"
+
 msgid "order"
 msgstr "ordre"
 
@@ -3135,11 +3029,8 @@
 msgid "permission"
 msgstr "permission"
 
-msgid "permissions for entities"
-msgstr "permissions pour les entités"
-
-msgid "permissions for relations"
-msgstr "permissions pour les relations"
+msgid "permissions"
+msgstr "permissions"
 
 msgid "permissions for this entity"
 msgstr "permissions pour cette entité"
@@ -3215,6 +3106,12 @@
 msgid "project"
 msgstr "projet"
 
+msgid "rdef-description"
+msgstr "description"
+
+msgid "rdef-permissions"
+msgstr "permissions"
+
 msgid "read"
 msgstr "lecture"
 
@@ -3256,6 +3153,9 @@
 msgid "related entity has no workflow set"
 msgstr "l'entité lié n'a pas de workflow"
 
+msgid "relation"
+msgstr "relation"
+
 #, python-format
 msgid "relation %(relname)s of %(ent)s"
 msgstr "relation %(relname)s de %(ent)s"
@@ -3284,9 +3184,6 @@
 msgid "relation_type_object"
 msgstr "définition"
 
-msgid "relations"
-msgstr "relations"
-
 msgid "relations deleted"
 msgstr "relations supprimées"
 
@@ -3328,6 +3225,9 @@
 msgid "require_permission_object"
 msgstr "permission of"
 
+msgid "required"
+msgstr "requis"
+
 msgid "required attribute"
 msgstr "attribut requis"
 
@@ -3376,11 +3276,17 @@
 msgid "schema's permissions definitions"
 msgstr "permissions définies dans le schéma"
 
-msgid "schema-image"
-msgstr "schéma"
-
-msgid "schema-text"
-msgstr "description"
+msgid "schema-diagram"
+msgstr "diagramme"
+
+msgid "schema-entity-types"
+msgstr "types d'entités"
+
+msgid "schema-relation-types"
+msgstr "types de relations"
+
+msgid "schema-security"
+msgstr "permissions"
 
 msgid "search"
 msgstr "rechercher"
@@ -3573,12 +3479,12 @@
 msgid "subject"
 msgstr "sujet"
 
+msgid "subject type"
+msgstr "type du sujet"
+
 msgid "subject/object cardinality"
 msgstr "cardinalité sujet/objet"
 
-msgid "subject_plural:"
-msgstr "sujets :"
-
 msgid "subworkflow"
 msgstr "sous-workflow"
 
@@ -3985,6 +3891,9 @@
 msgid "used to grant a permission to a group"
 msgstr "utiliser pour donner une permission à un groupe"
 
+msgid "user"
+msgstr "utilisateur"
+
 #, python-format
 msgid ""
 "user %s has made the following change(s):\n"
@@ -4025,6 +3934,26 @@
 msgid "value associated to this key is not editable manually"
 msgstr "la valeur associée à cette clé n'est pas éditable manuellement"
 
+#, python-format
+msgid "value must be %(op)s %(boundary)s"
+msgstr "la valeur doit être %(op)s %(boundary)s"
+
+#, python-format
+msgid "value must be <= %(boundary)s"
+msgstr "la valeur doit être <= %(boundary)s"
+
+#, python-format
+msgid "value must be >= %(boundary)s"
+msgstr "la valeur doit être >= %(boundary)s"
+
+#, python-format
+msgid "value should have maximum size of %s"
+msgstr "la valeur doit être de taille %s au maximum"
+
+#, python-format
+msgid "value should have minimum size of %s"
+msgstr "la valeur doit être de taille %s au minimum"
+
 msgid "vcard"
 msgstr "vcard"
 
@@ -4043,15 +3972,18 @@
 msgid "view history"
 msgstr "voir l'historique"
 
+msgid "view identifier"
+msgstr "identifiant"
+
+msgid "view title"
+msgstr "titre"
+
 msgid "view workflow"
 msgstr "voir les états possibles"
 
 msgid "view_index"
 msgstr "accueil"
 
-msgid "views"
-msgstr "vues"
-
 msgid "visible"
 msgstr "visible"
 
@@ -4075,6 +4007,12 @@
 msgid "wf_info_for_object"
 msgstr "historique des transitions"
 
+msgid "wf_tab_info"
+msgstr "description"
+
+msgid "wfgraph"
+msgstr "image du workflow"
+
 msgid ""
 "when multiple addresses are equivalent (such as python-projects@logilab.org "
 "and python-projects@lists.logilab.org), set this to indicate which is the "
--- a/mail.py	Thu May 06 08:24:46 2010 +0200
+++ b/mail.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Common utilies to format / semd emails.
+"""Common utilies to format / semd emails."""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from base64 import b64encode, b64decode
@@ -85,8 +84,11 @@
     assert type(content) is unicode, repr(content)
     msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8')
     # safety: keep only the first newline
-    subject = subject.splitlines()[0]
-    msg['Subject'] = header(subject)
+    try:
+        subject = subject.splitlines()[0]
+        msg['Subject'] = header(subject)
+    except IndexError:
+        pass # no subject
     if uinfo.get('email'):
         email = uinfo['email']
     elif config and config['sender-addr']:
--- a/man/cubicweb-ctl.1	Thu May 06 08:24:46 2010 +0200
+++ b/man/cubicweb-ctl.1	Mon Jul 19 15:36:16 2010 +0200
@@ -1,4 +1,4 @@
-.TH cubicweb\-ctl 1 "2008-01-14" cubicweb\-ctl
+.TH cubicweb\-ctl 1 "2010-01-14" cubicweb\-ctl
 .SH NAME
 .B cubicweb\-ctl
 \- cubicweb control utility
@@ -23,5 +23,5 @@
 for more information about a specific command.
 
 .SH COPYRIGHT 
-Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
 http://www.logilab.fr/ -- mailto:contact@logilab.fr
--- a/md5crypt.py	Thu May 06 08:24:46 2010 +0200
+++ b/md5crypt.py	Mon Jul 19 15:36:16 2010 +0200
@@ -1,24 +1,3 @@
-#########################################################
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-XXX clarify this header
-"""
 # md5crypt.py
 #
 # 0423.2000 by michal wallace http://www.sabren.com/
@@ -36,6 +15,7 @@
 # * remove usage of the string module
 # * don't include the magic string in the output string
 #   for true crypt.crypt compatibility
+# * use hashlib module instead of md5
 #########################################################
 """md5crypt.py - Provides interoperable MD5-based crypt() function
 
@@ -61,7 +41,7 @@
 MAGIC = '$1$'                        # Magic string
 ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
 
-import md5
+import hashlib as md5
 
 def to64 (v, n):
     ret = ''
--- a/migration.py	Thu May 06 08:24:46 2010 +0200
+++ b/migration.py	Mon Jul 19 15:36:16 2010 +0200
@@ -29,6 +29,7 @@
 from logilab.common.decorators import cached
 from logilab.common.configuration import REQUIRED, read_old_config
 from logilab.common.shellutils import ASK
+from logilab.common.changelog import Version
 
 from cubicweb import ConfigurationError
 
@@ -319,7 +320,7 @@
         """a configuration option has been renamed"""
         self._option_changes.append(('renamed', oldname, newname))
 
-    def cmd_option_group_change(self, option, oldgroup, newgroup):
+    def cmd_option_group_changed(self, option, oldgroup, newgroup):
         """a configuration option has been moved in another group"""
         self._option_changes.append(('moved', option, oldgroup, newgroup))
 
@@ -387,3 +388,75 @@
 from logging import getLogger
 from cubicweb import set_log_methods
 set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
+
+
+def version_strictly_lower(a, b):
+    if a:
+        a = Version(a)
+    if b:
+        b = Version(b)
+    return a < b
+
+def max_version(a, b):
+    return str(max(Version(a), Version(b)))
+
+class ConfigurationProblem(object):
+    """Each cube has its own list of dependencies on other cubes/versions.
+
+    The ConfigurationProblem is used to record the loaded cubes, then to detect
+    inconsistencies in their dependencies.
+
+    See configuration management on wikipedia for litterature.
+    """
+
+    def __init__(self, config):
+        self.cubes = {}
+        self.config = config
+
+    def add_cube(self, name, version):
+        self.cubes[name] = version
+
+    def solve(self):
+        self.warnings = []
+        self.errors = []
+        self.read_constraints()
+        for cube, versions in sorted(self.constraints.items()):
+            oper, version = None, None
+            # simplify constraints
+            if versions:
+                for constraint in versions:
+                    op, ver = constraint
+                    if oper is None:
+                        oper = op
+                        version = ver
+                    elif op == '>=' and oper == '>=':
+                        version = max_version(ver, version)
+                    else:
+                        print 'unable to handle this case', oper, version, op, ver
+            # "solve" constraint satisfaction problem
+            if cube not in self.cubes:
+                self.errors.append( ('add', cube, version) )
+            elif versions:
+                lower_strict = version_strictly_lower(self.cubes[cube], version)
+                if oper in ('>=','='):
+                    if lower_strict:
+                        self.errors.append( ('update', cube, version) )
+                else:
+                    print 'unknown operator', oper
+
+    def read_constraints(self):
+        self.constraints = {}
+        self.reverse_constraints = {}
+        for cube in self.cubes:
+            use = self.config.cube_dependencies(cube)
+            for name, constraint in use.iteritems():
+                self.constraints.setdefault(name,set())
+                if constraint:
+                    try:
+                        oper, version = constraint.split()
+                        self.constraints[name].add( (oper, version) )
+                    except:
+                        self.warnings.append(
+                            'cube %s depends on %s but constraint badly '
+                            'formatted: %s' % (cube, name, constraint))
+                self.reverse_constraints.setdefault(name, set()).add(cube)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/cmp_schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,24 @@
+"""This module compare the Schema on the file system to the one in the database"""
+
+from cStringIO import StringIO
+from cubicweb.web.schemaviewer import SchemaViewer
+from logilab.common.ureports import TextWriter
+import difflib
+
+viewer = SchemaViewer()
+layout_db = viewer.visit_schema(schema, display_relations=True)
+layout_fs = viewer.visit_schema(fsschema, display_relations=True)
+writer = TextWriter()
+stream_db = StringIO()
+stream_fs = StringIO()
+writer.format(layout_db, stream=stream_db)
+writer.format(layout_fs, stream=stream_fs)
+
+stream_db.seek(0)
+stream_fs.seek(0)
+db = stream_db.getvalue().splitlines()
+fs = stream_fs.getvalue().splitlines()
+open('db_schema.txt', 'w').write(stream_db.getvalue())
+open('fs_schema.txt', 'w').write(stream_fs.getvalue())
+#for diff in difflib.ndiff(fs, db):
+#    print diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.8.1_Any.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,2 @@
+rql('SET X name "BoundaryConstraint" '
+    'WHERE X is CWConstraintType, X name "BoundConstraint"')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.8.3_Any.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,3 @@
+if 'same_as' in schema:
+    sync_schema_props_perms('same_as', syncperms=False)
+sync_schema_props_perms('Bookmark', syncperms=False)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.8.3_common.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,4 @@
+option_group_changed('port', 'main', 'web')
+option_group_changed('query-log-file', 'main', 'web')
+option_group_changed('profile', 'main', 'web')
+option_group_changed('max-post-length', 'main', 'web')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.8.5_Any.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,59 @@
+def migrate_varchar_to_nvarchar():
+    dbdriver  = config.sources()['system']['db-driver']
+    if dbdriver != "sqlserver2005":
+        return
+
+    introspection_sql = """\
+SELECT table_schema, table_name, column_name, is_nullable, character_maximum_length
+FROM information_schema.columns
+WHERE data_type = 'VARCHAR' and table_name <> 'SYSDIAGRAMS'
+"""
+    has_index_sql = """\
+SELECT i.name AS index_name,
+       i.type_desc,
+       i.is_unique,
+       i.is_unique_constraint
+FROM sys.indexes AS i, sys.index_columns as j, sys.columns as k
+WHERE is_hypothetical = 0 AND i.index_id <> 0
+AND i.object_id = j.object_id
+AND i.index_id = j.index_id
+AND i.object_id = OBJECT_ID('%(table)s')
+AND k.name = '%(col)s'
+AND k.object_id=i.object_id
+AND j.column_id = k.column_id;"""
+
+    generated_statements = []
+    for schema, table, column, is_nullable, length in sql(introspection_sql, ask_confirm=False):
+        qualified_table = '[%s].[%s]' % (schema, table)
+        rset = sql(has_index_sql % {'table': qualified_table, 'col':column},
+                   ask_confirm = False)
+        drops = []
+        creates = []
+        for idx_name, idx_type, idx_unique, is_unique_constraint in rset:
+            if is_unique_constraint:
+                drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (qualified_table, idx_name))
+                creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (qualified_table, idx_name, column))
+            else:
+                drops.append('DROP INDEX %s ON %s' % (idx_name, qualified_table))
+                if idx_unique:
+                    unique = 'UNIQUE'
+                else:
+                    unique = ''
+                creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, qualified_table, column))
+
+        if length == -1:
+            length = 'max'
+        if is_nullable == 'YES':
+            not_null = 'NULL'
+        else:
+            not_null = 'NOT NULL'
+        alter_sql = 'ALTER TABLE %s ALTER COLUMN %s NVARCHAR(%s) %s' % (qualified_table, column, length, not_null)
+        generated_statements+= drops + [alter_sql] + creates
+
+
+    for statement in generated_statements:
+        print statement
+        sql(statement, ask_confirm=False)
+    commit()
+
+migrate_varchar_to_nvarchar()
--- a/misc/migration/postcreate.py	Thu May 06 08:24:46 2010 +0200
+++ b/misc/migration/postcreate.py	Mon Jul 19 15:36:16 2010 +0200
@@ -56,7 +56,7 @@
 # need this since we already have at least one user in the database (the default admin)
 for user in rql('Any X WHERE X is CWUser').entities():
     rql('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
-        {'x': user.eid, 's': activated.eid}, 'x')
+        {'x': user.eid, 's': activated.eid})
 
 # on interactive mode, ask for level 0 persistent options
 if interactive_mode:
@@ -68,11 +68,12 @@
             default = cfg.option_default(optname, optdict)
             # only record values differing from default
             if value != default:
-                rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s', {'k': key, 'v': value})
+                rql('INSERT CWProperty X: X pkey %(k)s, X value %(v)s',
+                    {'k': key, 'v': value})
 
 # add PERM_USE_TEMPLATE_FORMAT permission
 from cubicweb.schema import PERM_USE_TEMPLATE_FORMAT
 usetmplperm = create_entity('CWPermission', name=PERM_USE_TEMPLATE_FORMAT,
                             label=_('use template languages'))
 rql('SET X require_group G WHERE G name "managers", X eid %(x)s',
-    {'x': usetmplperm.eid}, 'x')
+    {'x': usetmplperm.eid})
--- a/pytestconf.py	Thu May 06 08:24:46 2010 +0200
+++ b/pytestconf.py	Mon Jul 19 15:36:16 2010 +0200
@@ -22,8 +22,6 @@
 from os.path import split, splitext
 from logilab.common.pytest import PyTester
 
-from cubicweb.etwist.server import _gc_debug
-
 class CustomPyTester(PyTester):
     def testfile(self, filename, batchmode=False):
         try:
@@ -39,11 +37,10 @@
                 if getattr(cls, '__module__', None) != modname:
                     continue
                 clean_repo_test_cls(cls)
-            #_gc_debug()
 
 def clean_repo_test_cls(cls):
     if 'repo' in cls.__dict__:
-        if not cls.repo._shutting_down:
+        if not cls.repo.shutting_down:
             cls.repo.shutdown()
         del cls.repo
     for clsattr in ('cnx', '_orig_cnx', 'config', '_config', 'vreg', 'schema'):
--- a/req.py	Thu May 06 08:24:46 2010 +0200
+++ b/req.py	Mon Jul 19 15:36:16 2010 +0200
@@ -106,10 +106,7 @@
         return rset
 
     def empty_rset(self):
-        """return a result set for the given eid without doing actual query
-        (we have the eid, we can suppose it exists and user has access to the
-        entity)
-        """
+        """ return a guaranteed empty result """
         rset = ResultSet([], 'Any X WHERE X eid -1')
         rset.req = self
         return rset
@@ -292,7 +289,7 @@
     # formating methods #######################################################
 
     def view(self, __vid, rset=None, __fallback_oid=None, __registry='views',
-             initargs=None, **kwargs):
+             initargs=None, w=None, **kwargs):
         """Select object with the given id (`__oid`) then render it.  If the
         object isn't selectable, try to select fallback object if
         `__fallback_oid` is specified.
@@ -310,15 +307,17 @@
         try:
             view =  self.vreg[__registry].select(__vid, self, rset=rset, **initargs)
         except RegistryException:
+            if __fallback_oid is None:
+                raise
             view =  self.vreg[__registry].select(__fallback_oid, self,
                                                  rset=rset, **initargs)
-        return view.render(**kwargs)
+        return view.render(w=w, **kwargs)
 
     def format_date(self, date, date_format=None, time=False):
         """return a string for a date time according to instance's
         configuration
         """
-        if date:
+        if date is not None:
             if date_format is None:
                 if time:
                     date_format = self.property_value('ui.datetime-format')
@@ -331,7 +330,7 @@
         """return a string for a time according to instance's
         configuration
         """
-        if time:
+        if time is not None:
             return ustrftime(time, self.property_value('ui.time-format'))
         return u''
 
--- a/rqlrewrite.py	Thu May 06 08:24:46 2010 +0200
+++ b/rqlrewrite.py	Mon Jul 19 15:36:16 2010 +0200
@@ -54,15 +54,15 @@
         except KeyError:
             continue
         stinfo = var.stinfo
-        if stinfo.get('uidrels'):
+        if stinfo.get('uidrel') is not None:
             continue # eid specified, no need for additional type specification
         try:
-            typerels = rqlst.defined_vars[varname].stinfo.get('typerels')
+            typerel = rqlst.defined_vars[varname].stinfo.get('typerel')
         except KeyError:
             assert varname in rqlst.aliases
             continue
-        if newroot is rqlst and typerels:
-            mytyperel = iter(typerels).next()
+        if newroot is rqlst and typerel is not None:
+            mytyperel = typerel
         else:
             for vref in newroot.defined_vars[varname].references():
                 rel = vref.relation()
@@ -93,7 +93,7 @@
                 # tree is not annotated yet, no scope set so add the restriction
                 # to the root
                 rel = newroot.add_type_restriction(var, possibletypes)
-            stinfo['typerels'] = frozenset((rel,))
+            stinfo['typerel'] = rel
             stinfo['possibletypes'] = possibletypes
 
 
@@ -155,7 +155,7 @@
         snippets: (varmap, list of rql expression)
                   with varmap a *tuple* (select var, snippet var)
         """
-        self.select = self.insert_scope = select
+        self.select = select
         self.solutions = solutions
         self.kwargs = kwargs
         self.u_varname = None
@@ -163,6 +163,7 @@
         self.exists_snippet = {}
         self.pending_keys = []
         self.existingvars = existingvars
+        self._insert_scope = None
         # we have to annotate the rqlst before inserting snippets, even though
         # we'll have to redo it latter
         self.annotate(select)
@@ -249,15 +250,19 @@
 
     def _insert_snippet(self, varmap, parent, new):
         if new is not None:
+            if self._insert_scope is None:
+                insert_scope = self.varinfo.get('stinfo', {}).get('scope', self.select)
+            else:
+                insert_scope = self._insert_scope
             if self.varinfo.get('stinfo', {}).get('optrelations'):
                 assert parent is None
-                self.insert_scope = self.snippet_subquery(varmap, new)
+                self._insert_scope = self.snippet_subquery(varmap, new)
                 self.insert_pending()
-                self.insert_scope = self.select
+                self._insert_scope = None
                 return
             new = n.Exists(new)
             if parent is None:
-                self.insert_scope.add_restriction(new)
+                insert_scope.add_restriction(new)
             else:
                 grandpa = parent.parent
                 or_ = n.Or(parent, new)
@@ -274,9 +279,9 @@
                         self._cleanup_inserted(new)
                     raise
                 else:
-                    self.insert_scope = new
+                    self._insert_scope = new
                     self.insert_pending()
-                    self.insert_scope = self.select
+                    self._insert_scope = None
             return new
         self.insert_pending()
 
@@ -348,7 +353,7 @@
         if need_null_test:
             snippetrqlst = n.Or(
                 n.make_relation(subselectvar, 'is', (None, None), n.Constant,
-                                operator='IS'),
+                                operator='='),
                 snippetrqlst)
         subselect.add_restriction(snippetrqlst)
         if self.u_varname:
--- a/rset.py	Thu May 06 08:24:46 2010 +0200
+++ b/rset.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""The `ResultSet` class which is returned as result of an rql query
+"""The `ResultSet` class which is returned as result of an rql query"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.common.decorators import cached, clear_cache, copy_cache
@@ -45,14 +44,13 @@
     :type rql: str or unicode
     :param rql: the original RQL query string
     """
-    def __init__(self, results, rql, args=None, description=(), cachekey=None,
-                 rqlst=None):
+
+    def __init__(self, results, rql, args=None, description=(), rqlst=None):
         self.rows = results
         self.rowcount = results and len(results) or 0
         # original query and arguments
         self.rql = rql
         self.args = args
-        self.cachekey = cachekey
         # entity types for each cell (same shape as rows)
         # maybe discarded if specified when the query has been executed
         self.description = description
@@ -477,7 +475,10 @@
                 if role == 'subject':
                     rschema = eschema.subjrels[attr]
                     if rschema.final:
-                        entity[attr] = rowvalues[outerselidx]
+                        if attr == 'eid':
+                            entity.eid = rowvalues[outerselidx]
+                        else:
+                            entity[attr] = rowvalues[outerselidx]
                         continue
                 else:
                     rschema = eschema.objrels[attr]
@@ -599,7 +600,11 @@
         if rel is not None:
             index = rel.children[0].root_selection_index()
             if index is not None and self.rows[row][index]:
-                return self.get_entity(row, index), rel.r_type
+                try:
+                    entity = self.get_entity(row, index)
+                    return entity, rel.r_type
+                except NotAnEntity, exc:
+                    return None, None
         return None, None
 
     @cached
--- a/rtags.py	Thu May 06 08:24:46 2010 +0200
+++ b/rtags.py	Mon Jul 19 15:36:16 2010 +0200
@@ -82,16 +82,14 @@
         self._tagdefs.clear()
 
     def _get_keys(self, stype, rtype, otype, tagged):
-        keys = [('*', rtype, '*', tagged),
-                ('*', rtype, otype, tagged),
-                (stype, rtype, '*', tagged),
-                (stype, rtype, otype, tagged)]
-        if stype == '*' or otype == '*':
-            keys.remove( ('*', rtype, '*', tagged) )
-            if stype == '*':
-                keys.remove( ('*', rtype, otype, tagged) )
-            if otype == '*':
-                keys.remove( (stype, rtype, '*', tagged) )
+        keys = []
+        if '*' not in (stype, otype):
+            keys.append(('*', rtype, '*', tagged))
+        if '*' != stype:
+            keys.append(('*', rtype, otype, tagged))
+        if '*' != otype:
+            keys.append((stype, rtype, '*', tagged))
+        keys.append((stype, rtype, otype, tagged))
         return keys
 
     def init(self, schema, check=True):
--- a/schema.py	Thu May 06 08:24:46 2010 +0200
+++ b/schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -174,7 +174,7 @@
         mainvars.append('U')
     if not mainvars:
         raise Exception('unable to guess selection variables')
-    return ','.join(mainvars)
+    return ','.join(sorted(mainvars))
 
 def split_expression(rqlstring):
     for expr in rqlstring.split(','):
@@ -471,10 +471,14 @@
             assert action in ('read', 'add', 'delete')
             if 'fromeid' in kwargs:
                 subjtype = session.describe(kwargs['fromeid'])[0]
+            elif 'frometype' in kwargs:
+                subjtype = kwargs.pop('frometype')
             else:
                 subjtype = None
             if 'toeid' in kwargs:
                 objtype = session.describe(kwargs['toeid'])[0]
+            elif 'toetype' in kwargs:
+                objtype = kwargs.pop('toetype')
             else:
                 objtype = None
         if objtype and subjtype:
@@ -628,13 +632,13 @@
         # start with a comma for bw compat, see below
         return ';' + self.mainvars + ';' + self.restriction
 
+    @classmethod
     def deserialize(cls, value):
         # XXX < 3.5.10 bw compat
         if not value.startswith(';'):
             return cls(value)
         _, mainvars, restriction = value.split(';', 2)
         return cls(restriction, mainvars)
-    deserialize = classmethod(deserialize)
 
     def check(self, entity, rtype, value):
         """return true if the value satisfy the constraint, else false"""
@@ -718,14 +722,14 @@
         if eidto is None:
             # checking constraint for an attribute relation
             restriction = 'S eid %(s)s, ' + self.restriction
-            args, ck = {'s': eidfrom}, 's'
+            args = {'s': eidfrom}
         else:
             restriction = 'S eid %(s)s, O eid %(o)s, ' + self.restriction
-            args, ck = {'s': eidfrom, 'o': eidto}, ('s', 'o')
+            args = {'s': eidfrom, 'o': eidto}
         rql = 'Any %s WHERE %s' % (self.mainvars,  restriction)
         if self.distinct_query:
             rql = 'DISTINCT ' + rql
-        return session.execute(rql, args, ck, build_descr=False)
+        return session.execute(rql, args, build_descr=False)
 
 
 class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint):
@@ -845,16 +849,13 @@
             except KeyError:
                 pass
         rql, has_perm_defs, keyarg = self.transform_has_permission()
-        if creating:
-            # when creating an entity, consider has_*_permission satisfied
-            if has_perm_defs:
-                return True
-            return False
+        # when creating an entity, expression related to X satisfied
+        if creating and 'X' in self.rqlst.defined_vars:
+            return True
         if keyarg is None:
             kwargs.setdefault('u', session.user.eid)
-            cachekey = kwargs.keys()
             try:
-                rset = session.execute(rql, kwargs, cachekey, build_descr=True)
+                rset = session.execute(rql, kwargs, build_descr=True)
             except NotImplementedError:
                 self.critical('cant check rql expression, unsupported rql %s', rql)
                 if self.eid is not None:
@@ -867,6 +868,11 @@
                 if self.eid is not None:
                     session.local_perm_cache[key] = False
                 return False
+            except Unauthorized, ex:
+                self.debug('unauthorized %s: %s', rql, str(ex))
+                if self.eid is not None:
+                    session.local_perm_cache[key] = False
+                return False
         else:
             rset = session.eid_rset(kwargs[keyarg])
         # if no special has_*_permission relation in the rql expression, just
@@ -985,8 +991,8 @@
 
 class workflowable_definition(ybo.metadefinition):
     """extends default EntityType's metaclass to add workflow relations
-    (i.e. in_state and wf_info_for).
-    This is the default metaclass for WorkflowableEntityType
+    (i.e. in_state, wf_info_for and custom_workflow). This is the default
+    metaclass for WorkflowableEntityType.
     """
     def __new__(mcs, name, bases, classdict):
         abstract = classdict.pop('__abstract__', False)
@@ -996,23 +1002,33 @@
             make_workflowable(cls)
         return cls
 
+class WorkflowableEntityType(ybo.EntityType):
+    """Use this base class instead of :class:`EntityType` to have workflow
+    relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your
+    entity type.
+    """
+    __metaclass__ = workflowable_definition
+    __abstract__ = True
+
+
 def make_workflowable(cls, in_state_descr=None):
+    """Adds workflow relations as :class:`WorkflowableEntityType`, but usable on
+    existing classes which are not using that base class.
+    """
     existing_rels = set(rdef.name for rdef in cls.__relations__)
     # let relation types defined in cw.schemas.workflow carrying
     # cardinality, constraints and other relation definition properties
+    etype = getattr(cls, 'name', cls.__name__)
     if 'custom_workflow' not in existing_rels:
-        rdef = ybo.SubjectRelation('Workflow')
-        yams_add_relation(cls.__relations__, rdef, 'custom_workflow')
+        rdef = ybo.RelationDefinition(etype, 'custom_workflow', 'Workflow')
+        yams_add_relation(cls.__relations__, rdef)
     if 'in_state' not in existing_rels:
-        rdef = ybo.SubjectRelation('State', description=in_state_descr)
-        yams_add_relation(cls.__relations__, rdef, 'in_state')
+        rdef = ybo.RelationDefinition(etype, 'in_state', 'State',
+                                      description=in_state_descr)
+        yams_add_relation(cls.__relations__, rdef)
     if 'wf_info_for' not in existing_rels:
-        rdef = ybo.ObjectRelation('TrInfo')
-        yams_add_relation(cls.__relations__, rdef, 'wf_info_for')
-
-class WorkflowableEntityType(ybo.EntityType):
-    __metaclass__ = workflowable_definition
-    __abstract__ = True
+        rdef = ybo.RelationDefinition('TrInfo', 'wf_info_for', etype)
+        yams_add_relation(cls.__relations__, rdef)
 
 
 # schema loading ##############################################################
--- a/schemas/Bookmark.py	Thu May 06 08:24:46 2010 +0200
+++ b/schemas/Bookmark.py	Mon Jul 19 15:36:16 2010 +0200
@@ -34,7 +34,7 @@
         }
 
     title = String(required=True, maxsize=128, internationalizable=True)
-    path  = String(maxsize=512, required=True,
+    path  = String(maxsize=2048, required=True,
                    description=_("relative url of the bookmarked page"))
 
     bookmarked_by = SubjectRelation('CWUser',
--- a/schemas/base.py	Thu May 06 08:24:46 2010 +0200
+++ b/schemas/base.py	Mon Jul 19 15:36:16 2010 +0200
@@ -176,7 +176,8 @@
     name = String(required=True, indexed=True, internationalizable=True, maxsize=100,
                   description=_('name or identifier of the permission'))
     label = String(required=True, internationalizable=True, maxsize=100,
-                   description=_('distinct label to distinguate between other permission entity of the same name'))
+                   description=_('distinct label to distinguate between other '
+                                 'permission entity of the same name'))
     require_group = SubjectRelation('CWGroup',
                                     description=_('groups to which the permission is granted'))
 
@@ -210,7 +211,7 @@
         'add':    ('managers', 'users'),
         'delete': ('managers', 'owners'),
         }
-    cardinality = '*1'
+    cardinality = '**'
     symmetric = True
     # NOTE: the 'object = ExternalUri' declaration will still be mandatory
     #       in the cube's schema.
--- a/schemas/bootstrap.py	Thu May 06 08:24:46 2010 +0200
+++ b/schemas/bootstrap.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,8 +16,8 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """core CubicWeb schema necessary for bootstrapping the actual instance's schema
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
--- a/schemaviewer.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,242 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""an helper class to display CubicWeb schema using ureports
-
-"""
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-from logilab.common.ureports import Section, Title, Table, Link, Span, Text
-
-from yams.schema2dot import CARD_MAP
-from yams.schema import RelationDefinitionSchema
-
-I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')]
-
-
-class SchemaViewer(object):
-    """return an ureport layout for some part of a schema"""
-    def __init__(self, req=None, encoding=None):
-        self.req = req
-        if req is not None:
-            self.req.add_css('cubicweb.schema.css')
-            self._possible_views = req.vreg['views'].possible_views
-            if not encoding:
-                encoding = req.encoding
-        else:
-            self._possible_views = lambda x: ()
-        self.encoding = encoding
-
-    def format_acls(self, schema, access_types):
-        """return a layout displaying access control lists"""
-        data = [self.req._('access type'), self.req._('groups')]
-        for access_type in access_types:
-            data.append(self.req._(access_type))
-            acls = [Link(self.req.build_url('cwgroup/%s' % group), self.req._(group))
-                    for group in schema.get_groups(access_type)]
-            acls += (Text(rqlexp.expression) for rqlexp in schema.get_rqlexprs(access_type))
-            acls = [n for _n in acls for n in (_n, Text(', '))][:-1]
-            data.append(Span(children=acls))
-        return Section(children=(Table(cols=2, cheaders=1, rheaders=1, children=data),),
-                       klass='acl')
-
-
-    def visit_schema(self, schema, display_relations=0, skiptypes=()):
-        """get a layout for a whole schema"""
-        title = Title(self.req._('Schema %s') % schema.name,
-                      klass='titleUnderline')
-        layout = Section(children=(title,))
-        esection = Section(children=(Title(self.req._('Entities'),
-                                           klass='titleUnderline'),))
-        layout.append(esection)
-        eschemas = [eschema for eschema in schema.entities()
-                    if not (eschema.final or eschema in skiptypes)]
-        for eschema in sorted(eschemas):
-            esection.append(self.visit_entityschema(eschema, skiptypes))
-        if display_relations:
-            title = Title(self.req._('Relations'), klass='titleUnderline')
-            rsection = Section(children=(title,))
-            layout.append(rsection)
-            relations = [rschema for rschema in schema.relations()
-                         if not (rschema.final or rschema.type in skiptypes)]
-            keys = [(rschema.type, rschema) for rschema in relations]
-            for key, rschema in sorted(keys):
-                relstr = self.visit_relationschema(rschema)
-                rsection.append(relstr)
-        return layout
-
-    def _entity_attributes_data(self, eschema):
-        _ = self.req._
-        data = [_('attribute'), _('type'), _('default'), _('constraints')]
-        for rschema, aschema in eschema.attribute_definitions():
-            rdef = eschema.rdef(rschema)
-            if not rdef.may_have_permission('read', self.req):
-                continue
-            aname = rschema.type
-            if aname == 'eid':
-                continue
-            data.append('%s (%s)' % (aname, _(aname)))
-            data.append(_(aschema.type))
-            defaultval = eschema.default(aname)
-            if defaultval is not None:
-                default = self.to_string(defaultval)
-            elif rdef.cardinality[0] == '1':
-                default = _('required field')
-            else:
-                default = ''
-            data.append(default)
-            constraints = rschema.rproperty(eschema.type, aschema.type,
-                                            'constraints')
-            data.append(', '.join(str(constr) for constr in constraints))
-        return data
-
-    def eschema_link_url(self, eschema):
-        return self.req.build_url('cwetype/%s' % eschema)
-
-    def rschema_link_url(self, rschema):
-        return self.req.build_url('cwrtype/%s' % rschema)
-
-    def possible_views(self, etype):
-        rset = self.req.etype_rset(etype)
-        return [v for v in self._possible_views(self.req, rset)
-                if v.category != 'startupview']
-
-    def stereotype(self, name):
-        return Span((' <<%s>>' % name,), klass='stereotype')
-
-    def visit_entityschema(self, eschema, skiptypes=()):
-        """get a layout for an entity schema"""
-        etype = eschema.type
-        layout = Section(children=' ', klass='clear')
-        layout.append(Link(etype,'&#160;' , id=etype)) # anchor
-        title = Link(self.eschema_link_url(eschema), etype)
-        boxchild = [Section(children=(title, ' (%s)'% eschema.display_name(self.req)), klass='title')]
-        table = Table(cols=4, rheaders=1, klass='listing',
-                      children=self._entity_attributes_data(eschema))
-        boxchild.append(Section(children=(table,), klass='body'))
-        data = []
-        data.append(Section(children=boxchild, klass='box'))
-        data.append(Section(children='', klass='vl'))
-        data.append(Section(children='', klass='hl'))
-        t_vars = []
-        rels = []
-        first = True
-        for rschema, targetschemas, role in eschema.relation_definitions():
-            if rschema.type in skiptypes:
-                continue
-            rschemaurl = self.rschema_link_url(rschema)
-            for oeschema in targetschemas:
-                rdef = rschema.role_rdef(eschema, oeschema, role)
-                if not rdef.may_have_permission('read', self.req):
-                    continue
-                label = rschema.type
-                if role == 'subject':
-                    cards = rschema.rproperty(eschema, oeschema, 'cardinality')
-                else:
-                    cards = rschema.rproperty(oeschema, eschema, 'cardinality')
-                    cards = cards[::-1]
-                label = '%s %s (%s) %s' % (CARD_MAP[cards[1]], label,
-                                           display_name(self.req, label, role),
-                                           CARD_MAP[cards[0]])
-                rlink = Link(rschemaurl, label)
-                elink = Link(self.eschema_link_url(oeschema), oeschema.type)
-                if first:
-                    t_vars.append(Section(children=(elink,), klass='firstvar'))
-                    rels.append(Section(children=(rlink,), klass='firstrel'))
-                    first = False
-                else:
-                    t_vars.append(Section(children=(elink,), klass='var'))
-                    rels.append(Section(children=(rlink,), klass='rel'))
-        data.append(Section(children=rels, klass='rels'))
-        data.append(Section(children=t_vars, klass='vars'))
-        layout.append(Section(children=data, klass='entityAttributes'))
-        if eschema.final: # stop here for final entities
-            return layout
-        _ = self.req._
-        if self.req.user.matching_groups('managers'):
-            # layout.append(self.format_acls(eschema, ('read', 'add', 'delete', 'update')))
-            # possible views for this entity type
-            views = [_(view.title) for view in self.possible_views(etype)]
-            layout.append(Section(children=(Table(cols=1, rheaders=1,
-                                                  children=[_('views')]+views),),
-                                  klass='views'))
-        return layout
-
-    def visit_relationschema(self, rschema, title=True):
-        """get a layout for a relation schema"""
-        _ = self.req._
-        if title:
-            title = Link(self.rschema_link_url(rschema), rschema.type)
-            stereotypes = []
-            if rschema.meta:
-                stereotypes.append('meta')
-            if rschema.symmetric:
-                stereotypes.append('symmetric')
-            if rschema.inlined:
-                stereotypes.append('inlined')
-            title = Section(children=(title, ' (%s)'%rschema.display_name(self.req)), klass='title')
-            if stereotypes:
-                title.append(self.stereotype(','.join(stereotypes)))
-            layout = Section(children=(title,), klass='schema')
-        else:
-            layout = Section(klass='schema')
-        data = [_('from'), _('to')]
-        schema = rschema.schema
-        rschema_objects = rschema.objects()
-        if rschema_objects:
-            # might be empty
-            properties = [p for p in RelationDefinitionSchema.rproperty_defs(rschema_objects[0])
-                          if not p in ('cardinality', 'composite', 'eid')]
-        else:
-            properties = []
-        data += [_(prop) for prop in properties]
-        cols = len(data)
-        done = set()
-        for subjtype, objtypes in rschema.associations():
-            for objtype in objtypes:
-                if (subjtype, objtype) in done:
-                    continue
-                done.add((subjtype, objtype))
-                if rschema.symmetric:
-                    done.add((objtype, subjtype))
-                data.append(Link(self.eschema_link_url(schema[subjtype]), subjtype))
-                data.append(Link(self.eschema_link_url(schema[objtype]), objtype))
-                rdef = rschema.rdef(subjtype, objtype)
-                for prop in properties:
-                    val = getattr(rdef, prop)
-                    if val is None:
-                        val = ''
-                    elif isinstance(val, (list, tuple)):
-                        val = ', '.join(str(v) for v in val)
-                    elif val and isinstance(val, basestring):
-                        val = _(val)
-                    else:
-                        val = str(val)
-                    data.append(Text(val))
-        table = Table(cols=cols, rheaders=1, children=data, klass='listing')
-        layout.append(Section(children=(table,), klass='relationDefinition'))
-        #if self.req.user.matching_groups('managers'):
-        #    layout.append(self.format_acls(rschema, ('read', 'add', 'delete')))
-        layout.append(Section(children='', klass='clear'))
-        return layout
-
-    def to_string(self, value):
-        """used to converte arbitrary values to encoded string"""
-        if isinstance(value, unicode):
-            return value.encode(self.encoding, 'replace')
-        return str(value)
--- a/selectors.py	Thu May 06 08:24:46 2010 +0200
+++ b/selectors.py	Mon Jul 19 15:36:16 2010 +0200
@@ -111,12 +111,12 @@
 
     class UserLink(component.Component):
 	'''if the user is the anonymous user, build a link to login else a link
-	to the connected user object with a loggout link
+	to the connected user object with a logout link
 	'''
 	__regid__ = 'loggeduserlink'
 
 	def call(self):
-	    if self._cw.cnx.anonymous_connection:
+	    if self._cw.session.anonymous_session:
 		# display login link
 		...
 	    else:
@@ -191,6 +191,7 @@
 
 .. |cubicweb| replace:: *CubicWeb*
 """
+
 __docformat__ = "restructuredtext en"
 
 import logging
@@ -623,28 +624,35 @@
         return rset and self.match_expected(len(rset.rows[0])) or 0
 
 
-@objectify_selector
-@lltrace
-def paginated_rset(cls, req, rset=None, **kwargs):
-    """Return 1 for result set with more rows than a page size.
+class paginated_rset(Selector):
+    """Return 1 or more for result set with more rows than one or more page
+    size.  You can specify expected number of pages to the initializer (default
+    to one), and you'll get that number of pages as score if the result set is
+    big enough.
 
     Page size is searched in (respecting order):
     * a `page_size` argument
     * a `page_size` form parameters
     * the :ref:`navigation.page-size` property
     """
-    if rset is None:
-        return 0
-    page_size = kwargs.get('page_size')
-    if page_size is None:
-        page_size = req.form.get('page_size')
+    def __init__(self, nbpages=1):
+        assert nbpages > 0
+        self.nbpages = nbpages
+
+    @lltrace
+    def __call__(self, cls, req, rset=None, **kwargs):
+        if rset is None:
+            return 0
+        page_size = kwargs.get('page_size')
         if page_size is None:
-            page_size = req.property_value('navigation.page-size')
-        else:
-            page_size = int(page_size)
-    if rset.rowcount <= page_size:
-        return 0
-    return 1
+            page_size = req.form.get('page_size')
+            if page_size is None:
+                page_size = req.property_value('navigation.page-size')
+            else:
+                page_size = int(page_size)
+        if rset.rowcount <= (page_size*self.nbpages):
+            return 0
+        return self.nbpages
 
 
 @objectify_selector
@@ -689,6 +697,16 @@
         return rset and self.match_expected(len(rset.column_types(col))) or 0
 
 
+@objectify_selector
+def logged_user_in_rset(cls, req, rset=None, row=None, col=0, **kwargs):
+    """Return positive score if the result set at the specified row / col
+    contains the eid of the logged user.
+    """
+    if rset is None:
+        return 0
+    return req.user.eid == rset[row or 0][col]
+
+
 # entity selectors #############################################################
 
 class non_final_entity(EClassSelector):
@@ -764,7 +782,7 @@
 
     * `action`, a relation schema action (e.g. one of 'read', 'add', 'delete',
       default to 'read') which must be granted to the user, else a 0 score will
-      be returned
+      be returned. Give None if you don't want any permission checking.
 
     * `strict`, boolean (default to False) telling what to do when the user has
       not globally the permission for the action (eg the action is not granted
@@ -822,11 +840,14 @@
         if self.target_etype is not None:
             try:
                 rdef = rschema.role_rdef(eschema, self.target_etype, self.role)
-                if not rdef.may_have_permission(self.action, req):
-                    return 0
             except KeyError:
                 return 0
-        else:
+            if self.action and not rdef.may_have_permission(self.action, req):
+                return 0
+            teschema = req.vreg.schema.eschema(self.target_etype)
+            if not teschema.may_have_permission('read', req):
+                return 0
+        elif self.action:
             return rschema.may_have_permission(self.action, req, eschema, self.role)
         return 1
 
@@ -834,13 +855,19 @@
         rschema = self._get_rschema(entity)
         if rschema is None:
             return 0 # relation not supported
+        if self.action:
+            if self.target_etype is not None:
+                rschema = rschema.role_rdef(entity.e_schema, self.target_etype, self.role)
+            if self.role == 'subject':
+                if not rschema.has_perm(entity._cw, self.action, fromeid=entity.eid):
+                    return 0
+            elif not rschema.has_perm(entity._cw, self.action, toeid=entity.eid):
+                return 0
         if self.target_etype is not None:
-            rschema = rschema.role_rdef(entity.e_schema, self.target_etype, self.role)
-        if self.role == 'subject':
-            if not rschema.has_perm(entity._cw, 'add', fromeid=entity.eid):
+            req = entity._cw
+            teschema = req.vreg.schema.eschema(self.target_etype)
+            if not teschema.may_have_permission('read', req):
                 return 0
-        elif not rschema.has_perm(entity._cw, 'add', toeid=entity.eid):
-            return 0
         return 1
 
 
@@ -1044,7 +1071,7 @@
     def score(self, req, rset, row, col):
         try:
             return len(req.execute(self.rql, {'x': rset[row][col],
-                                              'u': req.user.eid}, 'x'))
+                                              'u': req.user.eid}))
         except Unauthorized:
             return 0
 
@@ -1052,12 +1079,24 @@
 
 @objectify_selector
 @lltrace
+def no_cnx(cls, req, **kwargs):
+    """Return 1 if the web session has no connection set. This occurs when
+    anonymous access is not allowed and user isn't authenticated.
+
+    May only be used on the web side, not on the data repository side.
+    """
+    if not req.cnx:
+        return 1
+    return 0
+
+@objectify_selector
+@lltrace
 def authenticated_user(cls, req, **kwargs):
     """Return 1 if the user is authenticated (e.g. not the anonymous user).
 
     May only be used on the web side, not on the data repository side.
     """
-    if req.cnx.anonymous_connection:
+    if req.session.anonymous_session:
         return 0
     return 1
 
--- a/server/__init__.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/__init__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -180,6 +180,7 @@
                         {'name': unicode(group)})
     create_user(session, login, pwd, 'managers')
     session.commit()
+    repo.shutdown()
     # reloging using the admin user
     config._cubes = None # avoid assertion error
     repo, cnx = in_memory_cnx(config, login, password=pwd)
--- a/server/checkintegrity.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/checkintegrity.py	Mon Jul 19 15:36:16 2010 +0200
@@ -88,32 +88,40 @@
     else:
         yield eschema
 
-def reindex_entities(schema, session, withpb=True):
+def reindex_entities(schema, session, withpb=True, etypes=None):
     """reindex all entities in the repository"""
     # deactivate modification_date hook since we don't want them
     # to be updated due to the reindexation
     repo = session.repo
     cursor = session.pool['system']
-    if not repo.system_source.dbhelper.has_fti_table(cursor):
+    dbhelper = session.repo.system_source.dbhelper
+    if not dbhelper.has_fti_table(cursor):
         print 'no text index table'
         dbhelper.init_fti(cursor)
     repo.system_source.do_fti = True  # ensure full-text indexation is activated
-    etypes = set()
-    for eschema in schema.entities():
-        if eschema.final:
-            continue
-        indexable_attrs = tuple(eschema.indexable_attributes()) # generator
-        if not indexable_attrs:
-            continue
-        for container in etype_fti_containers(eschema):
-            etypes.add(container)
-    print 'Reindexing entities of type %s' % \
-          ', '.join(sorted(str(e) for e in etypes))
     if withpb:
         pb = ProgressBar(len(etypes) + 1)
-    # first monkey patch Entity.check to disable validation
-    # clear fti table first
-    session.system_sql('DELETE FROM %s' % session.repo.system_source.dbhelper.fti_table)
+    if etypes is None:
+        print 'Reindexing entities'
+        etypes = set()
+        for eschema in schema.entities():
+            if eschema.final:
+                continue
+            indexable_attrs = tuple(eschema.indexable_attributes()) # generator
+            if not indexable_attrs:
+                continue
+            for container in etype_fti_containers(eschema):
+                etypes.add(container)
+        # clear fti table first
+        session.system_sql('DELETE FROM %s' % dbhelper.fti_table)
+    else:
+        print 'Reindexing entities of type %s' % \
+              ', '.join(sorted(str(e) for e in etypes))
+        # clear fti table first. Use subquery for sql compatibility
+        session.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES "
+                           "WHERE eid=%s AND type IN (%s))" % (
+                               dbhelper.fti_table, dbhelper.fti_uid_attr,
+                               ','.join("'%s'" % etype for etype in etypes)))
     if withpb:
         pb.update()
     # reindex entities by generating rql queries which set all indexable
@@ -130,7 +138,7 @@
     """check serialized schema"""
     print 'Checking serialized schema'
     unique_constraints = ('SizeConstraint', 'FormatConstraint',
-                          'VocabularyConstraint', 'RQLConstraint',
+                          'VocabularyConstraint',
                           'RQLVocabularyConstraint')
     rql = ('Any COUNT(X),RN,SN,ON,CTN GROUPBY RN,SN,ON,CTN ORDERBY 1 '
            'WHERE X is CWConstraint, R constrained_by X, '
@@ -142,6 +150,8 @@
         if cstrname in unique_constraints:
             print "ERROR: got %s %r constraints on relation %s.%s.%s" % (
                 count, cstrname, sn, rn, on)
+            if fix:
+                print 'dunno how to fix, do it yourself'
 
 
 
--- a/server/hook.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/hook.py	Mon Jul 19 15:36:16 2010 +0200
@@ -46,8 +46,8 @@
 `timestamp` attributes, but *their `_cw` attribute is None*.
 
 Session hooks (eg session_open, session_close) have no special attribute.
+"""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
@@ -122,6 +122,7 @@
 _MARKER = object()
 def entity_oldnewvalue(entity, attr):
     """returns the couple (old attr value, new attr value)
+
     NOTE: will only work in a before_update_entity hook
     """
     # get new value and remove from local dict to force a db query to
@@ -130,6 +131,8 @@
     oldvalue = getattr(entity, attr)
     if newvalue is not _MARKER:
         entity[attr] = newvalue
+    else:
+        newvalue = oldvalue
     return oldvalue, newvalue
 
 
@@ -304,9 +307,9 @@
             assert self.rtype in self.object_relations
             meid, seid = self.eidto, self.eidfrom
         self._cw.execute(
-            'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P'\
+            'SET E %s P WHERE X %s P, X eid %%(x)s, E eid %%(e)s, NOT E %s P'
             % (self.main_rtype, self.main_rtype, self.main_rtype),
-            {'x': meid, 'e': seid}, ('x', 'e'))
+            {'x': meid, 'e': seid})
 
 
 class PropagateSubjectRelationAddHook(Hook):
@@ -326,12 +329,12 @@
             if rel in eschema.subjrels:
                 execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, '
                         'X %s R, NOT R %s P' % (self.rtype, rel, self.rtype),
-                        {'x': self.eidfrom, 'p': self.eidto}, 'x')
+                        {'x': self.eidfrom, 'p': self.eidto})
         for rel in self.object_relations:
             if rel in eschema.objrels:
                 execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, '
                         'R %s X, NOT R %s P' % (self.rtype, rel, self.rtype),
-                        {'x': self.eidfrom, 'p': self.eidto}, 'x')
+                        {'x': self.eidfrom, 'p': self.eidto})
 
 
 class PropagateSubjectRelationDelHook(Hook):
@@ -351,12 +354,12 @@
             if rel in eschema.subjrels:
                 execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, '
                         'X %s R' % (self.rtype, rel),
-                        {'x': self.eidfrom, 'p': self.eidto}, 'x')
+                        {'x': self.eidfrom, 'p': self.eidto})
         for rel in self.object_relations:
             if rel in eschema.objrels:
                 execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, '
                         'R %s X' % (self.rtype, rel),
-                        {'x': self.eidfrom, 'p': self.eidto}, 'x')
+                        {'x': self.eidfrom, 'p': self.eidto})
 
 
 # abstract classes for operation ###############################################
@@ -366,10 +369,10 @@
     commit / rollback transations. Possible events are:
 
     precommit:
-      the pool is preparing to commit. You shouldn't do anything things which
-      has to be reverted if the commit fail at this point, but you can freely
+      the pool is preparing to commit. You shouldn't do anything which
+      has to be reverted if the commit fails at this point, but you can freely
       do any heavy computation or raise an exception if the commit can't go.
-      You can add some new operation during this phase but their precommit
+      You can add some new operations during this phase but their precommit
       event won't be triggered
 
     commit:
@@ -388,6 +391,12 @@
        * a commit event failed, all operations which are not been triggered for
          commit are rollbacked
 
+    postcommit:
+      The transaction is over. All the ORM entities are
+      invalid. If you need to work on the database, you need to stard
+      a new transaction, for instance using a new internal_session,
+      which you will need to commit (and close!).
+
     order of operations may be important, and is controlled according to
     the insert_index's method output
     """
@@ -473,23 +482,27 @@
 
 set_log_methods(Operation, getLogger('cubicweb.session'))
 
+def _container_add(container, value):
+    {set: set.add, list: list.append}[container.__class__](container, value)
 
-def set_operation(session, datakey, value, opcls, **opkwargs):
+def set_operation(session, datakey, value, opcls, containercls=set, **opkwargs):
     """Search for session.transaction_data[`datakey`] (expected to be a set):
 
     * if found, simply append `value`
 
-    * else, initialize it to set([`value`]) and instantiate the given `opcls`
-      operation class with additional keyword arguments.
+    * else, initialize it to containercls([`value`]) and instantiate the given
+      `opcls` operation class with additional keyword arguments. `containercls`
+      is a set by default. Give `list` if you want to keep arrival ordering.
 
     You should use this instead of creating on operation for each `value`,
     since handling operations becomes coslty on massive data import.
     """
     try:
-        session.transaction_data[datakey].add(value)
+        _container_add(session.transaction_data[datakey], value)
     except KeyError:
         opcls(session, **opkwargs)
-        session.transaction_data[datakey] = set((value,))
+        session.transaction_data[datakey] = containercls()
+        _container_add(session.transaction_data[datakey], value)
 
 
 class LateOperation(Operation):
--- a/server/migractions.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/migractions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -25,9 +25,8 @@
 The following data actions are supported for now:
 * add an entity
 * execute raw RQL queries
-
+"""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
@@ -281,9 +280,9 @@
         if self.session:
             self.session.set_pool()
 
-    def rqlexecall(self, rqliter, cachekey=None, ask_confirm=True):
+    def rqlexecall(self, rqliter, ask_confirm=True):
         for rql, kwargs in rqliter:
-            self.rqlexec(rql, kwargs, cachekey, ask_confirm=ask_confirm)
+            self.rqlexec(rql, kwargs, ask_confirm=ask_confirm)
 
     @cached
     def _create_context(self):
@@ -374,14 +373,14 @@
             # handle groups
             newgroups = list(erschema.get_groups(action))
             for geid, gname in self.rqlexec('Any G, GN WHERE T %s G, G name GN, '
-                                            'T eid %%(x)s' % perm, {'x': teid}, 'x',
+                                            'T eid %%(x)s' % perm, {'x': teid},
                                             ask_confirm=False):
                 if not gname in newgroups:
                     if not confirm or self.confirm('Remove %s permission of %s to %s?'
                                                    % (action, erschema, gname)):
                         self.rqlexec('DELETE T %s G WHERE G eid %%(x)s, T eid %s'
                                      % (perm, teid),
-                                     {'x': geid}, 'x', ask_confirm=False)
+                                     {'x': geid}, ask_confirm=False)
                 else:
                     newgroups.remove(gname)
             for gname in newgroups:
@@ -389,7 +388,7 @@
                                                % (action, erschema, gname)):
                     self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s'
                                  % (perm, teid),
-                                 {'x': gm[gname]}, 'x', ask_confirm=False)
+                                 {'x': gm[gname]}, ask_confirm=False)
             # handle rql expressions
             newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action))
             for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, '
@@ -401,7 +400,7 @@
                         # deleting the relation will delete the expression entity
                         self.rqlexec('DELETE T %s E WHERE E eid %%(x)s, T eid %s'
                                      % (perm, teid),
-                                     {'x': expreid}, 'x', ask_confirm=False)
+                                     {'x': expreid}, ask_confirm=False)
                 else:
                     newexprs.pop(expression)
             for expression in newexprs.values():
@@ -412,10 +411,11 @@
                                  'X expression %%(expr)s, X mainvars %%(vars)s, T %s X '
                                  'WHERE T eid %%(x)s' % perm,
                                  {'expr': expr, 'exprtype': exprtype,
-                                  'vars': expression.mainvars, 'x': teid}, 'x',
+                                  'vars': expression.mainvars, 'x': teid},
                                  ask_confirm=False)
 
-    def _synchronize_rschema(self, rtype, syncrdefs=True, syncperms=True, syncprops=True):
+    def _synchronize_rschema(self, rtype, syncrdefs=True,
+                             syncperms=True, syncprops=True):
         """synchronize properties of the persistent relation schema against its
         current definition:
 
@@ -447,7 +447,8 @@
                                               syncprops=syncprops,
                                               syncperms=syncperms)
 
-    def _synchronize_eschema(self, etype, syncperms=True):
+    def _synchronize_eschema(self, etype, syncrdefs=True,
+                             syncperms=True, syncprops=True):
         """synchronize properties of the persistent entity schema against
         its current definition:
 
@@ -464,40 +465,43 @@
         try:
             eschema = self.fs_schema.eschema(etype)
         except KeyError:
-            return
-        repospschema = repoeschema.specializes()
-        espschema = eschema.specializes()
-        if repospschema and not espschema:
-            self.rqlexec('DELETE X specializes Y WHERE X is CWEType, X name %(x)s',
-                         {'x': str(repoeschema)}, ask_confirm=False)
-        elif not repospschema and espschema:
-            self.rqlexec('SET X specializes Y WHERE X is CWEType, X name %(x)s, '
-                         'Y is CWEType, Y name %(y)s',
-                         {'x': str(repoeschema), 'y': str(espschema)},
-                         ask_confirm=False)
-        self.rqlexecall(ss.updateeschema2rql(eschema, repoeschema.eid),
-                        ask_confirm=self.verbosity >= 2)
-        for rschema, targettypes, role in eschema.relation_definitions(True):
-            if rschema in VIRTUAL_RTYPES:
-                continue
-            if role == 'subject':
-                if not rschema in repoeschema.subject_relations():
-                    continue
-                subjtypes, objtypes = [etype], targettypes
-            else: # role == 'object'
-                if not rschema in repoeschema.object_relations():
-                    continue
-                subjtypes, objtypes = targettypes, [etype]
-            self._synchronize_rschema(rschema, syncperms=syncperms,
-                                      syncrdefs=False)
-            reporschema = self.repo.schema.rschema(rschema)
-            for subj in subjtypes:
-                for obj in objtypes:
-                    if (subj, obj) not in reporschema.rdefs:
-                        continue
-                    self._synchronize_rdef_schema(subj, rschema, obj)
+            return # XXX somewhat unexpected, no?...
+        if syncprops:
+            repospschema = repoeschema.specializes()
+            espschema = eschema.specializes()
+            if repospschema and not espschema:
+                self.rqlexec('DELETE X specializes Y WHERE X is CWEType, X name %(x)s',
+                             {'x': str(repoeschema)}, ask_confirm=False)
+            elif not repospschema and espschema:
+                self.rqlexec('SET X specializes Y WHERE X is CWEType, X name %(x)s, '
+                             'Y is CWEType, Y name %(y)s',
+                             {'x': str(repoeschema), 'y': str(espschema)},
+                             ask_confirm=False)
+            self.rqlexecall(ss.updateeschema2rql(eschema, repoeschema.eid),
+                            ask_confirm=self.verbosity >= 2)
         if syncperms:
             self._synchronize_permissions(eschema, repoeschema.eid)
+        if syncrdefs:
+            for rschema, targettypes, role in eschema.relation_definitions(True):
+                if rschema in VIRTUAL_RTYPES:
+                    continue
+                if role == 'subject':
+                    if not rschema in repoeschema.subject_relations():
+                        continue
+                    subjtypes, objtypes = [etype], targettypes
+                else: # role == 'object'
+                    if not rschema in repoeschema.object_relations():
+                        continue
+                    subjtypes, objtypes = targettypes, [etype]
+                self._synchronize_rschema(rschema, syncrdefs=False,
+                                          syncprops=syncprops, syncperms=syncperms)
+                reporschema = self.repo.schema.rschema(rschema)
+                for subj in subjtypes:
+                    for obj in objtypes:
+                        if (subj, obj) not in reporschema.rdefs:
+                            continue
+                        self._synchronize_rdef_schema(subj, rschema, obj,
+                                                      syncprops=syncprops, syncperms=syncperms)
 
     def _synchronize_rdef_schema(self, subjtype, rtype, objtype,
                                  syncperms=True, syncprops=True):
@@ -537,14 +541,13 @@
                     newcstr = None
                 if newcstr is None:
                     self.rqlexec('DELETE X constrained_by C WHERE C eid %(x)s',
-                                 {'x': cstr.eid}, 'x',
-                                 ask_confirm=confirm)
+                                 {'x': cstr.eid}, ask_confirm=confirm)
                 else:
                     newconstraints.remove(newcstr)
                     value = unicode(newcstr.serialize())
                     if value != unicode(cstr.serialize()):
                         self.rqlexec('SET X value %(v)s WHERE X eid %(x)s',
-                                     {'x': cstr.eid, 'v': value}, 'x',
+                                     {'x': cstr.eid, 'v': value},
                                      ask_confirm=confirm)
             # 2. add new constraints
             cstrtype_map = self.cstrtype_mapping()
@@ -657,10 +660,10 @@
                         self.cmd_drop_relation_definition(
                             str(fromtype), rschema.type, str(totype))
         # execute post-remove files
-        for pack in reversed(removedcubes):
-            self.exec_event_script('postremove', self.config.cube_dir(pack))
+        for cube in reversed(removedcubes):
+            self.exec_event_script('postremove', self.config.cube_dir(cube))
             self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s',
-                         {'pk': u'system.version.'+pack}, ask_confirm=False)
+                         {'pk': u'system.version.'+cube}, ask_confirm=False)
             self.commit()
 
     # schema migration actions ################################################
@@ -756,8 +759,8 @@
                 continue
             if instspschema.specializes() != eschema:
                 self.rqlexec('SET D specializes P WHERE D eid %(d)s, P name %(pn)s',
-                             {'d': instspschema.eid,
-                              'pn': eschema.type}, ask_confirm=confirm)
+                             {'d': instspschema.eid, 'pn': eschema.type},
+                             ask_confirm=confirm)
                 for rschema, tschemas, role in spschema.relation_definitions(True):
                     for tschema in tschemas:
                         if not tschema in instschema:
@@ -977,32 +980,23 @@
         if ertype is not None:
             if isinstance(ertype, (tuple, list)):
                 assert len(ertype) == 3, 'not a relation definition'
-                assert syncprops, 'can\'t update permission for a relation definition'
                 self._synchronize_rdef_schema(ertype[0], ertype[1], ertype[2],
                                               syncperms=syncperms,
                                               syncprops=syncprops)
             else:
                 erschema = self.repo.schema[ertype]
                 if isinstance(erschema, CubicWebRelationSchema):
-                    self._synchronize_rschema(erschema, syncperms=syncperms,
-                                              syncprops=syncprops,
-                                              syncrdefs=syncrdefs)
-                elif syncprops:
-                    self._synchronize_eschema(erschema, syncperms=syncperms)
+                    self._synchronize_rschema(erschema, syncrdefs=syncrdefs,
+                                              syncperms=syncperms,
+                                              syncprops=syncprops)
                 else:
-                    self._synchronize_permissions(self.fs_schema[ertype], erschema.eid)
+                    self._synchronize_eschema(erschema, syncrdefs=syncrdefs,
+                                              syncperms=syncperms,
+                                              syncprops=syncprops)
         else:
             for etype in self.repo.schema.entities():
-                if syncprops:
-                    self._synchronize_eschema(etype, syncperms=syncperms)
-                else:
-                    try:
-                        fseschema = self.fs_schema[etype]
-                    except KeyError:
-                        # entity type in the repository schema but not anymore
-                        # on the fs schema
-                        continue
-                    self._synchronize_permissions(fseschema, etype.eid)
+                self._synchronize_eschema(etype, syncrdefs=syncrdefs,
+                                          syncprops=syncprops, syncperms=syncperms)
         if commit:
             self.commit()
 
@@ -1099,12 +1093,12 @@
         for etype in wfof:
             rset = self.rqlexec(
                 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s',
-                {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False)
+                {'x': wf.eid, 'et': etype}, ask_confirm=False)
             assert rset, 'unexistant entity type %s' % etype
             if default:
                 self.rqlexec(
                     'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s',
-                    {'x': wf.eid, 'et': etype}, 'x', ask_confirm=False)
+                    {'x': wf.eid, 'et': etype}, ask_confirm=False)
         if commit:
             self.commit()
         return wf
@@ -1193,6 +1187,27 @@
             return session
         return self.cnx.request()
 
+    def cmd_storage_changed(self, etype, attribute):
+        """migrate entities to a custom storage. The new storage is expected to
+        be set, it will be temporarily removed for the migration.
+        """
+        from logilab.common.shellutils import ProgressBar
+        source = self.repo.system_source
+        storage = source.storage(etype, attribute)
+        source.unset_storage(etype, attribute)
+        rset = self.rqlexec('Any X WHERE X is %s' % etype, ask_confirm=False)
+        pb = ProgressBar(len(rset))
+        for entity in rset.entities():
+            # fill cache. Do not fetch that attribute using the global rql query
+            # since we may exhaust memory doing that....
+            getattr(entity, attribute)
+            storage.migrate_entity(entity, attribute)
+            # remove from entity cache to avoid memory exhaustion
+            del entity[attribute]
+            pb.update()
+        print
+        source.set_storage(etype, attribute, storage)
+
     def cmd_create_entity(self, etype, commit=False, **kwargs):
         """add a new entity of the given type"""
         entity = self._cw.create_entity(etype, **kwargs)
@@ -1200,6 +1215,13 @@
             self.commit()
         return entity
 
+    def cmd_reindex_entities(self, etypes=None):
+        """force reindexaction of entities of the given types or of all
+        indexable entity types
+        """
+        from cubicweb.server.checkintegrity import reindex_entities
+        reindex_entities(self.repo.schema, self.session, etypes=etypes)
+
     @deprecated('[3.5] use create_entity', stacklevel=3)
     def cmd_add_entity(self, etype, *args, **kwargs):
         """add a new entity of the given type"""
@@ -1228,6 +1250,9 @@
     def rqlexec(self, rql, kwargs=None, cachekey=None, build_descr=True,
                 ask_confirm=True):
         """rql action"""
+        if cachekey is not None:
+            warn('[3.8] cachekey is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
         if not isinstance(rql, (tuple, list)):
             rql = ( (rql, kwargs), )
         res = None
@@ -1239,7 +1264,7 @@
                 msg = rql
             if not ask_confirm or self.confirm('Execute rql: %s ?' % msg):
                 try:
-                    res = execute(rql, kwargs, cachekey, build_descr=build_descr)
+                    res = execute(rql, kwargs, build_descr=build_descr)
                 except Exception, ex:
                     if self.confirm('Error: %s\nabort?' % ex):
                         raise
--- a/server/msplanner.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/msplanner.py	Mon Jul 19 15:36:16 2010 +0200
@@ -95,7 +95,8 @@
 from logilab.common.decorators import cached
 
 from rql.stmts import Union, Select
-from rql.nodes import VariableRef, Comparison, Relation, Constant, Variable
+from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable,
+                       Not, Exists)
 
 from cubicweb import server
 from cubicweb.utils import make_uid
@@ -109,6 +110,40 @@
 # str() Constant.value to ensure generated table name won't be unicode
 Constant._ms_table_key = lambda x: str(x.value)
 
+def ms_scope(term):
+    rel = None
+    scope = term.scope
+    if isinstance(term, Variable) and len(term.stinfo['relations']) == 1:
+        rel = iter(term.stinfo['relations']).next().relation()
+    elif isinstance(term, Constant):
+        rel = term.relation()
+    elif isinstance(term, Relation):
+        rel = term
+    if rel is not None and (
+        rel.r_type != 'identity' and rel.scope is scope
+        and isinstance(rel.parent, Exists) and rel.parent.neged(strict=True)):
+        return scope.parent.scope
+    return scope
+
+def need_intersect(select, getrschema):
+    for rel in select.iget_nodes(Relation):
+        if isinstance(rel.parent, Exists) and rel.parent.neged(strict=True) and not rel.is_types_restriction():
+            rschema = getrschema(rel.r_type)
+            if not rschema.final:
+                # if one of the relation's variable is ambiguous but not
+                # invariant, an intersection will be necessary
+                for vref in rel.get_nodes(VariableRef):
+                    var = vref.variable
+                    if (var.valuable_references() == 1
+                        and len(var.stinfo['possibletypes']) > 1):
+                        return True
+    return False
+
+def neged_relation(rel):
+    parent = rel.parent
+    return isinstance(parent, Not) or (isinstance(parent, Exists) and
+                                       isinstance(parent.parent, Not))
+
 def need_source_access_relation(vargraph):
     if not vargraph:
         return False
@@ -195,7 +230,7 @@
     """return true if the variable is used in an outer scope of the given scope
     """
     for rel in var.stinfo['relations']:
-        rscope = rel.scope
+        rscope = ms_scope(rel)
         if not rscope is scope and is_ancestor(scope, rscope):
             return True
     return False
@@ -322,21 +357,24 @@
         # find for each source which variable/solution are supported
         for varname, varobj in self.rqlst.defined_vars.items():
             # if variable has an eid specified, we can get its source directly
-            # NOTE: use uidrels and not constnode to deal with "X eid IN(1,2,3,4)"
-            if varobj.stinfo['uidrels']:
-                vrels = varobj.stinfo['relations'] - varobj.stinfo['uidrels']
-                for rel in varobj.stinfo['uidrels']:
-                    for const in rel.children[1].get_nodes(Constant):
-                        eid = const.eval(self.plan.args)
-                        source = self._session.source_from_eid(eid)
-                        if vrels and not any(source.support_relation(r.r_type)
-                                             for r in vrels):
-                            self._set_source_for_term(self.system_source, varobj)
-                        else:
-                            self._set_source_for_term(source, varobj)
+            # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)"
+            if varobj.stinfo['uidrel'] is not None:
+                rel = varobj.stinfo['uidrel']
+                hasrel = len(varobj.stinfo['relations']) > 1
+                for const in rel.children[1].get_nodes(Constant):
+                    eid = const.eval(self.plan.args)
+                    source = self._session.source_from_eid(eid)
+                    if (source is self.system_source
+                        or (hasrel and
+                            not any(source.support_relation(r.r_type)
+                                    for r in varobj.stinfo['relations']
+                                    if not r is rel))):
+                        self._set_source_for_term(self.system_source, varobj)
+                    else:
+                        self._set_source_for_term(source, varobj)
                 continue
             rels = varobj.stinfo['relations']
-            if not rels and not varobj.stinfo['typerels']:
+            if not rels and varobj.stinfo['typerel'] is None:
                 # (rare) case where the variable has no type specified nor
                 # relation accessed ex. "Any MAX(X)"
                 self._set_source_for_term(self.system_source, varobj)
@@ -375,9 +413,9 @@
             elif not self._sourcesterms:
                 self._set_source_for_term(source, const)
             elif source in self._sourcesterms:
-                source_scopes = frozenset(t.scope for t in self._sourcesterms[source])
+                source_scopes = frozenset(ms_scope(t) for t in self._sourcesterms[source])
                 for const in vconsts:
-                    if const.scope in source_scopes:
+                    if ms_scope(const) in source_scopes:
                         self._set_source_for_term(source, const)
                         # if system source is used, add every rewritten constant
                         # to its supported terms even when associated entity
@@ -502,12 +540,15 @@
     def _remove_sources_until_stable(self, term, termssources):
         sourcesterms = self._sourcesterms
         for oterm, rel in self._linkedterms.get(term, ()):
-            if not term.scope is oterm.scope and rel.scope.neged(strict=True):
+            tscope = ms_scope(term)
+            otscope = ms_scope(oterm)
+            rscope = ms_scope(rel)
+            if not tscope is otscope and rscope.neged(strict=True):
                 # can't get information from relation inside a NOT exists
                 # where terms don't belong to the same scope
                 continue
             need_ancestor_scope = False
-            if not (term.scope is rel.scope and oterm.scope is rel.scope):
+            if not (tscope is rscope and otscope is rscope):
                 if rel.ored():
                     continue
                 if rel.ored(traverse_scope=True):
@@ -515,7 +556,7 @@
                     # propagate from parent scope to child scope, nothing else
                     need_ancestor_scope = True
             relsources = self._repo.rel_type_sources(rel.r_type)
-            if rel.neged(strict=True) and (
+            if neged_relation(rel) and (
                 len(relsources) < 2
                 or not isinstance(oterm, Variable)
                 or oterm.valuable_references() != 1
@@ -529,9 +570,9 @@
                 # Y)
                 continue
             # compute invalid sources for terms and remove them
-            if not need_ancestor_scope or is_ancestor(term.scope, oterm.scope):
+            if not need_ancestor_scope or is_ancestor(tscope, otscope):
                 self._remove_term_sources(term, rel, oterm, termssources)
-            if not need_ancestor_scope or is_ancestor(oterm.scope, term.scope):
+            if not need_ancestor_scope or is_ancestor(otscope, tscope):
                 self._remove_term_sources(oterm, rel, term, termssources)
 
     def _remove_term_sources(self, term, rel, oterm, termssources):
@@ -690,7 +731,7 @@
                     sourceterms.clear()
                     sources = [source]
                 else:
-                    scope = term.scope
+                    scope = ms_scope(term)
                     # find which sources support the same term and solutions
                     sources = self._expand_sources(source, term, solindices)
                     # no try to get as much terms as possible
@@ -721,7 +762,7 @@
                     for var in select.defined_vars.itervalues():
                         if not var in terms:
                             stinfo = var.stinfo
-                            for ovar, rtype in stinfo['attrvars']:
+                            for ovar, rtype in stinfo.get('attrvars', ()):
                                 if ovar in terms:
                                     needsel.add(var.name)
                                     terms.append(var)
@@ -776,7 +817,7 @@
                             # `terms`, eg cross relations)
                             for c in vconsts:
                                 rel = c.relation()
-                                if rel is None or not (rel in terms or rel.neged(strict=True)):
+                                if rel is None or not (rel in terms or neged_relation(rel)):
                                     final = False
                                     break
                             break
@@ -799,20 +840,19 @@
             # variable is refed by an outer scope and should be substituted
             # using an 'identity' relation (else we'll get a conflict of
             # temporary tables)
-            if rhsvar in terms and not lhsvar in terms:
+            if rhsvar in terms and not lhsvar in terms and ms_scope(lhsvar) is lhsvar.stmt:
                 self._identity_substitute(rel, lhsvar, terms, needsel)
-            elif lhsvar in terms and not rhsvar in terms:
+            elif lhsvar in terms and not rhsvar in terms and ms_scope(rhsvar) is rhsvar.stmt:
                 self._identity_substitute(rel, rhsvar, terms, needsel)
 
     def _identity_substitute(self, relation, var, terms, needsel):
-        newvar = self._insert_identity_variable(relation.scope, var)
-        if newvar is not None:
-            # ensure relation is using '=' operator, else we rely on a
-            # sqlgenerator side effect (it won't insert an inequality operator
-            # in this case)
-            relation.children[1].operator = '='
-            terms.append(newvar)
-            needsel.add(newvar.name)
+        newvar = self._insert_identity_variable(ms_scope(relation), var)
+        # ensure relation is using '=' operator, else we rely on a
+        # sqlgenerator side effect (it won't insert an inequality operator
+        # in this case)
+        relation.children[1].operator = '='
+        terms.append(newvar)
+        needsel.add(newvar.name)
 
     def _choose_term(self, sourceterms):
         """pick one term among terms supported by a source, which will be used
@@ -822,14 +862,14 @@
         if len(self._sourcesterms) > 1:
             # priority to variable from subscopes
             for term in sourceterms:
-                if not term.scope is self.rqlst:
+                if not ms_scope(term) is self.rqlst:
                     if isinstance(term, Variable):
                         return term, sourceterms.pop(term)
                     secondchoice = term
         else:
             # priority to variable from outer scope
             for term in sourceterms:
-                if term.scope is self.rqlst:
+                if ms_scope(term) is self.rqlst:
                     if isinstance(term, Variable):
                         return term, sourceterms.pop(term)
                     secondchoice = term
@@ -879,7 +919,7 @@
         # term has to belong to the same scope if there is more
         # than the system source remaining
         if len(sourcesterms) > 1 and not scope is self.rqlst:
-            candidates = (t for t in sourceterms.keys() if scope is t.scope)
+            candidates = (t for t in sourceterms.keys() if scope is ms_scope(t))
         else:
             candidates = sourceterms #.iterkeys()
         # we only want one unlinked term in each generated query
@@ -1198,9 +1238,10 @@
             step = AggrStep(plan, selection, select, atemptable, temptable)
             step.children = steps
         elif len(steps) > 1:
-            if select.need_intersect or any(select.need_intersect
-                                            for step in steps
-                                            for select in step.union.children):
+            getrschema = self.schema.rschema
+            if need_intersect(select, getrschema) or any(need_intersect(select, getrschema)
+                                                         for step in steps
+                                                         for select in step.union.children):
                 if temptable:
                     step = IntersectFetchStep(plan) # XXX not implemented
                 else:
@@ -1440,7 +1481,7 @@
             return False
         if not var in terms or used_in_outer_scope(var, self.current_scope):
             return False
-        if any(v for v, _ in var.stinfo['attrvars'] if not v in terms):
+        if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms):
             return False
         return True
 
--- a/server/mssteps.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/mssteps.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,8 +21,8 @@
 * get data from the parent plan, the latest step, temporary table...
 * each step has is own members (this is not necessarily bad, but a bit messy
   for now)
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from rql.nodes import VariableRef, Variable, Function
@@ -37,11 +37,11 @@
     for select in union.children:
         if keepgroup:
             having, orderby = select.having, select.orderby
-            select.having, select.orderby = None, None
+            select.having, select.orderby = (), ()
             clauses.append( (having, orderby) )
         else:
             groupby, having, orderby = select.groupby, select.having, select.orderby
-            select.groupby, select.having, select.orderby = None, None, None
+            select.groupby, select.having, select.orderby = (), (), ()
             clauses.append( (groupby, having, orderby) )
     return clauses
 
@@ -74,7 +74,7 @@
             if not isinstance(vref, VariableRef):
                 continue
             var = vref.variable
-            if var.stinfo['attrvars']:
+            if var.stinfo.get('attrvars'):
                 for lhsvar, rtype in var.stinfo['attrvars']:
                     if lhsvar.name in srqlst.defined_vars:
                         key = '%s.%s' % (lhsvar.name, rtype)
--- a/server/pool.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/pool.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,9 +19,8 @@
 connections pools, each of them dealing with a set of connections on each source
 used by the repository. A connections pools (`ConnectionsPool`) is an
 abstraction for a group of connection to each source.
-
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -68,6 +67,9 @@
                 cnx.rollback()
             except:
                 source.critical('rollback error', exc_info=sys.exc_info())
+                # error on rollback, the connection is much probably in a really
+                # bad state. Replace it by a new one.
+                self.reconnect(source)
 
     def close(self, i_know_what_i_do=False):
         """close all connections in the pool"""
--- a/server/querier.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/querier.py	Mon Jul 19 15:36:16 2010 +0200
@@ -17,8 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Helper classes to execute RQL queries on a set of sources, performing
 security checking and data aggregation.
+"""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
@@ -29,7 +29,7 @@
 from logilab.common.compat import any
 from rql import RQLSyntaxError
 from rql.stmts import Union, Select
-from rql.nodes import Relation, VariableRef, Constant, SubQuery
+from rql.nodes import Relation, VariableRef, Constant, SubQuery, Exists, Not
 
 from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid
 from cubicweb import server
@@ -112,7 +112,16 @@
                 ex = Unauthorized('read', solution[varname])
                 ex.var = varname
                 raise ex
-            localchecks[varname] = erqlexprs
+            # don't insert security on variable only referenced by 'NOT X relation Y' or
+            # 'NOT EXISTS(X relation Y)'
+            varinfo = rqlst.defined_vars[varname].stinfo
+            if varinfo['selected'] or (
+                len([r for r in varinfo['relations']
+                     if (not schema.rschema(r.r_type).final
+                         and ((isinstance(r.parent, Exists) and r.parent.neged(strict=True))
+                              or isinstance(r.parent, Not)))])
+                != len(varinfo['relations'])):
+                localchecks[varname] = erqlexprs
     return localchecks
 
 def add_noinvariant(noinvariant, restricted, select, nbtrees):
@@ -269,6 +278,7 @@
                 # transform in subquery when len(localchecks)>1 and groups
                 if nbtrees > 1 and (select.orderby or select.groupby or
                                     select.having or select.has_aggregat or
+                                    select.distinct or
                                     select.limit or select.offset):
                     newselect = Select()
                     # only select variables in subqueries
@@ -303,6 +313,7 @@
                         select.offset = 0
                     myunion = Union()
                     newselect.set_with([SubQuery(aliases, myunion)], check=False)
+                    newselect.distinct = select.distinct
                     solutions = [sol.copy() for sol in select.solutions]
                     cleanup_solutions(newselect, solutions)
                     newselect.set_possible_types(solutions)
@@ -353,16 +364,9 @@
         varkwargs = {}
         if not session.transaction_data.get('security-rqlst-cache'):
             for var in rqlst.defined_vars.itervalues():
-                for rel in var.stinfo['uidrels']:
-                    const = rel.children[1].children[0]
-                    try:
-                        varkwargs[var.name] = typed_eid(const.eval(self.args))
-                        break
-                    except AttributeError:
-                        #from rql.nodes import Function
-                        #assert isinstance(const, Function)
-                        # X eid IN(...)
-                        pass
+                if var.stinfo['constnode'] is not None:
+                    eid = var.stinfo['constnode'].eval(self.args)
+                    varkwargs[var.name] = typed_eid(eid)
         # dictionnary of variables restricted for security reason
         localchecks = {}
         restricted_vars = set()
@@ -424,7 +428,7 @@
         # list of new or updated entities definition (utils.Entity)
         self.e_defs = [[]]
         # list of new relation definition (3-uple (from_eid, r_type, to_eid)
-        self.r_defs = []
+        self.r_defs = set()
         # indexes to track entity definitions bound to relation definitions
         self._r_subj_index = {}
         self._r_obj_index = {}
@@ -437,7 +441,7 @@
 
     def add_relation_def(self, rdef):
         """add an relation definition to build"""
-        self.r_defs.append(rdef)
+        self.r_defs.add(rdef)
         if not isinstance(rdef[0], int):
             self._r_subj_index.setdefault(rdef[0], []).append(rdef)
         if not isinstance(rdef[2], int):
@@ -463,9 +467,9 @@
         for i, row in enumerate(self.e_defs[:]):
             self.e_defs[i][colidx] = edefs[0]
             samplerow = self.e_defs[i]
-            for edef in edefs[1:]:
+            for edef_ in edefs[1:]:
                 row = samplerow[:]
-                row[colidx] = edef
+                row[colidx] = edef_
                 self.e_defs.append(row)
         # now, see if this entity def is referenced as subject in some relation
         # definition
@@ -474,8 +478,8 @@
                 expanded = self._expanded(rdef)
                 result = []
                 for exp_rdef in expanded:
-                    for edef in edefs:
-                        result.append( (edef, exp_rdef[1], exp_rdef[2]) )
+                    for edef_ in edefs:
+                        result.append( (edef_, exp_rdef[1], exp_rdef[2]) )
                 self._expanded_r_defs[rdef] = result
         # and finally, see if this entity def is referenced as object in some
         # relation definition
@@ -484,8 +488,8 @@
                 expanded = self._expanded(rdef)
                 result = []
                 for exp_rdef in expanded:
-                    for edef in edefs:
-                        result.append( (exp_rdef[0], exp_rdef[1], edef) )
+                    for edef_ in edefs:
+                        result.append( (exp_rdef[0], exp_rdef[1], edef_) )
                 self._expanded_r_defs[rdef] = result
 
     def _expanded(self, rdef):
@@ -556,16 +560,22 @@
     def set_schema(self, schema):
         self.schema = schema
         repo = self._repo
+        # rql st and solution cache. Don't bother using a Cache instance: we
+        # should have a limited number of queries in there, since there are no
+        # entries in this cache for user queries (which have no args)
+        self._rql_cache = {}
+        # rql cache key cache
+        self._rql_ck_cache = Cache(repo.config['rql-cache-size'])
+        # some cache usage stats
+        self.cache_hit, self.cache_miss = 0, 0
         # rql parsing / analysing helper
         self.solutions = repo.vreg.solutions
-        self._rql_cache = Cache(repo.config['rql-cache-size'])
-        self.cache_hit, self.cache_miss = 0, 0
+        rqlhelper = repo.vreg.rqlhelper
+        self._parse = rqlhelper.parse
+        self._annotate = rqlhelper.annotate
         # rql planner
         # note: don't use repo.sources, may not be built yet, and also "admin"
         #       isn't an actual source
-        rqlhelper = repo.vreg.rqlhelper
-        self._parse = rqlhelper.parse
-        self._annotate = rqlhelper.annotate
         if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2:
             from cubicweb.server.ssplanner import SSPlanner
             self._planner = SSPlanner(schema, rqlhelper)
@@ -588,7 +598,7 @@
             return InsertPlan(self, rqlst, args, session)
         return ExecutionPlan(self, rqlst, args, session)
 
-    def execute(self, session, rql, args=None, eid_key=None, build_descr=True):
+    def execute(self, session, rql, args=None, build_descr=True):
         """execute a rql query, return resulting rows and their description in
         a `ResultSet` object
 
@@ -597,12 +607,6 @@
         * `build_descr` is a boolean flag indicating if the description should
           be built on select queries (if false, the description will be en empty
           list)
-        * `eid_key` must be both a key in args and a substitution in the rql
-          query. It should be used to enhance cacheability of rql queries.
-          It may be a tuple for keys in args.
-          `eid_key` must be provided in cases where a eid substitution is provided
-          and resolves ambiguities in the possible solutions inferred for each
-          variable in the query.
 
         on INSERT queries, there will be one row with the eid of each inserted
         entity
@@ -618,40 +622,33 @@
                 print '*'*80
             print 'querier input', rql, args
         # parse the query and binds variables
-        if eid_key is not None:
-            if not isinstance(eid_key, (tuple, list)):
-                eid_key = (eid_key,)
-            cachekey = [rql]
-            for key in eid_key:
-                try:
-                    etype = self._repo.type_from_eid(args[key], session)
-                except KeyError:
-                    raise QueryError('bad cache key %s (no value)' % key)
-                except TypeError:
-                    raise QueryError('bad cache key %s (value: %r)' % (
-                        key, args[key]))
-                except UnknownEid:
-                    # we want queries such as "Any X WHERE X eid 9999"
-                    # return an empty result instead of raising UnknownEid
-                    return empty_rset(rql, args)
-                cachekey.append(etype)
-                # ensure eid is correctly typed in args
-                args[key] = typed_eid(args[key])
-            cachekey = tuple(cachekey)
-        else:
+        try:
             cachekey = rql
-        try:
+            if args:
+                eidkeys = self._rql_ck_cache[rql]
+                if eidkeys:
+                    try:
+                        cachekey = self._repo.querier_cache_key(session, rql,
+                                                                args, eidkeys)
+                    except UnknownEid:
+                        # we want queries such as "Any X WHERE X eid 9999"
+                        # return an empty result instead of raising UnknownEid
+                        return empty_rset(rql, args)
             rqlst = self._rql_cache[cachekey]
             self.cache_hit += 1
         except KeyError:
             self.cache_miss += 1
             rqlst = self.parse(rql)
             try:
-                self.solutions(session, rqlst, args)
+                eidkeys = self.solutions(session, rqlst, args)
             except UnknownEid:
                 # we want queries such as "Any X WHERE X eid 9999" return an
                 # empty result instead of raising UnknownEid
                 return empty_rset(rql, args, rqlst)
+            self._rql_ck_cache[rql] = eidkeys
+            if eidkeys:
+                cachekey = self._repo.querier_cache_key(session, rql, args,
+                                                        eidkeys)
             self._rql_cache[cachekey] = rqlst
         orig_rqlst = rqlst
         if rqlst.TYPE != 'select':
@@ -711,7 +708,7 @@
             # FIXME: get number of affected entities / relations on non
             # selection queries ?
         # return a result set object
-        return ResultSet(results, rql, args, descr, eid_key, orig_rqlst)
+        return ResultSet(results, rql, args, descr, orig_rqlst)
 
 from logging import getLogger
 from cubicweb import set_log_methods
--- a/server/repository.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/repository.py	Mon Jul 19 15:36:16 2010 +0200
@@ -25,14 +25,14 @@
   point to a cubicweb instance.
 * handles session management
 * provides method for pyro registration, to call if pyro is enabled
-
+"""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
 
 import sys
+import threading
 import Queue
 from os.path import join
 from datetime import datetime
@@ -46,7 +46,7 @@
 from yams.schema import role_name
 from rql import RQLSyntaxError
 
-from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
+from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError,
                       UnknownEid, AuthenticationError, ExecutionError,
                       ETypeNotSupportedBySources, MultiSourcesError,
                       BadConnectionId, Unauthorized, ValidationError,
@@ -89,12 +89,12 @@
             with security_enabled(session, read=False):
                 session.execute('DELETE X %s Y WHERE X eid %%(x)s, '
                                 'NOT Y eid %%(y)s' % rtype,
-                                {'x': eidfrom, 'y': eidto}, 'x')
+                                {'x': eidfrom, 'y': eidto})
     if card[1] in '1?':
         with security_enabled(session, read=False):
-            session.execute('DELETE X %sY WHERE Y eid %%(y)s, '
+            session.execute('DELETE X %s Y WHERE Y eid %%(y)s, '
                             'NOT X eid %%(x)s' % rtype,
-                            {'x': eidfrom, 'y': eidto}, 'y')
+                            {'x': eidfrom, 'y': eidto})
 
 
 class Repository(object):
@@ -125,6 +125,8 @@
         # sources
         self.sources = []
         self.sources_by_uri = {}
+        # shutdown flag
+        self.shutting_down = False
         # FIXME: store additional sources info in the system database ?
         # FIXME: sources should be ordered (add_entity priority)
         for uri, source_config in config.sources().items():
@@ -215,7 +217,6 @@
         for i in xrange(config['connections-pool-size']):
             self.pools.append(pool.ConnectionsPool(self.sources))
             self._available_pools.put_nowait(self.pools[-1])
-        self._shutting_down = False
         if config.quick_start:
             config.init_cubes(self.get_cubes())
         self.hm = hook.HooksManager(self.vreg)
@@ -315,7 +316,6 @@
     def pinfo(self):
         # XXX: session.pool is accessed from a local storage, would be interesting
         #      to see if there is a pool set in any thread specific data)
-        import threading
         return '%s: %s (%s)' % (self._available_pools.qsize(),
                                 ','.join(session.user.login for session in self._sessions.values()
                                          if session.pool),
@@ -324,8 +324,9 @@
         """called on server stop event to properly close opened sessions and
         connections
         """
-        assert not self._shutting_down, 'already shutting down'
-        self._shutting_down = True
+        assert not self.shutting_down, 'already shutting down'
+        self.shutting_down = True
+        self.system_source.shutdown()
         if isinstance(self._looping_tasks, tuple): # if tasks have been started
             for looptask in self._looping_tasks:
                 self.info('canceling task %s...', looptask.name)
@@ -351,7 +352,7 @@
             pyro_unregister(self.config)
         hits, misses = self.querier.cache_hit, self.querier.cache_miss
         try:
-            self.info('rqlt st cache hit/miss: %s/%s (%s%% hits)', hits, misses,
+            self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses,
                       (hits * 100) / (hits + misses))
             hits, misses = self.system_source.cache_hit, self.system_source.cache_miss
             self.info('sql cache hit/miss: %s/%s (%s%% hits)', hits, misses,
@@ -362,28 +363,6 @@
         except ZeroDivisionError:
             pass
 
-    def stats(self): # XXX restrict to managers session?
-        import threading
-        results = {}
-        querier = self.querier
-        source = self.system_source
-        for size, maxsize, hits, misses, title in (
-            (len(querier._rql_cache), self.config['rql-cache-size'],
-            querier.cache_hit, querier.cache_miss, 'rqlt_st'),
-            (len(source._cache), self.config['rql-cache-size'],
-            source.cache_hit, source.cache_miss, 'sql'),
-            ):
-            results['%s_cache_size' % title] =  '%s / %s' % (size, maxsize)
-            results['%s_cache_hit' % title] =  hits
-            results['%s_cache_miss' % title] = misses
-            results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses)
-        results['sql_no_cache'] = self.system_source.no_cache
-        results['nb_open_sessions'] = len(self._sessions)
-        results['nb_active_threads'] = threading.activeCount()
-        results['looping_tasks'] = ', '.join(str(t) for t in self._looping_tasks)
-        results['available_pools'] = self._available_pools.qsize()
-        return results
-
     def _login_from_email(self, login):
         session = self.internal_session()
         try:
@@ -421,7 +400,7 @@
         """return a CWUser entity for user with the given eid"""
         cls = self.vreg['etypes'].etype_class('CWUser')
         rql = cls.fetch_rql(session.user, ['X eid %(x)s'])
-        rset = session.execute(rql, {'x': eid}, 'x')
+        rset = session.execute(rql, {'x': eid})
         assert len(rset) == 1, rset
         cwuser = rset.get_entity(0, 0)
         # pylint: disable-msg=W0104
@@ -433,6 +412,28 @@
 
     # public (dbapi) interface ################################################
 
+    def stats(self): # XXX restrict to managers session?
+        results = {}
+        querier = self.querier
+        source = self.system_source
+        for size, maxsize, hits, misses, title in (
+            (len(querier._rql_cache), self.config['rql-cache-size'],
+            querier.cache_hit, querier.cache_miss, 'rqlt_st'),
+            (len(source._cache), self.config['rql-cache-size'],
+            source.cache_hit, source.cache_miss, 'sql'),
+            ):
+            results['%s_cache_size' % title] =  '%s / %s' % (size, maxsize)
+            results['%s_cache_hit' % title] =  hits
+            results['%s_cache_miss' % title] = misses
+            results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses)
+        results['sql_no_cache'] = self.system_source.no_cache
+        results['nb_open_sessions'] = len(self._sessions)
+        results['nb_active_threads'] = threading.activeCount()
+        results['looping_tasks'] = ', '.join(str(t) for t in self._looping_tasks)
+        results['available_pools'] = self._available_pools.qsize()
+        results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate()))
+        return results
+
     def get_schema(self):
         """return the instance schema. This is a public method, not
         requiring a session id
@@ -504,9 +505,10 @@
         """return a result set containing system wide properties"""
         session = self.internal_session()
         try:
-            return session.execute('Any K,V WHERE P is CWProperty,'
-                                   'P pkey K, P value V, NOT P for_user U',
-                                   build_descr=False)
+            # don't use session.execute, we don't want rset.req set
+            return self.querier.execute(session, 'Any K,V WHERE P is CWProperty,'
+                                        'P pkey K, P value V, NOT P for_user U',
+                                        build_descr=False)
         finally:
             session.close()
 
@@ -573,14 +575,15 @@
         user._cw = user.cw_rset.req = session
         user.clear_related_cache()
         self._sessions[session.id] = session
-        self.info('opened %s', session)
+        self.info('opened session %s for user %s', session.id, login)
         self.hm.call_hooks('session_open', session)
         # commit session at this point in case write operation has been done
         # during `session_open` hooks
         session.commit()
         return session.id
 
-    def execute(self, sessionid, rqlstring, args=None, eid_key=None, build_descr=True):
+    def execute(self, sessionid, rqlstring, args=None, build_descr=True,
+                txid=None):
         """execute a RQL query
 
         * rqlstring should be an unicode string or a plain ascii string
@@ -588,11 +591,16 @@
         * build_descr is a flag indicating if the description should be
           built on select queries
         """
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             try:
-                return self.querier.execute(session, rqlstring, args, eid_key,
+                rset = self.querier.execute(session, rqlstring, args,
                                             build_descr)
+                # NOTE: the web front will (re)build it when needed
+                #       e.g in facets
+                #       Zeroed to avoid useless overhead with pyro
+                rset._rqlst = None
+                return rset
             except (Unauthorized, RQLSyntaxError):
                 raise
             except ValidationError, ex:
@@ -611,9 +619,9 @@
         finally:
             session.reset_pool()
 
-    def describe(self, sessionid, eid):
+    def describe(self, sessionid, eid, txid=None):
         """return a tuple (type, source, extid) for the entity with id <eid>"""
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             return self.type_and_source_from_eid(eid, session)
         finally:
@@ -639,32 +647,36 @@
         session = self._get_session(sessionid, setpool=False)
         session.set_shared_data(key, value, querydata)
 
-    def commit(self, sessionid):
+    def commit(self, sessionid, txid=None):
         """commit transaction for the session with the given id"""
         self.debug('begin commit for session %s', sessionid)
         try:
-            return self._get_session(sessionid).commit()
+            session = self._get_session(sessionid)
+            session.set_tx_data(txid)
+            return session.commit()
         except (ValidationError, Unauthorized):
             raise
         except:
             self.exception('unexpected error')
             raise
 
-    def rollback(self, sessionid):
+    def rollback(self, sessionid, txid=None):
         """commit transaction for the session with the given id"""
         self.debug('begin rollback for session %s', sessionid)
         try:
-            self._get_session(sessionid).rollback()
+            session = self._get_session(sessionid)
+            session.set_tx_data(txid)
+            session.rollback()
         except:
             self.exception('unexpected error')
             raise
 
-    def close(self, sessionid, checkshuttingdown=True):
+    def close(self, sessionid, txid=None, checkshuttingdown=True):
         """close the session with the given id"""
-        session = self._get_session(sessionid, setpool=True,
+        session = self._get_session(sessionid, setpool=True, txid=txid,
                                     checkshuttingdown=checkshuttingdown)
         # operation uncommited before close are rollbacked before hook is called
-        session.rollback()
+        session.rollback(reset_pool=False)
         self.hm.call_hooks('session_close', session)
         # commit session at this point in case write operation has been done
         # during `session_close` hooks
@@ -695,34 +707,35 @@
         for prop, value in props.items():
             session.change_property(prop, value)
 
-    def undoable_transactions(self, sessionid, ueid=None, **actionfilters):
+    def undoable_transactions(self, sessionid, ueid=None, txid=None,
+                              **actionfilters):
         """See :class:`cubicweb.dbapi.Connection.undoable_transactions`"""
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             return self.system_source.undoable_transactions(session, ueid,
                                                             **actionfilters)
         finally:
             session.reset_pool()
 
-    def transaction_info(self, sessionid, txuuid):
+    def transaction_info(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_info`"""
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             return self.system_source.tx_info(session, txuuid)
         finally:
             session.reset_pool()
 
-    def transaction_actions(self, sessionid, txuuid, public=True):
+    def transaction_actions(self, sessionid, txuuid, public=True, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_actions`"""
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             return self.system_source.tx_actions(session, txuuid, public)
         finally:
             session.reset_pool()
 
-    def undo_transaction(self, sessionid, txuuid):
+    def undo_transaction(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.undo_transaction`"""
-        session = self._get_session(sessionid, setpool=True)
+        session = self._get_session(sessionid, setpool=True, txid=txid)
         try:
             return self.system_source.undo_transaction(session, txuuid)
         finally:
@@ -785,15 +798,17 @@
         session.set_pool()
         return session
 
-    def _get_session(self, sessionid, setpool=False, checkshuttingdown=True):
+    def _get_session(self, sessionid, setpool=False, txid=None,
+                     checkshuttingdown=True):
         """return the user associated to the given session identifier"""
-        if checkshuttingdown and self._shutting_down:
+        if checkshuttingdown and self.shutting_down:
             raise Exception('Repository is shutting down')
         try:
             session = self._sessions[sessionid]
         except KeyError:
             raise BadConnectionId('No such session %s' % sessionid)
         if setpool:
+            session.set_tx_data(txid) # must be done before set_pool
             session.set_pool()
         return session
 
@@ -849,6 +864,21 @@
         """return the source for the given entity's eid"""
         return self.sources_by_uri[self.type_and_source_from_eid(eid, session)[1]]
 
+    def querier_cache_key(self, session, rql, args, eidkeys):
+        cachekey = [rql]
+        for key in sorted(eidkeys):
+            try:
+                etype = self.type_from_eid(args[key], session)
+            except KeyError:
+                raise QueryError('bad cache key %s (no value)' % key)
+            except TypeError:
+                raise QueryError('bad cache key %s (value: %r)' % (
+                    key, args[key]))
+            cachekey.append(etype)
+            # ensure eid is correctly typed in args
+            args[key] = typed_eid(args[key])
+        return tuple(cachekey)
+
     def eid2extid(self, source, eid, session=None):
         """get local id from an eid"""
         etype, uri, extid = self.type_and_source_from_eid(eid, session)
@@ -873,6 +903,7 @@
         if eid is not None:
             self._extid_cache[cachekey] = eid
             self._type_source_cache[eid] = (etype, source.uri, extid)
+            # XXX used with extlite (eg vcsfile), probably not needed anymore
             if recreate:
                 entity = source.before_entity_insertion(session, extid, etype, eid)
                 entity._cw_recreating = True
@@ -901,10 +932,8 @@
             self._extid_cache[cachekey] = eid
             self._type_source_cache[eid] = (etype, source.uri, extid)
             entity = source.before_entity_insertion(session, extid, etype, eid)
-            if not hasattr(entity, 'edited_attributes'):
-                entity.edited_attributes = set()
+            entity.edited_attributes = set(entity)
             if source.should_call_hooks:
-                entity.edited_attributes = set(entity)
                 self.hm.call_hooks('before_add_entity', session, entity=entity)
             # XXX call add_info with complete=False ?
             self.add_info(session, entity, source, extid)
@@ -914,7 +943,7 @@
             else:
                 # minimal meta-data
                 session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
-                                {'x': entity.eid, 'name': entity.__regid__}, 'x')
+                                {'x': entity.eid, 'name': entity.__regid__})
             session.commit(reset_pool)
             return eid
         except:
@@ -962,7 +991,7 @@
                     rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype
                 else:
                     rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
-                session.execute(rql, {'x': eid}, 'x', build_descr=False)
+                session.execute(rql, {'x': eid}, build_descr=False)
         self.system_source.delete_info(session, entity, sourceuri, extid)
 
     def locate_relation_source(self, session, subject, rtype, object):
@@ -1085,8 +1114,6 @@
         orig_edited_attributes = getattr(entity, 'edited_attributes', None)
         entity.edited_attributes = edited_attributes
         try:
-            if session.is_hook_category_activated('integrity'):
-                entity.check()
             only_inline_rels, need_fti_update = True, False
             relations = []
             source = self.source_from_eid(entity.eid, session)
@@ -1117,6 +1144,8 @@
                                   eidfrom=entity.eid, rtype=attr, eidto=value)
                 if not only_inline_rels:
                     hm.call_hooks('before_update_entity', session, entity=entity)
+            if session.is_hook_category_activated('integrity'):
+                entity.check()
             source.update_entity(session, entity)
             self.system_source.update_info(session, entity, need_fti_update)
             if source.should_call_hooks:
--- a/server/rqlannotation.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/rqlannotation.py	Mon Jul 19 15:36:16 2010 +0200
@@ -24,7 +24,7 @@
 from logilab.common.compat import any
 
 from rql import BadRQLQuery
-from rql.nodes import Relation, VariableRef, Constant, Variable, Or
+from rql.nodes import Relation, VariableRef, Constant, Variable, Or, Exists
 from rql.utils import common_parent
 
 def _annotate_select(annotator, rqlst):
@@ -36,7 +36,7 @@
     has_text_query = False
     need_distinct = rqlst.distinct
     for rel in rqlst.iget_nodes(Relation):
-        if getrschema(rel.r_type).symmetric and not rel.neged(strict=True):
+        if getrschema(rel.r_type).symmetric and not isinstance(rel.parent, Exists):
             for vref in rel.iget_nodes(VariableRef):
                 stinfo = vref.variable.stinfo
                 if not stinfo['constnode'] and stinfo['selected']:
@@ -51,7 +51,7 @@
             stinfo['invariant'] = False
             stinfo['principal'] = _select_main_var(stinfo['rhsrelations'])
             continue
-        if not stinfo['relations'] and not stinfo['typerels']:
+        if not stinfo['relations'] and stinfo['typerel'] is None:
             # Any X, Any MAX(X)...
             # those particular queries should be executed using the system
             # entities table unless there is some type restriction
@@ -93,7 +93,7 @@
                 continue
             rschema = getrschema(rel.r_type)
             if rel.optional:
-                if rel in stinfo['optrelations']:
+                if rel in stinfo.get('optrelations', ()):
                     # optional variable can't be invariant if this is the lhs
                     # variable of an inlined relation
                     if not rel in stinfo['rhsrelations'] and rschema.inlined:
@@ -135,7 +135,7 @@
             # priority should be given to relation which are not in inner queries
             # (eg exists)
             try:
-                stinfo['principal'] = _select_principal(var.sqlscope, joins)
+                stinfo['principal'] = _select_principal(var.scope, joins)
             except CantSelectPrincipal:
                 stinfo['invariant'] = False
     rqlst.need_distinct = need_distinct
@@ -146,7 +146,7 @@
 class CantSelectPrincipal(Exception):
     """raised when no 'principal' variable can be found"""
 
-def _select_principal(sqlscope, relations, _sort=lambda x:x):
+def _select_principal(scope, relations, _sort=lambda x:x):
     """given a list of rqlst relations, select one which will be used to
     represent an invariant variable (e.g. using on extremity of the relation
     instead of the variable's type table
@@ -161,7 +161,7 @@
             continue
         if rel.ored(traverse_scope=True):
             ored_rels.add(rel)
-        elif rel.sqlscope is sqlscope:
+        elif rel.scope is scope:
             return rel
         elif not rel.neged(traverse_scope=True):
             diffscope_rels.add(rel)
@@ -175,12 +175,12 @@
                     ored_rels.discard(rel1)
                     ored_rels.discard(rel2)
     for rel in _sort(ored_rels):
-        if rel.sqlscope is sqlscope:
+        if rel.scope is scope:
             return rel
         diffscope_rels.add(rel)
     # if DISTINCT query, can use variable from a different scope as principal
     # since introduced duplicates will be removed
-    if sqlscope.stmt.distinct and diffscope_rels:
+    if scope.stmt.distinct and diffscope_rels:
         return iter(_sort(diffscope_rels)).next()
     # XXX  could use a relation for a different scope if it can't generate
     # duplicates, so we would have to check cardinality
@@ -197,7 +197,7 @@
         if rel.operator() not in ('=', 'IS') \
                or not isinstance(rel.children[1].children[0], VariableRef):
             continue
-        if rel.sqlscope is rel.stmt:
+        if rel.scope is rel.stmt:
             return rel
         principal = rel
     if principal is None:
@@ -220,23 +220,6 @@
                     var._q_invariant = True
             else:
                 var._q_invariant = False
-        for rel in select.iget_nodes(Relation):
-            if rel.neged(strict=True) and not rel.is_types_restriction():
-                rschema = getrschema(rel.r_type)
-                if not rschema.final:
-                    # if one of the relation's variable is ambiguous but not
-                    # invariant, an intersection will be necessary
-                    for vref in rel.get_nodes(VariableRef):
-                        var = vref.variable
-                        if (not var._q_invariant and var.valuable_references() == 1
-                            and len(var.stinfo['possibletypes']) > 1):
-                            select.need_intersect = True
-                            break
-                    else:
-                        continue
-                    break
-        else:
-            select.need_intersect = False
 
 
 class SQLGenAnnotator(object):
@@ -270,7 +253,7 @@
     def is_ambiguous(self, var):
         # ignore has_text relation
         if len([rel for rel in var.stinfo['relations']
-                if rel.sqlscope is var.sqlscope and rel.r_type == 'has_text']) == 1:
+                if rel.scope is var.scope and rel.r_type == 'has_text']) == 1:
             return False
         try:
             data = var.stmt._deamb_data
@@ -309,7 +292,7 @@
     def compute(self, rqlst):
         # set domains for each variable
         for varname, var in rqlst.defined_vars.iteritems():
-            if var.stinfo['uidrels'] or \
+            if var.stinfo['uidrel'] is not None or \
                    self.eschema(rqlst.solutions[0][varname]).final:
                 ptypes = var.stinfo['possibletypes']
             else:
@@ -352,8 +335,8 @@
     def set_rel_constraint(self, term, rel, etypes_func):
         if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
             var = term.variable
-            if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \
-                   or rel.sqlscope is var.sqlscope or rel.r_type == 'identity':
+            if len(var.stinfo['relations']) == 1 \
+                   or rel.scope is var.scope or rel.r_type == 'identity':
                 self.restrict(var, frozenset(etypes_func()))
                 try:
                     self.maydeambrels[var].add(rel)
@@ -369,7 +352,7 @@
         if isinstance(other, VariableRef) and isinstance(other.variable, Variable):
             deambiguifier = other.variable
             if not var is self.deambification_map.get(deambiguifier):
-                if not var.stinfo['typerels']:
+                if var.stinfo['typerel'] is None:
                     otheretypes = deambiguifier.stinfo['possibletypes']
                 elif not self.is_ambiguous(deambiguifier):
                     otheretypes = self.varsols[deambiguifier]
@@ -377,7 +360,7 @@
                     # we know variable won't be invariant, try to use
                     # it to deambguify the current variable
                     otheretypes = self.varsols[deambiguifier]
-            if not deambiguifier.stinfo['typerels']:
+            if deambiguifier.stinfo['typerel'] is None:
                 # if deambiguifier has no type restriction using 'is',
                 # don't record it
                 deambiguifier = None
--- a/server/schemaserial.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/schemaserial.py	Mon Jul 19 15:36:16 2010 +0200
@@ -65,7 +65,10 @@
 
 def cstrtype_mapping(cursor):
     """cached constraint types mapping"""
-    return dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T'))
+    map = dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T'))
+    if not 'BoundConstraint' in map:
+        map['BoundConstraint'] = map['BoundaryConstraint']
+    return map
 
 # schema / perms deserialization ##############################################
 
@@ -253,10 +256,13 @@
     cstrtypemap = {}
     rql = 'INSERT CWConstraintType X: X name %(ct)s'
     for cstrtype in CONSTRAINTS:
+        if cstrtype == 'BoundConstraint':
+            continue # XXX deprecated in yams 0.29 / cw 3.8.1
         cstrtypemap[cstrtype] = execute(rql, {'ct': unicode(cstrtype)},
                                         build_descr=False)[0][0]
         if pb is not None:
             pb.update()
+    cstrtypemap['BoundConstraint'] = cstrtypemap['BoundaryConstraint']
     # serialize relations
     for rschema in schema.relations():
         # skip virtual relations such as eid, has_text and identity
--- a/server/server.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/server.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Pyro RQL server
+"""Pyro RQL server"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import os
@@ -83,7 +82,6 @@
         self.quiting = None
         # event queue
         self.events = []
-        # start repository looping tasks
 
     def add_event(self, event):
         """add an event to the loop"""
@@ -103,6 +101,7 @@
 
     def run(self, req_timeout=5.0):
         """enter the service loop"""
+        # start repository looping tasks
         self.repo.start_looping_tasks()
         while self.quiting is None:
             try:
@@ -130,35 +129,7 @@
         signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit())
         signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit())
 
-    def daemonize(self, pid_file=None):
-        """daemonize the process"""
-        # fork so the parent can exist
-        if (os.fork()):
-            return -1
-        # deconnect from tty and create a new session
-        os.setsid()
-        # fork again so the parent, (the session group leader), can exit.
-        # as a non-session group leader, we can never regain a controlling
-        # terminal.
-        if (os.fork()):
-            return -1
-        # move to the root to avoit mount pb
-        os.chdir('/')
-        # set paranoid umask
-        os.umask(077)
-        if pid_file is not None:
-            # write pid in a file
-            f = open(pid_file, 'w')
-            f.write(str(os.getpid()))
-            f.close()
-        # filter warnings
-        warnings.filterwarnings('ignore')
-        # close standard descriptors
-        sys.stdin.close()
-        sys.stdout.close()
-        sys.stderr.close()
-
 from logging import getLogger
 from cubicweb import set_log_methods
 LOGGER = getLogger('cubicweb.reposerver')
-set_log_methods(CubicWebConfiguration, LOGGER)
+set_log_methods(RepositoryServer, LOGGER)
--- a/server/serverconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/serverconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -36,12 +36,12 @@
                'default': 'admin',
                'help': "cubicweb manager account's login "
                '(this user will be created)',
-               'inputlevel': 0,
+               'level': 0,
                }),
     ('password', {'type' : 'password',
                   'default': REQUIRED,
                   'help': "cubicweb manager account's password",
-                  'inputlevel': 0,
+                  'level': 0,
                   }),
     )
 
@@ -106,39 +106,39 @@
          {'type' : 'string',
           'default': None,
           'help': 'host name if not correctly detectable through gethostname',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'main', 'level': 1,
           }),
         ('pid-file',
          {'type' : 'string',
           'default': Method('default_pid_file'),
           'help': 'repository\'s pid file',
-          'group': 'main', 'inputlevel': 2,
+          'group': 'main', 'level': 2,
           }),
         ('uid',
          {'type' : 'string',
           'default': None,
           'help': 'if this option is set, use the specified user to start \
 the repository rather than the user running the command',
-          'group': 'main', 'inputlevel': (CubicWebConfiguration.mode == 'installed') and 0 or 1,
+          'group': 'main', 'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1,
           }),
         ('session-time',
          {'type' : 'time',
           'default': '30min',
           'help': 'session expiration time, default to 30 minutes',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('connections-pool-size',
          {'type' : 'int',
           'default': 4,
           'help': 'size of the connections pools. Each source supporting multiple \
 connections will have this number of opened connections.',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('rql-cache-size',
          {'type' : 'int',
           'default': 300,
           'help': 'size of the parsed rql cache size.',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('undo-support',
          {'type' : 'string', 'default': '',
@@ -146,20 +146,20 @@
 [C]reate [U]pdate [D]elete entities / [A]dd [R]emove relation. Leave it empty \
 for no undo support, set it to CUDAR for full undo support, or to DR for \
 support undoing of deletion only.',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('keep-transaction-lifetime',
          {'type' : 'int', 'default': 7,
           'help': 'number of days during which transaction records should be \
 kept (hence undoable).',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('multi-sources-etypes',
          {'type' : 'csv', 'default': (),
           'help': 'defines which entity types from this repository are used \
 by some other instances. You should set this properly so those instances to \
 detect updates / deletions.',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
 
         ('delay-full-text-indexation',
@@ -168,7 +168,7 @@
           ' to be done when entity are added/modified by users, activate this '
           'option and setup a job using cubicweb-ctl db-rebuild-fti on your '
           'system (using cron for instance).',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
 
         # email configuration
@@ -181,7 +181,7 @@
 modes are "default-dest-addrs" (emails specified in the configuration \
 variable with the same name), "users" (every users which has activated \
 account with an email set), "none" (no notification).',
-          'group': 'email', 'inputlevel': 2,
+          'group': 'email', 'level': 2,
           }),
         ('default-dest-addrs',
          {'type' : 'csv',
@@ -189,14 +189,14 @@
           'help': 'comma separated list of email addresses that will be used \
 as default recipient when an email is sent and the notification has no \
 specific recipient rules.',
-          'group': 'email', 'inputlevel': 2,
+          'group': 'email', 'level': 2,
           }),
         ('supervising-addrs',
          {'type' : 'csv',
           'default': (),
           'help': 'comma separated list of email addresses that will be \
 notified of every changes.',
-          'group': 'email', 'inputlevel': 2,
+          'group': 'email', 'level': 2,
           }),
         # pyro server.serverconfig
         ('pyro-host',
@@ -205,7 +205,7 @@
           'help': 'Pyro server host, if not detectable correctly through \
 gethostname(). It may contains port information using <host>:<port> notation, \
 and if not set, it will be choosen randomly',
-          'group': 'pyro', 'inputlevel': 3,
+          'group': 'pyro', 'level': 3,
           }),
         ) + CubicWebConfiguration.options)
 
--- a/server/serverctl.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/serverctl.py	Mon Jul 19 15:36:16 2010 +0200
@@ -43,7 +43,7 @@
     given server.serverconfig
     """
     from getpass import getpass
-    from logilab.common.db import get_connection
+    from logilab.database import get_connection
     dbhost = source.get('db-host')
     if dbname is None:
         dbname = source['db-name']
@@ -317,8 +317,9 @@
         create_db = self.config.create_db
         helper = get_db_helper(driver)
         if driver == 'sqlite':
-            if os.path.exists(dbname) and automatic or \
-                   ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
+            if os.path.exists(dbname) and (
+                automatic or
+                ASK.confirm('Database %s already exists. Drop it?' % dbname)):
                 os.unlink(dbname)
         elif create_db:
             print '\n'+underline_title('Creating the system database')
@@ -392,7 +393,7 @@
     def run(self, args):
         print '\n'+underline_title('Initializing the system database')
         from cubicweb.server import init_repository
-        from logilab.common.db import get_connection
+        from logilab.database import get_connection
         appid = pop_arg(args, msg='No instance specified !')
         config = ServerConfiguration.config_for(appid)
         try:
@@ -525,6 +526,7 @@
         )
 
     def run(self, args):
+        from logilab.common.daemon import daemonize
         from cubicweb.server.server import RepositoryServer
         appid = pop_arg(args, msg='No instance specified !')
         config = ServerConfiguration.config_for(appid)
@@ -537,14 +539,14 @@
         debug = self.config.debug
         # create the server
         server = RepositoryServer(config, debug)
-        # go ! (don't daemonize in debug mode)
-        pidfile = config['pid-file']
         # ensure the directory where the pid-file should be set exists (for
         # instance /var/run/cubicweb may be deleted on computer restart)
+        pidfile = config['pid-file']
         piddir = os.path.dirname(pidfile)
+        # go ! (don't daemonize in debug mode)
         if not os.path.exists(piddir):
             os.makedirs(piddir)
-        if not debug and server.daemonize(pidfile) == -1:
+        if not debug and daemonize(pidfile):
             return
         uid = config['uid']
         if uid is not None:
--- a/server/session.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/session.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Repository users' and internal' sessions.
+"""Repository users' and internal' sessions."""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
@@ -26,9 +25,10 @@
 import threading
 from time import time
 from uuid import uuid4
+from warnings import warn
 
 from logilab.common.deprecation import deprecated
-from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj
+from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj
 from yams import BASE_TYPES
 
 from cubicweb import Binary, UnknownEid, schema
@@ -48,17 +48,6 @@
 NO_UNDO_TYPES.add('is_instance_of')
 # XXX rememberme,forgotpwd,apycot,vcsfile
 
-def is_final(rqlst, variable, args):
-    # try to find if this is a final var or not
-    for select in rqlst.children:
-        for sol in select.solutions:
-            etype = variable.get_type(sol, args)
-            if etype is None:
-                continue
-            if etype in BASE_TYPES:
-                return True
-            return False
-
 def _make_description(selected, args, solution):
     """return a description for a result set"""
     description = []
@@ -128,6 +117,9 @@
 #            print INDENT + 'reset write to', self.oldwrite
 
 
+class TransactionData(object):
+    def __init__(self, txid):
+        self.transactionid = txid
 
 class Session(RequestSessionBase):
     """tie session id, user, connections pool and other session data all
@@ -142,8 +134,7 @@
         self.user = user
         self.repo = repo
         self.cnxtype = cnxprops.cnxtype
-        self.creation = time()
-        self.timestamp = self.creation
+        self.timestamp = time()
         self.default_mode = 'read'
         # support undo for Create Update Delete entity / Add Remove relation
         if repo.config.creating or repo.config.repairing or self.is_internal_session:
@@ -160,7 +151,8 @@
         # i18n initialization
         self.set_language(cnxprops.lang)
         # internals
-        self._threaddata = threading.local()
+        self._tx_data = {}
+        self.__threaddata = threading.local()
         self._threads_in_transaction = set()
         self._closed = False
 
@@ -168,6 +160,23 @@
         return '<%ssession %s (%s 0x%x)>' % (
             self.cnxtype, unicode(self.user.login), self.id, id(self))
 
+    def set_tx_data(self, txid=None):
+        if txid is None:
+            txid = threading.currentThread().getName()
+        try:
+            self.__threaddata.txdata = self._tx_data[txid]
+        except KeyError:
+            self.__threaddata.txdata = self._tx_data[txid] = TransactionData(txid)
+
+    @property
+    def _threaddata(self):
+        try:
+            return self.__threaddata.txdata
+        except AttributeError:
+            self.set_tx_data()
+            return self.__threaddata.txdata
+
+
     def hijack_user(self, user):
         """return a fake request/session using specified user"""
         session = Session(user, self.repo)
@@ -295,8 +304,10 @@
         try:
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
         except (source.OperationalError, source.InterfaceError):
+            if not rollback_on_failure:
+                raise
             source.warning("trying to reconnect")
-            self.pool.reconnect(self)
+            self.pool.reconnect(source)
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
 
     def set_language(self, language):
@@ -348,11 +359,14 @@
     @property
     def read_security(self):
         """return a boolean telling if read security is activated or not"""
+        txstore = self._threaddata
+        if txstore is None:
+            return self.DEFAULT_SECURITY
         try:
-            return self._threaddata.read_security
+            return txstore.read_security
         except AttributeError:
-            self._threaddata.read_security = self.DEFAULT_SECURITY
-            return self._threaddata.read_security
+            txstore.read_security = self.DEFAULT_SECURITY
+            return txstore.read_security
 
     def set_read_security(self, activated):
         """[de]activate read security, returning the previous value set for
@@ -361,8 +375,11 @@
         you should usually use the `security_enabled` context manager instead
         of this to change security settings.
         """
-        oldmode = self.read_security
-        self._threaddata.read_security = activated
+        txstore = self._threaddata
+        if txstore is None:
+            return self.DEFAULT_SECURITY
+        oldmode = getattr(txstore, 'read_security', self.DEFAULT_SECURITY)
+        txstore.read_security = activated
         # dbapi_query used to detect hooks triggered by a 'dbapi' query (eg not
         # issued on the session). This is tricky since we the execution model of
         # a (write) user query is:
@@ -379,18 +396,21 @@
         # else (False actually) is not perfect but should be enough
         #
         # also reset dbapi_query to true when we go back to DEFAULT_SECURITY
-        self._threaddata.dbapi_query = (oldmode is self.DEFAULT_SECURITY
-                                        or activated is self.DEFAULT_SECURITY)
+        txstore.dbapi_query = (oldmode is self.DEFAULT_SECURITY
+                               or activated is self.DEFAULT_SECURITY)
         return oldmode
 
     @property
     def write_security(self):
         """return a boolean telling if write security is activated or not"""
+        txstore = self._threaddata
+        if txstore is None:
+            return self.DEFAULT_SECURITY
         try:
-            return self._threaddata.write_security
+            return txstore.write_security
         except:
-            self._threaddata.write_security = self.DEFAULT_SECURITY
-            return self._threaddata.write_security
+            txstore.write_security = self.DEFAULT_SECURITY
+            return txstore.write_security
 
     def set_write_security(self, activated):
         """[de]activate write security, returning the previous value set for
@@ -399,8 +419,11 @@
         you should usually use the `security_enabled` context manager instead
         of this to change security settings.
         """
-        oldmode = self.write_security
-        self._threaddata.write_security = activated
+        txstore = self._threaddata
+        if txstore is None:
+            return self.DEFAULT_SECURITY
+        oldmode = getattr(txstore, 'write_security', self.DEFAULT_SECURITY)
+        txstore.write_security = activated
         return oldmode
 
     @property
@@ -539,11 +562,15 @@
     @property
     def pool(self):
         """connections pool, set according to transaction mode for each query"""
+        if self._closed:
+            self.reset_pool(True)
+            raise Exception('try to access pool on a closed session')
         return getattr(self._threaddata, 'pool', None)
 
-    def set_pool(self, checkclosed=True):
+    def set_pool(self):
         """the session need a pool to execute some queries"""
-        if checkclosed and self._closed:
+        if self._closed:
+            self.reset_pool(True)
             raise Exception('try to set pool on a closed session')
         if self.pool is None:
             # get pool first to avoid race-condition
@@ -554,30 +581,39 @@
                 self._threaddata.pool = None
                 self.repo._free_pool(pool)
                 raise
-            self._threads_in_transaction.add(threading.currentThread())
+            self._threads_in_transaction.add(
+                (threading.currentThread(), pool) )
         return self._threaddata.pool
 
+    def _free_thread_pool(self, thread, pool, force_close=False):
+        try:
+            self._threads_in_transaction.remove( (thread, pool) )
+        except KeyError:
+            # race condition on pool freeing (freed by commit or rollback vs
+            # close)
+            pass
+        else:
+            if force_close:
+                pool.reconnect()
+            else:
+                pool.pool_reset()
+            # free pool once everything is done to avoid race-condition
+            self.repo._free_pool(pool)
+
     def reset_pool(self, ignoremode=False):
         """the session is no longer using its pool, at least for some time"""
         # pool may be none if no operation has been done since last commit
         # or rollback
-        if self.pool is not None and (ignoremode or self.mode == 'read'):
+        pool = getattr(self._threaddata, 'pool', None)
+        if pool is not None and (ignoremode or self.mode == 'read'):
             # even in read mode, we must release the current transaction
-            pool = self.pool
-            try:
-                self._threads_in_transaction.remove(threading.currentThread())
-            except KeyError:
-                pass
-            pool.pool_reset()
+            self._free_thread_pool(threading.currentThread(), pool)
             del self._threaddata.pool
-            # free pool once everything is done to avoid race-condition
-            self.repo._free_pool(pool)
 
     def _touch(self):
         """update latest session usage timestamp and reset mode to read"""
         self.timestamp = time()
         self.local_perm_cache.clear() # XXX simply move in transaction_data, no?
-        self._threaddata.mode = self.default_mode
 
     # shared data handling ###################################################
 
@@ -655,23 +691,41 @@
         return self.repo.source_from_eid(eid, self)
 
     def execute(self, rql, kwargs=None, eid_key=None, build_descr=True):
-        """db-api like method directly linked to the querier execute method"""
-        rset = self._execute(self, rql, kwargs, eid_key, build_descr)
+        """db-api like method directly linked to the querier execute method.
+
+        See :meth:`cubicweb.dbapi.Cursor.execute` documentation.
+        """
+        if eid_key is not None:
+            warn('[3.8] eid_key is deprecated, you can safely remove this argument',
+                 DeprecationWarning, stacklevel=2)
+        self.timestamp = time() # update timestamp
+        rset = self._execute(self, rql, kwargs, build_descr)
         rset.req = self
         return rset
 
-    def _clear_thread_data(self):
+    def _clear_thread_data(self, reset_pool=True):
         """remove everything from the thread local storage, except pool
         which is explicitly removed by reset_pool, and mode which is set anyway
         by _touch
         """
-        store = self._threaddata
-        for name in ('commit_state', 'transaction_data', 'pending_operations',
-                     '_rewriter'):
-            try:
-                delattr(store, name)
-            except AttributeError:
-                pass
+        try:
+            txstore = self.__threaddata.txdata
+        except AttributeError:
+            pass
+        else:
+            if reset_pool:
+                self._tx_data.pop(txstore.transactionid, None)
+                try:
+                    del self.__threaddata.txdata
+                except AttributeError:
+                    pass
+            else:
+                for name in ('commit_state', 'transaction_data',
+                             'pending_operations', '_rewriter'):
+                    try:
+                        delattr(txstore, name)
+                    except AttributeError:
+                        continue
 
     def commit(self, reset_pool=True):
         """commit the current session's transaction"""
@@ -683,13 +737,13 @@
             return
         if self.commit_state:
             return
-        # by default, operations are executed with security turned off
-        with security_enabled(self, False, False):
-            # on rollback, an operation should have the following state
-            # information:
-            # - processed by the precommit/commit event or not
-            # - if processed, is it the failed operation
-            try:
+        # on rollback, an operation should have the following state
+        # information:
+        # - processed by the precommit/commit event or not
+        # - if processed, is it the failed operation
+        try:
+            # by default, operations are executed with security turned off
+            with security_enabled(self, False, False):
                 for trstate in ('precommit', 'commit'):
                     processed = []
                     self.commit_state = trstate
@@ -733,23 +787,24 @@
                                       exc_info=sys.exc_info())
                 self.info('%s session %s done', trstate, self.id)
                 return self.transaction_uuid(set=False)
-            finally:
-                self._clear_thread_data()
-                self._touch()
-                if reset_pool:
-                    self.reset_pool(ignoremode=True)
+        finally:
+            self._touch()
+            if reset_pool:
+                self.reset_pool(ignoremode=True)
+            self._clear_thread_data(reset_pool)
 
     def rollback(self, reset_pool=True):
         """rollback the current session's transaction"""
-        if self.pool is None:
-            assert not self.pending_operations
+        # don't use self.pool, rollback may be called with _closed == True
+        pool = getattr(self._threaddata, 'pool', None)
+        if pool is None:
             self._clear_thread_data()
             self._touch()
             self.debug('rollback session %s done (no db activity)', self.id)
             return
-        # by default, operations are executed with security turned off
-        with security_enabled(self, False, False):
-            try:
+        try:
+            # by default, operations are executed with security turned off
+            with security_enabled(self, False, False):
                 while self.pending_operations:
                     try:
                         operation = self.pending_operations.pop(0)
@@ -757,19 +812,19 @@
                     except:
                         self.critical('rollback error', exc_info=sys.exc_info())
                         continue
-                self.pool.rollback()
+                pool.rollback()
                 self.debug('rollback for session %s done', self.id)
-            finally:
-                self._clear_thread_data()
-                self._touch()
-                if reset_pool:
-                    self.reset_pool(ignoremode=True)
+        finally:
+            self._touch()
+            if reset_pool:
+                self.reset_pool(ignoremode=True)
+            self._clear_thread_data(reset_pool)
 
     def close(self):
         """do not close pool on session close, since they are shared now"""
         self._closed = True
         # copy since _threads_in_transaction maybe modified while waiting
-        for thread in self._threads_in_transaction.copy():
+        for thread, pool in self._threads_in_transaction.copy():
             if thread is threading.currentThread():
                 continue
             self.info('waiting for thread %s', thread)
@@ -779,13 +834,15 @@
             for i in xrange(10):
                 thread.join(1)
                 if not (thread.isAlive() and
-                        thread in self._threads_in_transaction):
+                        (thread, pool) in self._threads_in_transaction):
                     break
             else:
                 self.error('thread %s still alive after 10 seconds, will close '
                            'session anyway', thread)
+                self._free_thread_pool(thread, pool, force_close=True)
         self.rollback()
-        del self._threaddata
+        del self.__threaddata
+        del self._tx_data
 
     # transaction data/operations management ##################################
 
@@ -863,32 +920,37 @@
         """
         # not so easy, looks for variable which changes from one solution
         # to another
-        unstables = rqlst.get_variable_variables()
-        basedescription = []
+        unstables = rqlst.get_variable_indices()
+        basedescr = []
         todetermine = []
-        selected = rqlst.children[0].selection # sample selection
-        for i, term in enumerate(selected):
-            if isinstance(term, Function) and term.descr().rtype is not None:
-                basedescription.append(term.get_type(term.descr().rtype, args))
-                continue
-            for vref in term.get_nodes(VariableRef):
-                if vref.name in unstables:
-                    basedescription.append(None)
-                    todetermine.append( (i, is_final(rqlst, vref.variable, args)) )
-                    break
+        sampleselect = rqlst.children[0]
+        samplesols = sampleselect.solutions[0]
+        for i, term in enumerate(sampleselect.selection):
+            try:
+                ttype = term.get_type(samplesols, args)
+            except CoercionError:
+                ttype = None
+                isfinal = True
             else:
-                # sample etype
-                etype = rqlst.children[0].solutions[0]
-                basedescription.append(term.get_type(etype, args))
+                if ttype is None or ttype == 'Any':
+                    ttype = None
+                    isfinal = True
+                else:
+                    isfinal = ttype in BASE_TYPES
+            if ttype is None or i in unstables:
+                basedescr.append(None)
+                todetermine.append( (i, isfinal) )
+            else:
+                basedescr.append(ttype)
         if not todetermine:
-            return RepeatList(len(result), tuple(basedescription))
-        return self._build_descr(result, basedescription, todetermine)
+            return RepeatList(len(result), tuple(basedescr))
+        return self._build_descr(result, basedescr, todetermine)
 
     def _build_descr(self, result, basedescription, todetermine):
         description = []
         etype_from_eid = self.describe
         for row in result:
-            row_descr = basedescription
+            row_descr = basedescription[:]
             for index, isfinal in todetermine:
                 value = row[index]
                 if value is None:
@@ -901,7 +963,8 @@
                     try:
                         row_descr[index] = etype_from_eid(value)[0]
                     except UnknownEid:
-                        self.critical('wrong eid %s in repository, should check database' % value)
+                        self.critical('wrong eid %s in repository, you should '
+                                      'db-check the database' % value)
                         row_descr[index] = row[index] = None
             description.append(tuple(row_descr))
         return description
@@ -968,6 +1031,14 @@
         self.cnxtype = 'inmemory'
         self.disable_hook_categories('integrity')
 
+    @property
+    def pool(self):
+        """connections pool, set according to transaction mode for each query"""
+        if self.repo.shutting_down:
+            self.reset_pool(True)
+            raise Exception('repository is shutting down')
+        return getattr(self._threaddata, 'pool', None)
+
 
 class InternalManager(object):
     """a manager user with all access rights used internally for task such as
--- a/server/sources/__init__.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/__init__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb server sources support
+"""cubicweb server sources support"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from os.path import join, splitext
@@ -52,9 +51,11 @@
     return True
 
 class TimedCache(dict):
-    def __init__(self, ttlm, ttls=0):
-        # time to live in minutes
-        self.ttl = timedelta(0, ttlm*60 + ttls, 0)
+    def __init__(self, ttl):
+        # time to live in seconds
+        if ttl <= 0:
+            raise ValueError('TimedCache initialized with a ttl of %ss' % self.ttl.seconds)
+        self.ttl = timedelta(seconds=ttl)
 
     def __setitem__(self, key, value):
         dict.__setitem__(self, key, (datetime.now(), value))
--- a/server/sources/extlite.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/extlite.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,8 +16,8 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """provide an abstract class for external sources using a sqlite database helper
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 
@@ -79,7 +79,7 @@
           'default': None,
           'help': 'path to the sqlite database file used to do queries on the \
 repository.',
-          'inputlevel': 2,
+          'level': 2,
           }),
     )
 
--- a/server/sources/ldapuser.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/ldapuser.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,8 +19,6 @@
 
 this source is for now limited to a read-only CWUser source
 
-
-
 Part of the code is coming form Zope's LDAPUserFolder
 
 Copyright (c) 2004 Jens Vagelpohl.
@@ -33,7 +31,7 @@
 WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
 FOR A PARTICULAR PURPOSE.
 """
-
+from __future__ import division
 from base64 import b64decode
 
 from logilab.common.textutils import splitstrip
@@ -72,27 +70,27 @@
           'default': 'ldap',
           'help': 'ldap host. It may contains port information using \
 <host>:<port> notation.',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('protocol',
          {'type' : 'choice',
           'default': 'ldap',
           'choices': ('ldap', 'ldaps', 'ldapi'),
           'help': 'ldap protocol (allowed values: ldap, ldaps, ldapi)',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('auth-mode',
          {'type' : 'choice',
           'default': 'simple',
           'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'),
           'help': 'authentication mode used to authenticate user to the ldap.',
-          'group': 'ldap-source', 'inputlevel': 3,
+          'group': 'ldap-source', 'level': 3,
           }),
         ('auth-realm',
          {'type' : 'string',
           'default': None,
           'help': 'realm to use when using gssapi/kerberos authentication.',
-          'group': 'ldap-source', 'inputlevel': 3,
+          'group': 'ldap-source', 'level': 3,
           }),
 
         ('data-cnx-dn',
@@ -100,52 +98,52 @@
           'default': '',
           'help': 'user dn to use to open data connection to the ldap (eg used \
 to respond to rql queries).',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('data-cnx-password',
          {'type' : 'string',
           'default': '',
           'help': 'password to use to open data connection to the ldap (eg used to respond to rql queries).',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
 
         ('user-base-dn',
          {'type' : 'string',
           'default': 'ou=People,dc=logilab,dc=fr',
           'help': 'base DN to lookup for users',
-          'group': 'ldap-source', 'inputlevel': 0,
+          'group': 'ldap-source', 'level': 0,
           }),
         ('user-scope',
          {'type' : 'choice',
           'default': 'ONELEVEL',
           'choices': ('BASE', 'ONELEVEL', 'SUBTREE'),
           'help': 'user search scope',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('user-classes',
          {'type' : 'csv',
           'default': ('top', 'posixAccount'),
           'help': 'classes of user',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('user-login-attr',
          {'type' : 'string',
           'default': 'uid',
           'help': 'attribute used as login on authentication',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('user-default-group',
          {'type' : 'csv',
           'default': ('users',),
           'help': 'name of a group in which ldap users will be by default. \
 You can set multiple groups by separating them by a comma.',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
         ('user-attrs-map',
          {'type' : 'named',
           'default': {'uid': 'login', 'gecos': 'email'},
           'help': 'map from ldap user attributes to cubicweb attributes',
-          'group': 'ldap-source', 'inputlevel': 1,
+          'group': 'ldap-source', 'level': 1,
           }),
 
         ('synchronization-interval',
@@ -153,13 +151,13 @@
           'default': '1d',
           'help': 'interval between synchronization with the ldap \
 directory (default to once a day).',
-          'group': 'ldap-source', 'inputlevel': 3,
+          'group': 'ldap-source', 'level': 3,
           }),
         ('cache-life-time',
          {'type' : 'time',
           'default': '2h',
-          'help': 'life time of query cache in minutes (default to two hours).',
-          'group': 'ldap-source', 'inputlevel': 3,
+          'help': 'life time of query cache (default to two hours).',
+          'group': 'ldap-source', 'level': 3,
           }),
 
     )
@@ -187,22 +185,28 @@
                               for o in self.user_classes]
         self._conn = None
         self._cache = {}
-        ttlm = time_validator(None, None,
-                              source_config.get('cache-life-time', 2*60))
-        self._query_cache = TimedCache(ttlm)
+        # ttlm is in minutes!
+        self._cache_ttl = time_validator(None, None,
+                              source_config.get('cache-life-time', 2*60*60))
+        self._cache_ttl = max(71, self._cache_ttl)
+        self._query_cache = TimedCache(self._cache_ttl)
+        # interval is in seconds !
         self._interval = time_validator(None, None,
-                                        source_config.get('synchronization-interval',
-                                               24*60*60))
+                                    source_config.get('synchronization-interval',
+                                                      24*60*60))
 
     def reset_caches(self):
         """method called during test to reset potential source caches"""
         self._cache = {}
-        self._query_cache = TimedCache(2*60)
+        self._query_cache = TimedCache(self._cache_ttl)
 
     def init(self):
         """method called by the repository once ready to handle request"""
-        self.repo.looping_task(self._interval, self.synchronize)
-        self.repo.looping_task(self._query_cache.ttl.seconds/10,
+        self.info('ldap init')
+        # set minimum period of 5min 1s (the additional second is to minimize
+        # resonnance effet)
+        self.repo.looping_task(max(301, self._interval), self.synchronize)
+        self.repo.looping_task(self._cache_ttl // 10,
                                self._query_cache.clear_expired)
 
     def synchronize(self):
@@ -221,8 +225,10 @@
                                         "source='%s'" % self.uri)
             for eid, b64extid in cursor.fetchall():
                 extid = b64decode(b64extid)
+                self.debug('ldap eid %s', eid)
                 # if no result found, _search automatically delete entity information
                 res = self._search(session, extid, BASE)
+                self.debug('ldap search %s', res)
                 if res:
                     ldapemailaddr = res[0].get(ldap_emailattr)
                     if ldapemailaddr:
@@ -234,7 +240,7 @@
                             if emailaddr == ldapemailaddr:
                                 break
                         else:
-                            self.info('updating email address of user %s to %s',
+                            self.debug('updating email address of user %s to %s',
                                       extid, ldapemailaddr)
                             emailrset = execute('EmailAddress A WHERE A address %(addr)s',
                                                 {'addr': ldapemailaddr})
@@ -245,10 +251,10 @@
                             elif rset:
                                 if not execute('SET X address %(addr)s WHERE '
                                                'U primary_email X, U eid %(u)s',
-                                               {'addr': ldapemailaddr, 'u': eid}, 'u'):
+                                               {'addr': ldapemailaddr, 'u': eid}):
                                     execute('SET X address %(addr)s WHERE '
                                             'X eid %(x)s',
-                                            {'addr': ldapemailaddr, 'x': rset[0][0]}, 'x')
+                                            {'addr': ldapemailaddr, 'x': rset[0][0]})
                             else:
                                 # no email found, create it
                                 _insert_email(session, ldapemailaddr, eid)
@@ -269,7 +275,11 @@
         two queries are needed since passwords are stored crypted, so we have
         to fetch the salt first
         """
-        if password is None:
+        self.info('ldap authenticate %s', login)
+        if not password:
+            # On Windows + ADAM this would have succeeded (!!!)
+            # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'.
+            # we really really don't want that
             raise AuthenticationError()
         searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))]
         searchfilter.extend([filter_format('(%s=%s)', ('objectClass', o))
@@ -285,9 +295,12 @@
         # check password by establishing a (unused) connection
         try:
             self._connect(user, password)
-        except Exception, ex:
+        except ldap.LDAPError, ex:
+            # Something went wrong, most likely bad credentials
             self.info('while trying to authenticate %s: %s', user, ex)
-            # Something went wrong, most likely bad credentials
+            raise AuthenticationError()
+        except Exception:
+            self.error('while trying to authenticate %s', user, exc_info=True)
             raise AuthenticationError()
         return self.extid2eid(user['dn'], 'CWUser', session)
 
@@ -343,6 +356,7 @@
         possible type). If cachekey is given, the query necessary to fetch the
         results (but not the results themselves) may be cached using this key.
         """
+        self.debug('ldap syntax tree search')
         # XXX not handled : transform/aggregat function, join on multiple users...
         assert len(union.children) == 1, 'union not supported'
         rqlst = union.children[0]
@@ -494,26 +508,28 @@
     def _search(self, session, base, scope,
                 searchstr='(objectClass=*)', attrs=()):
         """make an ldap query"""
+        self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, searchstr, list(attrs))
         cnx = session.pool.connection(self.uri).cnx
         try:
             res = cnx.search_s(base, scope, searchstr, attrs)
         except ldap.PARTIAL_RESULTS:
             res = cnx.result(all=0)[1]
         except ldap.NO_SUCH_OBJECT:
+            self.info('ldap NO SUCH OBJECT')
             eid = self.extid2eid(base, 'CWUser', session, insert=False)
             if eid:
                 self.warning('deleting ldap user with eid %s and dn %s',
                              eid, base)
                 entity = session.entity_from_eid(eid, 'CWUser')
                 self.repo.delete_info(session, entity, self.uri, base)
-                self._cache.pop(base, None)
+                self.reset_cache()
             return []
-##         except ldap.REFERRAL, e:
-##             cnx = self.handle_referral(e)
-##             try:
-##                 res = cnx.search_s(base, scope, searchstr, attrs)
-##             except ldap.PARTIAL_RESULTS:
-##                 res_type, res = cnx.result(all=0)
+        # except ldap.REFERRAL, e:
+        #     cnx = self.handle_referral(e)
+        #     try:
+        #         res = cnx.search_s(base, scope, searchstr, attrs)
+        #     except ldap.PARTIAL_RESULTS:
+        #         res_type, res = cnx.result(all=0)
         result = []
         for rec_dn, rec_dict in res:
             # When used against Active Directory, "rec_dict" may not be
@@ -541,6 +557,7 @@
             self._cache[rec_dn] = rec_dict
             result.append(rec_dict)
         #print '--->', result
+        self.debug('ldap built results %s', len(result))
         return result
 
     def before_entity_insertion(self, session, lid, etype, eid):
@@ -551,6 +568,7 @@
         This method must return the an Entity instance representation of this
         entity.
         """
+        self.debug('ldap before entity insertion')
         entity = super(LDAPUserSource, self).before_entity_insertion(session, lid, etype, eid)
         res = self._search(session, lid, BASE)[0]
         for attr in entity.e_schema.indexable_attributes():
@@ -561,10 +579,11 @@
         """called by the repository after an entity stored here has been
         inserted in the system table.
         """
+        self.debug('ldap after entity insertion')
         super(LDAPUserSource, self).after_entity_insertion(session, dn, entity)
         for group in self.user_default_groups:
             session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s',
-                            {'x': entity.eid, 'group': group}, 'x')
+                            {'x': entity.eid, 'group': group})
         # search for existant email first
         try:
             emailaddr = self._cache[dn][self.user_rev_attrs['email']]
@@ -574,7 +593,7 @@
                                {'addr': emailaddr})
         if rset:
             session.execute('SET U primary_email X WHERE U eid %(u)s, X eid %(x)s',
-                            {'x': rset[0][0], 'u': entity.eid}, 'u')
+                            {'x': rset[0][0], 'u': entity.eid})
         else:
             # not found, create it
             _insert_email(session, emailaddr, entity.eid)
@@ -589,7 +608,7 @@
 
 def _insert_email(session, emailaddr, ueid):
     session.execute('INSERT EmailAddress X: X address %(addr)s, U primary_email X '
-                    'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid}, 'x')
+                    'WHERE U eid %(x)s', {'addr': emailaddr, 'x': ueid})
 
 class GotDN(Exception):
     """exception used when a dn localizing the searched user has been found"""
--- a/server/sources/native.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/native.py	Mon Jul 19 15:36:16 2010 +0200
@@ -22,8 +22,8 @@
   from which it comes from) are stored in a varchar column encoded as a base64
   string. This is because it should actually be Bytes but we want an index on
   it for fast querying.
+"""
 
-"""
 from __future__ import with_statement
 
 __docformat__ = "restructuredtext en"
@@ -33,6 +33,7 @@
 from datetime import datetime
 from base64 import b64decode, b64encode
 from contextlib import contextmanager
+from os.path import abspath
 
 from logilab.common.compat import any
 from logilab.common.cache import Cache
@@ -190,51 +191,52 @@
         ('db-driver',
          {'type' : 'string',
           'default': 'postgres',
-          'help': 'database driver (postgres, sqlite, sqlserver2005)',
-          'group': 'native-source', 'inputlevel': 1,
+          # XXX use choice type
+          'help': 'database driver (postgres, mysql, sqlite, sqlserver2005)',
+          'group': 'native-source', 'level': 1,
           }),
         ('db-host',
          {'type' : 'string',
           'default': '',
           'help': 'database host',
-          'group': 'native-source', 'inputlevel': 1,
+          'group': 'native-source', 'level': 1,
           }),
         ('db-port',
          {'type' : 'string',
           'default': '',
           'help': 'database port',
-          'group': 'native-source', 'inputlevel': 1,
+          'group': 'native-source', 'level': 1,
           }),
         ('db-name',
          {'type' : 'string',
           'default': Method('default_instance_id'),
           'help': 'database name',
-          'group': 'native-source', 'inputlevel': 0,
+          'group': 'native-source', 'level': 0,
           }),
         ('db-user',
          {'type' : 'string',
           'default': CubicWebNoAppConfiguration.mode == 'user' and getlogin() or 'cubicweb',
           'help': 'database user',
-          'group': 'native-source', 'inputlevel': 0,
+          'group': 'native-source', 'level': 0,
           }),
         ('db-password',
          {'type' : 'password',
           'default': '',
           'help': 'database password',
-          'group': 'native-source', 'inputlevel': 0,
+          'group': 'native-source', 'level': 0,
           }),
         ('db-encoding',
          {'type' : 'string',
           'default': 'utf8',
           'help': 'database encoding',
-          'group': 'native-source', 'inputlevel': 1,
+          'group': 'native-source', 'level': 1,
           }),
         ('db-extra-arguments',
          {'type' : 'string',
           'default': '',
           'help': 'set to "Trusted_Connection" if you are using SQLServer and '
                   'want trusted authentication for the database connection',
-          'group': 'native-source', 'inputlevel': 2,
+          'group': 'native-source', 'level': 2,
           }),
     )
 
@@ -254,6 +256,7 @@
         # we need a lock to protect eid attribution function (XXX, really?
         # explain)
         self._eid_creation_lock = Lock()
+        self._eid_creation_cnx = None
         # (etype, attr) / storage mapping
         self._storages = {}
         # entity types that may be used by other multi-sources instances
@@ -263,11 +266,17 @@
         if self.dbdriver == 'sqlite' and \
                not getattr(repo.config, 'no_sqlite_wrap', False):
             from cubicweb.server.sources.extlite import ConnectionWrapper
+            self.dbhelper.dbname = abspath(self.dbhelper.dbname)
             self.get_connection = lambda: ConnectionWrapper(self)
             self.check_connection = lambda cnx: cnx
             def pool_reset(cnx):
                 cnx.close()
             self.pool_reset = pool_reset
+        if self.dbdriver == 'sqlite':
+            self._create_eid = None
+            self.create_eid = self._create_eid_sqlite
+        self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str
+
 
     @property
     def _sqlcnx(self):
@@ -328,6 +337,11 @@
     def init(self):
         self.init_creating()
 
+    def shutdown(self):
+        if self._eid_creation_cnx:
+            self._eid_creation_cnx.close()
+            self._eid_creation_cnx = None
+
     # XXX deprecates [un]map_attribute ?
     def map_attribute(self, etype, attr, cb, sourcedb=True):
         self._rql_sqlgen.attr_map['%s.%s' % (etype, attr)] = (cb, sourcedb)
@@ -348,6 +362,14 @@
             del self._storages[etype]
         self.unmap_attribute(etype, attr)
 
+    def storage(self, etype, attr):
+        """return the storage for the given entity type / attribute
+        """
+        try:
+            return self._storages[etype][attr]
+        except KeyError:
+            raise Exception('no custom storage set for %s.%s' % (etype, attr))
+
     # ISource interface #######################################################
 
     def compile_rql(self, rql, sols):
@@ -429,11 +451,23 @@
         try:
             cursor = self.doexec(session, sql, args)
         except (self.OperationalError, self.InterfaceError):
+            if session.mode == 'write':
+                # do not attempt to reconnect if there has been some write
+                # during the transaction
+                raise
             # FIXME: better detection of deconnection pb
             self.warning("trying to reconnect")
             session.pool.reconnect(self)
             cursor = self.doexec(session, sql, args)
-        results = self.process_result(cursor, cbs)
+        except (self.DbapiError,), exc:
+            # We get this one with pyodbc and SQL Server when connection was reset
+            if exc.args[0] == '08S01' and session.mode != 'write':
+                self.warning("trying to reconnect")
+                session.pool.reconnect(self)
+                cursor = self.doexec(session, sql, args)
+            else:
+                raise
+        results = self.process_result(cursor, cbs, session=session)
         assert dbg_results(results)
         return results
 
@@ -644,9 +678,6 @@
 
     # short cut to method requiring advanced db helper usage ##################
 
-    def binary_to_str(self, value):
-        return self.dbhelper.dbapi_module.binary_to_str(value)
-
     def create_index(self, session, table, column, unique=False):
         cursor = LogCursor(session.pool[self.uri])
         self.dbhelper.create_index(cursor, table, column, unique)
@@ -655,6 +686,14 @@
         cursor = LogCursor(session.pool[self.uri])
         self.dbhelper.drop_index(cursor, table, column, unique)
 
+    def change_col_type(self, session, table, column, coltype, null_allowed):
+        cursor = LogCursor(session.pool[self.uri])
+        self.dbhelper.change_col_type(cursor, table, column, coltype, null_allowed)
+
+    def set_null_allowed(self, session, table, column, coltype, null_allowed):
+        cursor = LogCursor(session.pool[self.uri])
+        self.dbhelper.set_null_allowed(cursor, table, column, coltype, null_allowed)
+
     # system source interface #################################################
 
     def eid_type_source(self, session, eid):
@@ -692,7 +731,7 @@
         return None
 
     def make_temp_table_name(self, table):
-        try: # XXX remove this once 
+        try: # XXX remove this once
             return self.dbhelper.temporary_table_name(table)
         except AttributeError:
             import warnings
@@ -710,7 +749,7 @@
         sql = self.dbhelper.sql_temporary_table(table, schema, False)
         self.doexec(session, sql)
 
-    def create_eid(self, session):
+    def _create_eid_sqlite(self, session):
         self._eid_creation_lock.acquire()
         try:
             for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
@@ -719,6 +758,50 @@
         finally:
             self._eid_creation_lock.release()
 
+
+    def create_eid(self, session):
+        # lock needed to prevent 'Connection is busy with results for another command (0)' errors with SQLServer
+        self._eid_creation_lock.acquire()
+        try:
+            return self._create_eid()
+        finally:
+            self._eid_creation_lock.release()
+
+    def _create_eid(self):
+        # internal function doing the eid creation without locking.
+        # needed for the recursive handling of disconnections (otherwise we
+        # deadlock on self._eid_creation_lock
+        if self._eid_creation_cnx is None:
+            self._eid_creation_cnx = self.get_connection()
+        cnx = self._eid_creation_cnx
+        cursor = cnx.cursor()
+        try:
+            for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
+                cursor.execute(sql)
+            eid = cursor.fetchone()[0]
+        except (self.OperationalError, self.InterfaceError):
+            # FIXME: better detection of deconnection pb
+            self.warning("trying to reconnect create eid connection")
+            self._eid_creation_cnx = None
+            return self._create_eid()
+        except (self.DbapiError,), exc:
+            # We get this one with pyodbc and SQL Server when connection was reset
+            if exc.args[0] == '08S01':
+                self.warning("trying to reconnect create eid connection")
+                self._eid_creation_cnx = None
+                return self._create_eid()
+            else:
+                raise
+        except: # WTF?
+            cnx.rollback()
+            self._eid_creation_cnx = None
+            self.exception('create eid failed in an unforeseen way on SQL statement %s', sql)
+            raise
+        else:
+            cnx.commit()
+            return eid
+
+
     def add_info(self, session, entity, source, extid, complete):
         """add type and source info for an eid into the system table"""
         # begin by inserting eid/type/source/extid into the entities table
--- a/server/sources/pyrorql.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/pyrorql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -38,7 +38,7 @@
 from cubicweb.cwconfig import register_persistent_options
 from cubicweb.server.sources import (AbstractSource, ConnectionWrapper,
                                      TimedCache, dbg_st_search, dbg_results)
-
+from cubicweb.server.msplanner import neged_relation
 
 def uidtype(union, col, etype, args):
     select, col = union.locate_subquery(col, etype, args)
@@ -67,53 +67,53 @@
          {'type' : 'string',
           'default': REQUIRED,
           'help': 'identifier of the repository in the pyro name server',
-          'group': 'pyro-source', 'inputlevel': 0,
+          'group': 'pyro-source', 'level': 0,
           }),
         ('mapping-file',
          {'type' : 'string',
           'default': REQUIRED,
           'help': 'path to a python file with the schema mapping definition',
-          'group': 'pyro-source', 'inputlevel': 1,
+          'group': 'pyro-source', 'level': 1,
           }),
         ('cubicweb-user',
          {'type' : 'string',
           'default': REQUIRED,
           'help': 'user to use for connection on the distant repository',
-          'group': 'pyro-source', 'inputlevel': 0,
+          'group': 'pyro-source', 'level': 0,
           }),
         ('cubicweb-password',
          {'type' : 'password',
           'default': '',
           'help': 'user to use for connection on the distant repository',
-          'group': 'pyro-source', 'inputlevel': 0,
+          'group': 'pyro-source', 'level': 0,
           }),
         ('base-url',
          {'type' : 'string',
           'default': '',
           'help': 'url of the web site for the distant repository, if you want '
           'to generate external link to entities from this repository',
-          'group': 'pyro-source', 'inputlevel': 1,
+          'group': 'pyro-source', 'level': 1,
           }),
         ('pyro-ns-host',
          {'type' : 'string',
           'default': None,
           'help': 'Pyro name server\'s host. If not set, default to the value \
 from all_in_one.conf. It may contains port information using <host>:<port> notation.',
-          'group': 'pyro-source', 'inputlevel': 1,
+          'group': 'pyro-source', 'level': 1,
           }),
         ('pyro-ns-group',
          {'type' : 'string',
           'default': None,
           'help': 'Pyro name server\'s group where the repository will be \
 registered. If not set, default to the value from all_in_one.conf.',
-          'group': 'pyro-source', 'inputlevel': 1,
+          'group': 'pyro-source', 'level': 1,
           }),
         ('synchronization-interval',
          {'type' : 'int',
           'default': 5*60,
           'help': 'interval between synchronization with the external \
 repository (default to 5 minutes).',
-          'group': 'pyro-source', 'inputlevel': 2,
+          'group': 'pyro-source', 'level': 2,
           }),
 
     )
@@ -144,11 +144,11 @@
                        'group': 'sources',
                        }),)
         register_persistent_options(myoptions)
-        self._query_cache = TimedCache(30)
+        self._query_cache = TimedCache(1800)
 
     def reset_caches(self):
         """method called during test to reset potential source caches"""
-        self._query_cache = TimedCache(30)
+        self._query_cache = TimedCache(1800)
 
     def last_update_time(self):
         pkey = u'sources.%s.latest-update-time' % self.uri
@@ -299,7 +299,7 @@
             session.set_shared_data('sources_error', msg % self.uri)
             return []
         try:
-            rql, cachekey = RQL2RQL(self).generate(session, union, args)
+            rql = RQL2RQL(self).generate(session, union, args)
         except UnknownEid, ex:
             if server.DEBUG:
                 print '  unknown eid', ex, 'no results'
@@ -307,7 +307,7 @@
         if server.DEBUG & server.DBG_RQL:
             print '  translated rql', rql
         try:
-            rset = cu.execute(rql, args, cachekey)
+            rset = cu.execute(rql, args)
         except Exception, ex:
             self.exception(str(ex))
             msg = session._("error while querying source %s, some data may be missing")
@@ -359,8 +359,7 @@
         """update an entity in the source"""
         relations, kwargs = self._entity_relations_and_kwargs(session, entity)
         cu = session.pool[self.uri]
-        cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
-                   kwargs, 'x')
+        cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs)
         self._query_cache.clear()
         entity.clear_all_caches()
 
@@ -368,7 +367,7 @@
         """delete an entity from the source"""
         cu = session.pool[self.uri]
         cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__,
-                   {'x': self.eid2extid(entity.eid, session)}, 'x')
+                   {'x': self.eid2extid(entity.eid, session)})
         self._query_cache.clear()
 
     def add_relation(self, session, subject, rtype, object):
@@ -376,7 +375,7 @@
         cu = session.pool[self.uri]
         cu.execute('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                    {'x': self.eid2extid(subject, session),
-                    'y': self.eid2extid(object, session)}, ('x', 'y'))
+                    'y': self.eid2extid(object, session)})
         self._query_cache.clear()
         session.entity_from_eid(subject).clear_all_caches()
         session.entity_from_eid(object).clear_all_caches()
@@ -386,7 +385,7 @@
         cu = session.pool[self.uri]
         cu.execute('DELETE X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                    {'x': self.eid2extid(subject, session),
-                    'y': self.eid2extid(object, session)}, ('x', 'y'))
+                    'y': self.eid2extid(object, session)})
         self._query_cache.clear()
         session.entity_from_eid(subject).clear_all_caches()
         session.entity_from_eid(object).clear_all_caches()
@@ -409,9 +408,8 @@
     def generate(self, session, rqlst, args):
         self._session = session
         self.kwargs = args
-        self.cachekey = []
         self.need_translation = False
-        return self.visit_union(rqlst), self.cachekey
+        return self.visit_union(rqlst)
 
     def visit_union(self, node):
         s = self._accept_children(node)
@@ -478,7 +476,10 @@
         return
 
     def visit_exists(self, node):
-        return 'EXISTS(%s)' % node.children[0].accept(self)
+        rql = node.children[0].accept(self)
+        if rql:
+            return 'EXISTS(%s)' % rql
+        return
 
     def visit_relation(self, node):
         try:
@@ -488,7 +489,7 @@
                     restr, lhs = self.process_eid_const(node.children[0])
                 except UnknownEid:
                     # can safely skip not relation with an unsupported eid
-                    if node.neged(strict=True):
+                    if neged_relation(node):
                         return
                     raise
             else:
@@ -496,7 +497,7 @@
                 restr = None
         except UnknownEid:
             # can safely skip not relation with an unsupported eid
-            if node.neged(strict=True):
+            if neged_relation(node):
                 return
             # XXX what about optional relation or outer NOT EXISTS()
             raise
@@ -513,7 +514,7 @@
             rhs = node.children[1].accept(self)
         except UnknownEid:
             # can safely skip not relation with an unsupported eid
-            if node.neged(strict=True):
+            if neged_relation(node):
                 return
             # XXX what about optional relation or outer NOT EXISTS()
             raise
@@ -560,7 +561,6 @@
                 # ensure we have not yet translated the value...
                 if not key in self._const_var:
                     self.kwargs[key] = self.eid2extid(self.kwargs[key])
-                    self.cachekey.append(key)
                     self._const_var[key] = None
         return node.as_string()
 
--- a/server/sources/rql2sql.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/rql2sql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -44,10 +44,9 @@
 by Troels Arvin. Features SQL ISO Standard, PG, mysql, Oracle, MS SQL, DB2
 and Informix.
 
-.. _Comparison of different SQL implementations: http://www.troels.arvin.dk/db/rdbms 
+.. _Comparison of different SQL implementations: http://www.troels.arvin.dk/db/rdbms
+"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 
 import threading
@@ -56,8 +55,8 @@
 
 from rql import BadRQLQuery, CoercionError
 from rql.stmts import Union, Select
-from rql.nodes import (SortTerm, VariableRef, Constant, Function, Not,
-                       Variable, ColumnAlias, Relation, SubQuery, Exists)
+from rql.nodes import (SortTerm, VariableRef, Constant, Function, Variable, Or,
+                       Not, Comparison, ColumnAlias, Relation, SubQuery, Exists)
 
 from cubicweb import QueryError
 from cubicweb.server.sqlutils import SQL_PREFIX
@@ -72,7 +71,7 @@
 FunctionDescr.update_cb_stack = default_update_cb_stack
 
 LENGTH = SQL_FUNCTIONS_REGISTRY.get_function('LENGTH')
-def length_source_execute(source, value):
+def length_source_execute(source, session, value):
     return len(value.getvalue())
 LENGTH.source_execute = length_source_execute
 
@@ -106,13 +105,13 @@
     modified = False
     for varname in tuple(unstable):
         var = select.defined_vars[varname]
-        if not var.stinfo['optrelations']:
+        if not var.stinfo.get('optrelations'):
             continue
         modified = True
         unstable.remove(varname)
         torewrite.add(var)
         newselect = Select()
-        newselect.need_distinct = newselect.need_intersect = False
+        newselect.need_distinct = False
         myunion = Union()
         myunion.append(newselect)
         # extract aliases / selection
@@ -133,13 +132,13 @@
             var.stinfo['relations'].remove(rel)
             newvar.stinfo['relations'].add(newrel)
             if rel.optional in ('left', 'both'):
-                newvar.stinfo['optrelations'].add(newrel)
+                newvar.add_optional_relation(newrel)
             for vref in newrel.children[1].iget_nodes(VariableRef):
                 var = vref.variable
                 var.stinfo['relations'].add(newrel)
                 var.stinfo['rhsrelations'].add(newrel)
                 if rel.optional in ('right', 'both'):
-                    var.stinfo['optrelations'].add(newrel)
+                    var.add_optional_relation(newrel)
         # extract subquery solutions
         mysolutions = [sol.copy() for sol in solutions]
         cleanup_solutions(newselect, mysolutions)
@@ -316,13 +315,15 @@
 # IGenerator implementation for RQL->SQL #######################################
 
 class StateInfo(object):
-    def __init__(self, existssols, unstablevars):
+    def __init__(self, select, existssols, unstablevars):
         self.existssols = existssols
         self.unstablevars = unstablevars
         self.subtables = {}
         self.needs_source_cb = None
         self.subquery_source_cb = None
         self.source_cb_funcs = set()
+        self.scopes = {select: 0}
+        self.scope_nodes = []
 
     def reset(self, solution):
         """reset some visit variables"""
@@ -381,16 +382,62 @@
         self.solution = origsol
         self.tables = origtables
 
-    def push_scope(self):
+    def push_scope(self, scope_node):
+        self.scope_nodes.append(scope_node)
+        self.scopes[scope_node] = len(self.actual_tables)
         self.actual_tables.append([])
         self._restr_stack.append(self.restrictions)
         self.restrictions = []
 
     def pop_scope(self):
+        del self.scopes[self.scope_nodes[-1]]
+        self.scope_nodes.pop()
         restrictions = self.restrictions
         self.restrictions = self._restr_stack.pop()
         return restrictions, self.actual_tables.pop()
 
+def extract_fake_having_terms(having):
+    """RQL's HAVING may be used to contains stuff that should go in the WHERE
+    clause of the SQL query, due to RQL grammar limitation. Split them...
+
+    Return a list nodes that can be ANDed with query's WHERE clause. Having
+    subtrees updated in place.
+    """
+    fakehaving = []
+    for subtree in having:
+        ors, tocheck = set(), []
+        for compnode in subtree.get_nodes(Comparison):
+            for fnode in compnode.get_nodes(Function):
+                if fnode.descr().aggregat:
+                    p = compnode.parent
+                    oor = None
+                    while not isinstance(p, Select):
+                        if isinstance(p, Or):
+                            oor = p
+                        p = p.parent
+                    if oor is not None:
+                        ors.add(oor)
+                    break
+            else:
+                tocheck.append(compnode)
+        # tocheck hold a set of comparison not implying an aggregat function
+        # put them in fakehaving if the don't share an Or node as ancestor
+        # with another comparison containing an aggregat function
+        for compnode in tocheck:
+            parents = set()
+            p = compnode.parent
+            oor = None
+            while not isinstance(p, Select):
+                if p in ors or p is None: # p is None for nodes already in fakehaving
+                    break
+                if isinstance(p, Or):
+                    oor = p
+                p = p.parent
+            else:
+                node = oor or compnode
+                fakehaving.append(node)
+                node.parent.remove(node)
+    return fakehaving
 
 class SQLGenerator(object):
     """
@@ -442,7 +489,7 @@
         self._varmap = varmap
         self._query_attrs = {}
         self._state = None
-        self._not_scope_offset = 0
+        # self._not_scope_offset = 0
         try:
             # union query for each rqlst / solution
             sql = self.union_sql(union)
@@ -488,6 +535,7 @@
         sorts = select.orderby
         groups = select.groupby
         having = select.having
+        morerestr = extract_fake_having_terms(having)
         # remember selection, it may be changed and have to be restored
         origselection = select.selection[:]
         # check if the query will have union subquery, if it need sort term
@@ -509,7 +557,7 @@
                     needwrap = True
         else:
             existssols, unstable = {}, ()
-        state = StateInfo(existssols, unstable)
+        state = StateInfo(select, existssols, unstable)
         if self._state is not None:
             # state from a previous unioned select
             state.merge_source_cbs(self._state.needs_source_cb)
@@ -539,7 +587,8 @@
         self._in_wrapping_query = False
         self._state = state
         try:
-            sql = self._solutions_sql(select, sols, distinct, needalias or needwrap)
+            sql = self._solutions_sql(select, morerestr, sols, distinct,
+                                      needalias or needwrap)
             # generate groups / having before wrapping query selection to
             # get correct column aliases
             self._in_wrapping_query = needwrap
@@ -550,8 +599,8 @@
                                   if not isinstance(vref, Constant))
             if having:
                 # filter out constants as for GROUP BY
-                having = ','.join(vref.accept(self) for vref in having
-                                  if not isinstance(vref, Constant))
+                having = ' AND '.join(term.accept(self) for term in having
+                                      if not isinstance(term, Constant))
             if needwrap:
                 sql = '%s FROM (%s) AS T1' % (self._selection_sql(outerselection, distinct,
                                                                   needalias),
@@ -602,13 +651,15 @@
                 except KeyError:
                     continue
 
-    def _solutions_sql(self, select, solutions, distinct, needalias):
+    def _solutions_sql(self, select, morerestr, solutions, distinct, needalias):
         sqls = []
         for solution in solutions:
             self._state.reset(solution)
             # visit restriction subtree
             if select.where is not None:
                 self._state.add_restriction(select.where.accept(self))
+            for restriction in morerestr:
+                self._state.add_restriction(restriction.accept(self))
             sql = [self._selection_sql(select.selection, distinct, needalias)]
             if self._state.restrictions:
                 sql.append('WHERE %s' % ' AND '.join(self._state.restrictions))
@@ -622,12 +673,7 @@
             elif self._state.restrictions and self.dbhelper.needs_from_clause:
                 sql.insert(1, 'FROM (SELECT 1) AS _T')
             sqls.append('\n'.join(sql))
-        if select.need_intersect:
-            #if distinct or not self.dbhelper.intersect_all_support:
-            return '\nINTERSECT\n'.join(sqls)
-            #else:
-            #    return '\nINTERSECT ALL\n'.join(sqls)
-        elif distinct:
+        if distinct:
             return '\nUNION\n'.join(sqls)
         else:
             return '\nUNION ALL\n'.join(sqls)
@@ -682,32 +728,11 @@
         return ''
 
     def visit_not(self, node):
-        self._state.push_scope()
-        if isinstance(node.children[0], Relation):
-            self._not_scope_offset += 1
         csql = node.children[0].accept(self)
-        if isinstance(node.children[0], Relation):
-            self._not_scope_offset -= 1
-        sqls, tables = self._state.pop_scope()
         if node in self._state.done or not csql:
             # already processed or no sql generated by children
-            self._state.actual_tables[-1] += tables
-            self._state.restrictions += sqls
             return csql
-        if isinstance(node.children[0], Exists):
-            assert not sqls, (sqls, str(node.stmt))
-            assert not tables, (tables, str(node.stmt))
-            return 'NOT %s' % csql
-        sqls.append(csql)
-        if tables:
-            select = 'SELECT 1 FROM %s' % ','.join(tables)
-        else:
-            select = 'SELECT 1'
-        if sqls:
-            sql = 'NOT EXISTS(%s WHERE %s)' % (select, ' AND '.join(sqls))
-        else:
-            sql = 'NOT EXISTS(%s)' % select
-        return sql
+        return 'NOT (%s)' % csql
 
     def visit_exists(self, exists):
         """generate SQL name for a exists subquery"""
@@ -721,7 +746,7 @@
         return 'EXISTS(%s)' % ' UNION '.join(sqls)
 
     def _visit_exists(self, exists):
-        self._state.push_scope()
+        self._state.push_scope(exists)
         restriction = exists.children[0].accept(self)
         restrictions, tables = self._state.pop_scope()
         if restriction:
@@ -762,32 +787,29 @@
                 else:
                     # no variables in the RHS
                     sql = self._visit_attribute_relation(relation)
-                if relation.neged(strict=True):
-                    self._state.done.add(relation.parent)
-                    sql = 'NOT (%s)' % sql
+        elif (rtype == 'is' and isinstance(rhs.children[0], Constant)
+              and rhs.children[0].eval(self._args) is None):
+            # special case "C is NULL"
+            if lhs.name in self._varmap:
+                lhssql = self._varmap[lhs.name]
+            else:
+                lhssql = lhs.accept(self)
+            return '%s%s' % (lhssql, rhs.accept(self))
+        elif '%s.%s' % (lhs, relation.r_type) in self._varmap:
+            # relation has already been processed by a previous step
+            return ''
+        elif relation.optional:
+            # check it has not already been treaten (to get necessary
+            # information to add an outer join condition)
+            if relation in self._state.done:
+                return ''
+            # OPTIONAL relation, generate a left|right outer join
+            sql = self._visit_outer_join_relation(relation, rschema)
+        elif rschema.inlined:
+            sql = self._visit_inlined_relation(relation)
         else:
-            if rtype == 'is' and rhs.operator == 'IS':
-                # special case "C is NULL"
-                if lhs.name in self._varmap:
-                    lhssql = self._varmap[lhs.name]
-                else:
-                    lhssql = lhs.accept(self)
-                return '%s%s' % (lhssql, rhs.accept(self))
-            if '%s.%s' % (lhs, relation.r_type) in self._varmap:
-                # relation has already been processed by a previous step
-                return
-            if relation.optional:
-                # check it has not already been treaten (to get necessary
-                # information to add an outer join condition)
-                if relation in self._state.done:
-                    return
-                # OPTIONAL relation, generate a left|right outer join
-                sql = self._visit_outer_join_relation(relation, rschema)
-            elif rschema.inlined:
-                sql = self._visit_inlined_relation(relation)
-            else:
-                # regular (non final) relation
-                sql = self._visit_relation(relation, rschema)
+            # regular (non final) relation
+            sql = self._visit_relation(relation, rschema)
         return sql
 
     def _visit_inlined_relation(self, relation):
@@ -796,7 +818,7 @@
         assert lhsvar is not None
         if isinstance(relation.parent, Not) \
                and len(lhsvar.stinfo['relations']) > 1 \
-               and (rhsvar is None or rhsvar._q_invariant):
+               and (rhsvar is not None and rhsvar._q_invariant):
             self._state.done.add(relation.parent)
             return '%s IS NULL' % self._inlined_var_sql(lhsvar, relation.r_type)
         lhssql = self._inlined_var_sql(lhsvar, relation.r_type)
@@ -806,10 +828,11 @@
             # if the rhs variable is only linked to this relation, this mean we
             # only want the relation to exists, eg NOT NULL in case of inlined
             # relation
-            if len(rhsvar.stinfo['relations']) == 1 and rhsvar._q_invariant:
+            if rhsvar._q_invariant:
+                sql = self._extra_join_sql(relation, lhssql, rhsvar)
+                if sql:
+                    return sql
                 return '%s IS NOT NULL' % lhssql
-            if rhsvar._q_invariant:
-                return self._extra_join_sql(relation, lhssql, rhsvar)
         return '%s=%s' % (lhssql, rhsvar.accept(self))
 
     def _process_relation_term(self, relation, rid, termvar, termconst, relfield):
@@ -833,9 +856,6 @@
         if relation.r_type == 'identity':
             # special case "X identity Y"
             lhs, rhs = relation.get_parts()
-            if isinstance(relation.parent, Not):
-                self._state.done.add(relation.parent)
-                return 'NOT %s%s' % (lhs.accept(self), rhs.accept(self))
             return '%s%s' % (lhs.accept(self), rhs.accept(self))
         lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
         rid = self._relation_table(relation)
@@ -907,7 +927,7 @@
                         condition = '%s=%s' % (lhssql, rhsconst.accept(self))
                         if relation.r_type != 'identity':
                             condition = '(%s OR %s IS NULL)' % (condition, lhssql)
-                        if not lhsvar.stinfo['optrelations']:
+                        if not lhsvar.stinfo.get('optrelations'):
                             return condition
                         self.add_outer_join_condition(lhsvar, t1, condition)
                     return
@@ -1006,7 +1026,7 @@
                 sql = '%s%s' % (lhssql, rhssql)
         except AttributeError:
             sql = '%s%s' % (lhssql, rhssql)
-        if lhs.variable.stinfo['optrelations']:
+        if lhs.variable.stinfo.get('optrelations'):
             self.add_outer_join_condition(lhs.variable, table, sql)
         else:
             return sql
@@ -1021,7 +1041,7 @@
         lhsvar = lhs.variable
         me_is_principal = lhsvar.stinfo.get('principal') is rel
         if me_is_principal:
-            if not lhsvar.stinfo['typerels']:
+            if lhsvar.stinfo['typerel'] is None:
                 # the variable is using the fti table, no join needed
                 jointo = None
             elif not lhsvar.name in self._varmap:
@@ -1041,7 +1061,7 @@
         else:
             not_ = False
         return self.dbhelper.fti_restriction_sql(alias, const.eval(self._args),
-                                                    jointo, not_) + restriction
+                                                 jointo, not_) + restriction
 
     def visit_comparison(self, cmp):
         """generate SQL for a comparison"""
@@ -1052,7 +1072,7 @@
             lhs = None
             rhs = cmp.children[0]
         operator = cmp.operator
-        if operator in ('IS', 'LIKE', 'ILIKE'):
+        if operator in ('LIKE', 'ILIKE'):
             if operator == 'ILIKE' and not self.dbhelper.ilike_support:
                 operator = ' LIKE '
             else:
@@ -1076,7 +1096,8 @@
         operator = mexpr.operator
         try:
             if mexpr.operator == '+' and mexpr.get_type(self._state.solution, self._args) == 'String':
-                operator = '||'
+                return '(%s)' % self.dbhelper.sql_concat_string(lhs.accept(self),
+                                                                rhs.accept(self))
         except CoercionError:
             pass
         return '(%s %s %s)'% (lhs.accept(self), operator, rhs.accept(self))
@@ -1154,7 +1175,7 @@
                 vtablename = '_' + variable.name
                 self.add_table('entities AS %s' % vtablename, vtablename)
                 sql = '%s.eid' % vtablename
-                if variable.stinfo['typerels']:
+                if variable.stinfo['typerel'] is not None:
                     # add additional restriction on entities.type column
                     pts = variable.stinfo['possibletypes']
                     if len(pts) == 1:
@@ -1203,25 +1224,27 @@
             pass
         return ''
 
+    def _temp_table_scope(self, select, table):
+        scope = 9999
+        for var, sql in self._varmap.iteritems():
+            # skip "attribute variable" in varmap (such 'T.login')
+            if not '.' in var and table == sql.split('.', 1)[0]:
+                try:
+                    scope = min(scope, self._state.scopes[select.defined_vars[var].scope])
+                except KeyError:
+                    scope = 0 # XXX
+                if scope == 0:
+                    break
+        return scope
+
     def _var_info(self, var):
-        # if current var or one of its attribute is selected , it *must*
-        # appear in the toplevel's FROM even if we're currently visiting
-        # a EXISTS node
-        if var.sqlscope is var.stmt:
-            scope = 0
-        # don't consider not_scope_offset if the variable is only used in one
-        # relation
-        elif len(var.stinfo['relations']) > 1:
-            scope = -1 - self._not_scope_offset
-        else:
-            scope = -1
         try:
             sql = self._varmap[var.name]
             tablealias = sql.split('.', 1)[0]
-            if scope < 0:
-                scope = self._varmap_table_scope(var.stmt, tablealias)
+            scope = self._temp_table_scope(var.stmt, tablealias)
             self.add_table(tablealias, scope=scope)
         except KeyError:
+            scope = self._state.scopes[var.scope]
             etype = self._state.solution[var.name]
             # XXX this check should be moved in rql.stcheck
             if self.schema.eschema(etype).final:
@@ -1235,7 +1258,7 @@
     def _inlined_var_sql(self, var, rtype):
         try:
             sql = self._varmap['%s.%s' % (var.name, rtype)]
-            scope = var.sqlscope is var.stmt and 0 or -1
+            scope = self._state.scopes[var.scope]
             self.add_table(sql.split('.', 1)[0], scope=scope)
         except KeyError:
             sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
@@ -1316,7 +1339,7 @@
             tablealias = self._state.outer_tables[table]
             actualtables = self._state.actual_tables[-1]
         except KeyError:
-            for rel in var.stinfo['optrelations']:
+            for rel in var.stinfo.get('optrelations'):
                 self.visit_relation(rel)
             assert self._state.outer_tables
             self.add_outer_join_condition(var, table, condition)
@@ -1358,7 +1381,7 @@
                 break
             # XXX may have a principal without being invariant for this generation,
             #     not sure this is a pb or not
-            if var.stinfo.get('principal') is relation and var.sqlscope is var.stmt:
+            if var.stinfo.get('principal') is relation and var.scope is var.stmt:
                 scope = 0
                 break
         else:
@@ -1379,15 +1402,3 @@
         alias = self.alias_and_add_table(self.dbhelper.fti_table)
         relation._q_sqltable = alias
         return alias
-
-    def _varmap_table_scope(self, select, table):
-        """since a varmap table may be used for multiple variable, its scope is
-        the most outer scope of each variables
-        """
-        scope = -1
-        for varname, alias in self._varmap.iteritems():
-            # check '.' in varname since there are 'X.attribute' keys in varmap
-            if not '.' in varname and alias.split('.', 1)[0] == table:
-                if select.defined_vars[varname].sqlscope is select:
-                    return 0
-        return scope
--- a/server/sources/storages.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sources/storages.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,12 +16,13 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """custom storages for the system source"""
+
 from os import unlink, path as osp
 
 from yams.schema import role_name
 
-from cubicweb import Binary
-from cubicweb.server.hook import Operation
+from cubicweb import Binary, ValidationError
+from cubicweb.server import hook
 
 def set_attribute_storage(repo, etype, attr, storage):
     repo.system_source.set_storage(etype, attr, storage)
@@ -33,10 +34,10 @@
     """abstract storage
 
     * If `source_callback` is true (by default), the callback will be run during
-      query result process of fetched attribute's valu and should have the
+      query result process of fetched attribute's value and should have the
       following prototype::
 
-        callback(self, source, value)
+        callback(self, source, session, value)
 
       where `value` is the value actually stored in the backend. None values
       will be skipped (eg callback won't be called).
@@ -67,6 +68,9 @@
     def entity_deleted(self, entity, attr):
         """an entity using this storage for attr has been deleted"""
         raise NotImplementedError()
+    def migrate_entity(self, entity, attribute):
+        """migrate an entity attribute to the storage"""
+        raise NotImplementedError()
 
 # TODO
 # * make it configurable without code
@@ -79,7 +83,7 @@
 
     XXX subject to race condition.
     """
-    path = osp.join(dirpath, basename)
+    path = osp.join(dirpath, basename.replace(osp.sep, '-'))
     if not osp.isfile(path):
         return path
     base, ext = osp.splitext(path)
@@ -89,19 +93,20 @@
             return path
     return None
 
+
 class BytesFileSystemStorage(Storage):
     """store Bytes attribute value on the file system"""
     def __init__(self, defaultdir, fsencoding='utf-8'):
         self.default_directory = defaultdir
         self.fsencoding = fsencoding
 
-    def callback(self, source, value):
+    def callback(self, source, session, value):
         """sql generator callback when some attribute with a custom storage is
         accessed
         """
         fpath = source.binary_to_str(value)
         try:
-            return Binary(file(fpath).read())
+            return Binary(file(fpath, 'rb').read())
         except OSError, ex:
             source.critical("can't open %s: %s", value, ex)
             return None
@@ -109,33 +114,58 @@
     def entity_added(self, entity, attr):
         """an entity using this storage for attr has been added"""
         if entity._cw.transaction_data.get('fs_importing'):
-            binary = Binary(file(entity[attr].getvalue()).read())
+            binary = Binary(file(entity[attr].getvalue(), 'rb').read())
         else:
             binary = entity.pop(attr)
             fpath = self.new_fs_path(entity, attr)
             # bytes storage used to store file's path
             entity[attr] = Binary(fpath)
-            file(fpath, 'w').write(binary.getvalue())
-            AddFileOp(entity._cw, filepath=fpath)
+            file(fpath, 'wb').write(binary.getvalue())
+            hook.set_operation(entity._cw, 'bfss_added', fpath, AddFileOp)
         return binary
 
     def entity_updated(self, entity, attr):
         """an entity using this storage for attr has been updatded"""
+        # get the name of the previous file containing the value
+        oldpath = self.current_fs_path(entity, attr)
         if entity._cw.transaction_data.get('fs_importing'):
-            oldpath = self.current_fs_path(entity, attr)
+            # If we are importing from the filesystem, the file already exists.
+            # We do not need to create it but we need to fetch the content of
+            # the file as the actual content of the attribute
             fpath = entity[attr].getvalue()
-            if oldpath != fpath:
-                DeleteFileOp(entity._cw, filepath=oldpath)
-            binary = Binary(file(fpath).read())
+            binary = Binary(file(fpath, 'rb').read())
         else:
+            # We must store the content of the attributes
+            # into a file to stay consistent with the behaviour of entity_add.
+            # Moreover, the BytesFileSystemStorage expects to be able to
+            # retrieve the current value of the attribute at anytime by reading
+            # the file on disk. To be able to rollback things, use a new file
+            # and keep the old one that will be removed on commit if everything
+            # went ok.
+            #
+            # fetch the current attribute value in memory
             binary = entity.pop(attr)
-            fpath = self.current_fs_path(entity, attr)
-            UpdateFileOp(entity._cw, filepath=fpath, filedata=binary.getvalue())
+            # Get filename for it
+            fpath = self.new_fs_path(entity, attr)
+            assert not osp.exists(fpath)
+            # write attribute value on disk
+            file(fpath, 'wb').write(binary.getvalue())
+            # Mark the new file as added during the transaction.
+            # The file will be removed on rollback
+            hook.set_operation(entity._cw, 'bfss_added', fpath, AddFileOp)
+        if oldpath != fpath:
+            # register the new location for the file.
+            entity[attr] = Binary(fpath)
+            # Mark the old file as useless so the file will be removed at
+            # commit.
+            hook.set_operation(entity._cw, 'bfss_deleted', oldpath,
+                               DeleteFileOp)
         return binary
 
     def entity_deleted(self, entity, attr):
         """an entity using this storage for attr has been deleted"""
-        DeleteFileOp(entity._cw, filepath=self.current_fs_path(entity, attr))
+        fpath = self.current_fs_path(entity, attr)
+        hook.set_operation(entity._cw, 'bfss_deleted', fpath, DeleteFileOp)
 
     def new_fs_path(self, entity, attr):
         # We try to get some hint about how to name the file using attribute's
@@ -150,7 +180,7 @@
         fspath = uniquify_path(self.default_directory, '_'.join(basename))
         if fspath is None:
             msg = entity._cw._('failed to uniquify path (%s, %s)') % (
-                dirpath, '_'.join(basename))
+                self.default_directory, '_'.join(basename))
             raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg})
         return fspath
 
@@ -165,24 +195,30 @@
         return sysource._process_value(rawvalue, cu.description[0],
                                        binarywrap=str)
 
+    def migrate_entity(self, entity, attribute):
+        """migrate an entity attribute to the storage"""
+        entity.edited_attributes = set()
+        self.entity_added(entity, attribute)
+        session = entity._cw
+        source = session.repo.system_source
+        attrs = source.preprocess_entity(entity)
+        sql = source.sqlgen.update('cw_' + entity.__regid__, attrs,
+                                   ['cw_eid'])
+        source.doexec(session, sql, attrs)
 
-class AddFileOp(Operation):
+
+class AddFileOp(hook.Operation):
     def rollback_event(self):
-        try:
-            unlink(self.filepath)
-        except:
-            pass
+        for filepath in self.session.transaction_data.pop('bfss_added'):
+            try:
+                unlink(filepath)
+            except Exception, ex:
+                self.error('cant remove %s: %s' % (filepath, ex))
 
-class DeleteFileOp(Operation):
+class DeleteFileOp(hook.Operation):
     def commit_event(self):
-        try:
-            unlink(self.filepath)
-        except:
-            pass
-
-class UpdateFileOp(Operation):
-    def precommit_event(self):
-        try:
-            file(self.filepath, 'w').write(self.filedata)
-        except Exception, ex:
-            self.exception(str(ex))
+        for filepath in self.session.transaction_data.pop('bfss_deleted'):
+            try:
+                unlink(filepath)
+            except Exception, ex:
+                self.error('cant remove %s: %s' % (filepath, ex))
--- a/server/sqlutils.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/sqlutils.py	Mon Jul 19 15:36:16 2010 +0200
@@ -165,6 +165,7 @@
         dbapi_module = self.dbhelper.dbapi_module
         self.OperationalError = dbapi_module.OperationalError
         self.InterfaceError = dbapi_module.InterfaceError
+        self.DbapiError = dbapi_module.Error
         self._binary = dbapi_module.Binary
         self._process_value = dbapi_module.process_value
         self._dbencoding = dbencoding
@@ -201,7 +202,7 @@
             return newargs
         return query_args
 
-    def process_result(self, cursor, column_callbacks=None):
+    def process_result(self, cursor, column_callbacks=None, session=None):
         """return a list of CubicWeb compliant values from data in the given cursor
         """
         # use two different implementations to avoid paying the price of
@@ -209,9 +210,10 @@
         # lookup
         if not column_callbacks:
             return self._process_result(cursor)
-        return self._cb_process_result(cursor, column_callbacks)
+        assert session
+        return self._cb_process_result(cursor, column_callbacks, session)
 
-    def _process_result(self, cursor, column_callbacks=None):
+    def _process_result(self, cursor):
         # begin bind to locals for optimization
         descr = cursor.description
         encoding = self._dbencoding
@@ -229,7 +231,7 @@
             results[i] = result
         return results
 
-    def _cb_process_result(self, cursor, column_callbacks):
+    def _cb_process_result(self, cursor, column_callbacks, session):
         # begin bind to locals for optimization
         descr = cursor.description
         encoding = self._dbencoding
@@ -248,7 +250,7 @@
                     value = process_value(value, descr[col], encoding, binary)
                 else:
                     for cb in cbstack:
-                        value = cb(self, value)
+                        value = cb(self, session, value)
                 result.append(value)
             results[i] = result
         return results
--- a/server/test/data/migratedapp/schema.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/data/migratedapp/schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,7 +19,7 @@
 
 """
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, ObjectRelation,
+                            SubjectRelation,
                             RichString, String, Int, Boolean, Datetime, Date)
 from yams.constraints import SizeConstraint, UniqueConstraint
 from cubicweb.schema import (WorkflowableEntityType, RQLConstraint,
@@ -95,7 +95,9 @@
                   constraints=[UniqueConstraint(), SizeConstraint(64)])
     description = RichString(fulltextindexed=True)
 
-    filed_under2 = ObjectRelation('*')
+class filed_under2(RelationDefinition):
+    subject ='*'
+    object = 'Folder2'
 
 
 class Personne(EntityType):
--- a/server/test/data/schema.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/data/schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,12 +15,9 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
 
-"""
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, ObjectRelation,
-                            RichString, String, Int, Boolean, Datetime)
+                            SubjectRelation, RichString, String, Int, Boolean, Datetime)
 from yams.constraints import SizeConstraint
 from cubicweb.schema import (WorkflowableEntityType, RQLConstraint,
                              ERQLExpression, RRQLExpression)
@@ -77,7 +74,10 @@
 
 class SubDivision(Division):
     __specializes_schema__ = True
-    travaille_subdivision = ObjectRelation('Personne')
+
+class travaille_subdivision(RelationDefinition):
+    subject = 'Personne'
+    object = 'SubDivision'
 
 from cubicweb.schemas.base import CWUser
 CWUser.get_relations('login').next().fulltextindexed = True
@@ -85,7 +85,11 @@
 class Note(WorkflowableEntityType):
     date = String(maxsize=10)
     type = String(maxsize=6)
-    para = String(maxsize=512)
+    para = String(maxsize=512,
+                  __permissions__ = {
+                      'read':   ('managers', 'users', 'guests'),
+                      'update': ('managers', ERQLExpression('X in_state S, S name "todo"')),
+                      })
 
     migrated_from = SubjectRelation('Note')
     attachment = SubjectRelation(('File', 'Image'))
@@ -104,31 +108,18 @@
     tel    = Int()
     fax    = Int()
     datenaiss = Datetime()
-    test   = Boolean()
+    test   = Boolean(__permissions__={
+        'read': ('managers', 'users', 'guests'),
+        'update': ('managers',),
+        })
     description = String()
     firstname = String(fulltextindexed=True, maxsize=64)
 
-    travaille = SubjectRelation('Societe')
     concerne = SubjectRelation('Affaire')
     connait = SubjectRelation('Personne')
     inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*')
-    comments = ObjectRelation('Comment')
 
 
-class fiche(RelationType):
-    inlined = True
-    subject = 'Personne'
-    object = 'Card'
-    cardinality = '??'
-
-class multisource_inlined_rel(RelationType):
-    inlined = True
-    cardinality = '?*'
-    subject = ('Card', 'Note')
-    object = ('Affaire', 'Note')
-
-class ecrit_par(RelationType):
-    inlined = True
 
 class connait(RelationType):
     symmetric = True
@@ -140,23 +131,30 @@
         'delete': ('managers', RRQLExpression('O owned_by U')),
         }
 
-class travaille(RelationType):
+class travaille(RelationDefinition):
     __permissions__ = {
         'read':   ('managers', 'users', 'guests'),
         'add':    ('managers', RRQLExpression('U has_update_permission S')),
         'delete': ('managers', RRQLExpression('O owned_by U')),
         }
+    subject = 'Personne'
+    object = 'Societe'
 
-class para(RelationType):
-    __permissions__ = {
-        'read':   ('managers', 'users', 'guests'),
-        'update': ('managers', ERQLExpression('X in_state S, S name "todo"')),
-        }
+class comments(RelationDefinition):
+    subject = 'Comment'
+    object = 'Personne'
 
-class test(RelationType):
-    __permissions__ = {'read': ('managers', 'users', 'guests'),
-                       'update': ('managers',),
-                       }
+class fiche(RelationDefinition):
+    inlined = True
+    subject = 'Personne'
+    object = 'Card'
+    cardinality = '??'
+
+class multisource_inlined_rel(RelationDefinition):
+    inlined = True
+    cardinality = '?*'
+    subject = ('Card', 'Note')
+    object = ('Affaire', 'Note')
 
 class multisource_rel(RelationDefinition):
     subject = ('Card', 'Note')
@@ -180,6 +178,9 @@
     subject = ('Personne', 'CWUser', 'Societe')
     object = ('Note')
 
+class ecrit_par(RelationType):
+    inlined = True
+
 class ecrit_par_1(RelationDefinition):
     name = 'ecrit_par'
     subject = 'Note'
--- a/server/test/data/sources_multi	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/data/sources_multi	Mon Jul 19 15:36:16 2010 +0200
@@ -3,7 +3,7 @@
 db-driver   = sqlite
 db-host     = 
 adapter     = native
-db-name     = tmpdb
+db-name     = tmpdb-multi
 db-encoding = UTF-8
 db-user     = admin
 db-password = gingkow
--- a/server/test/unittest_checkintegrity.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_checkintegrity.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,28 +15,47 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
 
-"""
 import sys
 from StringIO import StringIO
 from logilab.common.testlib import TestCase, unittest_main
 from cubicweb.devtools import init_test_database
 
 
-from cubicweb.server.checkintegrity import check
+from cubicweb.server.checkintegrity import check, reindex_entities
 
 class CheckIntegrityTC(TestCase):
-    def test(self):
-        repo, cnx = init_test_database()
+    def setUp(self):
+        self.repo, self.cnx = init_test_database()
+        self.execute = self.cnx.cursor().execute
+        self.session = self.repo._sessions[self.cnx.sessionid]
         sys.stderr = sys.stdout = StringIO()
-        try:
-            check(repo, cnx, ('entities', 'relations', 'text_index', 'metadata'),
-                  reindex=True, fix=True, withpb=False)
-        finally:
-            sys.stderr = sys.__stderr__
-            sys.stdout = sys.__stdout__
-        repo.shutdown()
+
+    def tearDown(self):
+        sys.stderr = sys.__stderr__
+        sys.stdout = sys.__stdout__
+        self.cnx.close()
+        self.repo.shutdown()
+
+    def test_checks(self):
+        check(self.repo, self.cnx, ('entities', 'relations', 'text_index', 'metadata'),
+              reindex=False, fix=True, withpb=False)
+
+    def test_reindex_all(self):
+        self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
+        self.session.commit(False)
+        self.failUnless(self.execute('Any X WHERE X has_text "tutu"'))
+        reindex_entities(self.repo.schema, self.session, withpb=False)
+        self.failUnless(self.execute('Any X WHERE X has_text "tutu"'))
+
+    def test_reindex_etype(self):
+        self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
+        self.execute('INSERT Affaire X: X ref "toto"')
+        self.session.commit(False)
+        reindex_entities(self.repo.schema, self.session, withpb=False,
+                         etypes=('Personne',))
+        self.failUnless(self.execute('Any X WHERE X has_text "tutu"'))
+        self.failUnless(self.execute('Any X WHERE X has_text "toto"'))
 
 if __name__ == '__main__':
     unittest_main()
--- a/server/test/unittest_hook.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_hook.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,9 +16,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""unit/functional tests for cubicweb.server.hook
-
-"""
+"""unit/functional tests for cubicweb.server.hook"""
 
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
--- a/server/test/unittest_ldapuser.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_ldapuser.py	Mon Jul 19 15:36:16 2010 +0200
@@ -189,7 +189,7 @@
             rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,'
                                 'WF creation_date D, WF from_state FS,'
                                 'WF owned_by U?, X eid %(x)s',
-                                {'x': adim.eid}, 'x')
+                                {'x': adim.eid})
             self.assertEquals(rset.rows, [[syt.eid]])
         finally:
             # restore db state
--- a/server/test/unittest_migractions.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_migractions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -124,8 +124,8 @@
         testdate = date(2005, 12, 13)
         eid1 = self.mh.rqlexec('INSERT Note N')[0][0]
         eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
-        d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1}, 'x')[0][0]
-        d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2}, 'x')[0][0]
+        d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0]
+        d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0]
         self.assertEquals(d1, date.today())
         self.assertEquals(d2, testdate)
         self.mh.rollback()
@@ -519,13 +519,13 @@
         note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0)
         aff = self.execute('INSERT Affaire X').get_entity(0, 0)
         self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
-                                     {'x': text.eid, 'y': aff.eid}, 'x'))
+                                     {'x': text.eid, 'y': aff.eid}))
         self.failUnless(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
-                                     {'x': note.eid, 'y': aff.eid}, 'x'))
+                                     {'x': note.eid, 'y': aff.eid}))
         self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
-                                     {'x': text.eid, 'y': aff.eid}, 'x'))
+                                     {'x': text.eid, 'y': aff.eid}))
         self.failUnless(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
-                                     {'x': note.eid, 'y': aff.eid}, 'x'))
+                                     {'x': note.eid, 'y': aff.eid}))
         # XXX remove specializes by ourselves, else tearDown fails when removing
         # Para because of Note inheritance. This could be fixed by putting the
         # MemSchemaCWETypeDel(session, name) operation in the
--- a/server/test/unittest_msplanner.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_msplanner.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
 from cubicweb.devtools import init_test_database
 from cubicweb.devtools.repotest import BasePlannerTC, test_plan
 
@@ -748,7 +745,6 @@
                     ])
 
     def test_not_identity(self):
-        # both system and rql support all variables, can be
         self._test('Any X WHERE NOT X identity U, U eid %s' % self.session.user.eid,
                    [('OneFetchStep',
                      [('Any X WHERE NOT X identity 5, X is CWUser', [{'X': 'CWUser'}])],
@@ -1105,7 +1101,7 @@
                      [('Any L,X WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])],
                      [self.ldap, self.system], None, {'X': 'table2.C1', 'X.login': 'table2.C0', 'L': 'table2.C0'}, []),
                     ('OneFetchStep',
-                     [('Any G,L WHERE X in_group G, X login L, G name "managers", (EXISTS(X copain T, T login L, T is CWUser)) OR (EXISTS(X in_state S, S name "pascontent", NOT X copain T2, S is State, T2 is CWUser)), G is CWGroup, X is CWUser',
+                     [('Any G,L WHERE X in_group G, X login L, G name "managers", (EXISTS(X copain T, T login L, T is CWUser)) OR (EXISTS(X in_state S, S name "pascontent", NOT EXISTS(X copain T2), S is State)), G is CWGroup, T2 is CWUser, X is CWUser',
                        [{'G': 'CWGroup', 'L': 'String', 'S': 'State', 'T': 'CWUser', 'T2': 'CWUser', 'X': 'CWUser'}])],
                      None, None, [self.system],
                      {'T2': 'table1.C0', 'L': 'table2.C0',
@@ -1222,7 +1218,7 @@
         # in the source where %(x)s is not coming from and will be removed during rql
         # generation for the external source
         self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN',
-                   [('OneFetchStep', [('Any SN WHERE NOT 5 in_state S, S name SN, S is State',
+                   [('OneFetchStep', [('Any SN WHERE NOT EXISTS(5 in_state S), S name SN, S is State',
                                        [{'S': 'State', 'SN': 'String'}])],
                      None, None, [self.cards, self.system], {}, [])],
                    {'x': ueid})
@@ -1233,7 +1229,7 @@
         # the same plan may be used, since we won't find any record in the system source
         # linking 9999999 to a state
         self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN',
-                   [('OneFetchStep', [('Any SN WHERE NOT 999999 in_state S, S name SN, S is State',
+                   [('OneFetchStep', [('Any SN WHERE NOT EXISTS(999999 in_state S), S name SN, S is State',
                                        [{'S': 'State', 'SN': 'String'}])],
                      None, None, [self.cards, self.system], {}, [])],
                    {'x': 999999})
@@ -1246,12 +1242,12 @@
                      []),
                     ('IntersectStep', None, None,
                      [('OneFetchStep',
-                       [('Any SN WHERE NOT X in_state S, S name SN, S is State, X is Note',
+                       [('Any SN WHERE NOT EXISTS(X in_state S, X is Note), S name SN, S is State',
                          [{'S': 'State', 'SN': 'String', 'X': 'Note'}])],
                        None, None, [self.cards, self.system], {},
                        []),
                       ('OneFetchStep',
-                       [('Any SN WHERE NOT X in_state S, S name SN, S is State, X is IN(Affaire, CWUser)',
+                       [('Any SN WHERE NOT EXISTS(X in_state S, X is IN(Affaire, CWUser)), S name SN, S is State',
                          [{'S': 'State', 'SN': 'String', 'X': 'Affaire'},
                           {'S': 'State', 'SN': 'String', 'X': 'CWUser'}])],
                        None, None, [self.system], {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'},
@@ -1505,7 +1501,7 @@
         self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y',
                    [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])],
                      [self.cards, self.system], None, {'Y': 'table0.C0'}, []),
-                    ('OneFetchStep', [('Any Y WHERE NOT 999999 multisource_crossed_rel Y, Y is Note',
+                    ('OneFetchStep', [('Any Y WHERE NOT EXISTS(999999 multisource_crossed_rel Y), Y is Note',
                                        [{'Y': 'Note'}])],
                      None, None, [self.system],
                      {'Y': 'table0.C0'},  [])],
@@ -1633,7 +1629,7 @@
         repo._type_source_cache[999999] = ('Note', 'system', 999999)
         self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X',
                    [('DeleteEntitiesStep',
-                     [('OneFetchStep', [('Any 999999 WHERE NOT Y multisource_rel 999999, Y is IN(Card, Note)',
+                     [('OneFetchStep', [('Any 999999 WHERE NOT EXISTS(Y multisource_rel 999999), Y is IN(Card, Note)',
                                          [{'Y': 'Card'}, {'Y': 'Note'}])],
                        None, None, [self.system], {}, [])
                       ])
@@ -2185,7 +2181,7 @@
         self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998)
         self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999)
         self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999',
-                   [('OneFetchStep', [('Any 999998,999999 WHERE NOT 999998 multisource_rel 999999', [{}])],
+                   [('OneFetchStep', [('Any 999998,999999 WHERE NOT EXISTS(999998 multisource_rel 999999)', [{}])],
                      None, None, [self.vcs], {}, [])
                     ])
 
--- a/server/test/unittest_multisources.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_multisources.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
 from os.path import dirname, join, abspath
 from datetime import datetime, timedelta
 
@@ -27,7 +24,6 @@
 from cubicweb.devtools.testlib import CubicWebTC, refresh_repo
 from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch
 
-TestServerConfiguration.no_sqlite_wrap = True
 
 class TwoSourcesConfiguration(TestServerConfiguration):
     sourcefile = 'sources_multi'
@@ -51,6 +47,7 @@
 Connection_close = Connection.close
 
 def setup_module(*args):
+    TestServerConfiguration.no_sqlite_wrap = True
     # hi-jack PyroRQLSource.get_connection to access existing connection (no
     # pyro connection)
     PyroRQLSource.get_connection = lambda x: x.uri == 'extern-multi' and cnx3 or cnx2
@@ -67,6 +64,7 @@
     del repo2, cnx2, repo3, cnx3
     #del TwoSourcesTC.config.vreg
     #del TwoSourcesTC.config
+    TestServerConfiguration.no_sqlite_wrap = False
 
 class TwoSourcesTC(CubicWebTC):
     config = TwoSourcesConfiguration('data')
@@ -122,7 +120,7 @@
         self.assertEquals(metainf['type'], 'Card')
         self.assert_(metainf['extid'])
         etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s',
-                             {'x': externent.eid}, 'x')[0][0]
+                             {'x': externent.eid})[0][0]
         self.assertEquals(etype, 'Card')
 
     def test_order_limit_offset(self):
@@ -142,7 +140,7 @@
         self.sexecute('INSERT Affaire X: X ref "no readable card"')[0][0]
         aff1 = self.sexecute('INSERT Affaire X: X ref "card"')[0][0]
         # grant read access
-        self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1}, 'x')
+        self.sexecute('SET X owned_by U WHERE X eid %(x)s, U login "anon"', {'x': aff1})
         self.commit()
         cnx = self.login('anon')
         cu = cnx.cursor()
@@ -152,8 +150,8 @@
 
     def test_synchronization(self):
         cu = cnx2.cursor()
-        assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1}, 'x')
-        cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1}, 'x')
+        assert cu.execute('Any X WHERE X eid %(x)s', {'x': self.aff1})
+        cu.execute('SET X ref "BLAH" WHERE X eid %(x)s', {'x': self.aff1})
         aff2 = cu.execute('INSERT Affaire X: X ref "AFFREUX"')[0][0]
         cnx2.commit()
         try:
@@ -168,20 +166,20 @@
             self.failIf(rset)
         finally:
             # restore state
-            cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1}, 'x')
+            cu.execute('SET X ref "AFFREF" WHERE X eid %(x)s', {'x': self.aff1})
             cnx2.commit()
 
     def test_simplifiable_var(self):
         affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
         rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB',
-                            {'x': affeid}, 'x')
+                            {'x': affeid})
         self.assertEquals(len(rset), 1)
         self.assertEquals(rset[0][1], "pitetre")
 
     def test_simplifiable_var_2(self):
         affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
         rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
-                            {'x': affeid, 'u': self.session.user.eid}, 'x')
+                            {'x': affeid, 'u': self.session.user.eid})
         self.assertEquals(len(rset), 1)
 
     def test_sort_func(self):
@@ -229,7 +227,7 @@
             rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T')
             self.assertEquals(len(rset), 2, rset.rows)
         finally:
-            cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}, 'x')
+            cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2})
             cnx2.commit()
 
     def test_attr_unification_neq_1(self):
@@ -271,15 +269,15 @@
         userstate = self.session.user.in_state[0]
         states.remove((userstate.eid, userstate.name))
         notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
-                                                       {'x': self.session.user.eid}, 'x'))
+                                                       {'x': self.session.user.eid}))
         self.assertSetEquals(notstates, states)
         aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0]
-        aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1}, 'x')[0]
+        aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0]
         self.assertEquals(aff1statename, 'pitetre')
         states.add((userstate.eid, userstate.name))
         states.remove((aff1stateeid, aff1statename))
         notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
-                                                       {'x': aff1}, 'x'))
+                                                       {'x': aff1}))
         self.assertSetEquals(notstates, states)
 
     def test_absolute_url_base_url(self):
--- a/server/test/unittest_querier.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_querier.py	Mon Jul 19 15:36:16 2010 +0200
@@ -236,13 +236,13 @@
 
     def test_typed_eid(self):
         # should return an empty result set
-        rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'}, 'x')
+        rset = self.execute('Any X WHERE X eid %(x)s', {'x': '1'})
         self.assertIsInstance(rset[0][0], (int, long))
 
     def test_bytes_storage(self):
         feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s',
                             {'data': Binary("xxx")})[0][0]
-        fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid}, 'x')[0][0]
+        fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0]
         self.assertIsInstance(fdata, Binary)
         self.assertEquals(fdata.getvalue(), 'xxx')
 
@@ -372,17 +372,17 @@
 
     def test_select_outer_join_optimized(self):
         peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
-        rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}, 'x')
+        rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1})
         self.assertEquals(rset.rows, [[peid1]])
         rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?',
-                            {'x':peid1}, 'x')
+                            {'x':peid1})
         self.assertEquals(rset.rows, [[peid1]])
 
     def test_select_left_outer_join(self):
         rset = self.execute('DISTINCT Any G WHERE U? in_group G')
         self.assertEquals(len(rset), 4)
         rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s',
-                            {'x': self.session.user.eid}, 'x')
+                            {'x': self.session.user.eid})
         self.assertEquals(len(rset), 4)
 
     def test_select_ambigous_outer_join(self):
@@ -390,7 +390,7 @@
         self.execute("INSERT Tag X: X name 'tagbis'")[0][0]
         geid = self.execute("CWGroup G WHERE G name 'users'")[0][0]
         self.execute("SET X tags Y WHERE X eid %(t)s, Y eid %(g)s",
-                     {'g': geid, 't': teid}, 'g')
+                     {'g': geid, 't': teid})
         rset = self.execute("Any GN,TN ORDERBY GN WHERE T? tags G, T name TN, G name GN")
         self.failUnless(['users', 'tag'] in rset.rows)
         self.failUnless(['activated', None] in rset.rows)
@@ -511,6 +511,21 @@
         self.assertEquals(len(rset.rows), 1)
         self.assertEquals(rset.rows[0][0], self.ueid)
 
+    def test_select_having_non_aggregat_1(self):
+        rset = self.execute('Any L WHERE X login L, X creation_date CD '
+                            'HAVING YEAR(CD) = %s' % date.today().year)
+        self.assertListEquals(rset.rows,
+                              [[u'admin'],
+                               [u'anon']])
+
+    def test_select_having_non_aggregat_2(self):
+        rset = self.execute('Any L GROUPBY L WHERE X login L, X in_group G, '
+                            'X creation_date CD HAVING YEAR(CD) = %s OR COUNT(G) > 1'
+                            % date.today().year)
+        self.assertListEquals(rset.rows,
+                              [[u'admin'],
+                               [u'anon']])
+
     def test_select_complex_sort(self):
         """need sqlite including http://www.sqlite.org/cvstrac/tktview?tn=3773 fix"""
         rset = self.execute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D')
@@ -789,6 +804,15 @@
                            'end', 'finie', 'markasdone', 'pitetre', 'redoit',
                            'start', 'todo'])
 
+    def test_select_union_description_diff_var(self):
+        eid1 = self.execute('CWGroup X WHERE X name "managers"')[0][0]
+        eid2 = self.execute('CWUser X WHERE X login "admin"')[0][0]
+        rset = self.execute('(Any X WHERE X eid %(x)s)'
+                            ' UNION '
+                            '(Any Y WHERE Y eid %(y)s)',
+                            {'x': eid1, 'y': eid2})
+        self.assertEquals(rset.description[:], [('CWGroup',), ('CWUser',)])
+
     def test_exists(self):
         geid = self.execute("INSERT CWGroup X: X name 'lulufanclub'")[0][0]
         self.execute("SET U in_group G WHERE G name 'lulufanclub'")
@@ -898,7 +922,7 @@
     def test_insert_5bis(self):
         peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
         self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
-                     {'x': peid}, 'x')
+                     {'x': peid})
         rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
         self.assert_(rset.rows)
         self.assertEquals(rset.description, [('Personne', 'Societe',)])
@@ -915,12 +939,33 @@
         self.assert_(rset.rows)
         self.assertEquals(rset.description, [('Personne', 'Societe',)])
 
+    def test_insert_7_2(self):
+        self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login N")
+        rset = self.execute('Any X, Y WHERE Y nom "toto", X travaille Y')
+        self.assertEquals(len(rset), 2)
+        self.assertEquals(rset.description, [('Personne', 'Societe',),
+                                             ('Personne', 'Societe',)])
+
     def test_insert_8(self):
         self.execute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y WHERE U login 'admin', U login N")
         rset = self.execute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y')
         self.assert_(rset.rows)
         self.assertEquals(rset.description, [('Personne', 'Societe',)])
 
+    def test_insert_9(self):
+        self.execute("INSERT Societe X: X nom  'Lo'")
+        self.execute("INSERT Societe X: X nom  'Gi'")
+        self.execute("INSERT SubDivision X: X nom  'Lab'")
+        rset = self.execute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z WHERE Y is Societe, Z is SubDivision, Y nom N")
+        self.assertEquals(len(rset), 2)
+        self.assertEquals(rset.description, [('Personne',), ('Personne',)])
+        # self.assertSetEquals(set(x.nom for x in rset.entities()),
+        #                      ['Lo', 'Gi'])
+        # self.assertSetEquals(set(y.nom for x in rset.entities() for y in x.travaille),
+        #                      ['Lo', 'Gi'])
+        # self.assertEquals([y.nom for x in rset.entities() for y in x.travaille_subdivision],
+        #                      ['Lab', 'Lab'])
+
     def test_insert_query_error(self):
         self.assertRaises(Exception,
                           self.execute,
@@ -1016,17 +1061,17 @@
         eid = self.execute("INSERT Folder T: T name 'toto'")[0][0]
         self.commit()
         # fill the cache
-        self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.execute("Any X WHERE X eid %(x)s", {'x': eid})
         self.execute("Any X WHERE X eid %s" %eid)
-        self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+        self.execute("Folder X WHERE X eid %(x)s", {'x': eid})
         self.execute("Folder X WHERE X eid %s" %eid)
         self.execute("DELETE Folder T WHERE T eid %s"%eid)
         self.commit()
-        rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}, 'x')
+        rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid})
         self.assertEquals(rset.rows, [])
         rset = self.execute("Any X WHERE X eid %s" %eid)
         self.assertEquals(rset.rows, [])
-        rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}, 'x')
+        rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid})
         self.assertEquals(rset.rows, [])
         rset = self.execute("Folder X WHERE X eid %s" %eid)
         self.assertEquals(rset.rows, [])
@@ -1102,7 +1147,7 @@
     def test_update_string_concat(self):
         beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0]
         self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'})
-        newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid}, 'x')[0][0]
+        newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0]
         self.assertEquals(newname, 'toto-moved')
 
     def test_update_query_error(self):
@@ -1219,7 +1264,7 @@
                         'creation_date': '2000/07/03 11:00'})
         rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,'
                             'X owned_by U, U eid %(x)s',
-                            {'x':self.session.user.eid}, 'x')
+                            {'x':self.session.user.eid})
         self.assertEquals(rset.rows, [[u'\xe9name0']])
 
 
@@ -1302,7 +1347,7 @@
         ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0]
         self.execute("SET E in_group G, E firstname %(firstname)s, E surname %(surname)s "
                       "WHERE E eid %(x)s, G name 'users'",
-                      {'x':ueid, 'firstname': u'jean', 'surname': u'paul'}, 'x')
+                      {'x':ueid, 'firstname': u'jean', 'surname': u'paul'})
 
     def test_nonregr_u_owned_by_u(self):
         ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G "
--- a/server/test/unittest_repository.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_repository.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,10 +16,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""unit tests for module cubicweb.server.repository
-
-"""
-from __future__ import with_statement
+"""unit tests for module cubicweb.server.repository"""
 
 from __future__ import with_statement
 
@@ -215,7 +212,7 @@
     def test_transaction_interleaved(self):
         self.skip('implement me')
 
-    def test_close_wait_processing_request(self):
+    def test_close_kill_processing_request(self):
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         repo.execute(cnxid, 'INSERT CWUser X: X login "toto", X upassword "tutu", X in_group G WHERE G name "users"')
@@ -226,9 +223,12 @@
             repo.close(cnxid)
         t = threading.Thread(target=close_in_a_few_moment)
         t.start()
-        try:
+        def run_transaction():
             repo.execute(cnxid, 'DELETE CWUser X WHERE X login "toto"')
             repo.commit(cnxid)
+        try:
+            ex = self.assertRaises(Exception, run_transaction)
+            self.assertEquals(str(ex), 'try to access pool on a closed session')
         finally:
             t.join()
 
@@ -239,10 +239,11 @@
                                if not r.type in ('eid', 'is', 'is_instance_of', 'identity',
                                                  'creation_date', 'modification_date', 'cwuri',
                                                  'owned_by', 'created_by',
-                                                 'update_permission', 'read_permission')],
+                                                 'update_permission', 'read_permission',
+                                                 'in_basket')],
                               ['relation_type',
                                'from_entity', 'to_entity',
-                               'in_basket', 'constrained_by', 
+                               'constrained_by',
                                'cardinality', 'ordernum',
                                'indexed', 'fulltextindexed', 'internationalizable',
                                'defaultval', 'description', 'description_format'])
@@ -295,11 +296,19 @@
         cnx = connect(self.repo.config.appid, u'admin', password='gingkow',
                       initlog=False) # don't reset logging configuration
         try:
+            cnx.load_appobjects(subpath=('entities',))
             # check we can get the schema
             schema = cnx.get_schema()
+            self.failUnless(cnx.vreg)
+            self.failUnless('etypes'in cnx.vreg)
             self.assertEquals(schema.__hashmode__, None)
             cu = cnx.cursor()
             rset = cu.execute('Any U,G WHERE U in_group G')
+            user = iter(rset.entities()).next()
+            self.failUnless(user._cw)
+            self.failUnless(user._cw.vreg)
+            from cubicweb.entities import authobjs
+            self.assertIsInstance(user._cw.user, authobjs.CWUser)
             cnx.close()
             done.append(True)
         finally:
@@ -491,7 +500,7 @@
         # our sqlite datetime adapter is ignore seconds fraction, so we have to
         # ensure update is done the next seconds
         time.sleep(1 - (ts.second - int(ts.second)))
-        self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}, 'x')
+        self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp})
         self.commit()
         self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
         self.session.set_pool()
--- a/server/test/unittest_rql2sql.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_rql2sql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-
 """unit tests for module cubicweb.server.sources.rql2sql"""
 
 import sys
@@ -180,12 +176,11 @@
      "NOT EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');",
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE _X.cw_prenom=lulu AND NOT EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, cw_CWGroup AS _G WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers)))'''),
-
-
+WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, cw_CWGroup AS _G WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''),
 
 ]
 
+
 ADVANCED= [
     ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'",
      '''SELECT _S.cw_eid
@@ -276,7 +271,7 @@
     ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P',
      '''SELECT _O.cw_eid
 FROM cw_Note AS _S, cw_Personne AS _O
-WHERE NOT EXISTS(SELECT 1 WHERE _S.cw_ecrit_par=_O.cw_eid) AND _S.cw_eid=1 AND _O.cw_inline2=_S.cw_inline1'''),
+WHERE NOT (_S.cw_ecrit_par=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''),
 
     ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
      '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1
@@ -299,7 +294,7 @@
     (' Any X,U WHERE C owned_by U, NOT X owned_by U, C eid 1, X eid 2',
      '''SELECT 2, rel_owned_by0.eid_to
 FROM owned_by_relation AS rel_owned_by0
-WHERE rel_owned_by0.eid_from=1 AND NOT EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=2 AND rel_owned_by0.eid_to=rel_owned_by1.eid_to)'''),
+WHERE rel_owned_by0.eid_from=1 AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=2 AND rel_owned_by0.eid_to=rel_owned_by1.eid_to))'''),
 
     ('Any GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))',
      '''SELECT _G.cw_name
@@ -353,7 +348,7 @@
     ('Any L WHERE X login "admin", NOT X identity Y, Y login L',
      '''SELECT _Y.cw_login
 FROM cw_CWUser AS _X, cw_CWUser AS _Y
-WHERE _X.cw_login=admin AND NOT _X.cw_eid=_Y.cw_eid'''),
+WHERE _X.cw_login=admin AND NOT (_X.cw_eid=_Y.cw_eid)'''),
 
     ('Any L WHERE X login "admin", X identity Y?, Y login L',
      '''SELECT _Y.cw_login
@@ -391,31 +386,31 @@
     ('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)',
      '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_CWGroup AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))
 UNION
 SELECT DISTINCT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_RQLExpression AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''),
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''),
 
     # should generate the same query as above
     ('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y',
      '''SELECT DISTINCT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_CWGroup AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))
 UNION
 SELECT DISTINCT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_RQLExpression AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''),
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''),
 
     # neged relation, can't be inveriant
     ('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y',
      '''SELECT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_CWGroup AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))
 UNION ALL
 SELECT _X.cw_eid, _Y.cw_eid
 FROM cw_CWEType AS _X, cw_RQLExpression AS _Y
-WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid)'''),
+WHERE _X.cw_name=CWGroup AND _Y.cw_eid IN(1, 2, 3) AND NOT (EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=_X.cw_eid AND rel_read_permission0.eid_to=_Y.cw_eid))'''),
 
     ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N, X is IN (Basket, Folder, Tag);',
      '''SELECT (MAX(T1.C0) + MIN(T1.C0)), T1.C1 FROM (SELECT _X.cw_eid AS C0, _X.cw_name AS C1
@@ -552,7 +547,7 @@
      'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2',
      '''SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date
 FROM cw_EmailAddress AS _O
-WHERE NOT EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid) AND _D.cw_name=guests))
+WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests))
 ORDER BY 4 DESC'''),
 
 
@@ -585,6 +580,7 @@
 
     ]
 
+
 MULTIPLE_SEL = [
     ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;",
      '''SELECT DISTINCT _P.cw_nom, _P.cw_prenom
@@ -599,21 +595,23 @@
 WHERE _Y.cw_nom=_X.cw_nom AND NOT (_Y.cw_eid=_X.cw_eid)''')
     ]
 
+
 NEGATIONS = [
+
     ("Personne X WHERE NOT X evaluee Y;",
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_X.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_X.cw_eid))'''),
 
     ("Note N WHERE NOT X evaluee N, X eid 0",
      '''SELECT _N.cw_eid
 FROM cw_Note AS _N
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=0 AND rel_evaluee0.eid_to=_N.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=0 AND rel_evaluee0.eid_to=_N.cw_eid))'''),
 
     ('Any X WHERE NOT X travaille S, X is Personne',
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid))'''),
 
     ("Personne P where not P datenaiss TODAY",
      '''SELECT _P.cw_eid
@@ -623,16 +621,16 @@
     ("Personne P where NOT P concerne A",
      '''SELECT _P.cw_eid
 FROM cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid))'''),
 
     ("Affaire A where not P concerne A",
      '''SELECT _A.cw_eid
 FROM cw_Affaire AS _A
-WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_to=_A.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_to=_A.cw_eid))'''),
     ("Personne P where not P concerne A, A sujet ~= 'TEST%'",
      '''SELECT _P.cw_eid
 FROM cw_Affaire AS _A, cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid AND rel_concerne0.eid_to=_A.cw_eid) AND _A.cw_sujet ILIKE TEST%'''),
+WHERE NOT (EXISTS(SELECT 1 FROM concerne_relation AS rel_concerne0 WHERE rel_concerne0.eid_from=_P.cw_eid AND rel_concerne0.eid_to=_A.cw_eid)) AND _A.cw_sujet ILIKE TEST%'''),
 
     ('Any S WHERE NOT T eid 28258, T tags S',
      '''SELECT rel_tags0.eid_to
@@ -660,33 +658,33 @@
     ('Note X WHERE NOT Y evaluee X',
      '''SELECT _X.cw_eid
 FROM cw_Note AS _X
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_to=_X.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_to=_X.cw_eid))'''),
 
     ('Any Y WHERE NOT Y evaluee X',
      '''SELECT _Y.cw_eid
 FROM cw_CWUser AS _Y
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))
 UNION ALL
 SELECT _Y.cw_eid
 FROM cw_Division AS _Y
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))
 UNION ALL
 SELECT _Y.cw_eid
 FROM cw_Personne AS _Y
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))
 UNION ALL
 SELECT _Y.cw_eid
 FROM cw_Societe AS _Y
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))
 UNION ALL
 SELECT _Y.cw_eid
 FROM cw_SubDivision AS _Y
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid))'''),
 
     ('Any X WHERE NOT Y evaluee X, Y is CWUser',
      '''SELECT _X.cw_eid
 FROM cw_Note AS _X
-WHERE NOT EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0,cw_CWUser AS _Y WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0, cw_CWUser AS _Y WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid))'''),
 
     ('Any X,RT WHERE X relation_type RT, NOT X is CWAttribute',
      '''SELECT _X.cw_eid, _X.cw_relation_type
@@ -701,17 +699,13 @@
     ('Any S WHERE NOT X in_state S, X is IN(Affaire, CWUser)',
      '''SELECT _S.cw_eid
 FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid)
-INTERSECT
-SELECT _S.cw_eid
-FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''),
 
     ('Any S WHERE NOT(X in_state S, S name "somename"), X is CWUser',
      '''SELECT _S.cw_eid
 FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename)'''),
-   
+WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename))'''),
+
 # XXXFIXME fail
 #         ('Any X,RT WHERE X relation_type RT?, NOT X is CWAttribute',
 #      '''SELECT _X.cw_eid, _X.cw_relation_type
@@ -844,7 +838,7 @@
     ('Any O,AD  WHERE NOT S inline1 O, S eid 123, O todo_by AD?',
      '''SELECT _O.cw_eid, rel_todo_by0.eid_to
 FROM cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid), cw_Note AS _S
-WHERE NOT EXISTS(SELECT 1 WHERE _S.cw_inline1=_O.cw_eid) AND _S.cw_eid=123''')
+WHERE NOT (_S.cw_inline1=_O.cw_eid) AND _S.cw_eid=123''')
     ]
 
 VIRTUAL_VARS = [
@@ -919,7 +913,7 @@
 FROM cw_Personne AS _P'''),
     ]
 
-SYMETRIC = [
+SYMMETRIC = [
     ('Any P WHERE X eid 0, X connait P',
      '''SELECT DISTINCT _P.cw_eid
 FROM connait_relation AS rel_connait0, cw_Personne AS _P
@@ -941,17 +935,17 @@
     ('Any P WHERE X eid 0, NOT X connait P',
      '''SELECT _P.cw_eid
 FROM cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=_P.cw_eid))'''),
+WHERE NOT (EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=_P.cw_eid)))'''),
 
     ('Any P WHERE NOT X connait P',
     '''SELECT _P.cw_eid
 FROM cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_from=_P.cw_eid))'''),
+WHERE NOT (EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_from=_P.cw_eid)))'''),
 
     ('Any X WHERE NOT X connait P',
     '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=_X.cw_eid OR rel_connait0.eid_to=_X.cw_eid))'''),
+WHERE NOT (EXISTS(SELECT 1 FROM connait_relation AS rel_connait0 WHERE (rel_connait0.eid_from=_X.cw_eid OR rel_connait0.eid_to=_X.cw_eid)))'''),
 
     ('Any P WHERE X connait P, P nom "nom"',
      '''SELECT DISTINCT _P.cw_eid
@@ -980,6 +974,12 @@
     ]
 
 INLINE = [
+
+    ('Any P WHERE N eid 1, N ecrit_par P, NOT P owned_by P2',
+     '''SELECT _N.cw_ecrit_par
+FROM cw_Note AS _N
+WHERE _N.cw_eid=1 AND _N.cw_ecrit_par IS NOT NULL AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE _N.cw_ecrit_par=rel_owned_by0.eid_from))'''),
+
     ('Any P, L WHERE N ecrit_par P, P nom L, N eid 0',
      '''SELECT _P.cw_eid, _P.cw_nom
 FROM cw_Note AS _N, cw_Personne AS _P
@@ -988,7 +988,12 @@
     ('Any N WHERE NOT N ecrit_par P, P nom "toto"',
      '''SELECT _N.cw_eid
 FROM cw_Note AS _N, cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 WHERE _N.cw_ecrit_par=_P.cw_eid) AND _P.cw_nom=toto'''),
+WHERE NOT (_N.cw_ecrit_par=_P.cw_eid) AND _P.cw_nom=toto'''),
+
+    ('Any P WHERE NOT N ecrit_par P, P nom "toto"',
+     '''SELECT _P.cw_eid
+FROM cw_Personne AS _P
+WHERE NOT (EXISTS(SELECT 1 FROM cw_Note AS _N WHERE _N.cw_ecrit_par=_P.cw_eid)) AND _P.cw_nom=toto'''),
 
     ('Any P WHERE N ecrit_par P, N eid 0',
     '''SELECT _N.cw_ecrit_par
@@ -1003,12 +1008,13 @@
     ('Any P WHERE NOT N ecrit_par P, P is Personne, N eid 512',
      '''SELECT _P.cw_eid
 FROM cw_Note AS _N, cw_Personne AS _P
-WHERE NOT EXISTS(SELECT 1 WHERE _N.cw_ecrit_par=_P.cw_eid) AND _N.cw_eid=512'''),
+WHERE NOT (_N.cw_ecrit_par=_P.cw_eid) AND _N.cw_eid=512'''),
 
     ('Any S,ES,T WHERE S state_of ET, ET name "CWUser", ES allowed_transition T, T destination_state S',
+     # XXX "_T.cw_destination_state IS NOT NULL" could be avoided here but it's not worth it
      '''SELECT _T.cw_destination_state, rel_allowed_transition1.eid_from, _T.cw_eid
 FROM allowed_transition_relation AS rel_allowed_transition1, cw_Transition AS _T, cw_Workflow AS _ET, state_of_relation AS rel_state_of0
-WHERE _T.cw_destination_state=rel_state_of0.eid_from AND rel_state_of0.eid_to=_ET.cw_eid AND _ET.cw_name=CWUser AND rel_allowed_transition1.eid_to=_T.cw_eid'''),
+WHERE _T.cw_destination_state=rel_state_of0.eid_from AND rel_state_of0.eid_to=_ET.cw_eid AND _ET.cw_name=CWUser AND rel_allowed_transition1.eid_to=_T.cw_eid AND _T.cw_destination_state IS NOT NULL'''),
 
     ('Any O WHERE S eid 0, S in_state O',
      '''SELECT _S.cw_in_state
@@ -1025,55 +1031,50 @@
 
     ('Any X WHERE NOT Y for_user X, X eid 123',
      '''SELECT 123
-WHERE NOT EXISTS(SELECT 1 FROM cw_CWProperty AS _Y WHERE _Y.cw_for_user=123)
-'''),
+WHERE NOT (EXISTS(SELECT 1 FROM cw_CWProperty AS _Y WHERE _Y.cw_for_user=123))'''),
 
+    ('DISTINCT Any X WHERE X from_entity OET, NOT X from_entity NET, OET name "Image", NET eid 1',
+     '''SELECT DISTINCT _X.cw_eid
+FROM cw_CWAttribute AS _X, cw_CWEType AS _OET
+WHERE _X.cw_from_entity=_OET.cw_eid AND NOT (_X.cw_from_entity=1) AND _OET.cw_name=Image
+UNION
+SELECT DISTINCT _X.cw_eid
+FROM cw_CWEType AS _OET, cw_CWRelation AS _X
+WHERE _X.cw_from_entity=_OET.cw_eid AND NOT (_X.cw_from_entity=1) AND _OET.cw_name=Image'''),
     ]
 
 INTERSECT = [
     ('Any SN WHERE NOT X in_state S, S name SN',
      '''SELECT _S.cw_name
 FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid)
-INTERSECT
-SELECT _S.cw_name
-FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid)
-INTERSECT
-SELECT _S.cw_name
-FROM cw_State AS _S
-WHERE NOT EXISTS(SELECT 1 FROM cw_Note AS _X WHERE _X.cw_in_state=_S.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM cw_Affaire AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_Note AS _X WHERE _X.cw_in_state=_S.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid))'''),
 
     ('Any PN WHERE NOT X travaille S, X nom PN, S is IN(Division, Societe)',
      '''SELECT _X.cw_nom
 FROM cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0,cw_Division AS _S WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)
-INTERSECT
-SELECT _X.cw_nom
-FROM cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0,cw_Societe AS _S WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0, cw_Division AS _S WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid UNION SELECT 1 FROM travaille_relation AS rel_travaille1, cw_Societe AS _S WHERE rel_travaille1.eid_from=_X.cw_eid AND rel_travaille1.eid_to=_S.cw_eid))'''),
 
     ('Any PN WHERE NOT X travaille S, S nom PN, S is IN(Division, Societe)',
      '''SELECT _S.cw_nom
 FROM cw_Division AS _S
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid)
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid))
 UNION ALL
 SELECT _S.cw_nom
 FROM cw_Societe AS _S
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid))'''),
 
     ('Personne X WHERE NOT X travaille S, S nom "chouette"',
      '''SELECT _X.cw_eid
 FROM cw_Division AS _S, cw_Personne AS _X
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid) AND _S.cw_nom=chouette
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette
 UNION ALL
 SELECT _X.cw_eid
 FROM cw_Personne AS _X, cw_Societe AS _S
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid) AND _S.cw_nom=chouette
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette
 UNION ALL
 SELECT _X.cw_eid
 FROM cw_Personne AS _X, cw_SubDivision AS _S
-WHERE NOT EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid) AND _S.cw_nom=chouette'''),
+WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid)) AND _S.cw_nom=chouette'''),
 
     ('Any X WHERE X is ET, ET eid 2',
      '''SELECT rel_is0.eid_from
@@ -1222,6 +1223,10 @@
                     '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER)
 FROM cw_Personne AS _P''')
 
+    def test_substring(self):
+        self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne",
+                    '''SELECT SUBSTR(_P.cw_nom, 1, 1)
+FROM cw_Personne AS _P''')
 
     def test_parser_parse(self):
         for t in self._parse(PARSER):
@@ -1341,7 +1346,7 @@
         self.assertRaises(BadRQLQuery, self.o.generate, rqlst)
 
     def test_symmetric(self):
-        for t in self._parse(SYMETRIC):
+        for t in self._parse(SYMMETRIC):
             yield t
 
     def test_inline(self):
@@ -1389,7 +1394,7 @@
 WHERE EXISTS(SELECT 1 FROM cw_CWGroup AS _T WHERE _T.cw_name=managers)'''),
                    ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6',
                     '''SELECT 5, 6
-WHERE NOT EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''),
                    ]
         for t in self._parse(queries):
             yield t
@@ -1427,6 +1432,18 @@
             self.o.attr_map.clear()
 
 
+    def test_concat_string(self):
+        self._check('Any "A"+R WHERE X ref R',
+                    '''SELECT (A || _X.cw_ref)
+FROM cw_Affaire AS _X''')
+
+    def test_or_having_fake_terms(self):
+        self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE ((CAST(EXTRACT(YEAR from _X.cw_creation_date) AS INTEGER)=2010) OR (_X.cw_creation_date IS NULL))''')
+
+
 class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
 
     def setUp(self):
@@ -1435,7 +1452,7 @@
         self.o = SQLGenerator(schema, dbhelper)
 
     def _norm_sql(self, sql):
-        return sql.strip().replace(' ILIKE ', ' LIKE ').replace('\nINTERSECT ALL\n', '\nINTERSECT\n')
+        return sql.strip().replace(' ILIKE ', ' LIKE ')
 
     def test_date_extraction(self):
         self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
@@ -1534,6 +1551,13 @@
             yield t
 
 
+    def test_or_having_fake_terms(self):
+        self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
+
+
 
 class MySQLGenerator(PostgresSQLGeneratorTC):
 
@@ -1567,7 +1591,7 @@
                    ('Any X,Y WHERE NOT X created_by Y, X eid 5, Y eid 6',
                     '''SELECT 5, 6
 FROM (SELECT 1) AS _T
-WHERE NOT EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6)'''),
+WHERE NOT (EXISTS(SELECT 1 FROM created_by_relation AS rel_created_by0 WHERE rel_created_by0.eid_from=5 AND rel_created_by0.eid_to=6))'''),
                    ]
         for t in self._parse(queries):
             yield t
@@ -1614,12 +1638,24 @@
 WHERE rel_concerne0.eid_from=_A.cw_eid AND rel_concerne0.eid_to=_N.cw_eid
 GROUP BY _A.cw_eid,rel_todo_by1.eid_to,rel_todo_by3.eid_to''')
 
+    def test_substring(self):
+        self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne",
+                    '''SELECT SUBSTRING(_P.cw_nom, 1, 1)
+FROM cw_Personne AS _P''')
+
+
+    def test_or_having_fake_terms(self):
+        self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE ((EXTRACT(YEAR from _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
+
 
 class removeUnsusedSolutionsTC(TestCase):
     def test_invariant_not_varying(self):
         rqlst = mock_object(defined_vars={})
-        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
-        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
         self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
                                                           {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None),
                           ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
@@ -1629,8 +1665,8 @@
 
     def test_invariant_varying(self):
         rqlst = mock_object(defined_vars={})
-        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
-        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
         self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
                                                           {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None),
                           ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set())
--- a/server/test/unittest_rqlannotation.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_rqlannotation.py	Mon Jul 19 15:36:16 2010 +0200
@@ -116,6 +116,12 @@
         self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
         self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False)
 
+    def test_8(self):
+        # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission
+        rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, '
+                              'X is Note, N eid 1')
+        self.assertEquals(rqlst.defined_vars['X']._q_invariant, False)
+
     def test_diff_scope_identity_deamb(self):
         rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)')
         self.assertEquals(rqlst.defined_vars['Z']._q_invariant, True)
--- a/server/test/unittest_security.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_security.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,8 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""functional tests for server'security
-"""
+"""functional tests for server'security"""
+
 import sys
 
 from logilab.common.testlib import unittest_main, TestCase
@@ -66,6 +66,27 @@
                           cu.execute, 'Any X,P WHERE X is CWUser, X upassword P')
 
 
+class SecurityRewritingTC(BaseSecurityTC):
+    def hijack_source_execute(self):
+        def syntax_tree_search(*args, **kwargs):
+            self.query = (args, kwargs)
+            return []
+        self.repo.system_source.syntax_tree_search = syntax_tree_search
+
+    def tearDown(self):
+        self.repo.system_source.__dict__.pop('syntax_tree_search', None)
+        BaseSecurityTC.tearDown(self)
+
+    def test_not_relation_read_security(self):
+        cnx = self.login('iaminusersgrouponly')
+        self.hijack_source_execute()
+        self.execute('Any U WHERE NOT A todo_by U, A is Affaire')
+        self.assertEquals(self.query[0][1].as_string(),
+                          'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
+        self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
+        self.assertEquals(self.query[0][1].as_string(),
+                          'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
+
 class SecurityTC(BaseSecurityTC):
 
     def setUp(self):
@@ -213,7 +234,7 @@
         # to actually get Unauthorized exception, try to delete a relation we can read
         self.restore_connection()
         eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0]
-        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid}, 'x')
+        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid})
         self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
         self.commit()
         cnx = self.login('iaminusersgrouponly')
@@ -230,7 +251,7 @@
         cnx = self.login('user')
         cu = cnx.cursor()
         cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
-                   {'x': ueid, 'passwd': 'newpwd'}, 'x')
+                   {'x': ueid, 'passwd': 'newpwd'})
         cnx.commit()
         cnx.close()
         cnx = self.login('user', password='newpwd')
@@ -240,7 +261,7 @@
         cnx = self.login('iaminusersgrouponly')
         cu = cnx.cursor()
         cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
-                   {'x': ueid, 'passwd': 'newpwd'}, 'x')
+                   {'x': ueid, 'passwd': 'newpwd'})
         self.assertRaises(Unauthorized, cnx.commit)
 
     # read security test
@@ -259,22 +280,22 @@
         cu = cnx.cursor()
         rset = cu.execute('Affaire X')
         self.assertEquals(rset.rows, [])
-        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
         # cache test
-        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
         aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
         soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
         cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
         cnx.commit()
-        rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}, 'x')
+        rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2})
         self.assertEquals(rset.rows, [[aff2]])
         # more cache test w/ NOT eid
-        rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}, 'x')
+        rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid})
         self.assertEquals(rset.rows, [[aff2]])
-        rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}, 'x')
+        rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2})
         self.assertEquals(rset.rows, [])
         # test can't update an attribute of an entity that can't be readen
-        self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
 
 
     def test_entity_created_in_transaction(self):
@@ -286,7 +307,7 @@
             cu = cnx.cursor()
             aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
             # entity created in transaction are readable *by eid*
-            self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x'))
+            self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
             # XXX would be nice if it worked
             rset = cu.execute("Affaire X WHERE X sujet 'cool'")
             self.assertEquals(len(rset), 0)
@@ -297,18 +318,17 @@
     def test_read_erqlexpr_has_text1(self):
         aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
         card1 = self.execute("INSERT Card X: X title 'cool'")[0][0]
-        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}, 'x')
+        self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1})
         self.commit()
         cnx = self.login('iaminusersgrouponly')
         cu = cnx.cursor()
         aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
         soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
-        cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1},
-                   ('a', 's'))
+        cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1})
         cnx.commit()
-        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1}, 'x')
-        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}, 'x'))
-        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}, 'x'))
+        self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1})
+        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
+        self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}))
         rset = cu.execute("Any X WHERE X has_text 'cool'")
         self.assertEquals(sorted(eid for eid, in rset.rows),
                           [card1, aff2])
@@ -363,7 +383,7 @@
         # only managers should be able to edit the 'test' attribute of Personne entities
         eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0]
         self.commit()
-        self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+        self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
         self.commit()
         cnx = self.login('iaminusersgrouponly')
         cu = cnx.cursor()
@@ -373,11 +393,11 @@
         self.assertRaises(Unauthorized, cnx.commit)
         eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0]
         cnx.commit()
-        cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}, 'x')
+        cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
         self.assertRaises(Unauthorized, cnx.commit)
-        cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid}, 'x')
+        cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid})
         self.assertRaises(Unauthorized, cnx.commit)
-        cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}, 'x')
+        cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid})
         cnx.commit()
         cnx.close()
 
@@ -386,23 +406,23 @@
         note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
         self.commit()
         note.fire_transition('markasdone')
-        self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}, 'x')
+        self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid})
         self.commit()
         cnx = self.login('iaminusersgrouponly')
         cu = cnx.cursor()
-        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}, 'x')
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid})
         self.assertRaises(Unauthorized, cnx.commit)
         note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
         cnx.commit()
         note2.fire_transition('markasdone')
         cnx.commit()
-        self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid}, 'x')),
+        self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})),
                           0)
-        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x')
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
         self.assertRaises(Unauthorized, cnx.commit)
         note2.fire_transition('redoit')
         cnx.commit()
-        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}, 'x')
+        cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
         cnx.commit()
 
     def test_attribute_read_security(self):
@@ -422,7 +442,6 @@
         self.failUnless(x.creation_date)
         cnx.rollback()
 
-
 class BaseSchemaSecurityTC(BaseSecurityTC):
     """tests related to the base schema permission configuration"""
 
@@ -463,13 +482,13 @@
         # should only be able to read the anonymous user, not another one
         origuser = self.adminsession.user
         self.assertRaises(Unauthorized,
-                          cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid}, 'x')
+                          cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid})
         # nothing selected, nothing updated, no exception raised
         #self.assertRaises(Unauthorized,
         #                  cu.execute, 'SET X login "toto" WHERE X eid %(x)s',
         #                  {'x': self.user.eid})
 
-        rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x')
+        rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid})
         self.assertEquals(rset.rows, [[anon.eid]])
         # but can't modify it
         cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
@@ -510,7 +529,7 @@
         self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U')
         self.assertRaises(Unauthorized,
                           cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
-                          {'x': anoneid, 'b': beid1}, 'x')
+                          {'x': anoneid, 'b': beid1})
 
 
     def test_ambigous_ordered(self):
@@ -567,10 +586,10 @@
         aff.clear_related_cache('wf_info_for', role='object')
         self.assertRaises(Unauthorized,
                           self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"',
-                          {'ti': trinfo.eid}, 'ti')
+                          {'ti': trinfo.eid})
         self.assertRaises(Unauthorized,
                           self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"',
-                          {'ti': trinfo.eid}, 'ti')
+                          {'ti': trinfo.eid})
 
 if __name__ == '__main__':
     unittest_main()
--- a/server/test/unittest_storage.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_storage.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,7 +21,7 @@
 
 from __future__ import with_statement
 
-from logilab.common.testlib import unittest_main
+from logilab.common.testlib import unittest_main, tag
 from cubicweb.devtools.testlib import CubicWebTC
 
 import os.path as osp
@@ -145,14 +145,14 @@
                             ' (Any D, X WHERE X eid %(x)s, X data D)'
                             '  UNION '
                             ' (Any D, X WHERE X eid %(x)s, X data D)'
-                            ')', {'x': f1.eid}, 'x')
+                            ')', {'x': f1.eid})
         self.assertEquals(len(rset), 2)
         self.assertEquals(rset[0][0], f1.eid)
         self.assertEquals(rset[1][0], f1.eid)
         self.assertEquals(rset[0][1].getvalue(), 'the-data')
         self.assertEquals(rset[1][1].getvalue(), 'the-data')
         rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D',
-                            {'x': f1.eid}, 'x')
+                            {'x': f1.eid})
         self.assertEquals(len(rset), 1)
         self.assertEquals(rset[0][0], f1.eid)
         self.assertEquals(rset[0][1], len('the-data'))
@@ -160,7 +160,7 @@
                             ' (Any D, X WHERE X eid %(x)s, X data D)'
                             '  UNION '
                             ' (Any D, X WHERE X eid %(x)s, X data D)'
-                            ')', {'x': f1.eid}, 'x')
+                            ')', {'x': f1.eid})
         self.assertEquals(len(rset), 2)
         self.assertEquals(rset[0][0], f1.eid)
         self.assertEquals(rset[1][0], f1.eid)
@@ -168,7 +168,7 @@
         self.assertEquals(rset[1][1], len('the-data'))
         ex = self.assertRaises(QueryError, self.execute,
                                'Any X,UPPER(D) WHERE X eid %(x)s, X data D',
-                               {'x': f1.eid}, 'x')
+                               {'x': f1.eid})
         self.assertEquals(str(ex), 'UPPER can not be called on mapped attribute')
 
 
@@ -180,7 +180,7 @@
         self.assertEquals(f1.data.getvalue(), file(filepath).read(),
                           'files content differ')
 
-
+    @tag('Storage', 'BFSS', 'update')
     def test_bfss_update_with_existing_data(self):
         # use self.session to use server-side cache
         f1 = self.session.create_entity('File', data=Binary('some data'),
@@ -191,9 +191,55 @@
                      {'d': Binary('some other data'), 'f': f1.eid})
         self.assertEquals(f1.data.getvalue(), 'some other data')
         self.commit()
-        f2 = self.entity('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid})
+        f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
         self.assertEquals(f2.data.getvalue(), 'some other data')
 
+    @tag('Storage', 'BFSS', 'update', 'extension', 'commit')
+    def test_bfss_update_with_different_extension_commited(self):
+        # use self.session to use server-side cache
+        f1 = self.session.create_entity('File', data=Binary('some data'),
+                                        data_format=u'text/plain', data_name=u'foo.txt')
+        # NOTE: do not use set_attributes() which would automatically
+        #       update f1's local dict. We want the pure rql version to work
+        self.commit()
+        old_path = self.fspath(f1)
+        self.failUnless(osp.isfile(old_path))
+        self.assertEquals(osp.splitext(old_path)[1], '.txt')
+        self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s',
+                     {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'})
+        self.commit()
+        # the new file exists with correct extension
+        # the old file is dead
+        f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
+        new_path = self.fspath(f2)
+        self.failIf(osp.isfile(old_path))
+        self.failUnless(osp.isfile(new_path))
+        self.assertEquals(osp.splitext(new_path)[1], '.jpg')
+
+    @tag('Storage', 'BFSS', 'update', 'extension', 'rollback')
+    def test_bfss_update_with_different_extension_rollbacked(self):
+        # use self.session to use server-side cache
+        f1 = self.session.create_entity('File', data=Binary('some data'),
+                                        data_format=u'text/plain', data_name=u'foo.txt')
+        # NOTE: do not use set_attributes() which would automatically
+        #       update f1's local dict. We want the pure rql version to work
+        self.commit()
+        old_path = self.fspath(f1)
+        old_data = f1.data.getvalue()
+        self.failUnless(osp.isfile(old_path))
+        self.assertEquals(osp.splitext(old_path)[1], '.txt')
+        self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s',
+                     {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'})
+        self.rollback()
+        # the new file exists with correct extension
+        # the old file is dead
+        f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
+        new_path = self.fspath(f2)
+        new_data = f2.data.getvalue()
+        self.failUnless(osp.isfile(new_path))
+        self.assertEquals(osp.splitext(new_path)[1], '.txt')
+        self.assertEquals(old_path, new_path)
+        self.assertEquals(old_data, new_data)
 
     def test_bfss_update_with_fs_importing(self):
         # use self.session to use server-side cache
--- a/server/test/unittest_undo.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/test/unittest_undo.py	Mon Jul 19 15:36:16 2010 +0200
@@ -157,8 +157,8 @@
         undotxuuid = self.commit()
         self.assertEquals(undotxuuid, None) # undo not undoable
         self.assertEquals(errors, [])
-        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}, 'x'))
-        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}, 'x'))
+        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}))
+        self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}))
         self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"'))
         self.assertEquals(toto.state, 'activated')
         self.assertEquals(toto.get_email(), 'toto@logilab.org')
@@ -229,8 +229,8 @@
         errors = self.cnx.undo_transaction(txuuid)
         self.commit()
         self.failIf(errors)
-        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}, 'x'))
-        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}, 'x'))
+        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
+        self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
         self.failIf(self.execute('Any X,Y WHERE X fiche Y'))
         self.session.set_pool()
         for eid in (p.eid, c.eid):
@@ -283,3 +283,7 @@
         #                    'required on CWUser (%s)' % self.toto.eid})
 
     # test implicit 'replacement' of an inlined relation
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/server/utils.py	Thu May 06 08:24:46 2010 +0200
+++ b/server/utils.py	Mon Jul 19 15:36:16 2010 +0200
@@ -123,6 +123,10 @@
 class LoopTask(object):
     """threaded task restarting itself once executed"""
     def __init__(self, interval, func, args):
+        if interval <= 0:
+            raise ValueError('Loop task interval must be > 0 '
+                             '(current value: %f for %s)' % \
+                             (interval, func.__name__))
         self.interval = interval
         def auto_restart_func(self=self, func=func, args=args):
             try:
@@ -137,13 +141,15 @@
 
     def start(self):
         self._t = Timer(self.interval, self.func)
+        self._t.setName('%s-%s[%d]' % (self._t.getName(), self.name, self.interval))
         self._t.start()
 
     def cancel(self):
         self._t.cancel()
 
     def join(self):
-        self._t.join()
+        if self._t.isAlive():
+            self._t.join()
 
 
 class RepoThread(Thread):
--- a/setup.py	Thu May 06 08:24:46 2010 +0200
+++ b/setup.py	Mon Jul 19 15:36:16 2010 +0200
@@ -24,38 +24,43 @@
 import os
 import sys
 import shutil
-from distutils.core import setup
-from distutils.command import install_lib
 from os.path import isdir, exists, join, walk
 
+try:
+    if os.environ.get('NO_SETUPTOOLS'):
+        raise ImportError() # do as there is no setuptools
+    from setuptools import setup
+    from setuptools.command import install_lib
+    USE_SETUPTOOLS = True
+except ImportError:
+    from distutils.core import setup
+    from distutils.command import install_lib
+    USE_SETUPTOOLS = False
+
 # import required features
-from __pkginfo__ import modname, version, license, short_desc, long_desc, \
-     web, author, author_email
+from __pkginfo__ import modname, version, license, description, web, \
+     author, author_email
+
+long_description = file('README').read()
+
 # import optional features
-try:
-    from __pkginfo__ import distname
-except ImportError:
-    distname = modname
-try:
-    from __pkginfo__ import scripts
-except ImportError:
-    scripts = []
-try:
-    from __pkginfo__ import data_files
-except ImportError:
-    data_files = None
-try:
-    from __pkginfo__ import subpackage_of
-except ImportError:
-    subpackage_of = None
-try:
-    from __pkginfo__ import include_dirs
-except ImportError:
-    include_dirs = []
-try:
-    from __pkginfo__ import ext_modules
-except ImportError:
-    ext_modules = None
+import __pkginfo__
+if USE_SETUPTOOLS:
+    requires = {}
+    for entry in ("__depends__", "__recommends__"):
+        requires.update(getattr(__pkginfo__, entry, {}))
+    install_requires = [("%s %s" % (d, v and v or "")).strip()
+                       for d, v in requires.iteritems()]
+else:
+    install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+subpackage_of = getattr(__pkginfo__, 'subpackage_of', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+
 
 BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
 IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
@@ -92,7 +97,8 @@
 
 def export(from_dir, to_dir,
            blacklist=BASE_BLACKLIST,
-           ignore_ext=IGNORED_EXTENSIONS):
+           ignore_ext=IGNORED_EXTENSIONS,
+           verbose=True):
     """make a mirror of from_dir in to_dir, omitting directories and files
     listed in the black list
     """
@@ -111,7 +117,8 @@
                 continue
             src = '%s/%s' % (directory, filename)
             dest = to_dir + src[len(from_dir):]
-            print >> sys.stderr, src, '->', dest
+            if verbose:
+               print >> sys.stderr, src, '->', dest
             if os.path.isdir(src):
                 if not exists(dest):
                     os.mkdir(dest)
@@ -154,28 +161,32 @@
                 base = modname
             for directory in include_dirs:
                 dest = join(self.install_dir, base, directory)
-                export(directory, dest)
+                export(directory, dest, verbose=False)
 
 def install(**kwargs):
     """setup entry point"""
+    if USE_SETUPTOOLS:
+        if '--force-manifest' in sys.argv:
+            sys.argv.remove('--force-manifest')
+    # install-layout option was introduced in 2.5.3-1~exp1
+    elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv:
+        sys.argv.remove('--install-layout=deb')
     if subpackage_of:
         package = subpackage_of + '.' + modname
         kwargs['package_dir'] = {package : '.'}
         packages = [package] + get_packages(os.getcwd(), package)
+        if USE_SETUPTOOLS:
+            kwargs['namespace_packages'] = [subpackage_of]
     else:
         kwargs['package_dir'] = {modname : '.'}
         packages = [modname] + get_packages(os.getcwd(), modname)
+    if USE_SETUPTOOLS:
+        kwargs['install_requires'] = install_requires
     kwargs['packages'] = packages
-    return setup(name = distname,
-                 version = version,
-                 license =license,
-                 description = short_desc,
-                 long_description = long_desc,
-                 author = author,
-                 author_email = author_email,
-                 url = web,
-                 scripts = ensure_scripts(scripts),
-                 data_files=data_files,
+    return setup(name=distname, version=version, license=license, url=web,
+                 description=description, long_description=long_description,
+                 author=author, author_email=author_email,
+                 scripts=ensure_scripts(scripts), data_files=data_files,
                  ext_modules=ext_modules,
                  cmdclass={'install_lib': MyInstallLib},
                  **kwargs
--- a/skeleton/MANIFEST.in	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/MANIFEST.in	Mon Jul 19 15:36:16 2010 +0200
@@ -2,3 +2,4 @@
 include */*.py
 recursive-include data external_resources *.gif *.png *.css *.ico *.js
 recursive-include i18n *.pot *.po
+recursive-include wdoc *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/README.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,3 @@
+Summary
+-------
+%(longdesc)s
--- a/skeleton/__pkginfo__.py.tmpl	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/__pkginfo__.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -7,15 +7,12 @@
 numversion = (0, 1, 0)
 version = '.'.join(str(num) for num in numversion)
 
-license = 'LCL'
-copyright = '''Copyright (c) %(year)s %(author)s.
-%(author-web-site)s -- mailto:%(author-email)s'''
+license = '%(license)s'
 
 author = '%(author)s'
 author_email = '%(author-email)s'
 
-short_desc = '%(shortdesc)s'
-long_desc = '''%(longdesc)s'''
+description = '%(shortdesc)s'
 
 web = 'http://www.cubicweb.org/project/%%s' %% distname
 
@@ -37,18 +34,12 @@
     [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']],
     ]
 # check for possible extended cube layout
-for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration'):
+for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'wdoc', 'i18n', 'migration'):
     if isdir(dname):
         data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)])
 # Note: here, you'll need to add subdirectories if you want
 # them to be included in the debian package
 
-# a dict; you might want to provide a version specification
-# of the form '>= x.y.z'
-__depends__ = {'cubicweb': '>= 3.7.0'}
-__depends_cubes__ = %(dependencies)s
-__recommends_cubes__ = {}
-# obsolete (will be gone in cw 3.8.0)
-__use__ = tuple(__depends_cubes__)
-__recommend__ = tuple(__recommends_cubes__)
+__depends__ =  %(dependencies)s
+__recommends__ = {}
 
--- a/skeleton/data/cubes.CUBENAME.css	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/data/cubes.CUBENAME.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,1 +1,1 @@
-/* template specific CSS */
+/* cube-specific CSS */
--- a/skeleton/data/cubes.CUBENAME.js	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/data/cubes.CUBENAME.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,1 +1,1 @@
-// This contains template-specific javascript
\ No newline at end of file
+// This contains cube-specific javascript
\ No newline at end of file
--- a/skeleton/debian/rules.tmpl	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/debian/rules.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -7,7 +7,7 @@
 build: build-stamp
 build-stamp:
 	dh_testdir
-	python setup.py -q build
+	NO_SETUPTOOLS=1 python setup.py -q build
 	touch build-stamp
 
 clean:
@@ -23,7 +23,7 @@
 	dh_testroot
 	dh_clean -k
 	dh_installdirs -i
-	python setup.py -q install --no-compile --prefix=debian/%(distname)s/usr/
+	NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/%(distname)s/usr/
 	# remove generated .egg-info file
 	rm -rf debian/%(distname)s/usr/lib/python*
 
--- a/skeleton/entities.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""this contains the cube-specific entities' classes
-
-"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/entities.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,5 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s entity's classes"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/hooks.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,5 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s specific hooks and operations"""
--- a/skeleton/migration/postcreate.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-# postcreate script. You could setup site properties or a workflow here for example
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-
-# Example of site property change
-#set_property('ui.site-title', "<sitename>")
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/postcreate.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,13 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s postcreate script, executed at instance creation time or when
+the cube is added to an existing instance.
+
+You could setup site properties or a workflow here for example.
+"""
+
+# Example of site property change
+#set_property('ui.site-title', "<sitename>")
+
--- a/skeleton/migration/precreate.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-# Instructions here will be read before reading the schema
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-# You could create your own groups here, like in :
-#   create_entity('CWGroup', name=u'mygroup')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/migration/precreate.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,9 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s precreate script, executed at instance creation time or when
+the cube is added to an existing instance, before the schema is serialized.
+
+This is typically to create groups referenced by the cube'schema.
+"""
--- a/skeleton/schema.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-# cube's specific schema
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/schema.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,5 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s schema"""
--- a/skeleton/setup.py	Thu May 06 08:24:46 2010 +0200
+++ b/skeleton/setup.py	Mon Jul 19 15:36:16 2010 +0200
@@ -1,70 +1,164 @@
 #!/usr/bin/env python
+# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152
 # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
+
+__docformat__ = "restructuredtext en"
+
+import os
+import sys
+import shutil
+from os.path import isdir, exists, join, walk
 
-"""
-# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-# http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# this program; if not, write to the Free Software Foundation, Inc.,
-# 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-""" Generic Setup script, takes package info from __pkginfo__.py file """
+try:
+    if os.environ.get('NO_SETUPTOOLS'):
+        raise ImportError()
+    from setuptools import setup
+    from setuptools.command import install_lib
+    USE_SETUPTOOLS = 1
+except ImportError:
+    from distutils.core import setup
+    from distutils.command import install_lib
+    USE_SETUPTOOLS = 0
 
-from distutils.core import setup
 
+sys.modules.pop('__pkginfo__', None)
 # import required features
-from __pkginfo__ import distname, version, license, short_desc, long_desc, \
+from __pkginfo__ import modname, version, license, description, \
      web, author, author_email
 # import optional features
-try:
-    from __pkginfo__ import data_files
-except ImportError:
-    data_files = None
-try:
-    from __pkginfo__ import include_dirs
-except ImportError:
-    include_dirs = []
+import __pkginfo__
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', [])
+data_files = getattr(__pkginfo__, 'data_files', None)
+include_dirs = getattr(__pkginfo__, 'include_dirs', [])
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+dependency_links = getattr(__pkginfo__, 'dependency_links', [])
+
+STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
+
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~')
+
+if exists('README'):
+    long_description = file('README').read()
+else:
+    long_description = ''
+if USE_SETUPTOOLS:
+   requires = {}
+   for entry in ("__depends__", "__recommends__"):
+      requires.update(getattr(__pkginfo__, entry, {}))
+   install_requires = [("%s %s" % (d, v and v or "")).strip()
+                       for d, v in requires.iteritems()]
+else:
+   install_requires = []
+
+
+def ensure_scripts(linux_scripts):
+    """Creates the proper script names required for each platform
+    (taken from 4Suite)
+    """
+    from distutils import util
+    if util.get_platform()[:3] == 'win':
+        scripts_ = [script + '.bat' for script in linux_scripts]
+    else:
+        scripts_ = linux_scripts
+    return scripts_
+
+def get_packages(directory, prefix):
+    """return a list of subpackages for the given directory"""
+    result = []
+    for package in os.listdir(directory):
+        absfile = join(directory, package)
+        if isdir(absfile):
+            if exists(join(absfile, '__init__.py')) or \
+                   package in ('test', 'tests'):
+                if prefix:
+                    result.append('%s.%s' % (prefix, package))
+                else:
+                    result.append(package)
+                result += get_packages(absfile, result[-1])
+    return result
+
+def export(from_dir, to_dir,
+           blacklist=STD_BLACKLIST,
+           ignore_ext=IGNORED_EXTENSIONS,
+           verbose=True):
+    """make a mirror of from_dir in to_dir, omitting directories and files
+    listed in the black list
+    """
+    def make_mirror(arg, directory, fnames):
+        """walk handler"""
+        for norecurs in blacklist:
+            try:
+                fnames.remove(norecurs)
+            except ValueError:
+                pass
+        for filename in fnames:
+            # don't include binary files
+            if filename[-4:] in ignore_ext:
+                continue
+            if filename[-1] == '~':
+                continue
+            src = join(directory, filename)
+            dest = to_dir + src[len(from_dir):]
+            if verbose:
+                print >> sys.stderr, src, '->', dest
+            if os.path.isdir(src):
+                if not exists(dest):
+                    os.mkdir(dest)
+            else:
+                if exists(dest):
+                    os.remove(dest)
+                shutil.copy2(src, dest)
+    try:
+        os.mkdir(to_dir)
+    except OSError, ex:
+        # file exists ?
+        import errno
+        if ex.errno != errno.EEXIST:
+            raise
+    walk(from_dir, make_mirror, None)
+
+
+class MyInstallLib(install_lib.install_lib):
+    """extend install_lib command to handle  package __init__.py and
+    include_dirs variable if necessary
+    """
+    def run(self):
+        """overridden from install_lib class"""
+        install_lib.install_lib.run(self)
+        # manually install included directories if any
+        if include_dirs:
+            base = modname
+            for directory in include_dirs:
+                dest = join(self.install_dir, base, directory)
+                export(directory, dest, verbose=False)
 
 def install(**kwargs):
     """setup entry point"""
-    #kwargs['distname'] = modname
-    return setup(name=distname,
-                 version=version,
-                 license=license,
-                 description=short_desc,
-                 long_description=long_desc,
-                 author=author,
-                 author_email=author_email,
-                 url=web,
-                 data_files=data_files,
-                 **kwargs)
+    if USE_SETUPTOOLS:
+        if '--force-manifest' in sys.argv:
+            sys.argv.remove('--force-manifest')
+    # install-layout option was introduced in 2.5.3-1~exp1
+    elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv:
+        sys.argv.remove('--install-layout=deb')
+    if USE_SETUPTOOLS and install_requires:
+        kwargs['install_requires'] = install_requires
+        kwargs['dependency_links'] = dependency_links
+    return setup(name = distname,
+                 version = version,
+                 license = license,
+                 description = description,
+                 long_description = long_description,
+                 author = author,
+                 author_email = author_email,
+                 url = web,
+                 scripts = ensure_scripts(scripts),
+                 data_files = data_files,
+                 ext_modules = ext_modules,
+                 cmdclass = {'install_lib': MyInstallLib},
+                 **kwargs
+                 )
 
 if __name__ == '__main__' :
     install()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/sobjects.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,5 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s repository side views, usually for notification"""
--- a/skeleton/views.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cube-specific forms/views/actions/components
-
-"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/views.py.tmpl	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,5 @@
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s views/forms/actions/components for web ui"""
--- a/sobjects/notification.py	Thu May 06 08:24:46 2010 +0200
+++ b/sobjects/notification.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""some views to handle notification on data changes
+"""some views to handle notification on data changes"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
--- a/sobjects/test/unittest_supervising.py	Thu May 06 08:24:46 2010 +0200
+++ b/sobjects/test/unittest_supervising.py	Mon Jul 19 15:36:16 2010 +0200
@@ -43,9 +43,9 @@
         # do some modification
         user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G '
                             'WHERE G name "users"').get_entity(0, 0)
-        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid}, 'x')
+        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid})
         self.execute('DELETE Card B WHERE B title "une news !"')
-        self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid}, 'x')
+        self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid})
         self.execute('SET X content "duh?" WHERE X is Comment')
         self.execute('DELETE X comments Y WHERE Y is Card, Y title "une autre news !"')
         # check only one supervision email operation
@@ -104,7 +104,7 @@
     def test_nonregr1(self):
         session = self.session
         # do some unlogged modification
-        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid}, 'x')
+        self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid})
         self.commit() # no crash
 
 
--- a/spa2rql.py	Thu May 06 08:24:46 2010 +0200
+++ b/spa2rql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""SPARQL -> RQL translator
+"""SPARQL -> RQL translator"""
 
-"""
 from logilab.common import make_domains
 from rql import TypeResolverException
 from fyzz.yappsparser import parse
@@ -76,7 +75,7 @@
             nbctypes = len(ctypes)
             ctypes &= varpossibletypes
             if not ctypes:
-                raise TypeResolverException()
+                raise TypeResolverException('No possible type')
             return len(ctypes) != nbctypes
         except KeyError:
             self.possible_types[var] = varpossibletypes
@@ -98,8 +97,8 @@
                         modified = True
                 # restrict predicates according to allowed subject var types
                 if subjvar in self.possible_types:
-                    yams_predicates = [(s, r, o) for s, r, o in yams_predicates
-                                       if s == '*' or s in self.possible_types[subjvar]]
+                    yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates
+                                          if s == '*' or s in self.possible_types[subjvar]]
                 if isinstance(obj, ast.SparqlVar):
                     # make a valid rql var name
                     objvar = obj.name.upper()
@@ -111,11 +110,11 @@
                             modified = True
                     # restrict predicates according to allowed object var types
                     if objvar in self.possible_types:
-                        yams_predicates = [(s, r, o) for s, r, o in yams_predicates
-                                           if o == '*' or o in self.possible_types[objvar]]
+                        yams_predicates[:] = [(s, r, o) for s, r, o in yams_predicates
+                                              if o == '*' or o in self.possible_types[objvar]]
                 # ensure this still make sense
                 if not yams_predicates:
-                    raise TypeResolverException()
+                    raise TypeResolverException('No yams predicate')
                 if len(yams_predicates) != nbchoices:
                     modified = True
 
@@ -197,7 +196,8 @@
                 raise UnsupportedQuery()
             # make a valid rql var name
             subjvar = subj.name.upper()
-            if predicate == ('', 'a'):
+            if predicate in [('', 'a'),
+                             ('http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'type')]:
                 # special 'is' relation
                 if not isinstance(obj, tuple):
                     raise UnsupportedQuery()
--- a/stdlib.txt	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,18 +0,0 @@
-addressbook
-basket
-blog
-book
-calendar
-comment
-company
-email
-file
-folder
-i18ncontent
-keyword
-link
-mailinglist
-person
-tag
-timeseries
-vcsfile
--- a/test/data/cubes/file/__pkginfo__.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/data/cubes/file/__pkginfo__.py	Mon Jul 19 15:36:16 2010 +0200
@@ -26,48 +26,3 @@
 numversion = (1, 4, 3)
 version = '.'.join(str(num) for num in numversion)
 
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
-author = "Logilab"
-author_email = "contact@logilab.fr"
-web = ''
-
-short_desc = "Raw file support for the CubicWeb framework"
-long_desc = """CubicWeb is a entities / relations bases knowledge management system
-developped at Logilab.
-.
-This package provides schema and views to store files and images in cubicweb
-applications.
-.
-"""
-
-from os import listdir
-from os.path import join
-
-CUBES_DIR = join('share', 'cubicweb', 'cubes')
-try:
-    data_files = [
-        [join(CUBES_DIR, 'file'),
-         [fname for fname in listdir('.')
-          if fname.endswith('.py') and fname != 'setup.py']],
-        [join(CUBES_DIR, 'file', 'data'),
-         [join('data', fname) for fname in listdir('data')]],
-        [join(CUBES_DIR, 'file', 'wdoc'),
-         [join('wdoc', fname) for fname in listdir('wdoc')]],
-        [join(CUBES_DIR, 'file', 'views'),
-         [join('views', fname) for fname in listdir('views') if fname.endswith('.py')]],
-        [join(CUBES_DIR, 'file', 'i18n'),
-         [join('i18n', fname) for fname in listdir('i18n')]],
-        [join(CUBES_DIR, 'file', 'migration'),
-         [join('migration', fname) for fname in listdir('migration')]],
-        ]
-except OSError:
-    # we are in an installed directory
-    pass
-
-
-cube_eid = 20320
-# used packages
-__use__ = ()
--- a/test/data/migration/depends.map	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-0.0.2: 2.3.0
-0.0.3: 2.4.0
-# missing 0.0.4 entry, that's alright
-0.1.0: 2.6.0
-0.1.2: 2.10.0
--- a/test/unittest_cwconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_cwconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -20,13 +20,16 @@
 """
 import sys
 import os
+import tempfile
 from os.path import dirname, join, abspath
 
 from logilab.common.modutils import cleanup_sys_modules
-from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.testlib import (TestCase, unittest_main,
+                                    with_tempdir)
 from logilab.common.changelog import Version
 
 from cubicweb.devtools import ApptestConfiguration
+from cubicweb.cwconfig import _find_prefix
 
 def unabsolutize(path):
     parts = path.split(os.sep)
@@ -45,7 +48,7 @@
         self.config._cubes = ('email', 'file')
 
     def tearDown(self):
-        os.environ.pop('CW_CUBES_PATH', None)
+        ApptestConfiguration.CUBES_PATH = []
 
     def test_reorder_cubes(self):
         # jpl depends on email and file and comment
@@ -65,7 +68,7 @@
 
     def test_reorder_cubes_recommends(self):
         from cubes.comment import __pkginfo__ as comment_pkginfo
-        comment_pkginfo.__recommend__ = ('file',)
+        comment_pkginfo.__recommends_cubes__ = {'file': None}
         try:
             # email recommends comment
             # comment recommends file
@@ -78,7 +81,7 @@
             self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')),
                               ('forge', 'email', 'comment', 'file'))
         finally:
-            comment_pkginfo.__use__ = ()
+            comment_pkginfo.__recommends_cubes__ = {}
 
 
 #     def test_vc_config(self):
@@ -104,11 +107,11 @@
         # make sure we don't import the email cube, but the stdlib email package
         import email
         self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR)
-        os.environ['CW_CUBES_PATH'] = CUSTOM_CUBES_DIR
+        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
         self.assertEquals(self.config.cubes_search_path(),
                           [CUSTOM_CUBES_DIR, self.config.CUBES_DIR])
-        os.environ['CW_CUBES_PATH'] = os.pathsep.join([
-            CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant'])
+        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR,
+                                            self.config.CUBES_DIR, 'unexistant']
         # filter out unexistant and duplicates
         self.assertEquals(self.config.cubes_search_path(),
                           [CUSTOM_CUBES_DIR,
@@ -127,6 +130,91 @@
         from cubes import file
         self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')])
 
+class FindPrefixTC(TestCase):
+    def make_dirs(self, *args):
+        path = join(tempfile.tempdir, *args)
+        if not os.path.exists(path):
+            os.makedirs(path)
+        return path
+
+    def make_file(self, *args):
+        self.make_dirs(*args[: -1])
+        file_path = join(tempfile.tempdir, *args)
+        file_obj = open(file_path, 'w')
+        file_obj.write('""" None """')
+        file_obj.close()
+        return file_path
+
+    @with_tempdir
+    def test_samedir(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.assertEquals(_find_prefix(prefix), prefix)
+
+    @with_tempdir
+    def test_samedir_filepath(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        dir_path = self.make_dirs('bob')
+        self.assertEquals(_find_prefix(dir_path), prefix)
+
+    @with_tempdir
+    def test_file_in_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_file_in_deeper_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_multiple_candidate_prefix(self):
+        self.make_dirs('share', 'cubicweb')
+        prefix = self.make_dirs('bob')
+        self.make_dirs('bob', 'share', 'cubicweb')
+        file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_sister_candidate_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.make_dirs('bob', 'share', 'cubicweb')
+        file_path = self.make_file('bell', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_multiple_parent_candidate_prefix(self):
+        self.make_dirs('share', 'cubicweb')
+        prefix = self.make_dirs('share', 'cubicweb', 'bob')
+        self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb')
+        file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_upper_candidate_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.make_dirs('bell','bob',  'share', 'cubicweb')
+        file_path = self.make_file('bell', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_no_prefix(self):
+        prefix = tempfile.tempdir
+        self.assertEquals(_find_prefix(prefix), sys.prefix)
 
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_cwctl.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_cwctl.py	Mon Jul 19 15:36:16 2010 +0200
@@ -23,15 +23,8 @@
 from cStringIO import StringIO
 from logilab.common.testlib import TestCase, unittest_main
 
-if os.environ.get('APYCOT_ROOT'):
-    root = os.environ['APYCOT_ROOT']
-    CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root
-    os.environ['CW_CUBES_PATH'] = CUBES_DIR
-    REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
-    os.environ['CW_INSTANCES_DIR'] = REGISTRY_DIR
-
 from cubicweb.cwconfig import CubicWebConfiguration
-CubicWebConfiguration.load_cwctl_plugins()
+CubicWebConfiguration.load_cwctl_plugins() # XXX necessary?
 
 class CubicWebCtlTC(TestCase):
     def setUp(self):
--- a/test/unittest_dbapi.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_dbapi.py	Mon Jul 19 15:36:16 2010 +0200
@@ -53,21 +53,6 @@
         self.assertRaises(ProgrammingError, cnx.user, None)
         self.assertRaises(ProgrammingError, cnx.describe, 1)
 
-    def test_session_data_api(self):
-        cnx = self.login('anon')
-        self.assertEquals(cnx.get_session_data('data'), None)
-        self.assertEquals(cnx.session_data(), {})
-        cnx.set_session_data('data', 4)
-        self.assertEquals(cnx.get_session_data('data'), 4)
-        self.assertEquals(cnx.session_data(), {'data': 4})
-        cnx.del_session_data('data')
-        cnx.del_session_data('whatever')
-        self.assertEquals(cnx.get_session_data('data'), None)
-        self.assertEquals(cnx.session_data(), {})
-        cnx.session_data()['data'] = 4
-        self.assertEquals(cnx.get_session_data('data'), 4)
-        self.assertEquals(cnx.session_data(), {'data': 4})
-
     def test_shared_data_api(self):
         cnx = self.login('anon')
         self.assertEquals(cnx.get_shared_data('data'), None)
--- a/test/unittest_entity.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_entity.py	Mon Jul 19 15:36:16 2010 +0200
@@ -57,8 +57,8 @@
         p = req.create_entity('Personne', nom=u'toto')
         oe = req.create_entity('Note', type=u'x')
         self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
-                     {'t': oe.eid, 'u': p.eid}, ('t','u'))
-        self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}, 'x')
+                     {'t': oe.eid, 'u': p.eid})
+        self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid})
         e = req.create_entity('Note', type=u'z')
         e.copy_relations(oe.eid)
         self.assertEquals(len(e.ecrit_par), 1)
@@ -73,7 +73,7 @@
         oe = req.create_entity('Note', type=u'x')
         self.schema['ecrit_par'].rdef('Note', 'Personne').composite = 'subject'
         self.execute('SET T ecrit_par U WHERE T eid %(t)s, U eid %(u)s',
-                     {'t': oe.eid, 'u': p.eid}, ('t','u'))
+                     {'t': oe.eid, 'u': p.eid})
         e = req.create_entity('Note', type=u'z')
         e.copy_relations(oe.eid)
         self.failIf(e.ecrit_par)
@@ -82,12 +82,12 @@
     def test_copy_with_composite(self):
         user = self.user()
         adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
-        e = self.entity('Any X WHERE X eid %(x)s', {'x':user.eid}, 'x')
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
         self.assertEquals(e.use_email[0].address, "toto@logilab.org")
         self.assertEquals(e.use_email[0].eid, adeleid)
         usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G '
                                'WHERE G name "users"')[0][0]
-        e = self.entity('Any X WHERE X eid %(x)s', {'x':usereid}, 'x')
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0)
         e.copy_relations(user.eid)
         self.failIf(e.use_email)
         self.failIf(e.primary_email)
@@ -100,14 +100,14 @@
         user.fire_transition('deactivate')
         self.commit()
         eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0]
-        e = self.entity('Any X WHERE X eid %(x)s', {'x': eid2}, 'x')
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0)
         e.copy_relations(user.eid)
         self.commit()
         e.clear_related_cache('in_state', 'subject')
         self.assertEquals(e.state, 'activated')
 
     def test_related_cache_both(self):
-        user = self.entity('Any X WHERE X eid %(x)s', {'x':self.user().eid}, 'x')
+        user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0)
         adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
         self.commit()
         self.assertEquals(user._related_cache, {})
@@ -248,10 +248,10 @@
         #rql = email.unrelated_rql('use_email', 'Person', 'object')[0]
         #self.assertEquals(rql, '')
         self.login('anon')
-        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
         rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0]
         self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA '
-                          'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, '
+                          'WHERE NOT EXISTS(S use_email O), O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, '
                           'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)')
         #rql = email.unrelated_rql('use_email', 'Person', 'object')[0]
         #self.assertEquals(rql, '')
@@ -273,7 +273,7 @@
         unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')]
         self.failUnless(p.eid in unrelated)
         self.execute('SET X tags Y WHERE X is Tag, Y is Personne')
-        e = self.entity('Any X WHERE X is Tag')
+        e = self.execute('Any X WHERE X is Tag').get_entity(0, 0)
         unrelated = [r[0] for r in e.unrelated('tags', 'Personne', 'subject')]
         self.failIf(p.eid in unrelated)
 
@@ -294,7 +294,7 @@
         self.assertEquals([x.address for x in rset.entities()], [u'hop'])
         self.create_user('toto')
         self.login('toto')
-        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
         rset = email.unrelated('use_email', 'CWUser', 'object')
         self.assertEquals([x.login for x in rset.entities()], ['toto'])
         user = self.request().user
@@ -304,7 +304,7 @@
         rset = user.unrelated('use_email', 'EmailAddress', 'subject')
         self.assertEquals([x.address for x in rset.entities()], [])
         self.login('anon')
-        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}, 'x').get_entity(0, 0)
+        email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
         rset = email.unrelated('use_email', 'CWUser', 'object')
         self.assertEquals([x.login for x in rset.entities()], [])
         user = self.request().user
@@ -356,8 +356,15 @@
                             data_encoding=u'ascii', data_name=u'toto.py')
         from cubicweb import mttransforms
         if mttransforms.HAS_PYGMENTS_TRANSFORMS:
-            self.assertEquals(e.printable_value('data'),
-                              '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mi">1</span>
+            import pygments
+            if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3):
+                self.assertEquals(e.printable_value('data'),
+                                  '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mi">1</span>
+</pre></div>
+''')
+            else:
+                self.assertEquals(e.printable_value('data'),
+                                  '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mf">1</span>
 </pre></div>
 ''')
         else:
@@ -452,7 +459,7 @@
         eid = session.execute(
             'INSERT TrInfo X: X comment "zou", X wf_info_for U, X from_state S1, X to_state S2 '
             'WHERE U login "admin", S1 name "activated", S2 name "deactivated"')[0][0]
-        trinfo = self.entity('Any X WHERE X eid %(x)s', {'x': eid}, 'x')
+        trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0)
         trinfo.complete()
         self.failUnless(isinstance(trinfo['creation_date'], datetime))
         self.failUnless(trinfo.relation_cached('from_state', 'subject'))
@@ -462,9 +469,9 @@
 
     def test_request_cache(self):
         req = self.request()
-        user = self.entity('CWUser X WHERE X login "admin"', req=req)
+        user = self.execute('CWUser X WHERE X login "admin"', req=req).get_entity(0, 0)
         state = user.in_state[0]
-        samestate = self.entity('State X WHERE X name "activated"', req=req)
+        samestate = self.execute('State X WHERE X name "activated"', req=req).get_entity(0, 0)
         self.failUnless(state is samestate)
 
     def test_rest_path(self):
@@ -494,7 +501,7 @@
         self.assertEquals(person.prenom, u'adrien')
         self.assertEquals(person.nom, u'di mascio')
         person.set_attributes(prenom=u'sylvain', nom=u'thénault')
-        person = self.entity('Personne P') # XXX retreival needed ?
+        person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ?
         self.assertEquals(person.prenom, u'sylvain')
         self.assertEquals(person.nom, u'thénault')
 
--- a/test/unittest_rqlrewrite.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_rqlrewrite.py	Mon Jul 19 15:36:16 2010 +0200
@@ -24,7 +24,7 @@
 from rql import parse, nodes, RQLHelper
 
 from cubicweb import Unauthorized
-from cubicweb.schema import RRQLExpression
+from cubicweb.schema import RRQLExpression, ERQLExpression
 from cubicweb.rqlrewrite import RQLRewriter
 from cubicweb.devtools import repotest, TestServerConfiguration
 
@@ -350,6 +350,20 @@
         self.failUnlessEqual(rqlst.as_string(),
                              u"Any C WHERE C is Card, EXISTS(C owned_by A, A is CWUser)")
 
+    def test_rqlexpr_not_relation1(self):
+        constraint = RRQLExpression('X owned_by Z, Z login "hop"', 'X')
+        rqlst = parse('Affaire A WHERE NOT EXISTS(A documented_by C)')
+        rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
+        self.failUnlessEqual(rqlst.as_string(),
+                             u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
+
+    def test_rqlexpr_not_relation2(self):
+        constraint = RRQLExpression('X owned_by Z, Z login "hop"', 'X')
+        rqlst = rqlhelper.parse('Affaire A WHERE NOT A documented_by C', annotate=False)
+        rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
+        self.failUnlessEqual(rqlst.as_string(),
+                             u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
+
 
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_rset.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_rset.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,6 +21,7 @@
 """
 
 from urlparse import urlsplit
+import pickle
 
 from rql import parse
 
@@ -84,6 +85,9 @@
             params2 = dict(pair.split('=') for pair in info1[3].split('&'))
             self.assertDictEquals(params1, params2)
 
+    def test_pickle(self):
+        del self.rset.req
+        self.assertEquals(len(pickle.dumps(self.rset)), 392)
 
     def test_build_url(self):
         req = self.request()
@@ -225,7 +229,7 @@
     def test_get_entity_simple(self):
         self.request().create_entity('CWUser', login=u'adim', upassword='adim',
                         surname=u'di mascio', firstname=u'adrien')
-        e = self.entity('Any X,T WHERE X login "adim", X surname T')
+        e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0)
         self.assertEquals(e['surname'], 'di mascio')
         self.assertRaises(KeyError, e.__getitem__, 'firstname')
         self.assertRaises(KeyError, e.__getitem__, 'creation_date')
@@ -397,5 +401,9 @@
                           '(Any X,N WHERE X is CWGroup, X name N)'
                           ')')
 
+    def test_count_users_by_date(self):
+        rset = self.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D')
+        self.assertEquals(rset.related_entity(0,0), (None, None))
+
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_selectors.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_selectors.py	Mon Jul 19 15:36:16 2010 +0200
@@ -100,6 +100,42 @@
         csel = AndSelector(Selector(), sel)
         self.assertIs(csel.search_selector(implements), sel)
 
+    def test_inplace_and(self):
+        selector = _1_()
+        selector &= _1_()
+        selector &= _1_()
+        self.assertEquals(selector(None), 3)
+        selector = _1_()
+        selector &= _0_()
+        selector &= _1_()
+        self.assertEquals(selector(None), 0)
+        selector = _0_()
+        selector &= _1_()
+        selector &= _1_()
+        self.assertEquals(selector(None), 0)
+        selector = _0_()
+        selector &= _0_()
+        selector &= _0_()
+        self.assertEquals(selector(None), 0)
+
+    def test_inplace_or(self):
+        selector = _1_()
+        selector |= _1_()
+        selector |= _1_()
+        self.assertEquals(selector(None), 1)
+        selector = _1_()
+        selector |= _0_()
+        selector |= _1_()
+        self.assertEquals(selector(None), 1)
+        selector = _0_()
+        selector |= _1_()
+        selector |= _1_()
+        self.assertEquals(selector(None), 1)
+        selector = _0_()
+        selector |= _0_()
+        selector |= _0_()
+        self.assertEquals(selector(None), 0)
+
 
 class ImplementsSelectorTC(CubicWebTC):
     def test_etype_priority(self):
--- a/test/unittest_spa2rql.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_spa2rql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -23,6 +23,7 @@
 xy.add_equivalence('Project', 'doap:Project')
 xy.add_equivalence('Project creation_date', 'doap:Project doap:created')
 xy.add_equivalence('Project name', 'doap:Project doap:name')
+xy.add_equivalence('Project name', 'doap:Project dc:title')
 
 
 config = TestServerConfiguration('data')
@@ -50,6 +51,14 @@
       ?project a doap:Project;
     }''', 'Any PROJECT WHERE PROJECT is Project')
 
+    def test_base_rdftype(self):
+        self._test('''
+    PREFIX doap: <http://usefulinc.com/ns/doap#>
+    PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+    SELECT ?project
+    WHERE  {
+      ?project rdf:type doap:Project.
+    }''', 'Any PROJECT WHERE PROJECT is Project')
 
     def test_base_attr_sel(self):
         self._test('''
@@ -171,6 +180,16 @@
               doap:name "cubicweb".
     }''', 'Any PROJECT WHERE PROJECT name %(a)s, PROJECT is Project', {'a': 'cubicweb'})
 
+    def test_dctitle_both_project_cwuser(self):
+        self._test('''
+    PREFIX doap: <http://usefulinc.com/ns/doap#>
+    PREFIX dc: <http://purl.org/dc/elements/1.1/>
+    SELECT ?project ?title
+    WHERE  {
+      ?project a doap:Project;
+              dc:title ?title.
+    }''', 'Any PROJECT, TITLE WHERE PROJECT name TITLE, PROJECT is Project')
+
 # # Two elements in the group
 # PREFIX :  <http://example.org/ns#>
 # SELECT *
--- a/test/unittest_uilib.py	Thu May 06 08:24:46 2010 +0200
+++ b/test/unittest_uilib.py	Mon Jul 19 15:36:16 2010 +0200
@@ -94,6 +94,54 @@
             got = uilib.text_cut(text, 30)
             self.assertEquals(got, expected)
 
+    def test_soup2xhtml_1_1(self):
+        self.assertEquals(uilib.soup2xhtml('hop <div>', 'ascii'),
+                          'hop <div/>')
+        self.assertEquals(uilib.soup2xhtml('<div> hop', 'ascii'),
+                          '<div> hop</div>')
+        self.assertEquals(uilib.soup2xhtml('hop <div> hop', 'ascii'),
+                          'hop <div> hop</div>')
+
+    def test_soup2xhtml_1_2(self):
+        self.assertEquals(uilib.soup2xhtml('hop </div>', 'ascii'),
+                          'hop ')
+        self.assertEquals(uilib.soup2xhtml('</div> hop', 'ascii'),
+                          '<div/> hop')
+        self.assertEquals(uilib.soup2xhtml('hop </div> hop', 'ascii'),
+                          '<div>hop </div> hop')
+
+    def test_soup2xhtml_2_1(self):
+        self.assertEquals(uilib.soup2xhtml('hop <body>', 'ascii'),
+                          'hop ')
+        self.assertEquals(uilib.soup2xhtml('<body> hop', 'ascii'),
+                          ' hop')
+        self.assertEquals(uilib.soup2xhtml('hop <body> hop', 'ascii'),
+                          'hop  hop')
+
+    def test_soup2xhtml_2_2(self):
+        self.assertEquals(uilib.soup2xhtml('hop </body>', 'ascii'),
+                          'hop ')
+        self.assertEquals(uilib.soup2xhtml('</body> hop', 'ascii'),
+                          ' hop')
+        self.assertEquals(uilib.soup2xhtml('hop </body> hop', 'ascii'),
+                          'hop  hop')
+
+    def test_soup2xhtml_3_1(self):
+        self.assertEquals(uilib.soup2xhtml('hop <html>', 'ascii'),
+                          'hop ')
+        self.assertEquals(uilib.soup2xhtml('<html> hop', 'ascii'),
+                          ' hop')
+        self.assertEquals(uilib.soup2xhtml('hop <html> hop', 'ascii'),
+                          'hop  hop')
+
+    def test_soup2xhtml_3_2(self):
+        self.assertEquals(uilib.soup2xhtml('hop </html>', 'ascii'),
+                          'hop ')
+        self.assertEquals(uilib.soup2xhtml('</html> hop', 'ascii'),
+                          ' hop')
+        self.assertEquals(uilib.soup2xhtml('hop </html> hop', 'ascii'),
+                          'hop  hop')
+
 if __name__ == '__main__':
     unittest_main()
 
--- a/toolsutils.py	Thu May 06 08:24:46 2010 +0200
+++ b/toolsutils.py	Mon Jul 19 15:36:16 2010 +0200
@@ -116,10 +116,9 @@
     else:
         print 'no diff between %s and %s' % (appl_file, ref_file)
 
-
+SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py')
 def copy_skeleton(skeldir, targetdir, context,
-                  exclude=('*.py[co]', '*.orig', '*~', '*_flymake.py'),
-                  askconfirm=False):
+                  exclude=SKEL_EXCLUDE, askconfirm=False):
     import shutil
     from fnmatch import fnmatch
     skeldir = normpath(skeldir)
@@ -197,7 +196,7 @@
                 config_file, ex)
     return config
 
-def env_path(env_var, default, name):
+def env_path(env_var, default, name, checkexists=True):
     """get a path specified in a variable or using the default value and return
     it.
 
@@ -216,8 +215,8 @@
     :raise `ConfigurationError`: if the returned path does not exist
     """
     path = environ.get(env_var, default)
-    if not exists(path):
-        raise ConfigurationError('%s path %s doesn\'t exist' % (name, path))
+    if checkexists and not exists(path):
+        raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path))
     return abspath(path)
 
 
--- a/transaction.py	Thu May 06 08:24:46 2010 +0200
+++ b/transaction.py	Mon Jul 19 15:36:16 2010 +0200
@@ -61,7 +61,7 @@
         none if not found.
         """
         return self.req.execute('Any X WHERE X eid %(x)s',
-                                {'x': self.user_eid}, 'x').get_entity(0, 0)
+                                {'x': self.user_eid}).get_entity(0, 0)
 
     def actions_list(self, public=True):
         """return an ordered list of action effectued during that transaction
--- a/uilib.py	Thu May 06 08:24:46 2010 +0200
+++ b/uilib.py	Mon Jul 19 15:36:16 2010 +0200
@@ -18,9 +18,10 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """user interface libraries
 
-contains some functions designed to help implementation of cubicweb user interface
+contains some functions designed to help implementation of cubicweb user
+interface.
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import csv
@@ -123,7 +124,7 @@
 
 fallback_safe_cut = safe_cut
 
-
+REM_ROOT_HTML_TAGS = re.compile('</(body|html)>', re.U)
 try:
     from lxml import etree
 except (ImportError, AttributeError):
@@ -133,12 +134,13 @@
 
     def soup2xhtml(data, encoding):
         """tidy (at least try) html soup and return the result
+
         Note: the function considers a string with no surrounding tag as valid
               if <div>`data`</div> can be parsed by an XML parser
         """
-        # normalize line break
-        # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
-        data = u'\n'.join(data.splitlines())
+        # remove spurious </body> and </html> tags, then normalize line break
+        # (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1)
+        data = REM_ROOT_HTML_TAGS.sub('', u'\n'.join(data.splitlines()))
         # XXX lxml 1.1 support still needed ?
         xmltree = etree.HTML('<div>%s</div>' % data)
         # NOTE: lxml 1.1 (etch platforms) doesn't recognize
@@ -146,7 +148,13 @@
         #       why we specify an encoding and re-decode to unicode later
         body = etree.tostring(xmltree[0], encoding=encoding)
         # remove <body> and </body> and decode to unicode
-        return body[11:-13].decode(encoding)
+        snippet = body[6:-7].decode(encoding)
+        # take care to bad xhtml (for instance starting with </div>) which
+        # may mess with the <div> we added below. Only remove it if it's
+        # still there...
+        if snippet.startswith('<div>') and snippet.endswith('</div>'):
+            snippet = snippet[5:-6]
+        return snippet
 
     if hasattr(etree.HTML('<div>test</div>'), 'iter'):
 
--- a/utils.py	Thu May 06 08:24:46 2010 +0200
+++ b/utils.py	Mon Jul 19 15:36:16 2010 +0200
@@ -322,35 +322,6 @@
                                                  self.body.getvalue())
 
 
-def _pdf_conversion_availability():
-    try:
-        import pysixt
-    except ImportError:
-        return False
-    from subprocess import Popen, STDOUT
-    if not os.path.isfile('/usr/bin/fop'):
-        return False
-    try:
-        Popen(['/usr/bin/fop', '-q'],
-              stdout=open(os.devnull, 'w'),
-              stderr=STDOUT)
-    except OSError, e:
-        getLogger('cubicweb').info('fop not usable (%s)', e)
-        return False
-    return True
-
-def can_do_pdf_conversion(__answer_cache=[]):
-    """pdf conversion depends on
-    * pysixt (python package)
-    * fop 0.9x
-
-    NOTE: actual check is done by _pdf_conversion_availability and
-    result is cached
-    """
-    if not __answer_cache: # first time, not in cache
-        __answer_cache.append(_pdf_conversion_availability())
-    return __answer_cache[0]
-
 try:
     # may not be there if cubicweb-web not installed
     if sys.version_info < (2,6):
--- a/view.py	Thu May 06 08:24:46 2010 +0200
+++ b/view.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""abstract views and templates classes for CubicWeb web client
+"""abstract views and templates classes for CubicWeb web client"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -53,7 +51,6 @@
  cubicweb:accesskey         CDATA   #IMPLIED
  cubicweb:actualrql         CDATA   #IMPLIED
  cubicweb:dataurl           CDATA   #IMPLIED
- cubicweb:displayactions    CDATA   #IMPLIED
  cubicweb:facetName         CDATA   #IMPLIED
  cubicweb:facetargs         CDATA   #IMPLIED
  cubicweb:fallbackvid       CDATA   #IMPLIED
@@ -74,6 +71,7 @@
  cubicweb:tindex            CDATA   #IMPLIED
  cubicweb:tlunit            CDATA   #IMPLIED
  cubicweb:type              CDATA   #IMPLIED
+ cubicweb:unselimg          CDATA   #IMPLIED
  cubicweb:uselabel          CDATA   #IMPLIED
  cubicweb:value             CDATA   #IMPLIED
  cubicweb:variables         CDATA   #IMPLIED
--- a/vregistry.py	Thu May 06 08:24:46 2010 +0200
+++ b/vregistry.py	Mon Jul 19 15:36:16 2010 +0200
@@ -29,9 +29,8 @@
   current state (req, rset, row, col). At the end of the selection, if
   a appobject class has been found, an instance of this class is
   returned. The selector is instantiated at appobject registration
-
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -385,7 +384,7 @@
             registry.register(obj, oid=oid, clear=clear)
             self.debug('registered appobject %s in registry %s with id %s',
                        vname, registryname, oid or class_regid(obj))
-        self._loadedmods[obj.__module__][classid(obj)] = obj
+        self._loadedmods.setdefault(obj.__module__, {})[classid(obj)] = obj
 
     def unregister(self, obj, registryname=None):
         """unregister `obj` application object from the registry `registryname` or
@@ -414,11 +413,11 @@
         self._loadedmods = {}
         return filemods
 
-    def register_objects(self, path, force_reload=False, extrapath=None):
+    def register_objects(self, path, extrapath=None):
         # load views from each directory in the instance's path
         filemods = self.init_registration(path, extrapath)
         for filepath, modname in filemods:
-            self.load_file(filepath, modname, force_reload)
+            self.load_file(filepath, modname)
         self.initialization_completed()
 
     def initialization_completed(self):
@@ -448,12 +447,15 @@
                 mdate = self._mdate(fileordir)
                 if mdate is None:
                     continue # backup file, see _mdate implementation
+                elif "flymake" in fileordir:
+                    # flymake + pylint in use, don't consider these they will corrupt the registry
+                    continue
                 if fileordir not in lastmodifs or lastmodifs[fileordir] < mdate:
                     self.info('File %s changed since last visit', fileordir)
                     return True
         return False
 
-    def load_file(self, filepath, modname, force_reload=False):
+    def load_file(self, filepath, modname):
         """load app objects from a python file"""
         from logilab.common.modutils import load_module_from_name
         if modname in self._loadedmods:
@@ -462,12 +464,15 @@
         mdate = self._mdate(filepath)
         if mdate is None:
             return # backup file, see _mdate implementation
+        elif "flymake" in filepath:
+            # flymake + pylint in use, don't consider these they will corrupt the registry
+            return
         # set update time before module loading, else we get some reloading
         # weirdness in case of syntax error or other error while importing the
         # module
         self._lastmodifs[filepath] = mdate
         # load the module
-        module = load_module_from_name(modname, use_sys=not force_reload)
+        module = load_module_from_name(modname)
         self.load_module(module)
 
     def load_module(self, module):
--- a/web/_exceptions.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/_exceptions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -53,10 +53,6 @@
         self.status = int(status)
         self.content = content
 
-class ExplicitLogin(AuthenticationError):
-    """raised when a bad connection id is given or when an attempt to establish
-    a connection failed"""
-
 class InvalidSession(CubicWebException):
     """raised when a session id is found but associated session is not found or
     invalid
@@ -72,3 +68,9 @@
     def dumps(self):
         from cubicweb.web import json
         return json.dumps({'reason': self.reason})
+
+class LogOut(PublishException):
+    """raised to ask for deauthentication of a logged in user"""
+    def __init__(self, url):
+        super(LogOut, self).__init__()
+        self.url = url
--- a/web/action.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/action.py	Mon Jul 19 15:36:16 2010 +0200
@@ -33,18 +33,7 @@
     """
     __registry__ = 'actions'
     __select__ = match_search_state('normal')
-
-    cw_property_defs = {
-        'visible':  dict(type='Boolean', default=True,
-                         help=_('display the action or not')),
-        'order':    dict(type='Int', default=99,
-                         help=_('display order of the action')),
-        'category': dict(type='String', default='moreactions',
-                         vocabulary=('mainactions', 'moreactions', 'addrelated',
-                                     'useractions', 'siteactions', 'hidden'),
-                         help=_('context where this component should be displayed')),
-    }
-    site_wide = True # don't want user to configurate actions
+    order = 99
     category = 'moreactions'
     # actions in category 'moreactions' can specify a sub-menu in which they should be filed
     submenu = None
--- a/web/application.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/application.py	Mon Jul 19 15:36:16 2010 +0200
@@ -18,6 +18,8 @@
 """CubicWeb web client application object
 
 """
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -31,10 +33,11 @@
 from cubicweb import (
     ValidationError, Unauthorized, AuthenticationError, NoSelectableObject,
     RepositoryError, CW_EVENT_MANAGER)
+from cubicweb.dbapi import DBAPISession
 from cubicweb.web import LOGGER, component
 from cubicweb.web import (
-    StatusResponse, DirectResponse, Redirect, NotFound,
-    RemoteCallFailed, ExplicitLogin, InvalidSession, RequestError)
+    StatusResponse, DirectResponse, Redirect, NotFound, LogOut,
+    RemoteCallFailed, InvalidSession, RequestError)
 
 # make session manager available through a global variable so the debug view can
 # print information about web session
@@ -56,12 +59,11 @@
         assert self.cleanup_anon_session_time > 0
         self.authmanager = vreg['components'].select('authmanager', vreg=vreg)
         if vreg.config.anonymous_user() is not None:
-            self.clean_sessions_interval = min(
-                5 * 60,
-                self.cleanup_session_time / 2.,
-                self.cleanup_anon_session_time / 2.)
+            self.clean_sessions_interval = max(
+                5 * 60, min(self.cleanup_session_time / 2.,
+                            self.cleanup_anon_session_time / 2.))
         else:
-            self.clean_sessions_interval = min(
+            self.clean_sessions_interval = max(
                 5 * 60,
                 self.cleanup_session_time / 2.)
 
@@ -74,7 +76,7 @@
         for session in self.current_sessions():
             no_use_time = (time() - session.last_usage_time)
             total += 1
-            if session.anonymous_connection:
+            if session.anonymous_session:
                 if no_use_time >= self.cleanup_anon_session_time:
                     self.close_session(session)
                     closed += 1
@@ -98,9 +100,11 @@
         raise NotImplementedError()
 
     def open_session(self, req):
-        """open and return a new session for the given request
+        """open and return a new session for the given request. The session is
+        also bound to the request.
 
-        :raise ExplicitLogin: if authentication is required
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
         """
         raise NotImplementedError()
 
@@ -119,11 +123,24 @@
     def __init__(self, vreg):
         self.vreg = vreg
 
-    def authenticate(self, req):
-        """authenticate user and return corresponding user object
+    def validate_session(self, req, session):
+        """check session validity, reconnecting it to the repository if the
+        associated connection expired in the repository side (hence the
+        necessity for this method).
 
-        :raise ExplicitLogin: if authentication is required (no authentication
-        info found or wrong user/password)
+        raise :exc:`InvalidSession` if session is corrupted for a reason or
+        another and should be closed
+        """
+        raise NotImplementedError()
+
+    def authenticate(self, req):
+        """authenticate user using connection information found in the request,
+        and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
+        as well as login and authentication information dictionary used to open
+        the connection.
+
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
         """
         raise NotImplementedError()
 
@@ -181,7 +198,6 @@
 
         :raise Redirect: if authentication has occured and succeed
         """
-        assert req.cnx is None # at this point no cnx should be set on the request
         cookie = req.get_cookie()
         try:
             sessionid = str(cookie[self.SESSION_VAR].value)
@@ -191,9 +207,11 @@
             try:
                 session = self.get_session(req, sessionid)
             except InvalidSession:
+                # try to open a new session, so we get an anonymous session if
+                # allowed
                 try:
                     session = self.open_session(req)
-                except ExplicitLogin:
+                except AuthenticationError:
                     req.remove_cookie(cookie, self.SESSION_VAR)
                     raise
         # remember last usage time for web session tracking
@@ -209,17 +227,20 @@
         req.set_cookie(cookie, self.SESSION_VAR, maxage=None)
         # remember last usage time for web session tracking
         session.last_usage_time = time()
-        if not session.anonymous_connection:
+        if not session.anonymous_session:
             self._postlogin(req)
         return session
 
     def _update_last_login_time(self, req):
+        # XXX should properly detect missing permission / non writeable source
+        # and avoid "except (RepositoryError, Unauthorized)" below
+        if req.user.metainformation()['source']['adapter'] == 'ldapuser':
+            return
         try:
             req.execute('SET X last_login_time NOW WHERE X eid %(x)s',
-                        {'x' : req.user.eid}, 'x')
+                        {'x' : req.user.eid})
             req.cnx.commit()
         except (RepositoryError, Unauthorized):
-            # ldap user are not writeable for instance
             req.cnx.rollback()
         except:
             req.cnx.rollback()
@@ -251,9 +272,9 @@
         """logout from the instance by cleaning the session and raising
         `AuthenticationError`
         """
-        self.session_manager.close_session(req.cnx)
+        self.session_manager.close_session(req.session)
         req.remove_cookie(req.get_cookie(), self.SESSION_VAR)
-        raise AuthenticationError(url=goto_url)
+        raise LogOut(url=goto_url)
 
 
 class CubicWebPublisher(object):
@@ -271,7 +292,7 @@
         # connect to the repository and get instance's schema
         self.repo = config.repository(vreg)
         if not vreg.initialized:
-            self.config.init_cubes(self.repo.get_cubes())
+            config.init_cubes(self.repo.get_cubes())
             vreg.init_properties(self.repo.properties())
             vreg.set_schema(self.repo.get_schema())
         # set the correct publish method
@@ -297,7 +318,10 @@
         sessions (i.e. a new connection may be created or an already existing
         one may be reused
         """
-        self.session_handler.set_session(req)
+        try:
+            self.session_handler.set_session(req)
+        except AuthenticationError:
+            req.set_session(DBAPISession(None))
 
     # publish methods #########################################################
 
@@ -309,19 +333,18 @@
             return self.main_publish(path, req)
         finally:
             cnx = req.cnx
-            self._logfile_lock.acquire()
-            try:
-                try:
-                    result = ['\n'+'*'*80]
-                    result.append(req.url())
-                    result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q for q in cnx.executed_queries]
-                    cnx.executed_queries = []
-                    self._query_log.write('\n'.join(result).encode(req.encoding))
-                    self._query_log.flush()
-                except Exception:
-                    self.exception('error while logging queries')
-            finally:
-                self._logfile_lock.release()
+            if cnx:
+                with self._logfile_lock:
+                    try:
+                        result = ['\n'+'*'*80]
+                        result.append(req.url())
+                        result += ['%s %s -- (%.3f sec, %.3f CPU sec)' % q
+                                   for q in cnx.executed_queries]
+                        cnx.executed_queries = []
+                        self._query_log.write('\n'.join(result).encode(req.encoding))
+                        self._query_log.flush()
+                    except Exception:
+                        self.exception('error while logging queries')
 
     @deprecated("[3.4] use vreg['controllers'].select(...)")
     def select_controller(self, oid, req):
@@ -351,6 +374,7 @@
         # remove user callbacks on a new request (except for json controllers
         # to avoid callbacks being unregistered before they could be called)
         tstart = clock()
+        commited = False
         try:
             try:
                 ctrlid, rset = self.url_resolver.process(req, path)
@@ -358,15 +382,22 @@
                     controller = self.vreg['controllers'].select(ctrlid, req,
                                                                  appli=self)
                 except NoSelectableObject:
+                    if ctrlid == 'login':
+                        raise Unauthorized(req._('log out first'))
                     raise Unauthorized(req._('not authorized'))
                 req.update_search_state()
                 result = controller.publish(rset=rset)
-                if req.cnx is not None:
-                    # req.cnx is None if anonymous aren't allowed and we are
-                    # displaying the cookie authentication form
+                if req.cnx:
+                    # no req.cnx if anonymous aren't allowed and we are
+                    # displaying some anonymous enabled view such as the cookie
+                    # authentication form
                     req.cnx.commit()
+                    commited = True
             except (StatusResponse, DirectResponse):
-                req.cnx.commit()
+                if req.cnx:
+                    req.cnx.commit()
+                raise
+            except (AuthenticationError, LogOut):
                 raise
             except Redirect:
                 # redirect is raised by edit controller when everything went fine,
@@ -388,10 +419,13 @@
                 else:
                     # delete validation errors which may have been previously set
                     if '__errorurl' in req.form:
-                        req.del_session_data(req.form['__errorurl'])
+                        req.session.data.pop(req.form['__errorurl'], None)
                     raise
-            except (AuthenticationError, NotFound, RemoteCallFailed):
-                raise
+            except RemoteCallFailed, ex:
+                req.set_header('content-type', 'application/json')
+                raise StatusResponse(500, ex.dumps())
+            except NotFound:
+                raise StatusResponse(404, self.notfound_content(req))
             except ValidationError, ex:
                 self.validation_error_handler(req, ex)
             except (Unauthorized, BadRQLQuery, RequestError), ex:
@@ -402,7 +436,7 @@
                 self.critical('Catch all triggered!!!')
                 self.exception('this is what happened')
         finally:
-            if req.cnx is not None:
+            if req.cnx and not commited:
                 try:
                     req.cnx.rollback()
                 except:
@@ -417,7 +451,7 @@
                         'values': req.form,
                         'eidmap': req.data.get('eidmap', {})
                         }
-            req.set_session_data(req.form['__errorurl'], forminfo)
+            req.session.data[req.form['__errorurl']] = forminfo
             # XXX form session key / __error_url should be differentiated:
             # session key is 'url + #<form dom id', though we usually don't want
             # the browser to move to the form since it hides the global
--- a/web/box.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/box.py	Mon Jul 19 15:36:16 2010 +0200
@@ -25,7 +25,7 @@
 
 from cubicweb import Unauthorized, role as get_role, target as get_target
 from cubicweb.schema import display_name
-from cubicweb.selectors import (one_line_rset,  primary_view,
+from cubicweb.selectors import (no_cnx, one_line_rset,  primary_view,
                                 match_context_prop, partial_has_related_entities)
 from cubicweb.view import View, ReloadableMixIn
 
@@ -50,7 +50,7 @@
         box.render(self.w)
     """
     __registry__ = 'boxes'
-    __select__ = match_context_prop()
+    __select__ = ~no_cnx() & match_context_prop()
 
     categories_in_order = ()
     cw_property_defs = {
--- a/web/captcha.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/captcha.py	Mon Jul 19 15:36:16 2010 +0200
@@ -83,8 +83,7 @@
         return img + super(CaptchaWidget, self).render(form, field, renderer)
 
     def process_field_data(self, form, field):
-        captcha = form._cw.get_session_data(field.input_name(form), None,
-                                            pop=True)
+        captcha = form._cw.session.data.pop(field.input_name(form), None)
         val = super(CaptchaWidget, self).process_field_data(form, field)
         if val is None:
             return val # required will be checked by field
--- a/web/component.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/component.py	Mon Jul 19 15:36:16 2010 +0200
@@ -130,11 +130,14 @@
         params = dict(params)
         params.update({self.start_param : start,
                        self.stop_param : stop,})
-        if path == 'json':
+        view = self.cw_extra_kwargs.get('view')
+        if view is not None and hasattr(view, 'page_navigation_url'):
+            url = view.page_navigation_url(self, path, params)
+        elif path == 'json':
             rql = params.pop('rql', self.cw_rset.printable_rql())
             # latest 'true' used for 'swap' mode
             url = 'javascript: replacePageChunk(%s, %s, %s, %s, true)' % (
-                json.dumps(params.get('divid', 'paginated-content')),
+                json.dumps(params.get('divid', 'pageContent')),
                 json.dumps(rql), json.dumps(params.pop('vid', None)), json.dumps(params))
         else:
             url = self._cw.build_url(path, **params)
@@ -181,7 +184,7 @@
             rset = entity.related(self.rtype, role(self))
         else:
             eid = self.cw_rset[row][col]
-            rset = self._cw.execute(self.rql(), {'x': eid}, 'x')
+            rset = self._cw.execute(self.rql(), {'x': eid})
         if not rset.rowcount:
             return
         self.w(u'<div class="%s">' % self.div_class())
--- a/web/controller.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/controller.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""abstract controller classe for CubicWeb web client
+"""abstract controller classe for CubicWeb web client"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.mtconverter import xml_escape
--- a/web/data/cubicweb.acl.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.acl.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for access control forms)
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -17,13 +17,13 @@
 }
 
 
-h3.schema{ 
+h3.schema{
  font-weight: bold;
 }
 
 h4 a,
 h4 a:link,
-h4 a:visited{ 
+h4 a:visited{
  color:#000;
  }
 
@@ -39,11 +39,11 @@
 table.schemaInfo td {
   padding: .3em .5em;
   border: 1px solid grey;
-  width:33%; 
+  width:33%;
 }
 
 
-table.schemaInfo tr th {   
+table.schemaInfo tr th {
  padding: 0.2em 0px 0.2em 5px;
  background-image:none;
  background-color:#dfdfdf;
@@ -51,47 +51,46 @@
 
 table.schemaInfo thead tr {
   border: 1px solid #dfdfdf;
-} 
+}
 
 table.schemaInfo td {
-  padding: 3px 10px 3px 5px; 
+  padding: 3px 10px 3px 5px;
 
 }
 
-.users{ 
+a.users{
  color : #00CC33;
  font-weight: bold }
 
-.guests{ 
+a.guests{
  color :  #ff7700;
  font-weight: bold;
 }
 
-.staff{  
- color : #0083ab;
- font-weight: bold;
-}
-
-.owners{ 
+a.owners{
  color : #8b0000;
  font-weight: bold;
 }
 
+a.managers{
+ color: #000000;
+}
+
 .discret,
-a.grey{ 
+a.grey{
  color:#666;
 }
 
-a.grey:hover{ 
+a.grey:hover{
  color:#000;
 }
 
-.red{ 
+.red{
  color :  #ff7700;
  }
 
-div#schema_security{ 
- width:780px;
+div#schema_security{
+ width:100%;
  }
 /******************************************************************************/
 /* user groups edition form (views/euser.py)                                  */
--- a/web/data/cubicweb.ajax.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.ajax.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2009 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -60,7 +60,7 @@
     //    returned several 'root' nodes and we need to keep the wrapper
     //    created by getDomFromResponse()
     if (response.childNodes.length == 1 &&
-	response.getAttribute('cubicweb:type') == 'cwResponseWrapper') {
+        response.getAttribute('cubicweb:type') == 'cwResponseWrapper') {
         return response.firstChild;
     }
     return response;
@@ -200,6 +200,7 @@
     if (req.status == 500) {
         updateMessage(err);
     } else {
+        log(err);
         updateMessage(_("an error occured while processing your request"));
     }
 }
@@ -286,6 +287,7 @@
         if (node) {
             // make sure the component is visible
             removeElementClass(node, "hidden");
+            domnode = preprocessAjaxLoad(node, domnode);
             swapDOM(node, domnode);
             postAjaxLoad(domnode);
         }
--- a/web/data/cubicweb.calendar.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.calendar.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for the calendar views
  *
  *  :organization: Logilab
- *  :copyright: 2003-2009 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.calendar.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.calendar.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /*
  *  This file contains Calendar utilities
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.calendar_popup.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.calendar_popup.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for the calendar popup widget used to edit date fields
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2009 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 /***************************************/
@@ -63,7 +63,7 @@
   text-decoration: underline;
 }
 
-a img {
+a img, img {
   border: none;
   text-align: center;
 }
--- a/web/data/cubicweb.edition.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.edition.js	Mon Jul 19 15:36:16 2010 +0200
@@ -321,9 +321,15 @@
 }
 
 function _clearPreviousErrors(formid) {
-    jQuery('#' + formid + 'ErrorMessage').remove();
-    jQuery('#' + formid + ' span.errorMsg').remove();
-    jQuery('#' + formid + ' .error').removeClass('error');
+    // on some case (eg max request size exceeded, we don't know the formid
+    if (formid) {
+	jQuery('#' + formid + 'ErrorMessage').remove();
+	jQuery('#' + formid + ' span.errorMsg').remove();
+	jQuery('#' + formid + ' .error').removeClass('error');
+    } else {
+	jQuery('span.errorMsg').remove();
+	jQuery('.error').removeClass('error');
+    }
 }
 
 function _displayValidationerrors(formid, eid, errors) {
@@ -389,14 +395,16 @@
     // Failures
     _clearPreviousErrors(formid);
     var descr = result[1];
+    var errmsg;
     // Unknown structure
     if ( !isArrayLike(descr) || descr.length != 2 ) {
-	log('got strange error :', descr);
-	updateMessage(descr);
-	return false;
+	errmsg = descr;
+    } else {
+	_displayValidationerrors(formid, descr[0], descr[1]);
+	errmsg = _('please correct errors below');
     }
-    _displayValidationerrors(formid, descr[0], descr[1]);
-    updateMessage(_('please correct errors below'));
+    updateMessage(errmsg);
+    // ensure the browser does not scroll down
     document.location.hash = '#header';
     return false;
 }
@@ -405,7 +413,12 @@
 /* unfreeze form buttons when the validation process is over*/
 function unfreezeFormButtons(formid) {
     jQuery('#progress').hide();
-    jQuery('#' + formid + ' .validateButton').removeAttr('disabled');
+    // on some case (eg max request size exceeded, we don't know the formid
+    if (formid) {
+	jQuery('#' + formid + ' .validateButton').removeAttr('disabled');
+    } else {
+	jQuery('.validateButton').removeAttr('disabled');
+    }
     return true;
 }
 
--- a/web/data/cubicweb.facets.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.facets.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -56,9 +56,7 @@
 	}
 	var toupdate = result[1];
 	var extraparams = vidargs;
-	var displayactions = jQuery('#' + divid).attr('cubicweb:displayactions');
-	if (displayactions) { extraparams['displayactions'] = displayactions; }
-	if (paginate) { extraparams['paginate'] = '1'; }
+	if (paginate) { extraparams['paginate'] = '1'; } // XXX in vidargs
 	// copy some parameters
 	// XXX cleanup vid/divid mess
 	// if vid argument is specified , the one specified in form params will
--- a/web/data/cubicweb.form.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.form.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for generated forms
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.gmap.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.gmap.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  *
  *
--- a/web/data/cubicweb.goa.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.goa.js	Mon Jul 19 15:36:16 2010 +0200
@@ -2,7 +2,7 @@
  *  functions specific to cubicweb on google appengine
  *
  *  :organization: Logilab
- *  :copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.mailform.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.mailform.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for the email form (views/massmailing.py)
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.preferences.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.preferences.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for preferences form (views/management.py)
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.schema.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.schema.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for schema views
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -15,32 +15,35 @@
 }
 
 
-div.relationDefinition { 
-  float: left;   
+div.relationDefinition {
+  float: left;
   position: relative;
   width: 60%;
   padding: 0;
 }
 
-div.acl{ 
+div.acl{
   position: relative;
 /*  right: 20%;*/
-  float: right; 
-  width: 10%;
+  width: 25%;
   padding:0px 0px 0px 2em;
 }
 
+div.acl table td,
+div.acl table tr {
+    padding: 2px 2px 2px 2px;
+}
+
 div.schema table {
   width : 100%;
 }
 
-div.entityAttributes{ 
- margin: 3em 0 5em; 
+div.entityAttributes{
+ margin: 3em 0 5em;
  font: normal 9pt Arial;
 }
 
 div.box div.title{
- border-bottom:1px solid black;
  padding:0.2em 0.2em;
  margin: 0 auto;
 }
@@ -56,43 +59,42 @@
 
 div.body{
  padding : 0.2em;
- padding-bottom : 0.4em; 
+ padding-bottom : 0.4em;
  overflow: auto;
 }
 
 div.body table td{
- padding:0.4em; 
+ padding:0.4em;
  }
 
 div.box{
  float:left;
  border:1px solid black;
- width:50%; 
 }
 
 div.vl{
  float:left;
- position:relative; 
- margin-top:1em; 
- border-top:1px solid black; 
- line-height : 1px; 
- width: 1em; 
+ position:relative;
+ margin-top:1em;
+ border-top:1px solid black;
+ line-height : 1px;
+ width: 1em;
  height : 0px}
 
 div.hl{
  float:left;
- position:relative; 
- margin-top:1em;  
- border-left:1px solid black; 
- width: 1px; 
+ position:relative;
+ margin-top:1em;
+ border-left:1px solid black;
+ width: 1px;
  height : 10px
 }
 
 div.rels{
- float:left; 
- position:relative; 
- margin-top:1em; 
- border-left:1px solid black;  
+ float:left;
+ position:relative;
+ margin-top:1em;
+ border-left:1px solid black;
  margin-left:-2px;}
 
 div.firstrel, div.rel, div.lastrel{
@@ -103,7 +105,7 @@
 }
 
 /* FIXME set to 9em or an image*/
-div.rel, div.lastrel{ 
+div.rel, div.lastrel{
  margin-top:0.7em}
 
 div.vars{
@@ -112,24 +114,24 @@
 
 div.firstvar, div.var, div.lastvar{
  line-height:1em;
- border:1px solid black; 
+ border:1px solid black;
  padding:0.2em}
 
 div.firstvar{
  margin-top:1em;}
 
 div.var{
- margin-top:0.5em; 
+ margin-top:0.5em;
 }
 
 div.lastvar{
  border:none;
 }
 
-div.firstvar a, 
+div.firstvar a,
 div.var a,
 div.rel a,
-div.firstrel a{ 
+div.firstrel a{
  padding:0px ! important;
- margin : 0px ! important; 
+ margin : 0px ! important;
 }
--- a/web/data/cubicweb.suggest.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.suggest.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for input with suggestions (as for tags and keywords)
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.timeline-bundle.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.timeline-bundle.js	Mon Jul 19 15:36:16 2010 +0200
@@ -2289,7 +2289,7 @@
     e2uHash['tilde']= '\u02DC';
     e2uHash['ensp']= '\u2002';
     e2uHash['emsp']= '\u2003';
-    e2uHash['thinsp']= '\u2009';
+    e2uHash['thinsp']= '\u2010';
     e2uHash['zwnj']= '\u200C';
     e2uHash['zwj']= '\u200D';
     e2uHash['lrm']= '\u200E';
@@ -5383,7 +5383,7 @@
                             // needed when too many events for the available width
                             // are painted on the visible part of the Timeline?
     this.autoWidthAnimationTime = 500; // mSec
-    this.timeline_start = null; // Setting a date, eg new Date(Date.UTC(2008,0,17,20,00,00,0)) will prevent the
+    this.timeline_start = null; // Setting a date, eg new Date(Date.UTC(2010,0,17,20,00,00,0)) will prevent the
                                 // Timeline from being moved to anytime before the date.
     this.timeline_stop = null;  // Use for setting a maximum date. The Timeline will not be able
                                 // to be moved to anytime after this date.
--- a/web/data/cubicweb.timeline-ext.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.timeline-ext.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  *
  */
--- a/web/data/cubicweb.timetable.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.timetable.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /* styles for the timetable view
  *
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
--- a/web/data/cubicweb.widgets.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/cubicweb.widgets.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  *
  *
--- a/web/data/jquery.autocomplete.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.autocomplete.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,13 +1,13 @@
 /*
  * jQuery Autocomplete plugin 1.1
  *
- * Copyright (c) 2009 Jörn Zaefferer
+ * Copyright (c) 2010 Jörn Zaefferer
  *
  * Dual licensed under the MIT and GPL licenses:
  *   http://www.opensource.org/licenses/mit-license.php
  *   http://www.gnu.org/licenses/gpl.html
  *
- * Revision: $Id: jquery.autocomplete.js 15 2009-08-22 10:30:27Z joern.zaefferer $
+ * Revision: $Id: jquery.autocomplete.js 15 2010-08-22 10:30:27Z joern.zaefferer $
  */;(function($){$.fn.extend({autocomplete:function(urlOrData,options){var isUrl=typeof urlOrData=="string";options=$.extend({},$.Autocompleter.defaults,{url:isUrl?urlOrData:null,data:isUrl?null:urlOrData,delay:isUrl?$.Autocompleter.defaults.delay:10,max:options&&!options.scroll?10:150},options);options.highlight=options.highlight||function(value){return value;};options.formatMatch=options.formatMatch||options.formatItem;return this.each(function(){new $.Autocompleter(this,options);});},result:function(handler){return this.bind("result",handler);},search:function(handler){return this.trigger("search",[handler]);},flushCache:function(){return this.trigger("flushCache");},setOptions:function(options){return this.trigger("setOptions",[options]);},unautocomplete:function(){return this.trigger("unautocomplete");}});$.Autocompleter=function(input,options){var KEY={UP:38,DOWN:40,DEL:46,TAB:9,RETURN:13,ESC:27,COMMA:188,PAGEUP:33,PAGEDOWN:34,BACKSPACE:8};var $input=$(input).attr("autocomplete","off").addClass(options.inputClass);var timeout;var previousValue="";var cache=$.Autocompleter.Cache(options);var hasFocus=0;var lastKeyPressCode;var config={mouseDownOnSelect:false};var select=$.Autocompleter.Select(options,input,selectCurrent,config);var blockSubmit;$.browser.opera&&$(input.form).bind("submit.autocomplete",function(){if(blockSubmit){blockSubmit=false;return false;}});$input.bind(($.browser.opera?"keypress":"keydown")+".autocomplete",function(event){hasFocus=1;lastKeyPressCode=event.keyCode;switch(event.keyCode){case KEY.UP:event.preventDefault();if(select.visible()){select.prev();}else{onChange(0,true);}break;case KEY.DOWN:event.preventDefault();if(select.visible()){select.next();}else{onChange(0,true);}break;case KEY.PAGEUP:event.preventDefault();if(select.visible()){select.pageUp();}else{onChange(0,true);}break;case KEY.PAGEDOWN:event.preventDefault();if(select.visible()){select.pageDown();}else{onChange(0,true);}break;case options.multiple&&$.trim(options.multipleSeparator)==","&&KEY.COMMA:case KEY.TAB:case KEY.RETURN:if(selectCurrent()){event.preventDefault();blockSubmit=true;return false;}break;case KEY.ESC:select.hide();break;default:clearTimeout(timeout);timeout=setTimeout(onChange,options.delay);break;}}).focus(function(){hasFocus++;}).blur(function(){hasFocus=0;if(!config.mouseDownOnSelect){hideResults();}}).click(function(){if(hasFocus++>1&&!select.visible()){onChange(0,true);}}).bind("search",function(){var fn=(arguments.length>1)?arguments[1]:null;function findValueCallback(q,data){var result;if(data&&data.length){for(var i=0;i<data.length;i++){if(data[i].result.toLowerCase()==q.toLowerCase()){result=data[i];break;}}}if(typeof fn=="function")fn(result);else $input.trigger("result",result&&[result.data,result.value]);}$.each(trimWords($input.val()),function(i,value){request(value,findValueCallback,findValueCallback);});}).bind("flushCache",function(){cache.flush();}).bind("setOptions",function(){$.extend(options,arguments[1]);if("data"in arguments[1])cache.populate();}).bind("unautocomplete",function(){select.unbind();$input.unbind();$(input.form).unbind(".autocomplete");});function selectCurrent(){var selected=select.selected();if(!selected)return false;var v=selected.result;previousValue=v;if(options.multiple){var words=trimWords($input.val());if(words.length>1){var seperator=options.multipleSeparator.length;var cursorAt=$(input).selection().start;var wordAt,progress=0;$.each(words,function(i,word){progress+=word.length;if(cursorAt<=progress){wordAt=i;return false;}progress+=seperator;});words[wordAt]=v;v=words.join(options.multipleSeparator);}v+=options.multipleSeparator;}$input.val(v);hideResultsNow();$input.trigger("result",[selected.data,selected.value]);return true;}function onChange(crap,skipPrevCheck){if(lastKeyPressCode==KEY.DEL){select.hide();return;}var currentValue=$input.val();if(!skipPrevCheck&&currentValue==previousValue)return;previousValue=currentValue;currentValue=lastWord(currentValue);if(currentValue.length>=options.minChars){$input.addClass(options.loadingClass);if(!options.matchCase)currentValue=currentValue.toLowerCase();request(currentValue,receiveData,hideResultsNow);}else{stopLoading();select.hide();}};function trimWords(value){if(!value)return[""];if(!options.multiple)return[$.trim(value)];return $.map(value.split(options.multipleSeparator),function(word){return $.trim(value).length?$.trim(word):null;});}function lastWord(value){if(!options.multiple)return value;var words=trimWords(value);if(words.length==1)return words[0];var cursorAt=$(input).selection().start;if(cursorAt==value.length){words=trimWords(value)}else{words=trimWords(value.replace(value.substring(cursorAt),""));}return words[words.length-1];}function autoFill(q,sValue){if(options.autoFill&&(lastWord($input.val()).toLowerCase()==q.toLowerCase())&&lastKeyPressCode!=KEY.BACKSPACE){$input.val($input.val()+sValue.substring(lastWord(previousValue).length));$(input).selection(previousValue.length,previousValue.length+sValue.length);}};function hideResults(){clearTimeout(timeout);timeout=setTimeout(hideResultsNow,200);};function hideResultsNow(){var wasVisible=select.visible();select.hide();clearTimeout(timeout);stopLoading();if(options.mustMatch){$input.search(function(result){if(!result){if(options.multiple){var words=trimWords($input.val()).slice(0,-1);$input.val(words.join(options.multipleSeparator)+(words.length?options.multipleSeparator:""));}else{$input.val("");$input.trigger("result",null);}}});}};function receiveData(q,data){if(data&&data.length&&hasFocus){stopLoading();select.display(data,q);autoFill(q,data[0].value);select.show();}else{hideResultsNow();}};function request(term,success,failure){if(!options.matchCase)term=term.toLowerCase();var data=cache.load(term);if(data&&data.length){success(term,data);}else if((typeof options.url=="string")&&(options.url.length>0)){var extraParams={timestamp:+new Date()};$.each(options.extraParams,function(key,param){extraParams[key]=typeof param=="function"?param():param;});$.ajax({mode:"abort",port:"autocomplete"+input.name,dataType:options.dataType,url:options.url,data:$.extend({q:lastWord(term),limit:options.max},extraParams),success:function(data){var parsed=options.parse&&options.parse(data)||parse(data);cache.add(term,parsed);success(term,parsed);}});}else{select.emptyList();failure(term);}};function parse(data){var parsed=[];var rows=data.split("\n");for(var i=0;i<rows.length;i++){var row=$.trim(rows[i]);if(row){row=row.split("|");parsed[parsed.length]={data:row,value:row[0],result:options.formatResult&&options.formatResult(row,row[0])||row[0]};}}return parsed;};function stopLoading(){$input.removeClass(options.loadingClass);};};$.Autocompleter.defaults={inputClass:"ac_input",resultsClass:"ac_results",loadingClass:"ac_loading",minChars:1,delay:400,matchCase:false,matchSubset:true,matchContains:false,cacheLength:10,max:100,mustMatch:false,extraParams:{},selectFirst:true,formatItem:function(row){return row[0];},formatMatch:null,autoFill:false,width:0,multiple:false,multipleSeparator:", ",highlight:function(value,term){return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)("+term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi,"\\$1")+")(?![^<>]*>)(?![^&;]+;)","gi"),"<strong>$1</strong>");},scroll:true,scrollHeight:180};$.Autocompleter.Cache=function(options){var data={};var length=0;function matchSubset(s,sub){if(!options.matchCase)s=s.toLowerCase();var i=s.indexOf(sub);if(options.matchContains=="word"){i=s.toLowerCase().search("\\b"+sub.toLowerCase());}if(i==-1)return false;return i==0||options.matchContains;};function add(q,value){if(length>options.cacheLength){flush();}if(!data[q]){length++;}data[q]=value;}function populate(){if(!options.data)return false;var stMatchSets={},nullData=0;if(!options.url)options.cacheLength=1;stMatchSets[""]=[];for(var i=0,ol=options.data.length;i<ol;i++){var rawValue=options.data[i];rawValue=(typeof rawValue=="string")?[rawValue]:rawValue;var value=options.formatMatch(rawValue,i+1,options.data.length);if(value===false)continue;var firstChar=value.charAt(0).toLowerCase();if(!stMatchSets[firstChar])stMatchSets[firstChar]=[];var row={value:value,data:rawValue,result:options.formatResult&&options.formatResult(rawValue)||value};stMatchSets[firstChar].push(row);if(nullData++<options.max){stMatchSets[""].push(row);}};$.each(stMatchSets,function(i,value){options.cacheLength++;add(i,value);});}setTimeout(populate,25);function flush(){data={};length=0;}return{flush:flush,add:add,populate:populate,load:function(q){if(!options.cacheLength||!length)return null;if(!options.url&&options.matchContains){var csub=[];for(var k in data){if(k.length>0){var c=data[k];$.each(c,function(i,x){if(matchSubset(x.value,q)){csub.push(x);}});}}return csub;}else
 if(data[q]){return data[q];}else
 if(options.matchSubset){for(var i=q.length-1;i>=options.minChars;i--){var c=data[q.substr(0,i)];if(c){var csub=[];$.each(c,function(i,x){if(matchSubset(x.value,q)){csub[csub.length]=x;}});return csub;}}}return null;}};};$.Autocompleter.Select=function(options,input,select,config){var CLASSES={ACTIVE:"ac_over"};var listItems,active=-1,data,term="",needsInit=true,element,list;function init(){if(!needsInit)return;element=$("<div/>").hide().addClass(options.resultsClass).css("position","absolute").appendTo(document.body);list=$("<ul/>").appendTo(element).mouseover(function(event){if(target(event).nodeName&&target(event).nodeName.toUpperCase()=='LI'){active=$("li",list).removeClass(CLASSES.ACTIVE).index(target(event));$(target(event)).addClass(CLASSES.ACTIVE);}}).click(function(event){$(target(event)).addClass(CLASSES.ACTIVE);select();input.focus();return false;}).mousedown(function(){config.mouseDownOnSelect=true;}).mouseup(function(){config.mouseDownOnSelect=false;});if(options.width>0)element.css("width",options.width);needsInit=false;}function target(event){var element=event.target;while(element&&element.tagName.toUpperCase()!="LI")element=element.parentNode;if(!element)return[];return element;}function moveSelect(step){listItems.slice(active,active+1).removeClass(CLASSES.ACTIVE);movePosition(step);var activeItem=listItems.slice(active,active+1).addClass(CLASSES.ACTIVE);if(options.scroll){var offset=0;listItems.slice(0,active).each(function(){offset+=this.offsetHeight;});if((offset+activeItem[0].offsetHeight-list.scrollTop())>list[0].clientHeight){list.scrollTop(offset+activeItem[0].offsetHeight-list.innerHeight());}else if(offset<list.scrollTop()){list.scrollTop(offset);}}};function movePosition(step){active+=step;if(active<0){active=listItems.size()-1;}else if(active>=listItems.size()){active=0;}}function limitNumberOfItems(available){return options.max&&options.max<available?options.max:available;}function fillList(){list.empty();var max=limitNumberOfItems(data.length);for(var i=0;i<max;i++){if(!data[i])continue;var formatted=options.formatItem(data[i].data,i+1,max,data[i].value,term);if(formatted===false)continue;var li=$("<li/>").html(options.highlight(formatted,term)).addClass(i%2==0?"ac_even":"ac_odd").appendTo(list)[0];$.data(li,"ac_data",data[i]);}listItems=list.find("li");if(options.selectFirst){listItems.slice(0,1).addClass(CLASSES.ACTIVE);active=0;}if($.fn.bgiframe)list.bgiframe();}return{display:function(d,q){init();data=d;term=q;fillList();},next:function(){moveSelect(1);},prev:function(){moveSelect(-1);},pageUp:function(){if(active!=0&&active-8<0){moveSelect(-active);}else{moveSelect(-8);}},pageDown:function(){if(active!=listItems.size()-1&&active+8>listItems.size()){moveSelect(listItems.size()-1-active);}else{moveSelect(8);}},hide:function(){element&&element.hide();listItems&&listItems.removeClass(CLASSES.ACTIVE);active=-1;},visible:function(){return element&&element.is(":visible");},current:function(){return this.visible()&&(listItems.filter("."+CLASSES.ACTIVE)[0]||options.selectFirst&&listItems[0]);},show:function(){var offset=$(input).offset();element.css({width:typeof options.width=="string"||options.width>0?options.width:$(input).width(),top:offset.top+input.offsetHeight,left:offset.left}).show();if(options.scroll){list.scrollTop(0);list.css({maxHeight:options.scrollHeight,overflow:'auto'});if($.browser.msie&&typeof document.body.style.maxHeight==="undefined"){var listHeight=0;listItems.each(function(){listHeight+=this.offsetHeight;});var scrollbarsVisible=listHeight>options.scrollHeight;list.css('height',scrollbarsVisible?options.scrollHeight:listHeight);if(!scrollbarsVisible){listItems.width(list.width()-parseInt(listItems.css("padding-left"))-parseInt(listItems.css("padding-right")));}}}},selected:function(){var selected=listItems&&listItems.filter("."+CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);return selected&&selected.length&&$.data(selected[0],"ac_data");},emptyList:function(){list&&list.empty();},unbind:function(){element&&element.remove();}};};$.fn.selection=function(start,end){if(start!==undefined){return this.each(function(){if(this.createTextRange){var selRange=this.createTextRange();if(end===undefined||start==end){selRange.move("character",start);selRange.select();}else{selRange.collapse(true);selRange.moveStart("character",start);selRange.moveEnd("character",end);selRange.select();}}else if(this.setSelectionRange){this.setSelectionRange(start,end);}else if(this.selectionStart){this.selectionStart=start;this.selectionEnd=end;}});}var field=this[0];if(field.createTextRange){var range=document.selection.createRange(),orig=field.value,teststring="<->",textLength=range.text.length;range.text=teststring;var caretAt=field.value.indexOf(teststring);field.value=orig;this.selection(caretAt,caretAt+textLength);return{start:caretAt,end:caretAt+textLength}}else if(field.selectionStart!==undefined){return{start:field.selectionStart,end:field.selectionEnd}}};})(jQuery);
\ No newline at end of file
--- a/web/data/jquery.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.js	Mon Jul 19 15:36:16 2010 +0200
@@ -2,17 +2,17 @@
  * jQuery JavaScript Library v1.3.2
  * http://jquery.com/
  *
- * Copyright (c) 2009 John Resig
+ * Copyright (c) 2010 John Resig
  * Dual licensed under the MIT and GPL licenses.
  * http://docs.jquery.com/License
  *
- * Date: 2009-02-19 17:34:21 -0500 (Thu, 19 Feb 2009)
+ * Date: 2010-02-19 17:34:21 -0500 (Thu, 19 Feb 2010)
  * Revision: 6246
  */
 (function(){var l=this,g,y=l.jQuery,p=l.$,o=l.jQuery=l.$=function(E,F){return new o.fn.init(E,F)},D=/^[^<]*(<(.|\s)+>)[^>]*$|^#([\w-]+)$/,f=/^.[^:#\[\.,]*$/;o.fn=o.prototype={init:function(E,H){E=E||document;if(E.nodeType){this[0]=E;this.length=1;this.context=E;return this}if(typeof E==="string"){var G=D.exec(E);if(G&&(G[1]||!H)){if(G[1]){E=o.clean([G[1]],H)}else{var I=document.getElementById(G[3]);if(I&&I.id!=G[3]){return o().find(E)}var F=o(I||[]);F.context=document;F.selector=E;return F}}else{return o(H).find(E)}}else{if(o.isFunction(E)){return o(document).ready(E)}}if(E.selector&&E.context){this.selector=E.selector;this.context=E.context}return this.setArray(o.isArray(E)?E:o.makeArray(E))},selector:"",jquery:"1.3.2",size:function(){return this.length},get:function(E){return E===g?Array.prototype.slice.call(this):this[E]},pushStack:function(F,H,E){var G=o(F);G.prevObject=this;G.context=this.context;if(H==="find"){G.selector=this.selector+(this.selector?" ":"")+E}else{if(H){G.selector=this.selector+"."+H+"("+E+")"}}return G},setArray:function(E){this.length=0;Array.prototype.push.apply(this,E);return this},each:function(F,E){return o.each(this,F,E)},index:function(E){return o.inArray(E&&E.jquery?E[0]:E,this)},attr:function(F,H,G){var E=F;if(typeof F==="string"){if(H===g){return this[0]&&o[G||"attr"](this[0],F)}else{E={};E[F]=H}}return this.each(function(I){for(F in E){o.attr(G?this.style:this,F,o.prop(this,E[F],G,I,F))}})},css:function(E,F){if((E=="width"||E=="height")&&parseFloat(F)<0){F=g}return this.attr(E,F,"curCSS")},text:function(F){if(typeof F!=="object"&&F!=null){return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(F))}var E="";o.each(F||this,function(){o.each(this.childNodes,function(){if(this.nodeType!=8){E+=this.nodeType!=1?this.nodeValue:o.fn.text([this])}})});return E},wrapAll:function(E){if(this[0]){var F=o(E,this[0].ownerDocument).clone();if(this[0].parentNode){F.insertBefore(this[0])}F.map(function(){var G=this;while(G.firstChild){G=G.firstChild}return G}).append(this)}return this},wrapInner:function(E){return this.each(function(){o(this).contents().wrapAll(E)})},wrap:function(E){return this.each(function(){o(this).wrapAll(E)})},append:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.appendChild(E)}})},prepend:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.insertBefore(E,this.firstChild)}})},before:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this)})},after:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this.nextSibling)})},end:function(){return this.prevObject||o([])},push:[].push,sort:[].sort,splice:[].splice,find:function(E){if(this.length===1){var F=this.pushStack([],"find",E);F.length=0;o.find(E,this[0],F);return F}else{return this.pushStack(o.unique(o.map(this,function(G){return o.find(E,G)})),"find",E)}},clone:function(G){var E=this.map(function(){if(!o.support.noCloneEvent&&!o.isXMLDoc(this)){var I=this.outerHTML;if(!I){var J=this.ownerDocument.createElement("div");J.appendChild(this.cloneNode(true));I=J.innerHTML}return o.clean([I.replace(/ jQuery\d+="(?:\d+|null)"/g,"").replace(/^\s*/,"")])[0]}else{return this.cloneNode(true)}});if(G===true){var H=this.find("*").andSelf(),F=0;E.find("*").andSelf().each(function(){if(this.nodeName!==H[F].nodeName){return}var I=o.data(H[F],"events");for(var K in I){for(var J in I[K]){o.event.add(this,K,I[K][J],I[K][J].data)}}F++})}return E},filter:function(E){return this.pushStack(o.isFunction(E)&&o.grep(this,function(G,F){return E.call(G,F)})||o.multiFilter(E,o.grep(this,function(F){return F.nodeType===1})),"filter",E)},closest:function(E){var G=o.expr.match.POS.test(E)?o(E):null,F=0;return this.map(function(){var H=this;while(H&&H.ownerDocument){if(G?G.index(H)>-1:o(H).is(E)){o.data(H,"closest",F);return H}H=H.parentNode;F++}})},not:function(E){if(typeof E==="string"){if(f.test(E)){return this.pushStack(o.multiFilter(E,this,true),"not",E)}else{E=o.multiFilter(E,this)}}var F=E.length&&E[E.length-1]!==g&&!E.nodeType;return this.filter(function(){return F?o.inArray(this,E)<0:this!=E})},add:function(E){return this.pushStack(o.unique(o.merge(this.get(),typeof E==="string"?o(E):o.makeArray(E))))},is:function(E){return !!E&&o.multiFilter(E,this).length>0},hasClass:function(E){return !!E&&this.is("."+E)},val:function(K){if(K===g){var E=this[0];if(E){if(o.nodeName(E,"option")){return(E.attributes.value||{}).specified?E.value:E.text}if(o.nodeName(E,"select")){var I=E.selectedIndex,L=[],M=E.options,H=E.type=="select-one";if(I<0){return null}for(var F=H?I:0,J=H?I+1:M.length;F<J;F++){var G=M[F];if(G.selected){K=o(G).val();if(H){return K}L.push(K)}}return L}return(E.value||"").replace(/\r/g,"")}return g}if(typeof K==="number"){K+=""}return this.each(function(){if(this.nodeType!=1){return}if(o.isArray(K)&&/radio|checkbox/.test(this.type)){this.checked=(o.inArray(this.value,K)>=0||o.inArray(this.name,K)>=0)}else{if(o.nodeName(this,"select")){var N=o.makeArray(K);o("option",this).each(function(){this.selected=(o.inArray(this.value,N)>=0||o.inArray(this.text,N)>=0)});if(!N.length){this.selectedIndex=-1}}else{this.value=K}}})},html:function(E){return E===g?(this[0]?this[0].innerHTML.replace(/ jQuery\d+="(?:\d+|null)"/g,""):null):this.empty().append(E)},replaceWith:function(E){return this.after(E).remove()},eq:function(E){return this.slice(E,+E+1)},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments),"slice",Array.prototype.slice.call(arguments).join(","))},map:function(E){return this.pushStack(o.map(this,function(G,F){return E.call(G,F,G)}))},andSelf:function(){return this.add(this.prevObject)},domManip:function(J,M,L){if(this[0]){var I=(this[0].ownerDocument||this[0]).createDocumentFragment(),F=o.clean(J,(this[0].ownerDocument||this[0]),I),H=I.firstChild;if(H){for(var G=0,E=this.length;G<E;G++){L.call(K(this[G],H),this.length>1||G>0?I.cloneNode(true):I)}}if(F){o.each(F,z)}}return this;function K(N,O){return M&&o.nodeName(N,"table")&&o.nodeName(O,"tr")?(N.getElementsByTagName("tbody")[0]||N.appendChild(N.ownerDocument.createElement("tbody"))):N}}};o.fn.init.prototype=o.fn;function z(E,F){if(F.src){o.ajax({url:F.src,async:false,dataType:"script"})}else{o.globalEval(F.text||F.textContent||F.innerHTML||"")}if(F.parentNode){F.parentNode.removeChild(F)}}function e(){return +new Date}o.extend=o.fn.extend=function(){var J=arguments[0]||{},H=1,I=arguments.length,E=false,G;if(typeof J==="boolean"){E=J;J=arguments[1]||{};H=2}if(typeof J!=="object"&&!o.isFunction(J)){J={}}if(I==H){J=this;--H}for(;H<I;H++){if((G=arguments[H])!=null){for(var F in G){var K=J[F],L=G[F];if(J===L){continue}if(E&&L&&typeof L==="object"&&!L.nodeType){J[F]=o.extend(E,K||(L.length!=null?[]:{}),L)}else{if(L!==g){J[F]=L}}}}}return J};var b=/z-?index|font-?weight|opacity|zoom|line-?height/i,q=document.defaultView||{},s=Object.prototype.toString;o.extend({noConflict:function(E){l.$=p;if(E){l.jQuery=y}return o},isFunction:function(E){return s.call(E)==="[object Function]"},isArray:function(E){return s.call(E)==="[object Array]"},isXMLDoc:function(E){return E.nodeType===9&&E.documentElement.nodeName!=="HTML"||!!E.ownerDocument&&o.isXMLDoc(E.ownerDocument)},globalEval:function(G){if(G&&/\S/.test(G)){var F=document.getElementsByTagName("head")[0]||document.documentElement,E=document.createElement("script");E.type="text/javascript";if(o.support.scriptEval){E.appendChild(document.createTextNode(G))}else{E.text=G}F.insertBefore(E,F.firstChild);F.removeChild(E)}},nodeName:function(F,E){return F.nodeName&&F.nodeName.toUpperCase()==E.toUpperCase()},each:function(G,K,F){var E,H=0,I=G.length;if(F){if(I===g){for(E in G){if(K.apply(G[E],F)===false){break}}}else{for(;H<I;){if(K.apply(G[H++],F)===false){break}}}}else{if(I===g){for(E in G){if(K.call(G[E],E,G[E])===false){break}}}else{for(var J=G[0];H<I&&K.call(J,H,J)!==false;J=G[++H]){}}}return G},prop:function(H,I,G,F,E){if(o.isFunction(I)){I=I.call(H,F)}return typeof I==="number"&&G=="curCSS"&&!b.test(E)?I+"px":I},className:{add:function(E,F){o.each((F||"").split(/\s+/),function(G,H){if(E.nodeType==1&&!o.className.has(E.className,H)){E.className+=(E.className?" ":"")+H}})},remove:function(E,F){if(E.nodeType==1){E.className=F!==g?o.grep(E.className.split(/\s+/),function(G){return !o.className.has(F,G)}).join(" "):""}},has:function(F,E){return F&&o.inArray(E,(F.className||F).toString().split(/\s+/))>-1}},swap:function(H,G,I){var E={};for(var F in G){E[F]=H.style[F];H.style[F]=G[F]}I.call(H);for(var F in G){H.style[F]=E[F]}},css:function(H,F,J,E){if(F=="width"||F=="height"){var L,G={position:"absolute",visibility:"hidden",display:"block"},K=F=="width"?["Left","Right"]:["Top","Bottom"];function I(){L=F=="width"?H.offsetWidth:H.offsetHeight;if(E==="border"){return}o.each(K,function(){if(!E){L-=parseFloat(o.curCSS(H,"padding"+this,true))||0}if(E==="margin"){L+=parseFloat(o.curCSS(H,"margin"+this,true))||0}else{L-=parseFloat(o.curCSS(H,"border"+this+"Width",true))||0}})}if(H.offsetWidth!==0){I()}else{o.swap(H,G,I)}return Math.max(0,Math.round(L))}return o.curCSS(H,F,J)},curCSS:function(I,F,G){var L,E=I.style;if(F=="opacity"&&!o.support.opacity){L=o.attr(E,"opacity");return L==""?"1":L}if(F.match(/float/i)){F=w}if(!G&&E&&E[F]){L=E[F]}else{if(q.getComputedStyle){if(F.match(/float/i)){F="float"}F=F.replace(/([A-Z])/g,"-$1").toLowerCase();var M=q.getComputedStyle(I,null);if(M){L=M.getPropertyValue(F)}if(F=="opacity"&&L==""){L="1"}}else{if(I.currentStyle){var J=F.replace(/\-(\w)/g,function(N,O){return O.toUpperCase()});L=I.currentStyle[F]||I.currentStyle[J];if(!/^\d+(px)?$/i.test(L)&&/^\d/.test(L)){var H=E.left,K=I.runtimeStyle.left;I.runtimeStyle.left=I.currentStyle.left;E.left=L||0;L=E.pixelLeft+"px";E.left=H;I.runtimeStyle.left=K}}}}return L},clean:function(F,K,I){K=K||document;if(typeof K.createElement==="undefined"){K=K.ownerDocument||K[0]&&K[0].ownerDocument||document}if(!I&&F.length===1&&typeof F[0]==="string"){var H=/^<(\w+)\s*\/?>$/.exec(F[0]);if(H){return[K.createElement(H[1])]}}var G=[],E=[],L=K.createElement("div");o.each(F,function(P,S){if(typeof S==="number"){S+=""}if(!S){return}if(typeof S==="string"){S=S.replace(/(<(\w+)[^>]*?)\/>/g,function(U,V,T){return T.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?U:V+"></"+T+">"});var O=S.replace(/^\s+/,"").substring(0,10).toLowerCase();var Q=!O.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!O.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||O.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!O.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!O.indexOf("<td")||!O.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!O.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||!o.support.htmlSerialize&&[1,"div<div>","</div>"]||[0,"",""];L.innerHTML=Q[1]+S+Q[2];while(Q[0]--){L=L.lastChild}if(!o.support.tbody){var R=/<tbody/i.test(S),N=!O.indexOf("<table")&&!R?L.firstChild&&L.firstChild.childNodes:Q[1]=="<table>"&&!R?L.childNodes:[];for(var M=N.length-1;M>=0;--M){if(o.nodeName(N[M],"tbody")&&!N[M].childNodes.length){N[M].parentNode.removeChild(N[M])}}}if(!o.support.leadingWhitespace&&/^\s/.test(S)){L.insertBefore(K.createTextNode(S.match(/^\s*/)[0]),L.firstChild)}S=o.makeArray(L.childNodes)}if(S.nodeType){G.push(S)}else{G=o.merge(G,S)}});if(I){for(var J=0;G[J];J++){if(o.nodeName(G[J],"script")&&(!G[J].type||G[J].type.toLowerCase()==="text/javascript")){E.push(G[J].parentNode?G[J].parentNode.removeChild(G[J]):G[J])}else{if(G[J].nodeType===1){G.splice.apply(G,[J+1,0].concat(o.makeArray(G[J].getElementsByTagName("script"))))}I.appendChild(G[J])}}return E}return G},attr:function(J,G,K){if(!J||J.nodeType==3||J.nodeType==8){return g}var H=!o.isXMLDoc(J),L=K!==g;G=H&&o.props[G]||G;if(J.tagName){var F=/href|src|style/.test(G);if(G=="selected"&&J.parentNode){J.parentNode.selectedIndex}if(G in J&&H&&!F){if(L){if(G=="type"&&o.nodeName(J,"input")&&J.parentNode){throw"type property can't be changed"}J[G]=K}if(o.nodeName(J,"form")&&J.getAttributeNode(G)){return J.getAttributeNode(G).nodeValue}if(G=="tabIndex"){var I=J.getAttributeNode("tabIndex");return I&&I.specified?I.value:J.nodeName.match(/(button|input|object|select|textarea)/i)?0:J.nodeName.match(/^(a|area)$/i)&&J.href?0:g}return J[G]}if(!o.support.style&&H&&G=="style"){return o.attr(J.style,"cssText",K)}if(L){J.setAttribute(G,""+K)}var E=!o.support.hrefNormalized&&H&&F?J.getAttribute(G,2):J.getAttribute(G);return E===null?g:E}if(!o.support.opacity&&G=="opacity"){if(L){J.zoom=1;J.filter=(J.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(K)+""=="NaN"?"":"alpha(opacity="+K*100+")")}return J.filter&&J.filter.indexOf("opacity=")>=0?(parseFloat(J.filter.match(/opacity=([^)]*)/)[1])/100)+"":""}G=G.replace(/-([a-z])/ig,function(M,N){return N.toUpperCase()});if(L){J[G]=K}return J[G]},trim:function(E){return(E||"").replace(/^\s+|\s+$/g,"")},makeArray:function(G){var E=[];if(G!=null){var F=G.length;if(F==null||typeof G==="string"||o.isFunction(G)||G.setInterval){E[0]=G}else{while(F){E[--F]=G[F]}}}return E},inArray:function(G,H){for(var E=0,F=H.length;E<F;E++){if(H[E]===G){return E}}return -1},merge:function(H,E){var F=0,G,I=H.length;if(!o.support.getAll){while((G=E[F++])!=null){if(G.nodeType!=8){H[I++]=G}}}else{while((G=E[F++])!=null){H[I++]=G}}return H},unique:function(K){var F=[],E={};try{for(var G=0,H=K.length;G<H;G++){var J=o.data(K[G]);if(!E[J]){E[J]=true;F.push(K[G])}}}catch(I){F=K}return F},grep:function(F,J,E){var G=[];for(var H=0,I=F.length;H<I;H++){if(!E!=!J(F[H],H)){G.push(F[H])}}return G},map:function(E,J){var F=[];for(var G=0,H=E.length;G<H;G++){var I=J(E[G],G);if(I!=null){F[F.length]=I}}return F.concat.apply([],F)}});var C=navigator.userAgent.toLowerCase();o.browser={version:(C.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[0,"0"])[1],safari:/webkit/.test(C),opera:/opera/.test(C),msie:/msie/.test(C)&&!/opera/.test(C),mozilla:/mozilla/.test(C)&&!/(compatible|webkit)/.test(C)};o.each({parent:function(E){return E.parentNode},parents:function(E){return o.dir(E,"parentNode")},next:function(E){return o.nth(E,2,"nextSibling")},prev:function(E){return o.nth(E,2,"previousSibling")},nextAll:function(E){return o.dir(E,"nextSibling")},prevAll:function(E){return o.dir(E,"previousSibling")},siblings:function(E){return o.sibling(E.parentNode.firstChild,E)},children:function(E){return o.sibling(E.firstChild)},contents:function(E){return o.nodeName(E,"iframe")?E.contentDocument||E.contentWindow.document:o.makeArray(E.childNodes)}},function(E,F){o.fn[E]=function(G){var H=o.map(this,F);if(G&&typeof G=="string"){H=o.multiFilter(G,H)}return this.pushStack(o.unique(H),E,G)}});o.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(E,F){o.fn[E]=function(G){var J=[],L=o(G);for(var K=0,H=L.length;K<H;K++){var I=(K>0?this.clone(true):this).get();o.fn[F].apply(o(L[K]),I);J=J.concat(I)}return this.pushStack(J,E,G)}});o.each({removeAttr:function(E){o.attr(this,E,"");if(this.nodeType==1){this.removeAttribute(E)}},addClass:function(E){o.className.add(this,E)},removeClass:function(E){o.className.remove(this,E)},toggleClass:function(F,E){if(typeof E!=="boolean"){E=!o.className.has(this,F)}o.className[E?"add":"remove"](this,F)},remove:function(E){if(!E||o.filter(E,[this]).length){o("*",this).add([this]).each(function(){o.event.remove(this);o.removeData(this)});if(this.parentNode){this.parentNode.removeChild(this)}}},empty:function(){o(this).children().remove();while(this.firstChild){this.removeChild(this.firstChild)}}},function(E,F){o.fn[E]=function(){return this.each(F,arguments)}});function j(E,F){return E[0]&&parseInt(o.curCSS(E[0],F,true),10)||0}var h="jQuery"+e(),v=0,A={};o.extend({cache:{},data:function(F,E,G){F=F==l?A:F;var H=F[h];if(!H){H=F[h]=++v}if(E&&!o.cache[H]){o.cache[H]={}}if(G!==g){o.cache[H][E]=G}return E?o.cache[H][E]:H},removeData:function(F,E){F=F==l?A:F;var H=F[h];if(E){if(o.cache[H]){delete o.cache[H][E];E="";for(E in o.cache[H]){break}if(!E){o.removeData(F)}}}else{try{delete F[h]}catch(G){if(F.removeAttribute){F.removeAttribute(h)}}delete o.cache[H]}},queue:function(F,E,H){if(F){E=(E||"fx")+"queue";var G=o.data(F,E);if(!G||o.isArray(H)){G=o.data(F,E,o.makeArray(H))}else{if(H){G.push(H)}}}return G},dequeue:function(H,G){var E=o.queue(H,G),F=E.shift();if(!G||G==="fx"){F=E[0]}if(F!==g){F.call(H)}}});o.fn.extend({data:function(E,G){var H=E.split(".");H[1]=H[1]?"."+H[1]:"";if(G===g){var F=this.triggerHandler("getData"+H[1]+"!",[H[0]]);if(F===g&&this.length){F=o.data(this[0],E)}return F===g&&H[1]?this.data(H[0]):F}else{return this.trigger("setData"+H[1]+"!",[H[0],G]).each(function(){o.data(this,E,G)})}},removeData:function(E){return this.each(function(){o.removeData(this,E)})},queue:function(E,F){if(typeof E!=="string"){F=E;E="fx"}if(F===g){return o.queue(this[0],E)}return this.each(function(){var G=o.queue(this,E,F);if(E=="fx"&&G.length==1){G[0].call(this)}})},dequeue:function(E){return this.each(function(){o.dequeue(this,E)})}});
 /*
  * Sizzle CSS Selector Engine - v0.9.3
- *  Copyright 2009, The Dojo Foundation
+ *  Copyright 2010, The Dojo Foundation
  *  Released under the MIT, BSD, and GPL Licenses.
  *  More information: http://sizzlejs.com/
  */
--- a/web/data/jquery.timePicker.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.timePicker.js	Mon Jul 19 15:36:16 2010 +0200
@@ -4,7 +4,7 @@
  * copyright (c) 2006 Sam Collett (http://www.texotela.co.uk)
  *
  * Dual licensed under the MIT and GPL licenses.
- * Copyright (c) 2009 Anders Fajerson
+ * Copyright (c) 2010 Anders Fajerson
  * @name     timePicker
  * @version  0.2
  * @author   Anders Fajerson (http://perifer.se)
--- a/web/data/jquery.treeview.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.treeview.js	Mon Jul 19 15:36:16 2010 +0200
@@ -10,7 +10,7 @@
  *   http://www.opensource.org/licenses/mit-license.php
  *   http://www.gnu.org/licenses/gpl.html
  *
- * Revision: $Id: jquery.treeview.js 4684 2008-02-07 19:08:06Z joern.zaefferer $
- * updated by Aurelien Campeas, 2009-09-01, to handle top-level ajax loads
+ * Revision: $Id: jquery.treeview.js 4684 2010-02-07 19:08:06Z joern.zaefferer $
+ * updated by Aurelien Campeas, 2010-09-01, to handle top-level ajax loads
  *
  */;(function($){$.extend($.fn,{swapClass:function(c1,c2){var c1Elements=this.filter('.'+c1);this.filter('.'+c2).removeClass(c2).addClass(c1);c1Elements.removeClass(c1).addClass(c2);return this;},replaceClass:function(c1,c2){return this.filter('.'+c1).removeClass(c1).addClass(c2).end();},hoverClass:function(className){className=className||"hover";return this.hover(function(){$(this).addClass(className);},function(){$(this).removeClass(className);});},heightToggle:function(animated,callback){animated?this.animate({height:"toggle"},animated,callback):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();if(callback)callback.apply(this,arguments);});},heightHide:function(animated,callback){if(animated){this.animate({height:"hide"},animated,callback);}else{this.hide();if(callback)this.each(callback);}},prepareBranches:function(settings){if(!settings.prerendered){this.filter(":last-child:not(ul)").addClass(CLASSES.last);this.filter((settings.collapsed?"":"."+CLASSES.closed)+":not(."+CLASSES.open+")").find(">ul").hide();}return this.filter(":has(>ul)");},applyClasses:function(settings,toggler){this.filter(":has(>ul):not(:has(>a))").find(">span").click(function(event){toggler.apply($(this).next());}).add($("a",this)).hoverClass();if(!settings.prerendered){this.filter(":has(>ul:hidden)").addClass(CLASSES.expandable).replaceClass(CLASSES.last,CLASSES.lastExpandable);this.not(":has(>ul:hidden)").addClass(CLASSES.collapsable).replaceClass(CLASSES.last,CLASSES.lastCollapsable);this.prepend("<div class=\""+CLASSES.hitarea+"\"/>").find("div."+CLASSES.hitarea).each(function(){var classes="";$.each($(this).parent().attr("class").split(" "),function(){classes+=this+"-hitarea ";});$(this).addClass(classes);});}this.find("div."+CLASSES.hitarea).click(toggler);},treeview:function(settings){if(this.attr('cubicweb:type')=='prepared-treeview'){return this;}this.attr('cubicweb:type','prepared-treeview');settings=$.extend({cookieId:"treeview"},settings);if(settings.add){return this.trigger("add",[settings.add]);}if(settings.toggle){var callback=settings.toggle;settings.toggle=function(){return callback.apply($(this).parent()[0],arguments);};}function treeController(tree,control){function handler(filter){return function(){toggler.apply($("div."+CLASSES.hitarea,tree).filter(function(){return filter?$(this).parent("."+filter).length:true;}));return false;};}$("a:eq(0)",control).click(handler(CLASSES.collapsable));$("a:eq(1)",control).click(handler(CLASSES.expandable));$("a:eq(2)",control).click(handler());}function toggler(){$(this).parent().find(">.hitarea").swapClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).swapClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().swapClass(CLASSES.collapsable,CLASSES.expandable).swapClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightToggle(settings.animated,settings.toggle);if(settings.unique){$(this).parent().siblings().find(">.hitarea").replaceClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).replaceClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().replaceClass(CLASSES.collapsable,CLASSES.expandable).replaceClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightHide(settings.animated,settings.toggle);}}function serialize(){function binary(arg){return arg?1:0;}var data=[];branches.each(function(i,e){data[i]=$(e).is(":has(>ul:visible)")?1:0;});$.cookie(settings.cookieId,data.join(""));}function deserialize(){var stored=$.cookie(settings.cookieId);if(stored){var data=stored.split("");branches.each(function(i,e){$(e).find(">ul")[parseInt(data[i])?"show":"hide"]();});}}this.addClass("treeview");var branches=this.find("li").prepareBranches(settings);switch(settings.persist){case"cookie":var toggleCallback=settings.toggle;settings.toggle=function(){serialize();if(toggleCallback){toggleCallback.apply(this,arguments);}};deserialize();break;case"location":var current=this.find("a").filter(function(){return this.href.toLowerCase()==location.href.toLowerCase();});if(current.length){current.addClass("selected").parents("ul, li").add(current.next()).show();}break;}branches.applyClasses(settings,toggler);if(settings.control){treeController(this,settings.control);$(settings.control).show();}return this.bind("add",function(event,branches){$(branches).prev().removeClass(CLASSES.last).removeClass(CLASSES.lastCollapsable).removeClass(CLASSES.lastExpandable).find(">.hitarea").removeClass(CLASSES.lastCollapsableHitarea).removeClass(CLASSES.lastExpandableHitarea);$(branches).find("li").andSelf().prepareBranches(settings).applyClasses(settings,toggler);});}});var CLASSES=$.fn.treeview.classes={open:"open",closed:"closed",expandable:"expandable",expandableHitarea:"expandable-hitarea",lastExpandableHitarea:"lastExpandable-hitarea",collapsable:"collapsable",collapsableHitarea:"collapsable-hitarea",lastCollapsableHitarea:"lastCollapsable-hitarea",lastCollapsable:"lastCollapsable",lastExpandable:"lastExpandable",last:"last",hitarea:"hitarea"};$.fn.Treeview=$.fn.treeview;})(jQuery);
\ No newline at end of file
--- a/web/data/jquery.ui.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.ui.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
 * jQuery UI CSS Framework
-* Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+* Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
 * Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
 */
 
@@ -40,7 +40,7 @@
 
 /*
 * jQuery UI CSS Framework
-* Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+* Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
 * Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
 * To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Trebuchet%20MS,%20Tahoma,%20Verdana,%20Arial,%20sans-serif&fwDefault=bold&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=f6a828&bgTextureHeader=12_gloss_wave.png&bgImgOpacityHeader=35&borderColorHeader=e78f08&fcHeader=ffffff&iconColorHeader=ffffff&bgColorContent=eeeeee&bgTextureContent=03_highlight_soft.png&bgImgOpacityContent=100&borderColorContent=dddddd&fcContent=333333&iconColorContent=222222&bgColorDefault=f6f6f6&bgTextureDefault=02_glass.png&bgImgOpacityDefault=100&borderColorDefault=cccccc&fcDefault=1c94c4&iconColorDefault=ef8c08&bgColorHover=fdf5ce&bgTextureHover=02_glass.png&bgImgOpacityHover=100&borderColorHover=fbcb09&fcHover=c77405&iconColorHover=ef8c08&bgColorActive=ffffff&bgTextureActive=02_glass.png&bgImgOpacityActive=65&borderColorActive=fbd850&fcActive=eb8f00&iconColorActive=ef8c08&bgColorHighlight=ffe45c&bgTextureHighlight=03_highlight_soft.png&bgImgOpacityHighlight=75&borderColorHighlight=fed22f&fcHighlight=363636&iconColorHighlight=228ef1&bgColorError=b81900&bgTextureError=08_diagonals_thick.png&bgImgOpacityError=18&borderColorError=cd0a0a&fcError=ffffff&iconColorError=ffd27a&bgColorOverlay=666666&bgTextureOverlay=08_diagonals_thick.png&bgImgOpacityOverlay=20&opacityOverlay=50&bgColorShadow=000000&bgTextureShadow=01_flat.png&bgImgOpacityShadow=10&opacityShadow=20&thicknessShadow=5px&offsetTopShadow=-5px&offsetLeftShadow=-5px&cornerRadiusShadow=5px
 */
--- a/web/data/jquery.ui.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/jquery.ui.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /*
  * jQuery UI 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -10,7 +10,7 @@
 jQuery.ui||(function(c){var i=c.fn.remove,d=c.browser.mozilla&&(parseFloat(c.browser.version)<1.9);c.ui={version:"1.7.2",plugin:{add:function(k,l,n){var m=c.ui[k].prototype;for(var j in n){m.plugins[j]=m.plugins[j]||[];m.plugins[j].push([l,n[j]])}},call:function(j,l,k){var n=j.plugins[l];if(!n||!j.element[0].parentNode){return}for(var m=0;m<n.length;m++){if(j.options[n[m][0]]){n[m][1].apply(j.element,k)}}}},contains:function(k,j){return document.compareDocumentPosition?k.compareDocumentPosition(j)&16:k!==j&&k.contains(j)},hasScroll:function(m,k){if(c(m).css("overflow")=="hidden"){return false}var j=(k&&k=="left")?"scrollLeft":"scrollTop",l=false;if(m[j]>0){return true}m[j]=1;l=(m[j]>0);m[j]=0;return l},isOverAxis:function(k,j,l){return(k>j)&&(k<(j+l))},isOver:function(o,k,n,m,j,l){return c.ui.isOverAxis(o,n,j)&&c.ui.isOverAxis(k,m,l)},keyCode:{BACKSPACE:8,CAPS_LOCK:20,COMMA:188,CONTROL:17,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,INSERT:45,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SHIFT:16,SPACE:32,TAB:9,UP:38}};if(d){var f=c.attr,e=c.fn.removeAttr,h="http://www.w3.org/2005/07/aaa",a=/^aria-/,b=/^wairole:/;c.attr=function(k,j,l){var m=l!==undefined;return(j=="role"?(m?f.call(this,k,j,"wairole:"+l):(f.apply(this,arguments)||"").replace(b,"")):(a.test(j)?(m?k.setAttributeNS(h,j.replace(a,"aaa:"),l):f.call(this,k,j.replace(a,"aaa:"))):f.apply(this,arguments)))};c.fn.removeAttr=function(j){return(a.test(j)?this.each(function(){this.removeAttributeNS(h,j.replace(a,""))}):e.call(this,j))}}c.fn.extend({remove:function(){c("*",this).add(this).each(function(){c(this).triggerHandler("remove")});return i.apply(this,arguments)},enableSelection:function(){return this.attr("unselectable","off").css("MozUserSelect","").unbind("selectstart.ui")},disableSelection:function(){return this.attr("unselectable","on").css("MozUserSelect","none").bind("selectstart.ui",function(){return false})},scrollParent:function(){var j;if((c.browser.msie&&(/(static|relative)/).test(this.css("position")))||(/absolute/).test(this.css("position"))){j=this.parents().filter(function(){return(/(relative|absolute|fixed)/).test(c.curCSS(this,"position",1))&&(/(auto|scroll)/).test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0)}else{j=this.parents().filter(function(){return(/(auto|scroll)/).test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0)}return(/fixed/).test(this.css("position"))||!j.length?c(document):j}});c.extend(c.expr[":"],{data:function(l,k,j){return !!c.data(l,j[3])},focusable:function(k){var l=k.nodeName.toLowerCase(),j=c.attr(k,"tabindex");return(/input|select|textarea|button|object/.test(l)?!k.disabled:"a"==l||"area"==l?k.href||!isNaN(j):!isNaN(j))&&!c(k)["area"==l?"parents":"closest"](":hidden").length},tabbable:function(k){var j=c.attr(k,"tabindex");return(isNaN(j)||j>=0)&&c(k).is(":focusable")}});function g(m,n,o,l){function k(q){var p=c[m][n][q]||[];return(typeof p=="string"?p.split(/,?\s+/):p)}var j=k("getter");if(l.length==1&&typeof l[0]=="string"){j=j.concat(k("getterSetter"))}return(c.inArray(o,j)!=-1)}c.widget=function(k,j){var l=k.split(".")[0];k=k.split(".")[1];c.fn[k]=function(p){var n=(typeof p=="string"),o=Array.prototype.slice.call(arguments,1);if(n&&p.substring(0,1)=="_"){return this}if(n&&g(l,k,p,o)){var m=c.data(this[0],k);return(m?m[p].apply(m,o):undefined)}return this.each(function(){var q=c.data(this,k);(!q&&!n&&c.data(this,k,new c[l][k](this,p))._init());(q&&n&&c.isFunction(q[p])&&q[p].apply(q,o))})};c[l]=c[l]||{};c[l][k]=function(o,n){var m=this;this.namespace=l;this.widgetName=k;this.widgetEventPrefix=c[l][k].eventPrefix||k;this.widgetBaseClass=l+"-"+k;this.options=c.extend({},c.widget.defaults,c[l][k].defaults,c.metadata&&c.metadata.get(o)[k],n);this.element=c(o).bind("setData."+k,function(q,p,r){if(q.target==o){return m._setData(p,r)}}).bind("getData."+k,function(q,p){if(q.target==o){return m._getData(p)}}).bind("remove",function(){return m.destroy()})};c[l][k].prototype=c.extend({},c.widget.prototype,j);c[l][k].getterSetter="option"};c.widget.prototype={_init:function(){},destroy:function(){this.element.removeData(this.widgetName).removeClass(this.widgetBaseClass+"-disabled "+this.namespace+"-state-disabled").removeAttr("aria-disabled")},option:function(l,m){var k=l,j=this;if(typeof l=="string"){if(m===undefined){return this._getData(l)}k={};k[l]=m}c.each(k,function(n,o){j._setData(n,o)})},_getData:function(j){return this.options[j]},_setData:function(j,k){this.options[j]=k;if(j=="disabled"){this.element[k?"addClass":"removeClass"](this.widgetBaseClass+"-disabled "+this.namespace+"-state-disabled").attr("aria-disabled",k)}},enable:function(){this._setData("disabled",false)},disable:function(){this._setData("disabled",true)},_trigger:function(l,m,n){var p=this.options[l],j=(l==this.widgetEventPrefix?l:this.widgetEventPrefix+l);m=c.Event(m);m.type=j;if(m.originalEvent){for(var k=c.event.props.length,o;k;){o=c.event.props[--k];m[o]=m.originalEvent[o]}}this.element.trigger(m,n);return !(c.isFunction(p)&&p.call(this.element[0],m,n)===false||m.isDefaultPrevented())}};c.widget.defaults={disabled:false};c.ui.mouse={_mouseInit:function(){var j=this;this.element.bind("mousedown."+this.widgetName,function(k){return j._mouseDown(k)}).bind("click."+this.widgetName,function(k){if(j._preventClickEvent){j._preventClickEvent=false;k.stopImmediatePropagation();return false}});if(c.browser.msie){this._mouseUnselectable=this.element.attr("unselectable");this.element.attr("unselectable","on")}this.started=false},_mouseDestroy:function(){this.element.unbind("."+this.widgetName);(c.browser.msie&&this.element.attr("unselectable",this._mouseUnselectable))},_mouseDown:function(l){l.originalEvent=l.originalEvent||{};if(l.originalEvent.mouseHandled){return}(this._mouseStarted&&this._mouseUp(l));this._mouseDownEvent=l;var k=this,m=(l.which==1),j=(typeof this.options.cancel=="string"?c(l.target).parents().add(l.target).filter(this.options.cancel).length:false);if(!m||j||!this._mouseCapture(l)){return true}this.mouseDelayMet=!this.options.delay;if(!this.mouseDelayMet){this._mouseDelayTimer=setTimeout(function(){k.mouseDelayMet=true},this.options.delay)}if(this._mouseDistanceMet(l)&&this._mouseDelayMet(l)){this._mouseStarted=(this._mouseStart(l)!==false);if(!this._mouseStarted){l.preventDefault();return true}}this._mouseMoveDelegate=function(n){return k._mouseMove(n)};this._mouseUpDelegate=function(n){return k._mouseUp(n)};c(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate);(c.browser.safari||l.preventDefault());l.originalEvent.mouseHandled=true;return true},_mouseMove:function(j){if(c.browser.msie&&!j.button){return this._mouseUp(j)}if(this._mouseStarted){this._mouseDrag(j);return j.preventDefault()}if(this._mouseDistanceMet(j)&&this._mouseDelayMet(j)){this._mouseStarted=(this._mouseStart(this._mouseDownEvent,j)!==false);(this._mouseStarted?this._mouseDrag(j):this._mouseUp(j))}return !this._mouseStarted},_mouseUp:function(j){c(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate);if(this._mouseStarted){this._mouseStarted=false;this._preventClickEvent=(j.target==this._mouseDownEvent.target);this._mouseStop(j)}return false},_mouseDistanceMet:function(j){return(Math.max(Math.abs(this._mouseDownEvent.pageX-j.pageX),Math.abs(this._mouseDownEvent.pageY-j.pageY))>=this.options.distance)},_mouseDelayMet:function(j){return this.mouseDelayMet},_mouseStart:function(j){},_mouseDrag:function(j){},_mouseStop:function(j){},_mouseCapture:function(j){return true}};c.ui.mouse.defaults={cancel:null,distance:1,delay:0}})(jQuery);;/*
  * jQuery UI Draggable 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -22,7 +22,7 @@
 (function(a){a.widget("ui.draggable",a.extend({},a.ui.mouse,{_init:function(){if(this.options.helper=="original"&&!(/^(?:r|a|f)/).test(this.element.css("position"))){this.element[0].style.position="relative"}(this.options.addClasses&&this.element.addClass("ui-draggable"));(this.options.disabled&&this.element.addClass("ui-draggable-disabled"));this._mouseInit()},destroy:function(){if(!this.element.data("draggable")){return}this.element.removeData("draggable").unbind(".draggable").removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled");this._mouseDestroy()},_mouseCapture:function(b){var c=this.options;if(this.helper||c.disabled||a(b.target).is(".ui-resizable-handle")){return false}this.handle=this._getHandle(b);if(!this.handle){return false}return true},_mouseStart:function(b){var c=this.options;this.helper=this._createHelper(b);this._cacheHelperProportions();if(a.ui.ddmanager){a.ui.ddmanager.current=this}this._cacheMargins();this.cssPosition=this.helper.css("position");this.scrollParent=this.helper.scrollParent();this.offset=this.element.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};a.extend(this.offset,{click:{left:b.pageX-this.offset.left,top:b.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(b);this.originalPageX=b.pageX;this.originalPageY=b.pageY;if(c.cursorAt){this._adjustOffsetFromHelper(c.cursorAt)}if(c.containment){this._setContainment()}this._trigger("start",b);this._cacheHelperProportions();if(a.ui.ddmanager&&!c.dropBehaviour){a.ui.ddmanager.prepareOffsets(this,b)}this.helper.addClass("ui-draggable-dragging");this._mouseDrag(b,true);return true},_mouseDrag:function(b,d){this.position=this._generatePosition(b);this.positionAbs=this._convertPositionTo("absolute");if(!d){var c=this._uiHash();this._trigger("drag",b,c);this.position=c.position}if(!this.options.axis||this.options.axis!="y"){this.helper[0].style.left=this.position.left+"px"}if(!this.options.axis||this.options.axis!="x"){this.helper[0].style.top=this.position.top+"px"}if(a.ui.ddmanager){a.ui.ddmanager.drag(this,b)}return false},_mouseStop:function(c){var d=false;if(a.ui.ddmanager&&!this.options.dropBehaviour){d=a.ui.ddmanager.drop(this,c)}if(this.dropped){d=this.dropped;this.dropped=false}if((this.options.revert=="invalid"&&!d)||(this.options.revert=="valid"&&d)||this.options.revert===true||(a.isFunction(this.options.revert)&&this.options.revert.call(this.element,d))){var b=this;a(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){b._trigger("stop",c);b._clear()})}else{this._trigger("stop",c);this._clear()}return false},_getHandle:function(b){var c=!this.options.handle||!a(this.options.handle,this.element).length?true:false;a(this.options.handle,this.element).find("*").andSelf().each(function(){if(this==b.target){c=true}});return c},_createHelper:function(c){var d=this.options;var b=a.isFunction(d.helper)?a(d.helper.apply(this.element[0],[c])):(d.helper=="clone"?this.element.clone():this.element);if(!b.parents("body").length){b.appendTo((d.appendTo=="parent"?this.element[0].parentNode:d.appendTo))}if(b[0]!=this.element[0]&&!(/(fixed|absolute)/).test(b.css("position"))){b.css("position","absolute")}return b},_adjustOffsetFromHelper:function(b){if(b.left!=undefined){this.offset.click.left=b.left+this.margins.left}if(b.right!=undefined){this.offset.click.left=this.helperProportions.width-b.right+this.margins.left}if(b.top!=undefined){this.offset.click.top=b.top+this.margins.top}if(b.bottom!=undefined){this.offset.click.top=this.helperProportions.height-b.bottom+this.margins.top}},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var b=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0])){b.left+=this.scrollParent.scrollLeft();b.top+=this.scrollParent.scrollTop()}if((this.offsetParent[0]==document.body)||(this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&a.browser.msie)){b={top:0,left:0}}return{top:b.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:b.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var b=this.element.position();return{top:b.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:b.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else{return{top:0,left:0}}},_cacheMargins:function(){this.margins={left:(parseInt(this.element.css("marginLeft"),10)||0),top:(parseInt(this.element.css("marginTop"),10)||0)}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var e=this.options;if(e.containment=="parent"){e.containment=this.helper[0].parentNode}if(e.containment=="document"||e.containment=="window"){this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,a(e.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(a(e.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top]}if(!(/^(document|window|parent)$/).test(e.containment)&&e.containment.constructor!=Array){var c=a(e.containment)[0];if(!c){return}var d=a(e.containment).offset();var b=(a(c).css("overflow")!="hidden");this.containment=[d.left+(parseInt(a(c).css("borderLeftWidth"),10)||0)+(parseInt(a(c).css("paddingLeft"),10)||0)-this.margins.left,d.top+(parseInt(a(c).css("borderTopWidth"),10)||0)+(parseInt(a(c).css("paddingTop"),10)||0)-this.margins.top,d.left+(b?Math.max(c.scrollWidth,c.offsetWidth):c.offsetWidth)-(parseInt(a(c).css("borderLeftWidth"),10)||0)-(parseInt(a(c).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,d.top+(b?Math.max(c.scrollHeight,c.offsetHeight):c.offsetHeight)-(parseInt(a(c).css("borderTopWidth"),10)||0)-(parseInt(a(c).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}else{if(e.containment.constructor==Array){this.containment=e.containment}}},_convertPositionTo:function(f,h){if(!h){h=this.position}var c=f=="absolute"?1:-1;var e=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,g=(/(html|body)/i).test(b[0].tagName);return{top:(h.top+this.offset.relative.top*c+this.offset.parent.top*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(g?0:b.scrollTop()))*c)),left:(h.left+this.offset.relative.left*c+this.offset.parent.left*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():g?0:b.scrollLeft())*c))}},_generatePosition:function(e){var h=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,i=(/(html|body)/i).test(b[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0])){this.offset.relative=this._getRelativeOffset()}var d=e.pageX;var c=e.pageY;if(this.originalPosition){if(this.containment){if(e.pageX-this.offset.click.left<this.containment[0]){d=this.containment[0]+this.offset.click.left}if(e.pageY-this.offset.click.top<this.containment[1]){c=this.containment[1]+this.offset.click.top}if(e.pageX-this.offset.click.left>this.containment[2]){d=this.containment[2]+this.offset.click.left}if(e.pageY-this.offset.click.top>this.containment[3]){c=this.containment[3]+this.offset.click.top}}if(h.grid){var g=this.originalPageY+Math.round((c-this.originalPageY)/h.grid[1])*h.grid[1];c=this.containment?(!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:(!(g-this.offset.click.top<this.containment[1])?g-h.grid[1]:g+h.grid[1])):g;var f=this.originalPageX+Math.round((d-this.originalPageX)/h.grid[0])*h.grid[0];d=this.containment?(!(f-this.offset.click.left<this.containment[0]||f-this.offset.click.left>this.containment[2])?f:(!(f-this.offset.click.left<this.containment[0])?f-h.grid[0]:f+h.grid[0])):f}}return{top:(c-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(i?0:b.scrollTop())))),left:(d-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:b.scrollLeft())))}},_clear:function(){this.helper.removeClass("ui-draggable-dragging");if(this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval){this.helper.remove()}this.helper=null;this.cancelHelperRemoval=false},_trigger:function(b,c,d){d=d||this._uiHash();a.ui.plugin.call(this,b,[c,d]);if(b=="drag"){this.positionAbs=this._convertPositionTo("absolute")}return a.widget.prototype._trigger.call(this,b,c,d)},plugins:{},_uiHash:function(b){return{helper:this.helper,position:this.position,absolutePosition:this.positionAbs,offset:this.positionAbs}}}));a.extend(a.ui.draggable,{version:"1.7.2",eventPrefix:"drag",defaults:{addClasses:true,appendTo:"parent",axis:false,cancel:":input,option",connectToSortable:false,containment:false,cursor:"auto",cursorAt:false,delay:0,distance:1,grid:false,handle:false,helper:"original",iframeFix:false,opacity:false,refreshPositions:false,revert:false,revertDuration:500,scope:"default",scroll:true,scrollSensitivity:20,scrollSpeed:20,snap:false,snapMode:"both",snapTolerance:20,stack:false,zIndex:false}});a.ui.plugin.add("draggable","connectToSortable",{start:function(c,e){var d=a(this).data("draggable"),f=d.options,b=a.extend({},e,{item:d.element});d.sortables=[];a(f.connectToSortable).each(function(){var g=a.data(this,"sortable");if(g&&!g.options.disabled){d.sortables.push({instance:g,shouldRevert:g.options.revert});g._refreshItems();g._trigger("activate",c,b)}})},stop:function(c,e){var d=a(this).data("draggable"),b=a.extend({},e,{item:d.element});a.each(d.sortables,function(){if(this.instance.isOver){this.instance.isOver=0;d.cancelHelperRemoval=true;this.instance.cancelHelperRemoval=false;if(this.shouldRevert){this.instance.options.revert=true}this.instance._mouseStop(c);this.instance.options.helper=this.instance.options._helper;if(d.options.helper=="original"){this.instance.currentItem.css({top:"auto",left:"auto"})}}else{this.instance.cancelHelperRemoval=false;this.instance._trigger("deactivate",c,b)}})},drag:function(c,f){var e=a(this).data("draggable"),b=this;var d=function(i){var n=this.offset.click.top,m=this.offset.click.left;var g=this.positionAbs.top,k=this.positionAbs.left;var j=i.height,l=i.width;var p=i.top,h=i.left;return a.ui.isOver(g+n,k+m,p,h,j,l)};a.each(e.sortables,function(g){this.instance.positionAbs=e.positionAbs;this.instance.helperProportions=e.helperProportions;this.instance.offset.click=e.offset.click;if(this.instance._intersectsWith(this.instance.containerCache)){if(!this.instance.isOver){this.instance.isOver=1;this.instance.currentItem=a(b).clone().appendTo(this.instance.element).data("sortable-item",true);this.instance.options._helper=this.instance.options.helper;this.instance.options.helper=function(){return f.helper[0]};c.target=this.instance.currentItem[0];this.instance._mouseCapture(c,true);this.instance._mouseStart(c,true,true);this.instance.offset.click.top=e.offset.click.top;this.instance.offset.click.left=e.offset.click.left;this.instance.offset.parent.left-=e.offset.parent.left-this.instance.offset.parent.left;this.instance.offset.parent.top-=e.offset.parent.top-this.instance.offset.parent.top;e._trigger("toSortable",c);e.dropped=this.instance.element;e.currentItem=e.element;this.instance.fromOutside=e}if(this.instance.currentItem){this.instance._mouseDrag(c)}}else{if(this.instance.isOver){this.instance.isOver=0;this.instance.cancelHelperRemoval=true;this.instance.options.revert=false;this.instance._trigger("out",c,this.instance._uiHash(this.instance));this.instance._mouseStop(c,true);this.instance.options.helper=this.instance.options._helper;this.instance.currentItem.remove();if(this.instance.placeholder){this.instance.placeholder.remove()}e._trigger("fromSortable",c);e.dropped=false}}})}});a.ui.plugin.add("draggable","cursor",{start:function(c,d){var b=a("body"),e=a(this).data("draggable").options;if(b.css("cursor")){e._cursor=b.css("cursor")}b.css("cursor",e.cursor)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._cursor){a("body").css("cursor",d._cursor)}}});a.ui.plugin.add("draggable","iframeFix",{start:function(b,c){var d=a(this).data("draggable").options;a(d.iframeFix===true?"iframe":d.iframeFix).each(function(){a('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1000}).css(a(this).offset()).appendTo("body")})},stop:function(b,c){a("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)})}});a.ui.plugin.add("draggable","opacity",{start:function(c,d){var b=a(d.helper),e=a(this).data("draggable").options;if(b.css("opacity")){e._opacity=b.css("opacity")}b.css("opacity",e.opacity)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._opacity){a(c.helper).css("opacity",d._opacity)}}});a.ui.plugin.add("draggable","scroll",{start:function(c,d){var b=a(this).data("draggable");if(b.scrollParent[0]!=document&&b.scrollParent[0].tagName!="HTML"){b.overflowOffset=b.scrollParent.offset()}},drag:function(d,e){var c=a(this).data("draggable"),f=c.options,b=false;if(c.scrollParent[0]!=document&&c.scrollParent[0].tagName!="HTML"){if(!f.axis||f.axis!="x"){if((c.overflowOffset.top+c.scrollParent[0].offsetHeight)-d.pageY<f.scrollSensitivity){c.scrollParent[0].scrollTop=b=c.scrollParent[0].scrollTop+f.scrollSpeed}else{if(d.pageY-c.overflowOffset.top<f.scrollSensitivity){c.scrollParent[0].scrollTop=b=c.scrollParent[0].scrollTop-f.scrollSpeed}}}if(!f.axis||f.axis!="y"){if((c.overflowOffset.left+c.scrollParent[0].offsetWidth)-d.pageX<f.scrollSensitivity){c.scrollParent[0].scrollLeft=b=c.scrollParent[0].scrollLeft+f.scrollSpeed}else{if(d.pageX-c.overflowOffset.left<f.scrollSensitivity){c.scrollParent[0].scrollLeft=b=c.scrollParent[0].scrollLeft-f.scrollSpeed}}}}else{if(!f.axis||f.axis!="x"){if(d.pageY-a(document).scrollTop()<f.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()-f.scrollSpeed)}else{if(a(window).height()-(d.pageY-a(document).scrollTop())<f.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()+f.scrollSpeed)}}}if(!f.axis||f.axis!="y"){if(d.pageX-a(document).scrollLeft()<f.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()-f.scrollSpeed)}else{if(a(window).width()-(d.pageX-a(document).scrollLeft())<f.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()+f.scrollSpeed)}}}}if(b!==false&&a.ui.ddmanager&&!f.dropBehaviour){a.ui.ddmanager.prepareOffsets(c,d)}}});a.ui.plugin.add("draggable","snap",{start:function(c,d){var b=a(this).data("draggable"),e=b.options;b.snapElements=[];a(e.snap.constructor!=String?(e.snap.items||":data(draggable)"):e.snap).each(function(){var g=a(this);var f=g.offset();if(this!=b.element[0]){b.snapElements.push({item:this,width:g.outerWidth(),height:g.outerHeight(),top:f.top,left:f.left})}})},drag:function(u,p){var g=a(this).data("draggable"),q=g.options;var y=q.snapTolerance;var x=p.offset.left,w=x+g.helperProportions.width,f=p.offset.top,e=f+g.helperProportions.height;for(var v=g.snapElements.length-1;v>=0;v--){var s=g.snapElements[v].left,n=s+g.snapElements[v].width,m=g.snapElements[v].top,A=m+g.snapElements[v].height;if(!((s-y<x&&x<n+y&&m-y<f&&f<A+y)||(s-y<x&&x<n+y&&m-y<e&&e<A+y)||(s-y<w&&w<n+y&&m-y<f&&f<A+y)||(s-y<w&&w<n+y&&m-y<e&&e<A+y))){if(g.snapElements[v].snapping){(g.options.snap.release&&g.options.snap.release.call(g.element,u,a.extend(g._uiHash(),{snapItem:g.snapElements[v].item})))}g.snapElements[v].snapping=false;continue}if(q.snapMode!="inner"){var c=Math.abs(m-e)<=y;var z=Math.abs(A-f)<=y;var j=Math.abs(s-w)<=y;var k=Math.abs(n-x)<=y;if(c){p.position.top=g._convertPositionTo("relative",{top:m-g.helperProportions.height,left:0}).top-g.margins.top}if(z){p.position.top=g._convertPositionTo("relative",{top:A,left:0}).top-g.margins.top}if(j){p.position.left=g._convertPositionTo("relative",{top:0,left:s-g.helperProportions.width}).left-g.margins.left}if(k){p.position.left=g._convertPositionTo("relative",{top:0,left:n}).left-g.margins.left}}var h=(c||z||j||k);if(q.snapMode!="outer"){var c=Math.abs(m-f)<=y;var z=Math.abs(A-e)<=y;var j=Math.abs(s-x)<=y;var k=Math.abs(n-w)<=y;if(c){p.position.top=g._convertPositionTo("relative",{top:m,left:0}).top-g.margins.top}if(z){p.position.top=g._convertPositionTo("relative",{top:A-g.helperProportions.height,left:0}).top-g.margins.top}if(j){p.position.left=g._convertPositionTo("relative",{top:0,left:s}).left-g.margins.left}if(k){p.position.left=g._convertPositionTo("relative",{top:0,left:n-g.helperProportions.width}).left-g.margins.left}}if(!g.snapElements[v].snapping&&(c||z||j||k||h)){(g.options.snap.snap&&g.options.snap.snap.call(g.element,u,a.extend(g._uiHash(),{snapItem:g.snapElements[v].item})))}g.snapElements[v].snapping=(c||z||j||k||h)}}});a.ui.plugin.add("draggable","stack",{start:function(b,c){var e=a(this).data("draggable").options;var d=a.makeArray(a(e.stack.group)).sort(function(g,f){return(parseInt(a(g).css("zIndex"),10)||e.stack.min)-(parseInt(a(f).css("zIndex"),10)||e.stack.min)});a(d).each(function(f){this.style.zIndex=e.stack.min+f});this[0].style.zIndex=e.stack.min+d.length}});a.ui.plugin.add("draggable","zIndex",{start:function(c,d){var b=a(d.helper),e=a(this).data("draggable").options;if(b.css("zIndex")){e._zIndex=b.css("zIndex")}b.css("zIndex",e.zIndex)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._zIndex){a(c.helper).css("zIndex",d._zIndex)}}})})(jQuery);;/*
  * jQuery UI Droppable 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -35,7 +35,7 @@
 (function(a){a.widget("ui.droppable",{_init:function(){var c=this.options,b=c.accept;this.isover=0;this.isout=1;this.options.accept=this.options.accept&&a.isFunction(this.options.accept)?this.options.accept:function(e){return e.is(b)};this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight};a.ui.ddmanager.droppables[this.options.scope]=a.ui.ddmanager.droppables[this.options.scope]||[];a.ui.ddmanager.droppables[this.options.scope].push(this);(this.options.addClasses&&this.element.addClass("ui-droppable"))},destroy:function(){var b=a.ui.ddmanager.droppables[this.options.scope];for(var c=0;c<b.length;c++){if(b[c]==this){b.splice(c,1)}}this.element.removeClass("ui-droppable ui-droppable-disabled").removeData("droppable").unbind(".droppable")},_setData:function(b,c){if(b=="accept"){this.options.accept=c&&a.isFunction(c)?c:function(e){return e.is(c)}}else{a.widget.prototype._setData.apply(this,arguments)}},_activate:function(c){var b=a.ui.ddmanager.current;if(this.options.activeClass){this.element.addClass(this.options.activeClass)}(b&&this._trigger("activate",c,this.ui(b)))},_deactivate:function(c){var b=a.ui.ddmanager.current;if(this.options.activeClass){this.element.removeClass(this.options.activeClass)}(b&&this._trigger("deactivate",c,this.ui(b)))},_over:function(c){var b=a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.hoverClass){this.element.addClass(this.options.hoverClass)}this._trigger("over",c,this.ui(b))}},_out:function(c){var b=a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.hoverClass){this.element.removeClass(this.options.hoverClass)}this._trigger("out",c,this.ui(b))}},_drop:function(c,d){var b=d||a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return false}var e=false;this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var f=a.data(this,"droppable");if(f.options.greedy&&a.ui.intersect(b,a.extend(f,{offset:f.element.offset()}),f.options.tolerance)){e=true;return false}});if(e){return false}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.activeClass){this.element.removeClass(this.options.activeClass)}if(this.options.hoverClass){this.element.removeClass(this.options.hoverClass)}this._trigger("drop",c,this.ui(b));return this.element}return false},ui:function(b){return{draggable:(b.currentItem||b.element),helper:b.helper,position:b.position,absolutePosition:b.positionAbs,offset:b.positionAbs}}});a.extend(a.ui.droppable,{version:"1.7.2",eventPrefix:"drop",defaults:{accept:"*",activeClass:false,addClasses:true,greedy:false,hoverClass:false,scope:"default",tolerance:"intersect"}});a.ui.intersect=function(q,j,o){if(!j.offset){return false}var e=(q.positionAbs||q.position.absolute).left,d=e+q.helperProportions.width,n=(q.positionAbs||q.position.absolute).top,m=n+q.helperProportions.height;var g=j.offset.left,c=g+j.proportions.width,p=j.offset.top,k=p+j.proportions.height;switch(o){case"fit":return(g<e&&d<c&&p<n&&m<k);break;case"intersect":return(g<e+(q.helperProportions.width/2)&&d-(q.helperProportions.width/2)<c&&p<n+(q.helperProportions.height/2)&&m-(q.helperProportions.height/2)<k);break;case"pointer":var h=((q.positionAbs||q.position.absolute).left+(q.clickOffset||q.offset.click).left),i=((q.positionAbs||q.position.absolute).top+(q.clickOffset||q.offset.click).top),f=a.ui.isOver(i,h,p,g,j.proportions.height,j.proportions.width);return f;break;case"touch":return((n>=p&&n<=k)||(m>=p&&m<=k)||(n<p&&m>k))&&((e>=g&&e<=c)||(d>=g&&d<=c)||(e<g&&d>c));break;default:return false;break}};a.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(e,g){var b=a.ui.ddmanager.droppables[e.options.scope];var f=g?g.type:null;var h=(e.currentItem||e.element).find(":data(droppable)").andSelf();droppablesLoop:for(var d=0;d<b.length;d++){if(b[d].options.disabled||(e&&!b[d].options.accept.call(b[d].element[0],(e.currentItem||e.element)))){continue}for(var c=0;c<h.length;c++){if(h[c]==b[d].element[0]){b[d].proportions.height=0;continue droppablesLoop}}b[d].visible=b[d].element.css("display")!="none";if(!b[d].visible){continue}b[d].offset=b[d].element.offset();b[d].proportions={width:b[d].element[0].offsetWidth,height:b[d].element[0].offsetHeight};if(f=="mousedown"){b[d]._activate.call(b[d],g)}}},drop:function(b,c){var d=false;a.each(a.ui.ddmanager.droppables[b.options.scope],function(){if(!this.options){return}if(!this.options.disabled&&this.visible&&a.ui.intersect(b,this,this.options.tolerance)){d=this._drop.call(this,c)}if(!this.options.disabled&&this.visible&&this.options.accept.call(this.element[0],(b.currentItem||b.element))){this.isout=1;this.isover=0;this._deactivate.call(this,c)}});return d},drag:function(b,c){if(b.options.refreshPositions){a.ui.ddmanager.prepareOffsets(b,c)}a.each(a.ui.ddmanager.droppables[b.options.scope],function(){if(this.options.disabled||this.greedyChild||!this.visible){return}var e=a.ui.intersect(b,this,this.options.tolerance);var g=!e&&this.isover==1?"isout":(e&&this.isover==0?"isover":null);if(!g){return}var f;if(this.options.greedy){var d=this.element.parents(":data(droppable):eq(0)");if(d.length){f=a.data(d[0],"droppable");f.greedyChild=(g=="isover"?1:0)}}if(f&&g=="isover"){f.isover=0;f.isout=1;f._out.call(f,c)}this[g]=1;this[g=="isout"?"isover":"isout"]=0;this[g=="isover"?"_over":"_out"].call(this,c);if(f&&g=="isout"){f.isout=0;f.isover=1;f._over.call(f,c)}})}}})(jQuery);;/*
  * jQuery UI Resizable 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -47,7 +47,7 @@
 (function(c){c.widget("ui.resizable",c.extend({},c.ui.mouse,{_init:function(){var e=this,j=this.options;this.element.addClass("ui-resizable");c.extend(this,{_aspectRatio:!!(j.aspectRatio),aspectRatio:j.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:j.helper||j.ghost||j.animate?j.helper||"ui-resizable-helper":null});if(this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)){if(/relative/.test(this.element.css("position"))&&c.browser.opera){this.element.css({position:"relative",top:"auto",left:"auto"})}this.element.wrap(c('<div class="ui-wrapper" style="overflow: hidden;"></div>').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")}));this.element=this.element.parent().data("resizable",this.element.data("resizable"));this.elementIsWrapper=true;this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")});this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0});this.originalResizeStyle=this.originalElement.css("resize");this.originalElement.css("resize","none");this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"}));this.originalElement.css({margin:this.originalElement.css("margin")});this._proportionallyResize()}this.handles=j.handles||(!c(".ui-resizable-handle",this.element).length?"e,s,se":{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"});if(this.handles.constructor==String){if(this.handles=="all"){this.handles="n,e,s,w,se,sw,ne,nw"}var k=this.handles.split(",");this.handles={};for(var f=0;f<k.length;f++){var h=c.trim(k[f]),d="ui-resizable-"+h;var g=c('<div class="ui-resizable-handle '+d+'"></div>');if(/sw|se|ne|nw/.test(h)){g.css({zIndex:++j.zIndex})}if("se"==h){g.addClass("ui-icon ui-icon-gripsmall-diagonal-se")}this.handles[h]=".ui-resizable-"+h;this.element.append(g)}}this._renderAxis=function(p){p=p||this.element;for(var m in this.handles){if(this.handles[m].constructor==String){this.handles[m]=c(this.handles[m],this.element).show()}if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var n=c(this.handles[m],this.element),o=0;o=/sw|ne|nw|se|n|s/.test(m)?n.outerHeight():n.outerWidth();var l=["padding",/ne|nw|n/.test(m)?"Top":/se|sw|s/.test(m)?"Bottom":/^e$/.test(m)?"Right":"Left"].join("");p.css(l,o);this._proportionallyResize()}if(!c(this.handles[m]).length){continue}}};this._renderAxis(this.element);this._handles=c(".ui-resizable-handle",this.element).disableSelection();this._handles.mouseover(function(){if(!e.resizing){if(this.className){var i=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i)}e.axis=i&&i[1]?i[1]:"se"}});if(j.autoHide){this._handles.hide();c(this.element).addClass("ui-resizable-autohide").hover(function(){c(this).removeClass("ui-resizable-autohide");e._handles.show()},function(){if(!e.resizing){c(this).addClass("ui-resizable-autohide");e._handles.hide()}})}this._mouseInit()},destroy:function(){this._mouseDestroy();var d=function(f){c(f).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};if(this.elementIsWrapper){d(this.element);var e=this.element;e.parent().append(this.originalElement.css({position:e.css("position"),width:e.outerWidth(),height:e.outerHeight(),top:e.css("top"),left:e.css("left")})).end().remove()}this.originalElement.css("resize",this.originalResizeStyle);d(this.originalElement)},_mouseCapture:function(e){var f=false;for(var d in this.handles){if(c(this.handles[d])[0]==e.target){f=true}}return this.options.disabled||!!f},_mouseStart:function(f){var i=this.options,e=this.element.position(),d=this.element;this.resizing=true;this.documentScroll={top:c(document).scrollTop(),left:c(document).scrollLeft()};if(d.is(".ui-draggable")||(/absolute/).test(d.css("position"))){d.css({position:"absolute",top:e.top,left:e.left})}if(c.browser.opera&&(/relative/).test(d.css("position"))){d.css({position:"relative",top:"auto",left:"auto"})}this._renderProxy();var j=b(this.helper.css("left")),g=b(this.helper.css("top"));if(i.containment){j+=c(i.containment).scrollLeft()||0;g+=c(i.containment).scrollTop()||0}this.offset=this.helper.offset();this.position={left:j,top:g};this.size=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalSize=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalPosition={left:j,top:g};this.sizeDiff={width:d.outerWidth()-d.width(),height:d.outerHeight()-d.height()};this.originalMousePosition={left:f.pageX,top:f.pageY};this.aspectRatio=(typeof i.aspectRatio=="number")?i.aspectRatio:((this.originalSize.width/this.originalSize.height)||1);var h=c(".ui-resizable-"+this.axis).css("cursor");c("body").css("cursor",h=="auto"?this.axis+"-resize":h);d.addClass("ui-resizable-resizing");this._propagate("start",f);return true},_mouseDrag:function(d){var g=this.helper,f=this.options,l={},p=this,i=this.originalMousePosition,m=this.axis;var q=(d.pageX-i.left)||0,n=(d.pageY-i.top)||0;var h=this._change[m];if(!h){return false}var k=h.apply(this,[d,q,n]),j=c.browser.msie&&c.browser.version<7,e=this.sizeDiff;if(this._aspectRatio||d.shiftKey){k=this._updateRatio(k,d)}k=this._respectSize(k,d);this._propagate("resize",d);g.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"});if(!this._helper&&this._proportionallyResizeElements.length){this._proportionallyResize()}this._updateCache(k);this._trigger("resize",d,this.ui());return false},_mouseStop:function(g){this.resizing=false;var h=this.options,l=this;if(this._helper){var f=this._proportionallyResizeElements,d=f.length&&(/textarea/i).test(f[0].nodeName),e=d&&c.ui.hasScroll(f[0],"left")?0:l.sizeDiff.height,j=d?0:l.sizeDiff.width;var m={width:(l.size.width-j),height:(l.size.height-e)},i=(parseInt(l.element.css("left"),10)+(l.position.left-l.originalPosition.left))||null,k=(parseInt(l.element.css("top"),10)+(l.position.top-l.originalPosition.top))||null;if(!h.animate){this.element.css(c.extend(m,{top:k,left:i}))}l.helper.height(l.size.height);l.helper.width(l.size.width);if(this._helper&&!h.animate){this._proportionallyResize()}}c("body").css("cursor","auto");this.element.removeClass("ui-resizable-resizing");this._propagate("stop",g);if(this._helper){this.helper.remove()}return false},_updateCache:function(d){var e=this.options;this.offset=this.helper.offset();if(a(d.left)){this.position.left=d.left}if(a(d.top)){this.position.top=d.top}if(a(d.height)){this.size.height=d.height}if(a(d.width)){this.size.width=d.width}},_updateRatio:function(g,f){var h=this.options,i=this.position,e=this.size,d=this.axis;if(g.height){g.width=(e.height*this.aspectRatio)}else{if(g.width){g.height=(e.width/this.aspectRatio)}}if(d=="sw"){g.left=i.left+(e.width-g.width);g.top=null}if(d=="nw"){g.top=i.top+(e.height-g.height);g.left=i.left+(e.width-g.width)}return g},_respectSize:function(k,f){var i=this.helper,h=this.options,q=this._aspectRatio||f.shiftKey,p=this.axis,s=a(k.width)&&h.maxWidth&&(h.maxWidth<k.width),l=a(k.height)&&h.maxHeight&&(h.maxHeight<k.height),g=a(k.width)&&h.minWidth&&(h.minWidth>k.width),r=a(k.height)&&h.minHeight&&(h.minHeight>k.height);if(g){k.width=h.minWidth}if(r){k.height=h.minHeight}if(s){k.width=h.maxWidth}if(l){k.height=h.maxHeight}var e=this.originalPosition.left+this.originalSize.width,n=this.position.top+this.size.height;var j=/sw|nw|w/.test(p),d=/nw|ne|n/.test(p);if(g&&j){k.left=e-h.minWidth}if(s&&j){k.left=e-h.maxWidth}if(r&&d){k.top=n-h.minHeight}if(l&&d){k.top=n-h.maxHeight}var m=!k.width&&!k.height;if(m&&!k.left&&k.top){k.top=null}else{if(m&&!k.top&&k.left){k.left=null}}return k},_proportionallyResize:function(){var j=this.options;if(!this._proportionallyResizeElements.length){return}var f=this.helper||this.element;for(var e=0;e<this._proportionallyResizeElements.length;e++){var g=this._proportionallyResizeElements[e];if(!this.borderDif){var d=[g.css("borderTopWidth"),g.css("borderRightWidth"),g.css("borderBottomWidth"),g.css("borderLeftWidth")],h=[g.css("paddingTop"),g.css("paddingRight"),g.css("paddingBottom"),g.css("paddingLeft")];this.borderDif=c.map(d,function(k,m){var l=parseInt(k,10)||0,n=parseInt(h[m],10)||0;return l+n})}if(c.browser.msie&&!(!(c(f).is(":hidden")||c(f).parents(":hidden").length))){continue}g.css({height:(f.height()-this.borderDif[0]-this.borderDif[2])||0,width:(f.width()-this.borderDif[1]-this.borderDif[3])||0})}},_renderProxy:function(){var e=this.element,h=this.options;this.elementOffset=e.offset();if(this._helper){this.helper=this.helper||c('<div style="overflow:hidden;"></div>');var d=c.browser.msie&&c.browser.version<7,f=(d?1:0),g=(d?2:-1);this.helper.addClass(this._helper).css({width:this.element.outerWidth()+g,height:this.element.outerHeight()+g,position:"absolute",left:this.elementOffset.left-f+"px",top:this.elementOffset.top-f+"px",zIndex:++h.zIndex});this.helper.appendTo("body").disableSelection()}else{this.helper=this.element}},_change:{e:function(f,e,d){return{width:this.originalSize.width+e}},w:function(g,e,d){var i=this.options,f=this.originalSize,h=this.originalPosition;return{left:h.left+e,width:f.width-e}},n:function(g,e,d){var i=this.options,f=this.originalSize,h=this.originalPosition;return{top:h.top+d,height:f.height-d}},s:function(f,e,d){return{height:this.originalSize.height+d}},se:function(f,e,d){return c.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[f,e,d]))},sw:function(f,e,d){return c.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[f,e,d]))},ne:function(f,e,d){return c.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[f,e,d]))},nw:function(f,e,d){return c.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[f,e,d]))}},_propagate:function(e,d){c.ui.plugin.call(this,e,[d,this.ui()]);(e!="resize"&&this._trigger(e,d,this.ui()))},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}));c.extend(c.ui.resizable,{version:"1.7.2",eventPrefix:"resize",defaults:{alsoResize:false,animate:false,animateDuration:"slow",animateEasing:"swing",aspectRatio:false,autoHide:false,cancel:":input,option",containment:false,delay:0,distance:1,ghost:false,grid:false,handles:"e,s,se",helper:false,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1000}});c.ui.plugin.add("resizable","alsoResize",{start:function(e,f){var d=c(this).data("resizable"),g=d.options;_store=function(h){c(h).each(function(){c(this).data("resizable-alsoresize",{width:parseInt(c(this).width(),10),height:parseInt(c(this).height(),10),left:parseInt(c(this).css("left"),10),top:parseInt(c(this).css("top"),10)})})};if(typeof(g.alsoResize)=="object"&&!g.alsoResize.parentNode){if(g.alsoResize.length){g.alsoResize=g.alsoResize[0];_store(g.alsoResize)}else{c.each(g.alsoResize,function(h,i){_store(h)})}}else{_store(g.alsoResize)}},resize:function(f,h){var e=c(this).data("resizable"),i=e.options,g=e.originalSize,k=e.originalPosition;var j={height:(e.size.height-g.height)||0,width:(e.size.width-g.width)||0,top:(e.position.top-k.top)||0,left:(e.position.left-k.left)||0},d=function(l,m){c(l).each(function(){var p=c(this),q=c(this).data("resizable-alsoresize"),o={},n=m&&m.length?m:["width","height","top","left"];c.each(n||["width","height","top","left"],function(r,t){var s=(q[t]||0)+(j[t]||0);if(s&&s>=0){o[t]=s||null}});if(/relative/.test(p.css("position"))&&c.browser.opera){e._revertToRelativePosition=true;p.css({position:"absolute",top:"auto",left:"auto"})}p.css(o)})};if(typeof(i.alsoResize)=="object"&&!i.alsoResize.nodeType){c.each(i.alsoResize,function(l,m){d(l,m)})}else{d(i.alsoResize)}},stop:function(e,f){var d=c(this).data("resizable");if(d._revertToRelativePosition&&c.browser.opera){d._revertToRelativePosition=false;el.css({position:"relative"})}c(this).removeData("resizable-alsoresize-start")}});c.ui.plugin.add("resizable","animate",{stop:function(h,m){var n=c(this).data("resizable"),i=n.options;var g=n._proportionallyResizeElements,d=g.length&&(/textarea/i).test(g[0].nodeName),e=d&&c.ui.hasScroll(g[0],"left")?0:n.sizeDiff.height,k=d?0:n.sizeDiff.width;var f={width:(n.size.width-k),height:(n.size.height-e)},j=(parseInt(n.element.css("left"),10)+(n.position.left-n.originalPosition.left))||null,l=(parseInt(n.element.css("top"),10)+(n.position.top-n.originalPosition.top))||null;n.element.animate(c.extend(f,l&&j?{top:l,left:j}:{}),{duration:i.animateDuration,easing:i.animateEasing,step:function(){var o={width:parseInt(n.element.css("width"),10),height:parseInt(n.element.css("height"),10),top:parseInt(n.element.css("top"),10),left:parseInt(n.element.css("left"),10)};if(g&&g.length){c(g[0]).css({width:o.width,height:o.height})}n._updateCache(o);n._propagate("resize",h)}})}});c.ui.plugin.add("resizable","containment",{start:function(e,q){var s=c(this).data("resizable"),i=s.options,k=s.element;var f=i.containment,j=(f instanceof c)?f.get(0):(/parent/.test(f))?k.parent().get(0):f;if(!j){return}s.containerElement=c(j);if(/document/.test(f)||f==document){s.containerOffset={left:0,top:0};s.containerPosition={left:0,top:0};s.parentData={element:c(document),left:0,top:0,width:c(document).width(),height:c(document).height()||document.body.parentNode.scrollHeight}}else{var m=c(j),h=[];c(["Top","Right","Left","Bottom"]).each(function(p,o){h[p]=b(m.css("padding"+o))});s.containerOffset=m.offset();s.containerPosition=m.position();s.containerSize={height:(m.innerHeight()-h[3]),width:(m.innerWidth()-h[1])};var n=s.containerOffset,d=s.containerSize.height,l=s.containerSize.width,g=(c.ui.hasScroll(j,"left")?j.scrollWidth:l),r=(c.ui.hasScroll(j)?j.scrollHeight:d);s.parentData={element:j,left:n.left,top:n.top,width:g,height:r}}},resize:function(f,p){var s=c(this).data("resizable"),h=s.options,e=s.containerSize,n=s.containerOffset,l=s.size,m=s.position,q=s._aspectRatio||f.shiftKey,d={top:0,left:0},g=s.containerElement;if(g[0]!=document&&(/static/).test(g.css("position"))){d=n}if(m.left<(s._helper?n.left:0)){s.size.width=s.size.width+(s._helper?(s.position.left-n.left):(s.position.left-d.left));if(q){s.size.height=s.size.width/h.aspectRatio}s.position.left=h.helper?n.left:0}if(m.top<(s._helper?n.top:0)){s.size.height=s.size.height+(s._helper?(s.position.top-n.top):s.position.top);if(q){s.size.width=s.size.height*h.aspectRatio}s.position.top=s._helper?n.top:0}s.offset.left=s.parentData.left+s.position.left;s.offset.top=s.parentData.top+s.position.top;var k=Math.abs((s._helper?s.offset.left-d.left:(s.offset.left-d.left))+s.sizeDiff.width),r=Math.abs((s._helper?s.offset.top-d.top:(s.offset.top-n.top))+s.sizeDiff.height);var j=s.containerElement.get(0)==s.element.parent().get(0),i=/relative|absolute/.test(s.containerElement.css("position"));if(j&&i){k-=s.parentData.left}if(k+s.size.width>=s.parentData.width){s.size.width=s.parentData.width-k;if(q){s.size.height=s.size.width/s.aspectRatio}}if(r+s.size.height>=s.parentData.height){s.size.height=s.parentData.height-r;if(q){s.size.width=s.size.height*s.aspectRatio}}},stop:function(e,m){var p=c(this).data("resizable"),f=p.options,k=p.position,l=p.containerOffset,d=p.containerPosition,g=p.containerElement;var i=c(p.helper),q=i.offset(),n=i.outerWidth()-p.sizeDiff.width,j=i.outerHeight()-p.sizeDiff.height;if(p._helper&&!f.animate&&(/relative/).test(g.css("position"))){c(this).css({left:q.left-d.left-l.left,width:n,height:j})}if(p._helper&&!f.animate&&(/static/).test(g.css("position"))){c(this).css({left:q.left-d.left-l.left,width:n,height:j})}}});c.ui.plugin.add("resizable","ghost",{start:function(f,g){var d=c(this).data("resizable"),h=d.options,e=d.size;d.ghost=d.originalElement.clone();d.ghost.css({opacity:0.25,display:"block",position:"relative",height:e.height,width:e.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof h.ghost=="string"?h.ghost:"");d.ghost.appendTo(d.helper)},resize:function(e,f){var d=c(this).data("resizable"),g=d.options;if(d.ghost){d.ghost.css({position:"relative",height:d.size.height,width:d.size.width})}},stop:function(e,f){var d=c(this).data("resizable"),g=d.options;if(d.ghost&&d.helper){d.helper.get(0).removeChild(d.ghost.get(0))}}});c.ui.plugin.add("resizable","grid",{resize:function(d,l){var n=c(this).data("resizable"),g=n.options,j=n.size,h=n.originalSize,i=n.originalPosition,m=n.axis,k=g._aspectRatio||d.shiftKey;g.grid=typeof g.grid=="number"?[g.grid,g.grid]:g.grid;var f=Math.round((j.width-h.width)/(g.grid[0]||1))*(g.grid[0]||1),e=Math.round((j.height-h.height)/(g.grid[1]||1))*(g.grid[1]||1);if(/^(se|s|e)$/.test(m)){n.size.width=h.width+f;n.size.height=h.height+e}else{if(/^(ne)$/.test(m)){n.size.width=h.width+f;n.size.height=h.height+e;n.position.top=i.top-e}else{if(/^(sw)$/.test(m)){n.size.width=h.width+f;n.size.height=h.height+e;n.position.left=i.left-f}else{n.size.width=h.width+f;n.size.height=h.height+e;n.position.top=i.top-e;n.position.left=i.left-f}}}}});var b=function(d){return parseInt(d,10)||0};var a=function(d){return !isNaN(parseInt(d,10))}})(jQuery);;/*
  * jQuery UI Selectable 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -59,7 +59,7 @@
 (function(a){a.widget("ui.selectable",a.extend({},a.ui.mouse,{_init:function(){var b=this;this.element.addClass("ui-selectable");this.dragged=false;var c;this.refresh=function(){c=a(b.options.filter,b.element[0]);c.each(function(){var d=a(this);var e=d.offset();a.data(this,"selectable-item",{element:this,$element:d,left:e.left,top:e.top,right:e.left+d.outerWidth(),bottom:e.top+d.outerHeight(),startselected:false,selected:d.hasClass("ui-selected"),selecting:d.hasClass("ui-selecting"),unselecting:d.hasClass("ui-unselecting")})})};this.refresh();this.selectees=c.addClass("ui-selectee");this._mouseInit();this.helper=a(document.createElement("div")).css({border:"1px dotted black"}).addClass("ui-selectable-helper")},destroy:function(){this.element.removeClass("ui-selectable ui-selectable-disabled").removeData("selectable").unbind(".selectable");this._mouseDestroy()},_mouseStart:function(d){var b=this;this.opos=[d.pageX,d.pageY];if(this.options.disabled){return}var c=this.options;this.selectees=a(c.filter,this.element[0]);this._trigger("start",d);a(c.appendTo).append(this.helper);this.helper.css({"z-index":100,position:"absolute",left:d.clientX,top:d.clientY,width:0,height:0});if(c.autoRefresh){this.refresh()}this.selectees.filter(".ui-selected").each(function(){var e=a.data(this,"selectable-item");e.startselected=true;if(!d.metaKey){e.$element.removeClass("ui-selected");e.selected=false;e.$element.addClass("ui-unselecting");e.unselecting=true;b._trigger("unselecting",d,{unselecting:e.element})}});a(d.target).parents().andSelf().each(function(){var e=a.data(this,"selectable-item");if(e){e.$element.removeClass("ui-unselecting").addClass("ui-selecting");e.unselecting=false;e.selecting=true;e.selected=true;b._trigger("selecting",d,{selecting:e.element});return false}})},_mouseDrag:function(i){var c=this;this.dragged=true;if(this.options.disabled){return}var e=this.options;var d=this.opos[0],h=this.opos[1],b=i.pageX,g=i.pageY;if(d>b){var f=b;b=d;d=f}if(h>g){var f=g;g=h;h=f}this.helper.css({left:d,top:h,width:b-d,height:g-h});this.selectees.each(function(){var j=a.data(this,"selectable-item");if(!j||j.element==c.element[0]){return}var k=false;if(e.tolerance=="touch"){k=(!(j.left>b||j.right<d||j.top>g||j.bottom<h))}else{if(e.tolerance=="fit"){k=(j.left>d&&j.right<b&&j.top>h&&j.bottom<g)}}if(k){if(j.selected){j.$element.removeClass("ui-selected");j.selected=false}if(j.unselecting){j.$element.removeClass("ui-unselecting");j.unselecting=false}if(!j.selecting){j.$element.addClass("ui-selecting");j.selecting=true;c._trigger("selecting",i,{selecting:j.element})}}else{if(j.selecting){if(i.metaKey&&j.startselected){j.$element.removeClass("ui-selecting");j.selecting=false;j.$element.addClass("ui-selected");j.selected=true}else{j.$element.removeClass("ui-selecting");j.selecting=false;if(j.startselected){j.$element.addClass("ui-unselecting");j.unselecting=true}c._trigger("unselecting",i,{unselecting:j.element})}}if(j.selected){if(!i.metaKey&&!j.startselected){j.$element.removeClass("ui-selected");j.selected=false;j.$element.addClass("ui-unselecting");j.unselecting=true;c._trigger("unselecting",i,{unselecting:j.element})}}}});return false},_mouseStop:function(d){var b=this;this.dragged=false;var c=this.options;a(".ui-unselecting",this.element[0]).each(function(){var e=a.data(this,"selectable-item");e.$element.removeClass("ui-unselecting");e.unselecting=false;e.startselected=false;b._trigger("unselected",d,{unselected:e.element})});a(".ui-selecting",this.element[0]).each(function(){var e=a.data(this,"selectable-item");e.$element.removeClass("ui-selecting").addClass("ui-selected");e.selecting=false;e.selected=true;e.startselected=true;b._trigger("selected",d,{selected:e.element})});this._trigger("stop",d);this.helper.remove();return false}}));a.extend(a.ui.selectable,{version:"1.7.2",defaults:{appendTo:"body",autoRefresh:true,cancel:":input,option",delay:0,distance:0,filter:"*",tolerance:"touch"}})})(jQuery);;/*
  * jQuery UI Sortable 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -71,7 +71,7 @@
 (function(a){a.widget("ui.sortable",a.extend({},a.ui.mouse,{_init:function(){var b=this.options;this.containerCache={};this.element.addClass("ui-sortable");this.refresh();this.floating=this.items.length?(/left|right/).test(this.items[0].item.css("float")):false;this.offset=this.element.offset();this._mouseInit()},destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled").removeData("sortable").unbind(".sortable");this._mouseDestroy();for(var b=this.items.length-1;b>=0;b--){this.items[b].item.removeData("sortable-item")}},_mouseCapture:function(e,f){if(this.reverting){return false}if(this.options.disabled||this.options.type=="static"){return false}this._refreshItems(e);var d=null,c=this,b=a(e.target).parents().each(function(){if(a.data(this,"sortable-item")==c){d=a(this);return false}});if(a.data(e.target,"sortable-item")==c){d=a(e.target)}if(!d){return false}if(this.options.handle&&!f){var g=false;a(this.options.handle,d).find("*").andSelf().each(function(){if(this==e.target){g=true}});if(!g){return false}}this.currentItem=d;this._removeCurrentsFromItems();return true},_mouseStart:function(e,f,b){var g=this.options,c=this;this.currentContainer=this;this.refreshPositions();this.helper=this._createHelper(e);this._cacheHelperProportions();this._cacheMargins();this.scrollParent=this.helper.scrollParent();this.offset=this.currentItem.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};this.helper.css("position","absolute");this.cssPosition=this.helper.css("position");a.extend(this.offset,{click:{left:e.pageX-this.offset.left,top:e.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(e);this.originalPageX=e.pageX;this.originalPageY=e.pageY;if(g.cursorAt){this._adjustOffsetFromHelper(g.cursorAt)}this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]};if(this.helper[0]!=this.currentItem[0]){this.currentItem.hide()}this._createPlaceholder();if(g.containment){this._setContainment()}if(g.cursor){if(a("body").css("cursor")){this._storedCursor=a("body").css("cursor")}a("body").css("cursor",g.cursor)}if(g.opacity){if(this.helper.css("opacity")){this._storedOpacity=this.helper.css("opacity")}this.helper.css("opacity",g.opacity)}if(g.zIndex){if(this.helper.css("zIndex")){this._storedZIndex=this.helper.css("zIndex")}this.helper.css("zIndex",g.zIndex)}if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"){this.overflowOffset=this.scrollParent.offset()}this._trigger("start",e,this._uiHash());if(!this._preserveHelperProportions){this._cacheHelperProportions()}if(!b){for(var d=this.containers.length-1;d>=0;d--){this.containers[d]._trigger("activate",e,c._uiHash(this))}}if(a.ui.ddmanager){a.ui.ddmanager.current=this}if(a.ui.ddmanager&&!g.dropBehaviour){a.ui.ddmanager.prepareOffsets(this,e)}this.dragging=true;this.helper.addClass("ui-sortable-helper");this._mouseDrag(e);return true},_mouseDrag:function(f){this.position=this._generatePosition(f);this.positionAbs=this._convertPositionTo("absolute");if(!this.lastPositionAbs){this.lastPositionAbs=this.positionAbs}if(this.options.scroll){var g=this.options,b=false;if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"){if((this.overflowOffset.top+this.scrollParent[0].offsetHeight)-f.pageY<g.scrollSensitivity){this.scrollParent[0].scrollTop=b=this.scrollParent[0].scrollTop+g.scrollSpeed}else{if(f.pageY-this.overflowOffset.top<g.scrollSensitivity){this.scrollParent[0].scrollTop=b=this.scrollParent[0].scrollTop-g.scrollSpeed}}if((this.overflowOffset.left+this.scrollParent[0].offsetWidth)-f.pageX<g.scrollSensitivity){this.scrollParent[0].scrollLeft=b=this.scrollParent[0].scrollLeft+g.scrollSpeed}else{if(f.pageX-this.overflowOffset.left<g.scrollSensitivity){this.scrollParent[0].scrollLeft=b=this.scrollParent[0].scrollLeft-g.scrollSpeed}}}else{if(f.pageY-a(document).scrollTop()<g.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()-g.scrollSpeed)}else{if(a(window).height()-(f.pageY-a(document).scrollTop())<g.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()+g.scrollSpeed)}}if(f.pageX-a(document).scrollLeft()<g.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()-g.scrollSpeed)}else{if(a(window).width()-(f.pageX-a(document).scrollLeft())<g.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()+g.scrollSpeed)}}}if(b!==false&&a.ui.ddmanager&&!g.dropBehaviour){a.ui.ddmanager.prepareOffsets(this,f)}}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y"){this.helper[0].style.left=this.position.left+"px"}if(!this.options.axis||this.options.axis!="x"){this.helper[0].style.top=this.position.top+"px"}for(var d=this.items.length-1;d>=0;d--){var e=this.items[d],c=e.item[0],h=this._intersectsWithPointer(e);if(!h){continue}if(c!=this.currentItem[0]&&this.placeholder[h==1?"next":"prev"]()[0]!=c&&!a.ui.contains(this.placeholder[0],c)&&(this.options.type=="semi-dynamic"?!a.ui.contains(this.element[0],c):true)){this.direction=h==1?"down":"up";if(this.options.tolerance=="pointer"||this._intersectsWithSides(e)){this._rearrange(f,e)}else{break}this._trigger("change",f,this._uiHash());break}}this._contactContainers(f);if(a.ui.ddmanager){a.ui.ddmanager.drag(this,f)}this._trigger("sort",f,this._uiHash());this.lastPositionAbs=this.positionAbs;return false},_mouseStop:function(c,d){if(!c){return}if(a.ui.ddmanager&&!this.options.dropBehaviour){a.ui.ddmanager.drop(this,c)}if(this.options.revert){var b=this;var e=b.placeholder.offset();b.reverting=true;a(this.helper).animate({left:e.left-this.offset.parent.left-b.margins.left+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollLeft),top:e.top-this.offset.parent.top-b.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){b._clear(c)})}else{this._clear(c,d)}return false},cancel:function(){var b=this;if(this.dragging){this._mouseUp();if(this.options.helper=="original"){this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else{this.currentItem.show()}for(var c=this.containers.length-1;c>=0;c--){this.containers[c]._trigger("deactivate",null,b._uiHash(this));if(this.containers[c].containerCache.over){this.containers[c]._trigger("out",null,b._uiHash(this));this.containers[c].containerCache.over=0}}}if(this.placeholder[0].parentNode){this.placeholder[0].parentNode.removeChild(this.placeholder[0])}if(this.options.helper!="original"&&this.helper&&this.helper[0].parentNode){this.helper.remove()}a.extend(this,{helper:null,dragging:false,reverting:false,_noFinalSort:null});if(this.domPosition.prev){a(this.domPosition.prev).after(this.currentItem)}else{a(this.domPosition.parent).prepend(this.currentItem)}return true},serialize:function(d){var b=this._getItemsAsjQuery(d&&d.connected);var c=[];d=d||{};a(b).each(function(){var e=(a(d.item||this).attr(d.attribute||"id")||"").match(d.expression||(/(.+)[-=_](.+)/));if(e){c.push((d.key||e[1]+"[]")+"="+(d.key&&d.expression?e[1]:e[2]))}});return c.join("&")},toArray:function(d){var b=this._getItemsAsjQuery(d&&d.connected);var c=[];d=d||{};b.each(function(){c.push(a(d.item||this).attr(d.attribute||"id")||"")});return c},_intersectsWith:function(m){var e=this.positionAbs.left,d=e+this.helperProportions.width,k=this.positionAbs.top,j=k+this.helperProportions.height;var f=m.left,c=f+m.width,n=m.top,i=n+m.height;var o=this.offset.click.top,h=this.offset.click.left;var g=(k+o)>n&&(k+o)<i&&(e+h)>f&&(e+h)<c;if(this.options.tolerance=="pointer"||this.options.forcePointerForContainers||(this.options.tolerance!="pointer"&&this.helperProportions[this.floating?"width":"height"]>m[this.floating?"width":"height"])){return g}else{return(f<e+(this.helperProportions.width/2)&&d-(this.helperProportions.width/2)<c&&n<k+(this.helperProportions.height/2)&&j-(this.helperProportions.height/2)<i)}},_intersectsWithPointer:function(d){var e=a.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,d.top,d.height),c=a.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,d.left,d.width),g=e&&c,b=this._getDragVerticalDirection(),f=this._getDragHorizontalDirection();if(!g){return false}return this.floating?(((f&&f=="right")||b=="down")?2:1):(b&&(b=="down"?2:1))},_intersectsWithSides:function(e){var c=a.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,e.top+(e.height/2),e.height),d=a.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,e.left+(e.width/2),e.width),b=this._getDragVerticalDirection(),f=this._getDragHorizontalDirection();if(this.floating&&f){return((f=="right"&&d)||(f=="left"&&!d))}else{return b&&((b=="down"&&c)||(b=="up"&&!c))}},_getDragVerticalDirection:function(){var b=this.positionAbs.top-this.lastPositionAbs.top;return b!=0&&(b>0?"down":"up")},_getDragHorizontalDirection:function(){var b=this.positionAbs.left-this.lastPositionAbs.left;return b!=0&&(b>0?"right":"left")},refresh:function(b){this._refreshItems(b);this.refreshPositions()},_connectWith:function(){var b=this.options;return b.connectWith.constructor==String?[b.connectWith]:b.connectWith},_getItemsAsjQuery:function(b){var l=this;var g=[];var e=[];var h=this._connectWith();if(h&&b){for(var d=h.length-1;d>=0;d--){var k=a(h[d]);for(var c=k.length-1;c>=0;c--){var f=a.data(k[c],"sortable");if(f&&f!=this&&!f.options.disabled){e.push([a.isFunction(f.options.items)?f.options.items.call(f.element):a(f.options.items,f.element).not(".ui-sortable-helper"),f])}}}}e.push([a.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):a(this.options.items,this.element).not(".ui-sortable-helper"),this]);for(var d=e.length-1;d>=0;d--){e[d][0].each(function(){g.push(this)})}return a(g)},_removeCurrentsFromItems:function(){var d=this.currentItem.find(":data(sortable-item)");for(var c=0;c<this.items.length;c++){for(var b=0;b<d.length;b++){if(d[b]==this.items[c].item[0]){this.items.splice(c,1)}}}},_refreshItems:function(b){this.items=[];this.containers=[this];var h=this.items;var p=this;var f=[[a.isFunction(this.options.items)?this.options.items.call(this.element[0],b,{item:this.currentItem}):a(this.options.items,this.element),this]];var l=this._connectWith();if(l){for(var e=l.length-1;e>=0;e--){var m=a(l[e]);for(var d=m.length-1;d>=0;d--){var g=a.data(m[d],"sortable");if(g&&g!=this&&!g.options.disabled){f.push([a.isFunction(g.options.items)?g.options.items.call(g.element[0],b,{item:this.currentItem}):a(g.options.items,g.element),g]);this.containers.push(g)}}}}for(var e=f.length-1;e>=0;e--){var k=f[e][1];var c=f[e][0];for(var d=0,n=c.length;d<n;d++){var o=a(c[d]);o.data("sortable-item",k);h.push({item:o,instance:k,width:0,height:0,left:0,top:0})}}},refreshPositions:function(b){if(this.offsetParent&&this.helper){this.offset.parent=this._getParentOffset()}for(var d=this.items.length-1;d>=0;d--){var e=this.items[d];if(e.instance!=this.currentContainer&&this.currentContainer&&e.item[0]!=this.currentItem[0]){continue}var c=this.options.toleranceElement?a(this.options.toleranceElement,e.item):e.item;if(!b){e.width=c.outerWidth();e.height=c.outerHeight()}var f=c.offset();e.left=f.left;e.top=f.top}if(this.options.custom&&this.options.custom.refreshContainers){this.options.custom.refreshContainers.call(this)}else{for(var d=this.containers.length-1;d>=0;d--){var f=this.containers[d].element.offset();this.containers[d].containerCache.left=f.left;this.containers[d].containerCache.top=f.top;this.containers[d].containerCache.width=this.containers[d].element.outerWidth();this.containers[d].containerCache.height=this.containers[d].element.outerHeight()}}},_createPlaceholder:function(d){var b=d||this,e=b.options;if(!e.placeholder||e.placeholder.constructor==String){var c=e.placeholder;e.placeholder={element:function(){var f=a(document.createElement(b.currentItem[0].nodeName)).addClass(c||b.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];if(!c){f.style.visibility="hidden"}return f},update:function(f,g){if(c&&!e.forcePlaceholderSize){return}if(!g.height()){g.height(b.currentItem.innerHeight()-parseInt(b.currentItem.css("paddingTop")||0,10)-parseInt(b.currentItem.css("paddingBottom")||0,10))}if(!g.width()){g.width(b.currentItem.innerWidth()-parseInt(b.currentItem.css("paddingLeft")||0,10)-parseInt(b.currentItem.css("paddingRight")||0,10))}}}}b.placeholder=a(e.placeholder.element.call(b.element,b.currentItem));b.currentItem.after(b.placeholder);e.placeholder.update(b,b.placeholder)},_contactContainers:function(d){for(var c=this.containers.length-1;c>=0;c--){if(this._intersectsWith(this.containers[c].containerCache)){if(!this.containers[c].containerCache.over){if(this.currentContainer!=this.containers[c]){var h=10000;var g=null;var e=this.positionAbs[this.containers[c].floating?"left":"top"];for(var b=this.items.length-1;b>=0;b--){if(!a.ui.contains(this.containers[c].element[0],this.items[b].item[0])){continue}var f=this.items[b][this.containers[c].floating?"left":"top"];if(Math.abs(f-e)<h){h=Math.abs(f-e);g=this.items[b]}}if(!g&&!this.options.dropOnEmpty){continue}this.currentContainer=this.containers[c];g?this._rearrange(d,g,null,true):this._rearrange(d,null,this.containers[c].element,true);this._trigger("change",d,this._uiHash());this.containers[c]._trigger("change",d,this._uiHash(this));this.options.placeholder.update(this.currentContainer,this.placeholder)}this.containers[c]._trigger("over",d,this._uiHash(this));this.containers[c].containerCache.over=1}}else{if(this.containers[c].containerCache.over){this.containers[c]._trigger("out",d,this._uiHash(this));this.containers[c].containerCache.over=0}}}},_createHelper:function(c){var d=this.options;var b=a.isFunction(d.helper)?a(d.helper.apply(this.element[0],[c,this.currentItem])):(d.helper=="clone"?this.currentItem.clone():this.currentItem);if(!b.parents("body").length){a(d.appendTo!="parent"?d.appendTo:this.currentItem[0].parentNode)[0].appendChild(b[0])}if(b[0]==this.currentItem[0]){this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")}}if(b[0].style.width==""||d.forceHelperSize){b.width(this.currentItem.width())}if(b[0].style.height==""||d.forceHelperSize){b.height(this.currentItem.height())}return b},_adjustOffsetFromHelper:function(b){if(b.left!=undefined){this.offset.click.left=b.left+this.margins.left}if(b.right!=undefined){this.offset.click.left=this.helperProportions.width-b.right+this.margins.left}if(b.top!=undefined){this.offset.click.top=b.top+this.margins.top}if(b.bottom!=undefined){this.offset.click.top=this.helperProportions.height-b.bottom+this.margins.top}},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var b=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0])){b.left+=this.scrollParent.scrollLeft();b.top+=this.scrollParent.scrollTop()}if((this.offsetParent[0]==document.body)||(this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&a.browser.msie)){b={top:0,left:0}}return{top:b.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:b.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var b=this.currentItem.position();return{top:b.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:b.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else{return{top:0,left:0}}},_cacheMargins:function(){this.margins={left:(parseInt(this.currentItem.css("marginLeft"),10)||0),top:(parseInt(this.currentItem.css("marginTop"),10)||0)}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var e=this.options;if(e.containment=="parent"){e.containment=this.helper[0].parentNode}if(e.containment=="document"||e.containment=="window"){this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,a(e.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(a(e.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top]}if(!(/^(document|window|parent)$/).test(e.containment)){var c=a(e.containment)[0];var d=a(e.containment).offset();var b=(a(c).css("overflow")!="hidden");this.containment=[d.left+(parseInt(a(c).css("borderLeftWidth"),10)||0)+(parseInt(a(c).css("paddingLeft"),10)||0)-this.margins.left,d.top+(parseInt(a(c).css("borderTopWidth"),10)||0)+(parseInt(a(c).css("paddingTop"),10)||0)-this.margins.top,d.left+(b?Math.max(c.scrollWidth,c.offsetWidth):c.offsetWidth)-(parseInt(a(c).css("borderLeftWidth"),10)||0)-(parseInt(a(c).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,d.top+(b?Math.max(c.scrollHeight,c.offsetHeight):c.offsetHeight)-(parseInt(a(c).css("borderTopWidth"),10)||0)-(parseInt(a(c).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(f,h){if(!h){h=this.position}var c=f=="absolute"?1:-1;var e=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,g=(/(html|body)/i).test(b[0].tagName);return{top:(h.top+this.offset.relative.top*c+this.offset.parent.top*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(g?0:b.scrollTop()))*c)),left:(h.left+this.offset.relative.left*c+this.offset.parent.left*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():g?0:b.scrollLeft())*c))}},_generatePosition:function(e){var h=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,i=(/(html|body)/i).test(b[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0])){this.offset.relative=this._getRelativeOffset()}var d=e.pageX;var c=e.pageY;if(this.originalPosition){if(this.containment){if(e.pageX-this.offset.click.left<this.containment[0]){d=this.containment[0]+this.offset.click.left}if(e.pageY-this.offset.click.top<this.containment[1]){c=this.containment[1]+this.offset.click.top}if(e.pageX-this.offset.click.left>this.containment[2]){d=this.containment[2]+this.offset.click.left}if(e.pageY-this.offset.click.top>this.containment[3]){c=this.containment[3]+this.offset.click.top}}if(h.grid){var g=this.originalPageY+Math.round((c-this.originalPageY)/h.grid[1])*h.grid[1];c=this.containment?(!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:(!(g-this.offset.click.top<this.containment[1])?g-h.grid[1]:g+h.grid[1])):g;var f=this.originalPageX+Math.round((d-this.originalPageX)/h.grid[0])*h.grid[0];d=this.containment?(!(f-this.offset.click.left<this.containment[0]||f-this.offset.click.left>this.containment[2])?f:(!(f-this.offset.click.left<this.containment[0])?f-h.grid[0]:f+h.grid[0])):f}}return{top:(c-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(i?0:b.scrollTop())))),left:(d-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:b.scrollLeft())))}},_rearrange:function(g,f,c,e){c?c[0].appendChild(this.placeholder[0]):f.item[0].parentNode.insertBefore(this.placeholder[0],(this.direction=="down"?f.item[0]:f.item[0].nextSibling));this.counter=this.counter?++this.counter:1;var d=this,b=this.counter;window.setTimeout(function(){if(b==d.counter){d.refreshPositions(!e)}},0)},_clear:function(d,e){this.reverting=false;var f=[],b=this;if(!this._noFinalSort&&this.currentItem[0].parentNode){this.placeholder.before(this.currentItem)}this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var c in this._storedCSS){if(this._storedCSS[c]=="auto"||this._storedCSS[c]=="static"){this._storedCSS[c]=""}}this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else{this.currentItem.show()}if(this.fromOutside&&!e){f.push(function(g){this._trigger("receive",g,this._uiHash(this.fromOutside))})}if((this.fromOutside||this.domPosition.prev!=this.currentItem.prev().not(".ui-sortable-helper")[0]||this.domPosition.parent!=this.currentItem.parent()[0])&&!e){f.push(function(g){this._trigger("update",g,this._uiHash())})}if(!a.ui.contains(this.element[0],this.currentItem[0])){if(!e){f.push(function(g){this._trigger("remove",g,this._uiHash())})}for(var c=this.containers.length-1;c>=0;c--){if(a.ui.contains(this.containers[c].element[0],this.currentItem[0])&&!e){f.push((function(g){return function(h){g._trigger("receive",h,this._uiHash(this))}}).call(this,this.containers[c]));f.push((function(g){return function(h){g._trigger("update",h,this._uiHash(this))}}).call(this,this.containers[c]))}}}for(var c=this.containers.length-1;c>=0;c--){if(!e){f.push((function(g){return function(h){g._trigger("deactivate",h,this._uiHash(this))}}).call(this,this.containers[c]))}if(this.containers[c].containerCache.over){f.push((function(g){return function(h){g._trigger("out",h,this._uiHash(this))}}).call(this,this.containers[c]));this.containers[c].containerCache.over=0}}if(this._storedCursor){a("body").css("cursor",this._storedCursor)}if(this._storedOpacity){this.helper.css("opacity",this._storedOpacity)}if(this._storedZIndex){this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex)}this.dragging=false;if(this.cancelHelperRemoval){if(!e){this._trigger("beforeStop",d,this._uiHash());for(var c=0;c<f.length;c++){f[c].call(this,d)}this._trigger("stop",d,this._uiHash())}return false}if(!e){this._trigger("beforeStop",d,this._uiHash())}this.placeholder[0].parentNode.removeChild(this.placeholder[0]);if(this.helper[0]!=this.currentItem[0]){this.helper.remove()}this.helper=null;if(!e){for(var c=0;c<f.length;c++){f[c].call(this,d)}this._trigger("stop",d,this._uiHash())}this.fromOutside=false;return true},_trigger:function(){if(a.widget.prototype._trigger.apply(this,arguments)===false){this.cancel()}},_uiHash:function(c){var b=c||this;return{helper:b.helper,placeholder:b.placeholder||a([]),position:b.position,absolutePosition:b.positionAbs,offset:b.positionAbs,item:b.currentItem,sender:c?c.element:null}}}));a.extend(a.ui.sortable,{getter:"serialize toArray",version:"1.7.2",eventPrefix:"sort",defaults:{appendTo:"parent",axis:false,cancel:":input,option",connectWith:false,containment:false,cursor:"auto",cursorAt:false,delay:0,distance:1,dropOnEmpty:true,forcePlaceholderSize:false,forceHelperSize:false,grid:false,handle:false,helper:"original",items:"> *",opacity:false,placeholder:false,revert:false,scroll:true,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1000}})})(jQuery);;/*
  * jQuery UI Accordion 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -83,7 +83,7 @@
 (function(a){a.widget("ui.accordion",{_init:function(){var d=this.options,b=this;this.running=0;if(d.collapsible==a.ui.accordion.defaults.collapsible&&d.alwaysOpen!=a.ui.accordion.defaults.alwaysOpen){d.collapsible=!d.alwaysOpen}if(d.navigation){var c=this.element.find("a").filter(d.navigationFilter);if(c.length){if(c.filter(d.header).length){this.active=c}else{this.active=c.parent().parent().prev();c.addClass("ui-accordion-content-active")}}}this.element.addClass("ui-accordion ui-widget ui-helper-reset");if(this.element[0].nodeName=="UL"){this.element.children("li").addClass("ui-accordion-li-fix")}this.headers=this.element.find(d.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all").bind("mouseenter.accordion",function(){a(this).addClass("ui-state-hover")}).bind("mouseleave.accordion",function(){a(this).removeClass("ui-state-hover")}).bind("focus.accordion",function(){a(this).addClass("ui-state-focus")}).bind("blur.accordion",function(){a(this).removeClass("ui-state-focus")});this.headers.next().addClass("ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom");this.active=this._findActive(this.active||d.active).toggleClass("ui-state-default").toggleClass("ui-state-active").toggleClass("ui-corner-all").toggleClass("ui-corner-top");this.active.next().addClass("ui-accordion-content-active");a("<span/>").addClass("ui-icon "+d.icons.header).prependTo(this.headers);this.active.find(".ui-icon").toggleClass(d.icons.header).toggleClass(d.icons.headerSelected);if(a.browser.msie){this.element.find("a").css("zoom","1")}this.resize();this.element.attr("role","tablist");this.headers.attr("role","tab").bind("keydown",function(e){return b._keydown(e)}).next().attr("role","tabpanel");this.headers.not(this.active||"").attr("aria-expanded","false").attr("tabIndex","-1").next().hide();if(!this.active.length){this.headers.eq(0).attr("tabIndex","0")}else{this.active.attr("aria-expanded","true").attr("tabIndex","0")}if(!a.browser.safari){this.headers.find("a").attr("tabIndex","-1")}if(d.event){this.headers.bind((d.event)+".accordion",function(e){return b._clickHandler.call(b,e,this)})}},destroy:function(){var c=this.options;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role").unbind(".accordion").removeData("accordion");this.headers.unbind(".accordion").removeClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-corner-top").removeAttr("role").removeAttr("aria-expanded").removeAttr("tabindex");this.headers.find("a").removeAttr("tabindex");this.headers.children(".ui-icon").remove();var b=this.headers.next().css("display","").removeAttr("role").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active");if(c.autoHeight||c.fillHeight){b.css("height","")}},_setData:function(b,c){if(b=="alwaysOpen"){b="collapsible";c=!c}a.widget.prototype._setData.apply(this,arguments)},_keydown:function(e){var g=this.options,f=a.ui.keyCode;if(g.disabled||e.altKey||e.ctrlKey){return}var d=this.headers.length;var b=this.headers.index(e.target);var c=false;switch(e.keyCode){case f.RIGHT:case f.DOWN:c=this.headers[(b+1)%d];break;case f.LEFT:case f.UP:c=this.headers[(b-1+d)%d];break;case f.SPACE:case f.ENTER:return this._clickHandler({target:e.target},e.target)}if(c){a(e.target).attr("tabIndex","-1");a(c).attr("tabIndex","0");c.focus();return false}return true},resize:function(){var e=this.options,d;if(e.fillSpace){if(a.browser.msie){var b=this.element.parent().css("overflow");this.element.parent().css("overflow","hidden")}d=this.element.parent().height();if(a.browser.msie){this.element.parent().css("overflow",b)}this.headers.each(function(){d-=a(this).outerHeight()});var c=0;this.headers.next().each(function(){c=Math.max(c,a(this).innerHeight()-a(this).height())}).height(Math.max(0,d-c)).css("overflow","auto")}else{if(e.autoHeight){d=0;this.headers.next().each(function(){d=Math.max(d,a(this).outerHeight())}).height(d)}}},activate:function(b){var c=this._findActive(b)[0];this._clickHandler({target:c},c)},_findActive:function(b){return b?typeof b=="number"?this.headers.filter(":eq("+b+")"):this.headers.not(this.headers.not(b)):b===false?a([]):this.headers.filter(":eq(0)")},_clickHandler:function(b,f){var d=this.options;if(d.disabled){return false}if(!b.target&&d.collapsible){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(d.icons.headerSelected).addClass(d.icons.header);this.active.next().addClass("ui-accordion-content-active");var h=this.active.next(),e={options:d,newHeader:a([]),oldHeader:d.active,newContent:a([]),oldContent:h},c=(this.active=a([]));this._toggle(c,h,e);return false}var g=a(b.currentTarget||f);var i=g[0]==this.active[0];if(this.running||(!d.collapsible&&i)){return false}this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(d.icons.headerSelected).addClass(d.icons.header);this.active.next().addClass("ui-accordion-content-active");if(!i){g.removeClass("ui-state-default ui-corner-all").addClass("ui-state-active ui-corner-top").find(".ui-icon").removeClass(d.icons.header).addClass(d.icons.headerSelected);g.next().addClass("ui-accordion-content-active")}var c=g.next(),h=this.active.next(),e={options:d,newHeader:i&&d.collapsible?a([]):g,oldHeader:this.active,newContent:i&&d.collapsible?a([]):c.find("> *"),oldContent:h.find("> *")},j=this.headers.index(this.active[0])>this.headers.index(g[0]);this.active=i?a([]):g;this._toggle(c,h,e,i,j);return false},_toggle:function(b,i,g,j,k){var d=this.options,m=this;this.toShow=b;this.toHide=i;this.data=g;var c=function(){if(!m){return}return m._completed.apply(m,arguments)};this._trigger("changestart",null,this.data);this.running=i.size()===0?b.size():i.size();if(d.animated){var f={};if(d.collapsible&&j){f={toShow:a([]),toHide:i,complete:c,down:k,autoHeight:d.autoHeight||d.fillSpace}}else{f={toShow:b,toHide:i,complete:c,down:k,autoHeight:d.autoHeight||d.fillSpace}}if(!d.proxied){d.proxied=d.animated}if(!d.proxiedDuration){d.proxiedDuration=d.duration}d.animated=a.isFunction(d.proxied)?d.proxied(f):d.proxied;d.duration=a.isFunction(d.proxiedDuration)?d.proxiedDuration(f):d.proxiedDuration;var l=a.ui.accordion.animations,e=d.duration,h=d.animated;if(!l[h]){l[h]=function(n){this.slide(n,{easing:h,duration:e||700})}}l[h](f)}else{if(d.collapsible&&j){b.toggle()}else{i.hide();b.show()}c(true)}i.prev().attr("aria-expanded","false").attr("tabIndex","-1").blur();b.prev().attr("aria-expanded","true").attr("tabIndex","0").focus()},_completed:function(b){var c=this.options;this.running=b?0:--this.running;if(this.running){return}if(c.clearStyle){this.toShow.add(this.toHide).css({height:"",overflow:""})}this._trigger("change",null,this.data)}});a.extend(a.ui.accordion,{version:"1.7.2",defaults:{active:null,alwaysOpen:true,animated:"slide",autoHeight:true,clearStyle:false,collapsible:false,event:"click",fillSpace:false,header:"> li > :first-child,> :not(li):even",icons:{header:"ui-icon-triangle-1-e",headerSelected:"ui-icon-triangle-1-s"},navigation:false,navigationFilter:function(){return this.href.toLowerCase()==location.href.toLowerCase()}},animations:{slide:function(j,h){j=a.extend({easing:"swing",duration:300},j,h);if(!j.toHide.size()){j.toShow.animate({height:"show"},j);return}if(!j.toShow.size()){j.toHide.animate({height:"hide"},j);return}var c=j.toShow.css("overflow"),g,d={},f={},e=["height","paddingTop","paddingBottom"],b;var i=j.toShow;b=i[0].style.width;i.width(parseInt(i.parent().width(),10)-parseInt(i.css("paddingLeft"),10)-parseInt(i.css("paddingRight"),10)-(parseInt(i.css("borderLeftWidth"),10)||0)-(parseInt(i.css("borderRightWidth"),10)||0));a.each(e,function(k,m){f[m]="hide";var l=(""+a.css(j.toShow[0],m)).match(/^([\d+-.]+)(.*)$/);d[m]={value:l[1],unit:l[2]||"px"}});j.toShow.css({height:0,overflow:"hidden"}).show();j.toHide.filter(":hidden").each(j.complete).end().filter(":visible").animate(f,{step:function(k,l){if(l.prop=="height"){g=(l.now-l.start)/(l.end-l.start)}j.toShow[0].style[l.prop]=(g*d[l.prop].value)+d[l.prop].unit},duration:j.duration,easing:j.easing,complete:function(){if(!j.autoHeight){j.toShow.css("height","")}j.toShow.css("width",b);j.toShow.css({overflow:c});j.complete()}})},bounceslide:function(b){this.slide(b,{easing:b.down?"easeOutBounce":"swing",duration:b.down?1000:200})},easeslide:function(b){this.slide(b,{easing:"easeinout",duration:700})}}})})(jQuery);;/*
  * jQuery UI Dialog 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -97,7 +97,7 @@
 (function(c){var b={dragStart:"start.draggable",drag:"drag.draggable",dragStop:"stop.draggable",maxHeight:"maxHeight.resizable",minHeight:"minHeight.resizable",maxWidth:"maxWidth.resizable",minWidth:"minWidth.resizable",resizeStart:"start.resizable",resize:"drag.resizable",resizeStop:"stop.resizable"},a="ui-dialog ui-widget ui-widget-content ui-corner-all ";c.widget("ui.dialog",{_init:function(){this.originalTitle=this.element.attr("title");var l=this,m=this.options,j=m.title||this.originalTitle||"&nbsp;",e=c.ui.dialog.getTitleId(this.element),k=(this.uiDialog=c("<div/>")).appendTo(document.body).hide().addClass(a+m.dialogClass).css({position:"absolute",overflow:"hidden",zIndex:m.zIndex}).attr("tabIndex",-1).css("outline",0).keydown(function(n){(m.closeOnEscape&&n.keyCode&&n.keyCode==c.ui.keyCode.ESCAPE&&l.close(n))}).attr({role:"dialog","aria-labelledby":e}).mousedown(function(n){l.moveToTop(false,n)}),g=this.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(k),f=(this.uiDialogTitlebar=c("<div></div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(k),i=c('<a href="#"/>').addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").hover(function(){i.addClass("ui-state-hover")},function(){i.removeClass("ui-state-hover")}).focus(function(){i.addClass("ui-state-focus")}).blur(function(){i.removeClass("ui-state-focus")}).mousedown(function(n){n.stopPropagation()}).click(function(n){l.close(n);return false}).appendTo(f),h=(this.uiDialogTitlebarCloseText=c("<span/>")).addClass("ui-icon ui-icon-closethick").text(m.closeText).appendTo(i),d=c("<span/>").addClass("ui-dialog-title").attr("id",e).html(j).prependTo(f);f.find("*").add(f).disableSelection();(m.draggable&&c.fn.draggable&&this._makeDraggable());(m.resizable&&c.fn.resizable&&this._makeResizable());this._createButtons(m.buttons);this._isOpen=false;(m.bgiframe&&c.fn.bgiframe&&k.bgiframe());(m.autoOpen&&this.open())},destroy:function(){(this.overlay&&this.overlay.destroy());this.uiDialog.hide();this.element.unbind(".dialog").removeData("dialog").removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body");this.uiDialog.remove();(this.originalTitle&&this.element.attr("title",this.originalTitle))},close:function(f){var d=this;if(false===d._trigger("beforeclose",f)){return}(d.overlay&&d.overlay.destroy());d.uiDialog.unbind("keypress.ui-dialog");(d.options.hide?d.uiDialog.hide(d.options.hide,function(){d._trigger("close",f)}):d.uiDialog.hide()&&d._trigger("close",f));c.ui.dialog.overlay.resize();d._isOpen=false;if(d.options.modal){var e=0;c(".ui-dialog").each(function(){if(this!=d.uiDialog[0]){e=Math.max(e,c(this).css("z-index"))}});c.ui.dialog.maxZ=e}},isOpen:function(){return this._isOpen},moveToTop:function(f,e){if((this.options.modal&&!f)||(!this.options.stack&&!this.options.modal)){return this._trigger("focus",e)}if(this.options.zIndex>c.ui.dialog.maxZ){c.ui.dialog.maxZ=this.options.zIndex}(this.overlay&&this.overlay.$el.css("z-index",c.ui.dialog.overlay.maxZ=++c.ui.dialog.maxZ));var d={scrollTop:this.element.attr("scrollTop"),scrollLeft:this.element.attr("scrollLeft")};this.uiDialog.css("z-index",++c.ui.dialog.maxZ);this.element.attr(d);this._trigger("focus",e)},open:function(){if(this._isOpen){return}var e=this.options,d=this.uiDialog;this.overlay=e.modal?new c.ui.dialog.overlay(this):null;(d.next().length&&d.appendTo("body"));this._size();this._position(e.position);d.show(e.show);this.moveToTop(true);(e.modal&&d.bind("keypress.ui-dialog",function(h){if(h.keyCode!=c.ui.keyCode.TAB){return}var g=c(":tabbable",this),i=g.filter(":first")[0],f=g.filter(":last")[0];if(h.target==f&&!h.shiftKey){setTimeout(function(){i.focus()},1)}else{if(h.target==i&&h.shiftKey){setTimeout(function(){f.focus()},1)}}}));c([]).add(d.find(".ui-dialog-content :tabbable:first")).add(d.find(".ui-dialog-buttonpane :tabbable:first")).add(d).filter(":first").focus();this._trigger("open");this._isOpen=true},_createButtons:function(g){var f=this,d=false,e=c("<div></div>").addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix");this.uiDialog.find(".ui-dialog-buttonpane").remove();(typeof g=="object"&&g!==null&&c.each(g,function(){return !(d=true)}));if(d){c.each(g,function(h,i){c('<button type="button"></button>').addClass("ui-state-default ui-corner-all").text(h).click(function(){i.apply(f.element[0],arguments)}).hover(function(){c(this).addClass("ui-state-hover")},function(){c(this).removeClass("ui-state-hover")}).focus(function(){c(this).addClass("ui-state-focus")}).blur(function(){c(this).removeClass("ui-state-focus")}).appendTo(e)});e.appendTo(this.uiDialog)}},_makeDraggable:function(){var d=this,f=this.options,e;this.uiDialog.draggable({cancel:".ui-dialog-content",handle:".ui-dialog-titlebar",containment:"document",start:function(){e=f.height;c(this).height(c(this).height()).addClass("ui-dialog-dragging");(f.dragStart&&f.dragStart.apply(d.element[0],arguments))},drag:function(){(f.drag&&f.drag.apply(d.element[0],arguments))},stop:function(){c(this).removeClass("ui-dialog-dragging").height(e);(f.dragStop&&f.dragStop.apply(d.element[0],arguments));c.ui.dialog.overlay.resize()}})},_makeResizable:function(g){g=(g===undefined?this.options.resizable:g);var d=this,f=this.options,e=typeof g=="string"?g:"n,e,s,w,se,sw,ne,nw";this.uiDialog.resizable({cancel:".ui-dialog-content",alsoResize:this.element,maxWidth:f.maxWidth,maxHeight:f.maxHeight,minWidth:f.minWidth,minHeight:f.minHeight,start:function(){c(this).addClass("ui-dialog-resizing");(f.resizeStart&&f.resizeStart.apply(d.element[0],arguments))},resize:function(){(f.resize&&f.resize.apply(d.element[0],arguments))},handles:e,stop:function(){c(this).removeClass("ui-dialog-resizing");f.height=c(this).height();f.width=c(this).width();(f.resizeStop&&f.resizeStop.apply(d.element[0],arguments));c.ui.dialog.overlay.resize()}}).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_position:function(i){var e=c(window),f=c(document),g=f.scrollTop(),d=f.scrollLeft(),h=g;if(c.inArray(i,["center","top","right","bottom","left"])>=0){i=[i=="right"||i=="left"?i:"center",i=="top"||i=="bottom"?i:"middle"]}if(i.constructor!=Array){i=["center","middle"]}if(i[0].constructor==Number){d+=i[0]}else{switch(i[0]){case"left":d+=0;break;case"right":d+=e.width()-this.uiDialog.outerWidth();break;default:case"center":d+=(e.width()-this.uiDialog.outerWidth())/2}}if(i[1].constructor==Number){g+=i[1]}else{switch(i[1]){case"top":g+=0;break;case"bottom":g+=e.height()-this.uiDialog.outerHeight();break;default:case"middle":g+=(e.height()-this.uiDialog.outerHeight())/2}}g=Math.max(g,h);this.uiDialog.css({top:g,left:d})},_setData:function(e,f){(b[e]&&this.uiDialog.data(b[e],f));switch(e){case"buttons":this._createButtons(f);break;case"closeText":this.uiDialogTitlebarCloseText.text(f);break;case"dialogClass":this.uiDialog.removeClass(this.options.dialogClass).addClass(a+f);break;case"draggable":(f?this._makeDraggable():this.uiDialog.draggable("destroy"));break;case"height":this.uiDialog.height(f);break;case"position":this._position(f);break;case"resizable":var d=this.uiDialog,g=this.uiDialog.is(":data(resizable)");(g&&!f&&d.resizable("destroy"));(g&&typeof f=="string"&&d.resizable("option","handles",f));(g||this._makeResizable(f));break;case"title":c(".ui-dialog-title",this.uiDialogTitlebar).html(f||"&nbsp;");break;case"width":this.uiDialog.width(f);break}c.widget.prototype._setData.apply(this,arguments)},_size:function(){var e=this.options;this.element.css({height:0,minHeight:0,width:"auto"});var d=this.uiDialog.css({height:"auto",width:e.width}).height();this.element.css({minHeight:Math.max(e.minHeight-d,0),height:e.height=="auto"?"auto":Math.max(e.height-d,0)})}});c.extend(c.ui.dialog,{version:"1.7.2",defaults:{autoOpen:true,bgiframe:false,buttons:{},closeOnEscape:true,closeText:"close",dialogClass:"",draggable:true,hide:null,height:"auto",maxHeight:false,maxWidth:false,minHeight:150,minWidth:150,modal:false,position:"center",resizable:true,show:null,stack:true,title:"",width:300,zIndex:1000},getter:"isOpen",uuid:0,maxZ:0,getTitleId:function(d){return"ui-dialog-title-"+(d.attr("id")||++this.uuid)},overlay:function(d){this.$el=c.ui.dialog.overlay.create(d)}});c.extend(c.ui.dialog.overlay,{instances:[],maxZ:0,events:c.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(d){return d+".dialog-overlay"}).join(" "),create:function(e){if(this.instances.length===0){setTimeout(function(){if(c.ui.dialog.overlay.instances.length){c(document).bind(c.ui.dialog.overlay.events,function(f){var g=c(f.target).parents(".ui-dialog").css("zIndex")||0;return(g>c.ui.dialog.overlay.maxZ)})}},1);c(document).bind("keydown.dialog-overlay",function(f){(e.options.closeOnEscape&&f.keyCode&&f.keyCode==c.ui.keyCode.ESCAPE&&e.close(f))});c(window).bind("resize.dialog-overlay",c.ui.dialog.overlay.resize)}var d=c("<div></div>").appendTo(document.body).addClass("ui-widget-overlay").css({width:this.width(),height:this.height()});(e.options.bgiframe&&c.fn.bgiframe&&d.bgiframe());this.instances.push(d);return d},destroy:function(d){this.instances.splice(c.inArray(this.instances,d),1);if(this.instances.length===0){c([document,window]).unbind(".dialog-overlay")}d.remove();var e=0;c.each(this.instances,function(){e=Math.max(e,this.css("z-index"))});this.maxZ=e},height:function(){if(c.browser.msie&&c.browser.version<7){var e=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight);var d=Math.max(document.documentElement.offsetHeight,document.body.offsetHeight);if(e<d){return c(window).height()+"px"}else{return e+"px"}}else{return c(document).height()+"px"}},width:function(){if(c.browser.msie&&c.browser.version<7){var d=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth);var e=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth);if(d<e){return c(window).width()+"px"}else{return d+"px"}}else{return c(document).width()+"px"}},resize:function(){var d=c([]);c.each(c.ui.dialog.overlay.instances,function(){d=d.add(this)});d.css({width:0,height:0}).css({width:c.ui.dialog.overlay.width(),height:c.ui.dialog.overlay.height()})}});c.extend(c.ui.dialog.overlay.prototype,{destroy:function(){c.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);;/*
  * jQuery UI Slider 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -109,7 +109,7 @@
 (function(a){a.widget("ui.slider",a.extend({},a.ui.mouse,{_init:function(){var b=this,c=this.options;this._keySliding=false;this._handleIndex=null;this._detectOrientation();this._mouseInit();this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget ui-widget-content ui-corner-all");this.range=a([]);if(c.range){if(c.range===true){this.range=a("<div></div>");if(!c.values){c.values=[this._valueMin(),this._valueMin()]}if(c.values.length&&c.values.length!=2){c.values=[c.values[0],c.values[0]]}}else{this.range=a("<div></div>")}this.range.appendTo(this.element).addClass("ui-slider-range");if(c.range=="min"||c.range=="max"){this.range.addClass("ui-slider-range-"+c.range)}this.range.addClass("ui-widget-header")}if(a(".ui-slider-handle",this.element).length==0){a('<a href="#"></a>').appendTo(this.element).addClass("ui-slider-handle")}if(c.values&&c.values.length){while(a(".ui-slider-handle",this.element).length<c.values.length){a('<a href="#"></a>').appendTo(this.element).addClass("ui-slider-handle")}}this.handles=a(".ui-slider-handle",this.element).addClass("ui-state-default ui-corner-all");this.handle=this.handles.eq(0);this.handles.add(this.range).filter("a").click(function(d){d.preventDefault()}).hover(function(){if(!c.disabled){a(this).addClass("ui-state-hover")}},function(){a(this).removeClass("ui-state-hover")}).focus(function(){if(!c.disabled){a(".ui-slider .ui-state-focus").removeClass("ui-state-focus");a(this).addClass("ui-state-focus")}else{a(this).blur()}}).blur(function(){a(this).removeClass("ui-state-focus")});this.handles.each(function(d){a(this).data("index.ui-slider-handle",d)});this.handles.keydown(function(i){var f=true;var e=a(this).data("index.ui-slider-handle");if(b.options.disabled){return}switch(i.keyCode){case a.ui.keyCode.HOME:case a.ui.keyCode.END:case a.ui.keyCode.UP:case a.ui.keyCode.RIGHT:case a.ui.keyCode.DOWN:case a.ui.keyCode.LEFT:f=false;if(!b._keySliding){b._keySliding=true;a(this).addClass("ui-state-active");b._start(i,e)}break}var g,d,h=b._step();if(b.options.values&&b.options.values.length){g=d=b.values(e)}else{g=d=b.value()}switch(i.keyCode){case a.ui.keyCode.HOME:d=b._valueMin();break;case a.ui.keyCode.END:d=b._valueMax();break;case a.ui.keyCode.UP:case a.ui.keyCode.RIGHT:if(g==b._valueMax()){return}d=g+h;break;case a.ui.keyCode.DOWN:case a.ui.keyCode.LEFT:if(g==b._valueMin()){return}d=g-h;break}b._slide(i,e,d);return f}).keyup(function(e){var d=a(this).data("index.ui-slider-handle");if(b._keySliding){b._stop(e,d);b._change(e,d);b._keySliding=false;a(this).removeClass("ui-state-active")}});this._refreshValue()},destroy:function(){this.handles.remove();this.range.remove();this.element.removeClass("ui-slider ui-slider-horizontal ui-slider-vertical ui-slider-disabled ui-widget ui-widget-content ui-corner-all").removeData("slider").unbind(".slider");this._mouseDestroy()},_mouseCapture:function(d){var e=this.options;if(e.disabled){return false}this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()};this.elementOffset=this.element.offset();var h={x:d.pageX,y:d.pageY};var j=this._normValueFromMouse(h);var c=this._valueMax()-this._valueMin()+1,f;var k=this,i;this.handles.each(function(l){var m=Math.abs(j-k.values(l));if(c>m){c=m;f=a(this);i=l}});if(e.range==true&&this.values(1)==e.min){f=a(this.handles[++i])}this._start(d,i);k._handleIndex=i;f.addClass("ui-state-active").focus();var g=f.offset();var b=!a(d.target).parents().andSelf().is(".ui-slider-handle");this._clickOffset=b?{left:0,top:0}:{left:d.pageX-g.left-(f.width()/2),top:d.pageY-g.top-(f.height()/2)-(parseInt(f.css("borderTopWidth"),10)||0)-(parseInt(f.css("borderBottomWidth"),10)||0)+(parseInt(f.css("marginTop"),10)||0)};j=this._normValueFromMouse(h);this._slide(d,i,j);return true},_mouseStart:function(b){return true},_mouseDrag:function(d){var b={x:d.pageX,y:d.pageY};var c=this._normValueFromMouse(b);this._slide(d,this._handleIndex,c);return false},_mouseStop:function(b){this.handles.removeClass("ui-state-active");this._stop(b,this._handleIndex);this._change(b,this._handleIndex);this._handleIndex=null;this._clickOffset=null;return false},_detectOrientation:function(){this.orientation=this.options.orientation=="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(d){var c,h;if("horizontal"==this.orientation){c=this.elementSize.width;h=d.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)}else{c=this.elementSize.height;h=d.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)}var f=(h/c);if(f>1){f=1}if(f<0){f=0}if("vertical"==this.orientation){f=1-f}var e=this._valueMax()-this._valueMin(),i=f*e,b=i%this.options.step,g=this._valueMin()+i-b;if(b>(this.options.step/2)){g+=this.options.step}return parseFloat(g.toFixed(5))},_start:function(d,c){var b={handle:this.handles[c],value:this.value()};if(this.options.values&&this.options.values.length){b.value=this.values(c);b.values=this.values()}this._trigger("start",d,b)},_slide:function(f,e,d){var g=this.handles[e];if(this.options.values&&this.options.values.length){var b=this.values(e?0:1);if((this.options.values.length==2&&this.options.range===true)&&((e==0&&d>b)||(e==1&&d<b))){d=b}if(d!=this.values(e)){var c=this.values();c[e]=d;var h=this._trigger("slide",f,{handle:this.handles[e],value:d,values:c});var b=this.values(e?0:1);if(h!==false){this.values(e,d,(f.type=="mousedown"&&this.options.animate),true)}}}else{if(d!=this.value()){var h=this._trigger("slide",f,{handle:this.handles[e],value:d});if(h!==false){this._setData("value",d,(f.type=="mousedown"&&this.options.animate))}}}},_stop:function(d,c){var b={handle:this.handles[c],value:this.value()};if(this.options.values&&this.options.values.length){b.value=this.values(c);b.values=this.values()}this._trigger("stop",d,b)},_change:function(d,c){var b={handle:this.handles[c],value:this.value()};if(this.options.values&&this.options.values.length){b.value=this.values(c);b.values=this.values()}this._trigger("change",d,b)},value:function(b){if(arguments.length){this._setData("value",b);this._change(null,0)}return this._value()},values:function(b,e,c,d){if(arguments.length>1){this.options.values[b]=e;this._refreshValue(c);if(!d){this._change(null,b)}}if(arguments.length){if(this.options.values&&this.options.values.length){return this._values(b)}else{return this.value()}}else{return this._values()}},_setData:function(b,d,c){a.widget.prototype._setData.apply(this,arguments);switch(b){case"disabled":if(d){this.handles.filter(".ui-state-focus").blur();this.handles.removeClass("ui-state-hover");this.handles.attr("disabled","disabled")}else{this.handles.removeAttr("disabled")}case"orientation":this._detectOrientation();this.element.removeClass("ui-slider-horizontal ui-slider-vertical").addClass("ui-slider-"+this.orientation);this._refreshValue(c);break;case"value":this._refreshValue(c);break}},_step:function(){var b=this.options.step;return b},_value:function(){var b=this.options.value;if(b<this._valueMin()){b=this._valueMin()}if(b>this._valueMax()){b=this._valueMax()}return b},_values:function(b){if(arguments.length){var c=this.options.values[b];if(c<this._valueMin()){c=this._valueMin()}if(c>this._valueMax()){c=this._valueMax()}return c}else{return this.options.values}},_valueMin:function(){var b=this.options.min;return b},_valueMax:function(){var b=this.options.max;return b},_refreshValue:function(c){var f=this.options.range,d=this.options,l=this;if(this.options.values&&this.options.values.length){var i,h;this.handles.each(function(p,n){var o=(l.values(p)-l._valueMin())/(l._valueMax()-l._valueMin())*100;var m={};m[l.orientation=="horizontal"?"left":"bottom"]=o+"%";a(this).stop(1,1)[c?"animate":"css"](m,d.animate);if(l.options.range===true){if(l.orientation=="horizontal"){(p==0)&&l.range.stop(1,1)[c?"animate":"css"]({left:o+"%"},d.animate);(p==1)&&l.range[c?"animate":"css"]({width:(o-lastValPercent)+"%"},{queue:false,duration:d.animate})}else{(p==0)&&l.range.stop(1,1)[c?"animate":"css"]({bottom:(o)+"%"},d.animate);(p==1)&&l.range[c?"animate":"css"]({height:(o-lastValPercent)+"%"},{queue:false,duration:d.animate})}}lastValPercent=o})}else{var j=this.value(),g=this._valueMin(),k=this._valueMax(),e=k!=g?(j-g)/(k-g)*100:0;var b={};b[l.orientation=="horizontal"?"left":"bottom"]=e+"%";this.handle.stop(1,1)[c?"animate":"css"](b,d.animate);(f=="min")&&(this.orientation=="horizontal")&&this.range.stop(1,1)[c?"animate":"css"]({width:e+"%"},d.animate);(f=="max")&&(this.orientation=="horizontal")&&this.range[c?"animate":"css"]({width:(100-e)+"%"},{queue:false,duration:d.animate});(f=="min")&&(this.orientation=="vertical")&&this.range.stop(1,1)[c?"animate":"css"]({height:e+"%"},d.animate);(f=="max")&&(this.orientation=="vertical")&&this.range[c?"animate":"css"]({height:(100-e)+"%"},{queue:false,duration:d.animate})}}}));a.extend(a.ui.slider,{getter:"value values",version:"1.7.2",eventPrefix:"slide",defaults:{animate:false,delay:0,distance:0,max:100,min:0,orientation:"horizontal",range:false,step:1,value:0,values:null}})})(jQuery);;/*
  * jQuery UI Tabs 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -121,7 +121,7 @@
 (function(a){a.widget("ui.tabs",{_init:function(){if(this.options.deselectable!==undefined){this.options.collapsible=this.options.deselectable}this._tabify(true)},_setData:function(b,c){if(b=="selected"){if(this.options.collapsible&&c==this.options.selected){return}this.select(c)}else{this.options[b]=c;if(b=="deselectable"){this.options.collapsible=c}this._tabify()}},_tabId:function(b){return b.title&&b.title.replace(/\s/g,"_").replace(/[^A-Za-z0-9\-_:\.]/g,"")||this.options.idPrefix+a.data(b)},_sanitizeSelector:function(b){return b.replace(/:/g,"\\:")},_cookie:function(){var b=this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+a.data(this.list[0]));return a.cookie.apply(null,[b].concat(a.makeArray(arguments)))},_ui:function(c,b){return{tab:c,panel:b,index:this.anchors.index(c)}},_cleanup:function(){this.lis.filter(".ui-state-processing").removeClass("ui-state-processing").find("span:data(label.tabs)").each(function(){var b=a(this);b.html(b.data("label.tabs")).removeData("label.tabs")})},_tabify:function(n){this.list=this.element.children("ul:first");this.lis=a("li:has(a[href])",this.list);this.anchors=this.lis.map(function(){return a("a",this)[0]});this.panels=a([]);var p=this,d=this.options;var c=/^#.+/;this.anchors.each(function(r,o){var q=a(o).attr("href");var s=q.split("#")[0],u;if(s&&(s===location.toString().split("#")[0]||(u=a("base")[0])&&s===u.href)){q=o.hash;o.href=q}if(c.test(q)){p.panels=p.panels.add(p._sanitizeSelector(q))}else{if(q!="#"){a.data(o,"href.tabs",q);a.data(o,"load.tabs",q.replace(/#.*$/,""));var w=p._tabId(o);o.href="#"+w;var v=a("#"+w);if(!v.length){v=a(d.panelTemplate).attr("id",w).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").insertAfter(p.panels[r-1]||p.list);v.data("destroy.tabs",true)}p.panels=p.panels.add(v)}else{d.disabled.push(r)}}});if(n){this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all");this.list.addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.lis.addClass("ui-state-default ui-corner-top");this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom");if(d.selected===undefined){if(location.hash){this.anchors.each(function(q,o){if(o.hash==location.hash){d.selected=q;return false}})}if(typeof d.selected!="number"&&d.cookie){d.selected=parseInt(p._cookie(),10)}if(typeof d.selected!="number"&&this.lis.filter(".ui-tabs-selected").length){d.selected=this.lis.index(this.lis.filter(".ui-tabs-selected"))}d.selected=d.selected||0}else{if(d.selected===null){d.selected=-1}}d.selected=((d.selected>=0&&this.anchors[d.selected])||d.selected<0)?d.selected:0;d.disabled=a.unique(d.disabled.concat(a.map(this.lis.filter(".ui-state-disabled"),function(q,o){return p.lis.index(q)}))).sort();if(a.inArray(d.selected,d.disabled)!=-1){d.disabled.splice(a.inArray(d.selected,d.disabled),1)}this.panels.addClass("ui-tabs-hide");this.lis.removeClass("ui-tabs-selected ui-state-active");if(d.selected>=0&&this.anchors.length){this.panels.eq(d.selected).removeClass("ui-tabs-hide");this.lis.eq(d.selected).addClass("ui-tabs-selected ui-state-active");p.element.queue("tabs",function(){p._trigger("show",null,p._ui(p.anchors[d.selected],p.panels[d.selected]))});this.load(d.selected)}a(window).bind("unload",function(){p.lis.add(p.anchors).unbind(".tabs");p.lis=p.anchors=p.panels=null})}else{d.selected=this.lis.index(this.lis.filter(".ui-tabs-selected"))}this.element[d.collapsible?"addClass":"removeClass"]("ui-tabs-collapsible");if(d.cookie){this._cookie(d.selected,d.cookie)}for(var g=0,m;(m=this.lis[g]);g++){a(m)[a.inArray(g,d.disabled)!=-1&&!a(m).hasClass("ui-tabs-selected")?"addClass":"removeClass"]("ui-state-disabled")}if(d.cache===false){this.anchors.removeData("cache.tabs")}this.lis.add(this.anchors).unbind(".tabs");if(d.event!="mouseover"){var f=function(o,i){if(i.is(":not(.ui-state-disabled)")){i.addClass("ui-state-"+o)}};var j=function(o,i){i.removeClass("ui-state-"+o)};this.lis.bind("mouseover.tabs",function(){f("hover",a(this))});this.lis.bind("mouseout.tabs",function(){j("hover",a(this))});this.anchors.bind("focus.tabs",function(){f("focus",a(this).closest("li"))});this.anchors.bind("blur.tabs",function(){j("focus",a(this).closest("li"))})}var b,h;if(d.fx){if(a.isArray(d.fx)){b=d.fx[0];h=d.fx[1]}else{b=h=d.fx}}function e(i,o){i.css({display:""});if(a.browser.msie&&o.opacity){i[0].style.removeAttribute("filter")}}var k=h?function(i,o){a(i).closest("li").removeClass("ui-state-default").addClass("ui-tabs-selected ui-state-active");o.hide().removeClass("ui-tabs-hide").animate(h,h.duration||"normal",function(){e(o,h);p._trigger("show",null,p._ui(i,o[0]))})}:function(i,o){a(i).closest("li").removeClass("ui-state-default").addClass("ui-tabs-selected ui-state-active");o.removeClass("ui-tabs-hide");p._trigger("show",null,p._ui(i,o[0]))};var l=b?function(o,i){i.animate(b,b.duration||"normal",function(){p.lis.removeClass("ui-tabs-selected ui-state-active").addClass("ui-state-default");i.addClass("ui-tabs-hide");e(i,b);p.element.dequeue("tabs")})}:function(o,i,q){p.lis.removeClass("ui-tabs-selected ui-state-active").addClass("ui-state-default");i.addClass("ui-tabs-hide");p.element.dequeue("tabs")};this.anchors.bind(d.event+".tabs",function(){var o=this,r=a(this).closest("li"),i=p.panels.filter(":not(.ui-tabs-hide)"),q=a(p._sanitizeSelector(this.hash));if((r.hasClass("ui-tabs-selected")&&!d.collapsible)||r.hasClass("ui-state-disabled")||r.hasClass("ui-state-processing")||p._trigger("select",null,p._ui(this,q[0]))===false){this.blur();return false}d.selected=p.anchors.index(this);p.abort();if(d.collapsible){if(r.hasClass("ui-tabs-selected")){d.selected=-1;if(d.cookie){p._cookie(d.selected,d.cookie)}p.element.queue("tabs",function(){l(o,i)}).dequeue("tabs");this.blur();return false}else{if(!i.length){if(d.cookie){p._cookie(d.selected,d.cookie)}p.element.queue("tabs",function(){k(o,q)});p.load(p.anchors.index(this));this.blur();return false}}}if(d.cookie){p._cookie(d.selected,d.cookie)}if(q.length){if(i.length){p.element.queue("tabs",function(){l(o,i)})}p.element.queue("tabs",function(){k(o,q)});p.load(p.anchors.index(this))}else{throw"jQuery UI Tabs: Mismatching fragment identifier."}if(a.browser.msie){this.blur()}});this.anchors.bind("click.tabs",function(){return false})},destroy:function(){var b=this.options;this.abort();this.element.unbind(".tabs").removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible").removeData("tabs");this.list.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.anchors.each(function(){var c=a.data(this,"href.tabs");if(c){this.href=c}var d=a(this).unbind(".tabs");a.each(["href","load","cache"],function(e,f){d.removeData(f+".tabs")})});this.lis.unbind(".tabs").add(this.panels).each(function(){if(a.data(this,"destroy.tabs")){a(this).remove()}else{a(this).removeClass(["ui-state-default","ui-corner-top","ui-tabs-selected","ui-state-active","ui-state-hover","ui-state-focus","ui-state-disabled","ui-tabs-panel","ui-widget-content","ui-corner-bottom","ui-tabs-hide"].join(" "))}});if(b.cookie){this._cookie(null,b.cookie)}},add:function(e,d,c){if(c===undefined){c=this.anchors.length}var b=this,g=this.options,i=a(g.tabTemplate.replace(/#\{href\}/g,e).replace(/#\{label\}/g,d)),h=!e.indexOf("#")?e.replace("#",""):this._tabId(a("a",i)[0]);i.addClass("ui-state-default ui-corner-top").data("destroy.tabs",true);var f=a("#"+h);if(!f.length){f=a(g.panelTemplate).attr("id",h).data("destroy.tabs",true)}f.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide");if(c>=this.lis.length){i.appendTo(this.list);f.appendTo(this.list[0].parentNode)}else{i.insertBefore(this.lis[c]);f.insertBefore(this.panels[c])}g.disabled=a.map(g.disabled,function(k,j){return k>=c?++k:k});this._tabify();if(this.anchors.length==1){i.addClass("ui-tabs-selected ui-state-active");f.removeClass("ui-tabs-hide");this.element.queue("tabs",function(){b._trigger("show",null,b._ui(b.anchors[0],b.panels[0]))});this.load(0)}this._trigger("add",null,this._ui(this.anchors[c],this.panels[c]))},remove:function(b){var d=this.options,e=this.lis.eq(b).remove(),c=this.panels.eq(b).remove();if(e.hasClass("ui-tabs-selected")&&this.anchors.length>1){this.select(b+(b+1<this.anchors.length?1:-1))}d.disabled=a.map(a.grep(d.disabled,function(g,f){return g!=b}),function(g,f){return g>=b?--g:g});this._tabify();this._trigger("remove",null,this._ui(e.find("a")[0],c[0]))},enable:function(b){var c=this.options;if(a.inArray(b,c.disabled)==-1){return}this.lis.eq(b).removeClass("ui-state-disabled");c.disabled=a.grep(c.disabled,function(e,d){return e!=b});this._trigger("enable",null,this._ui(this.anchors[b],this.panels[b]))},disable:function(c){var b=this,d=this.options;if(c!=d.selected){this.lis.eq(c).addClass("ui-state-disabled");d.disabled.push(c);d.disabled.sort();this._trigger("disable",null,this._ui(this.anchors[c],this.panels[c]))}},select:function(b){if(typeof b=="string"){b=this.anchors.index(this.anchors.filter("[href$="+b+"]"))}else{if(b===null){b=-1}}if(b==-1&&this.options.collapsible){b=this.options.selected}this.anchors.eq(b).trigger(this.options.event+".tabs")},load:function(e){var c=this,g=this.options,b=this.anchors.eq(e)[0],d=a.data(b,"load.tabs");this.abort();if(!d||this.element.queue("tabs").length!==0&&a.data(b,"cache.tabs")){this.element.dequeue("tabs");return}this.lis.eq(e).addClass("ui-state-processing");if(g.spinner){var f=a("span",b);f.data("label.tabs",f.html()).html(g.spinner)}this.xhr=a.ajax(a.extend({},g.ajaxOptions,{url:d,success:function(i,h){a(c._sanitizeSelector(b.hash)).html(i);c._cleanup();if(g.cache){a.data(b,"cache.tabs",true)}c._trigger("load",null,c._ui(c.anchors[e],c.panels[e]));try{g.ajaxOptions.success(i,h)}catch(j){}c.element.dequeue("tabs")}}))},abort:function(){this.element.queue([]);this.panels.stop(false,true);if(this.xhr){this.xhr.abort();delete this.xhr}this._cleanup()},url:function(c,b){this.anchors.eq(c).removeData("cache.tabs").data("load.tabs",b)},length:function(){return this.anchors.length}});a.extend(a.ui.tabs,{version:"1.7.2",getter:"length",defaults:{ajaxOptions:null,cache:false,cookie:null,collapsible:false,disabled:[],event:"click",fx:null,idPrefix:"ui-tabs-",panelTemplate:"<div></div>",spinner:"<em>Loading&#8230;</em>",tabTemplate:'<li><a href="#{href}"><span>#{label}</span></a></li>'}});a.extend(a.ui.tabs.prototype,{rotation:null,rotate:function(d,f){var b=this,g=this.options;var c=b._rotate||(b._rotate=function(h){clearTimeout(b.rotation);b.rotation=setTimeout(function(){var i=g.selected;b.select(++i<b.anchors.length?i:0)},d);if(h){h.stopPropagation()}});var e=b._unrotate||(b._unrotate=!f?function(h){if(h.clientX){b.rotate(null)}}:function(h){t=g.selected;c()});if(d){this.element.bind("tabsshow",c);this.anchors.bind(g.event+".tabs",e);c()}else{clearTimeout(b.rotation);this.element.unbind("tabsshow",c);this.anchors.unbind(g.event+".tabs",e);delete this._rotate;delete this._unrotate}}})})(jQuery);;/*
  * jQuery UI Datepicker 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -133,7 +133,7 @@
 (function($){$.extend($.ui,{datepicker:{version:"1.7.2"}});var PROP_NAME="datepicker";function Datepicker(){this.debug=false;this._curInst=null;this._keyEvent=false;this._disabledInputs=[];this._datepickerShowing=false;this._inDialog=false;this._mainDivId="ui-datepicker-div";this._inlineClass="ui-datepicker-inline";this._appendClass="ui-datepicker-append";this._triggerClass="ui-datepicker-trigger";this._dialogClass="ui-datepicker-dialog";this._disableClass="ui-datepicker-disabled";this._unselectableClass="ui-datepicker-unselectable";this._currentClass="ui-datepicker-current-day";this._dayOverClass="ui-datepicker-days-cell-over";this.regional=[];this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],dateFormat:"mm/dd/yy",firstDay:0,isRTL:false};this._defaults={showOn:"focus",showAnim:"show",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:false,hideIfNoPrevNext:false,navigationAsDateFormat:false,gotoCurrent:false,changeMonth:false,changeYear:false,showMonthAfterYear:false,yearRange:"-10:+10",showOtherMonths:false,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",minDate:null,maxDate:null,duration:"normal",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:true,showButtonPanel:false};$.extend(this._defaults,this.regional[""]);this.dpDiv=$('<div id="'+this._mainDivId+'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all ui-helper-hidden-accessible"></div>')}$.extend(Datepicker.prototype,{markerClassName:"hasDatepicker",log:function(){if(this.debug){console.log.apply("",arguments)}},setDefaults:function(settings){extendRemove(this._defaults,settings||{});return this},_attachDatepicker:function(target,settings){var inlineSettings=null;for(var attrName in this._defaults){var attrValue=target.getAttribute("date:"+attrName);if(attrValue){inlineSettings=inlineSettings||{};try{inlineSettings[attrName]=eval(attrValue)}catch(err){inlineSettings[attrName]=attrValue}}}var nodeName=target.nodeName.toLowerCase();var inline=(nodeName=="div"||nodeName=="span");if(!target.id){target.id="dp"+(++this.uuid)}var inst=this._newInst($(target),inline);inst.settings=$.extend({},settings||{},inlineSettings||{});if(nodeName=="input"){this._connectDatepicker(target,inst)}else{if(inline){this._inlineDatepicker(target,inst)}}},_newInst:function(target,inline){var id=target[0].id.replace(/([:\[\]\.])/g,"\\\\$1");return{id:id,input:target,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:inline,dpDiv:(!inline?this.dpDiv:$('<div class="'+this._inlineClass+' ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>'))}},_connectDatepicker:function(target,inst){var input=$(target);inst.append=$([]);inst.trigger=$([]);if(input.hasClass(this.markerClassName)){return}var appendText=this._get(inst,"appendText");var isRTL=this._get(inst,"isRTL");if(appendText){inst.append=$('<span class="'+this._appendClass+'">'+appendText+"</span>");input[isRTL?"before":"after"](inst.append)}var showOn=this._get(inst,"showOn");if(showOn=="focus"||showOn=="both"){input.focus(this._showDatepicker)}if(showOn=="button"||showOn=="both"){var buttonText=this._get(inst,"buttonText");var buttonImage=this._get(inst,"buttonImage");inst.trigger=$(this._get(inst,"buttonImageOnly")?$("<img/>").addClass(this._triggerClass).attr({src:buttonImage,alt:buttonText,title:buttonText}):$('<button type="button"></button>').addClass(this._triggerClass).html(buttonImage==""?buttonText:$("<img/>").attr({src:buttonImage,alt:buttonText,title:buttonText})));input[isRTL?"before":"after"](inst.trigger);inst.trigger.click(function(){if($.datepicker._datepickerShowing&&$.datepicker._lastInput==target){$.datepicker._hideDatepicker()}else{$.datepicker._showDatepicker(target)}return false})}input.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).bind("setData.datepicker",function(event,key,value){inst.settings[key]=value}).bind("getData.datepicker",function(event,key){return this._get(inst,key)});$.data(target,PROP_NAME,inst)},_inlineDatepicker:function(target,inst){var divSpan=$(target);if(divSpan.hasClass(this.markerClassName)){return}divSpan.addClass(this.markerClassName).append(inst.dpDiv).bind("setData.datepicker",function(event,key,value){inst.settings[key]=value}).bind("getData.datepicker",function(event,key){return this._get(inst,key)});$.data(target,PROP_NAME,inst);this._setDate(inst,this._getDefaultDate(inst));this._updateDatepicker(inst);this._updateAlternate(inst)},_dialogDatepicker:function(input,dateText,onSelect,settings,pos){var inst=this._dialogInst;if(!inst){var id="dp"+(++this.uuid);this._dialogInput=$('<input type="text" id="'+id+'" size="1" style="position: absolute; top: -100px;"/>');this._dialogInput.keydown(this._doKeyDown);$("body").append(this._dialogInput);inst=this._dialogInst=this._newInst(this._dialogInput,false);inst.settings={};$.data(this._dialogInput[0],PROP_NAME,inst)}extendRemove(inst.settings,settings||{});this._dialogInput.val(dateText);this._pos=(pos?(pos.length?pos:[pos.pageX,pos.pageY]):null);if(!this._pos){var browserWidth=window.innerWidth||document.documentElement.clientWidth||document.body.clientWidth;var browserHeight=window.innerHeight||document.documentElement.clientHeight||document.body.clientHeight;var scrollX=document.documentElement.scrollLeft||document.body.scrollLeft;var scrollY=document.documentElement.scrollTop||document.body.scrollTop;this._pos=[(browserWidth/2)-100+scrollX,(browserHeight/2)-150+scrollY]}this._dialogInput.css("left",this._pos[0]+"px").css("top",this._pos[1]+"px");inst.settings.onSelect=onSelect;this._inDialog=true;this.dpDiv.addClass(this._dialogClass);this._showDatepicker(this._dialogInput[0]);if($.blockUI){$.blockUI(this.dpDiv)}$.data(this._dialogInput[0],PROP_NAME,inst);return this},_destroyDatepicker:function(target){var $target=$(target);var inst=$.data(target,PROP_NAME);if(!$target.hasClass(this.markerClassName)){return}var nodeName=target.nodeName.toLowerCase();$.removeData(target,PROP_NAME);if(nodeName=="input"){inst.append.remove();inst.trigger.remove();$target.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress)}else{if(nodeName=="div"||nodeName=="span"){$target.removeClass(this.markerClassName).empty()}}},_enableDatepicker:function(target){var $target=$(target);var inst=$.data(target,PROP_NAME);if(!$target.hasClass(this.markerClassName)){return}var nodeName=target.nodeName.toLowerCase();if(nodeName=="input"){target.disabled=false;inst.trigger.filter("button").each(function(){this.disabled=false}).end().filter("img").css({opacity:"1.0",cursor:""})}else{if(nodeName=="div"||nodeName=="span"){var inline=$target.children("."+this._inlineClass);inline.children().removeClass("ui-state-disabled")}}this._disabledInputs=$.map(this._disabledInputs,function(value){return(value==target?null:value)})},_disableDatepicker:function(target){var $target=$(target);var inst=$.data(target,PROP_NAME);if(!$target.hasClass(this.markerClassName)){return}var nodeName=target.nodeName.toLowerCase();if(nodeName=="input"){target.disabled=true;inst.trigger.filter("button").each(function(){this.disabled=true}).end().filter("img").css({opacity:"0.5",cursor:"default"})}else{if(nodeName=="div"||nodeName=="span"){var inline=$target.children("."+this._inlineClass);inline.children().addClass("ui-state-disabled")}}this._disabledInputs=$.map(this._disabledInputs,function(value){return(value==target?null:value)});this._disabledInputs[this._disabledInputs.length]=target},_isDisabledDatepicker:function(target){if(!target){return false}for(var i=0;i<this._disabledInputs.length;i++){if(this._disabledInputs[i]==target){return true}}return false},_getInst:function(target){try{return $.data(target,PROP_NAME)}catch(err){throw"Missing instance data for this datepicker"}},_optionDatepicker:function(target,name,value){var inst=this._getInst(target);if(arguments.length==2&&typeof name=="string"){return(name=="defaults"?$.extend({},$.datepicker._defaults):(inst?(name=="all"?$.extend({},inst.settings):this._get(inst,name)):null))}var settings=name||{};if(typeof name=="string"){settings={};settings[name]=value}if(inst){if(this._curInst==inst){this._hideDatepicker(null)}var date=this._getDateDatepicker(target);extendRemove(inst.settings,settings);this._setDateDatepicker(target,date);this._updateDatepicker(inst)}},_changeDatepicker:function(target,name,value){this._optionDatepicker(target,name,value)},_refreshDatepicker:function(target){var inst=this._getInst(target);if(inst){this._updateDatepicker(inst)}},_setDateDatepicker:function(target,date,endDate){var inst=this._getInst(target);if(inst){this._setDate(inst,date,endDate);this._updateDatepicker(inst);this._updateAlternate(inst)}},_getDateDatepicker:function(target){var inst=this._getInst(target);if(inst&&!inst.inline){this._setDateFromField(inst)}return(inst?this._getDate(inst):null)},_doKeyDown:function(event){var inst=$.datepicker._getInst(event.target);var handled=true;var isRTL=inst.dpDiv.is(".ui-datepicker-rtl");inst._keyEvent=true;if($.datepicker._datepickerShowing){switch(event.keyCode){case 9:$.datepicker._hideDatepicker(null,"");break;case 13:var sel=$("td."+$.datepicker._dayOverClass+", td."+$.datepicker._currentClass,inst.dpDiv);if(sel[0]){$.datepicker._selectDay(event.target,inst.selectedMonth,inst.selectedYear,sel[0])}else{$.datepicker._hideDatepicker(null,$.datepicker._get(inst,"duration"))}return false;break;case 27:$.datepicker._hideDatepicker(null,$.datepicker._get(inst,"duration"));break;case 33:$.datepicker._adjustDate(event.target,(event.ctrlKey?-$.datepicker._get(inst,"stepBigMonths"):-$.datepicker._get(inst,"stepMonths")),"M");break;case 34:$.datepicker._adjustDate(event.target,(event.ctrlKey?+$.datepicker._get(inst,"stepBigMonths"):+$.datepicker._get(inst,"stepMonths")),"M");break;case 35:if(event.ctrlKey||event.metaKey){$.datepicker._clearDate(event.target)}handled=event.ctrlKey||event.metaKey;break;case 36:if(event.ctrlKey||event.metaKey){$.datepicker._gotoToday(event.target)}handled=event.ctrlKey||event.metaKey;break;case 37:if(event.ctrlKey||event.metaKey){$.datepicker._adjustDate(event.target,(isRTL?+1:-1),"D")}handled=event.ctrlKey||event.metaKey;if(event.originalEvent.altKey){$.datepicker._adjustDate(event.target,(event.ctrlKey?-$.datepicker._get(inst,"stepBigMonths"):-$.datepicker._get(inst,"stepMonths")),"M")}break;case 38:if(event.ctrlKey||event.metaKey){$.datepicker._adjustDate(event.target,-7,"D")}handled=event.ctrlKey||event.metaKey;break;case 39:if(event.ctrlKey||event.metaKey){$.datepicker._adjustDate(event.target,(isRTL?-1:+1),"D")}handled=event.ctrlKey||event.metaKey;if(event.originalEvent.altKey){$.datepicker._adjustDate(event.target,(event.ctrlKey?+$.datepicker._get(inst,"stepBigMonths"):+$.datepicker._get(inst,"stepMonths")),"M")}break;case 40:if(event.ctrlKey||event.metaKey){$.datepicker._adjustDate(event.target,+7,"D")}handled=event.ctrlKey||event.metaKey;break;default:handled=false}}else{if(event.keyCode==36&&event.ctrlKey){$.datepicker._showDatepicker(this)}else{handled=false}}if(handled){event.preventDefault();event.stopPropagation()}},_doKeyPress:function(event){var inst=$.datepicker._getInst(event.target);if($.datepicker._get(inst,"constrainInput")){var chars=$.datepicker._possibleChars($.datepicker._get(inst,"dateFormat"));var chr=String.fromCharCode(event.charCode==undefined?event.keyCode:event.charCode);return event.ctrlKey||(chr<" "||!chars||chars.indexOf(chr)>-1)}},_showDatepicker:function(input){input=input.target||input;if(input.nodeName.toLowerCase()!="input"){input=$("input",input.parentNode)[0]}if($.datepicker._isDisabledDatepicker(input)||$.datepicker._lastInput==input){return}var inst=$.datepicker._getInst(input);var beforeShow=$.datepicker._get(inst,"beforeShow");extendRemove(inst.settings,(beforeShow?beforeShow.apply(input,[input,inst]):{}));$.datepicker._hideDatepicker(null,"");$.datepicker._lastInput=input;$.datepicker._setDateFromField(inst);if($.datepicker._inDialog){input.value=""}if(!$.datepicker._pos){$.datepicker._pos=$.datepicker._findPos(input);$.datepicker._pos[1]+=input.offsetHeight}var isFixed=false;$(input).parents().each(function(){isFixed|=$(this).css("position")=="fixed";return !isFixed});if(isFixed&&$.browser.opera){$.datepicker._pos[0]-=document.documentElement.scrollLeft;$.datepicker._pos[1]-=document.documentElement.scrollTop}var offset={left:$.datepicker._pos[0],top:$.datepicker._pos[1]};$.datepicker._pos=null;inst.rangeStart=null;inst.dpDiv.css({position:"absolute",display:"block",top:"-1000px"});$.datepicker._updateDatepicker(inst);offset=$.datepicker._checkOffset(inst,offset,isFixed);inst.dpDiv.css({position:($.datepicker._inDialog&&$.blockUI?"static":(isFixed?"fixed":"absolute")),display:"none",left:offset.left+"px",top:offset.top+"px"});if(!inst.inline){var showAnim=$.datepicker._get(inst,"showAnim")||"show";var duration=$.datepicker._get(inst,"duration");var postProcess=function(){$.datepicker._datepickerShowing=true;if($.browser.msie&&parseInt($.browser.version,10)<7){$("iframe.ui-datepicker-cover").css({width:inst.dpDiv.width()+4,height:inst.dpDiv.height()+4})}};if($.effects&&$.effects[showAnim]){inst.dpDiv.show(showAnim,$.datepicker._get(inst,"showOptions"),duration,postProcess)}else{inst.dpDiv[showAnim](duration,postProcess)}if(duration==""){postProcess()}if(inst.input[0].type!="hidden"){inst.input[0].focus()}$.datepicker._curInst=inst}},_updateDatepicker:function(inst){var dims={width:inst.dpDiv.width()+4,height:inst.dpDiv.height()+4};var self=this;inst.dpDiv.empty().append(this._generateHTML(inst)).find("iframe.ui-datepicker-cover").css({width:dims.width,height:dims.height}).end().find("button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a").bind("mouseout",function(){$(this).removeClass("ui-state-hover");if(this.className.indexOf("ui-datepicker-prev")!=-1){$(this).removeClass("ui-datepicker-prev-hover")}if(this.className.indexOf("ui-datepicker-next")!=-1){$(this).removeClass("ui-datepicker-next-hover")}}).bind("mouseover",function(){if(!self._isDisabledDatepicker(inst.inline?inst.dpDiv.parent()[0]:inst.input[0])){$(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover");$(this).addClass("ui-state-hover");if(this.className.indexOf("ui-datepicker-prev")!=-1){$(this).addClass("ui-datepicker-prev-hover")}if(this.className.indexOf("ui-datepicker-next")!=-1){$(this).addClass("ui-datepicker-next-hover")}}}).end().find("."+this._dayOverClass+" a").trigger("mouseover").end();var numMonths=this._getNumberOfMonths(inst);var cols=numMonths[1];var width=17;if(cols>1){inst.dpDiv.addClass("ui-datepicker-multi-"+cols).css("width",(width*cols)+"em")}else{inst.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width("")}inst.dpDiv[(numMonths[0]!=1||numMonths[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi");inst.dpDiv[(this._get(inst,"isRTL")?"add":"remove")+"Class"]("ui-datepicker-rtl");if(inst.input&&inst.input[0].type!="hidden"&&inst==$.datepicker._curInst){$(inst.input[0]).focus()}},_checkOffset:function(inst,offset,isFixed){var dpWidth=inst.dpDiv.outerWidth();var dpHeight=inst.dpDiv.outerHeight();var inputWidth=inst.input?inst.input.outerWidth():0;var inputHeight=inst.input?inst.input.outerHeight():0;var viewWidth=(window.innerWidth||document.documentElement.clientWidth||document.body.clientWidth)+$(document).scrollLeft();var viewHeight=(window.innerHeight||document.documentElement.clientHeight||document.body.clientHeight)+$(document).scrollTop();offset.left-=(this._get(inst,"isRTL")?(dpWidth-inputWidth):0);offset.left-=(isFixed&&offset.left==inst.input.offset().left)?$(document).scrollLeft():0;offset.top-=(isFixed&&offset.top==(inst.input.offset().top+inputHeight))?$(document).scrollTop():0;offset.left-=(offset.left+dpWidth>viewWidth&&viewWidth>dpWidth)?Math.abs(offset.left+dpWidth-viewWidth):0;offset.top-=(offset.top+dpHeight>viewHeight&&viewHeight>dpHeight)?Math.abs(offset.top+dpHeight+inputHeight*2-viewHeight):0;return offset},_findPos:function(obj){while(obj&&(obj.type=="hidden"||obj.nodeType!=1)){obj=obj.nextSibling}var position=$(obj).offset();return[position.left,position.top]},_hideDatepicker:function(input,duration){var inst=this._curInst;if(!inst||(input&&inst!=$.data(input,PROP_NAME))){return}if(inst.stayOpen){this._selectDate("#"+inst.id,this._formatDate(inst,inst.currentDay,inst.currentMonth,inst.currentYear))}inst.stayOpen=false;if(this._datepickerShowing){duration=(duration!=null?duration:this._get(inst,"duration"));var showAnim=this._get(inst,"showAnim");var postProcess=function(){$.datepicker._tidyDialog(inst)};if(duration!=""&&$.effects&&$.effects[showAnim]){inst.dpDiv.hide(showAnim,$.datepicker._get(inst,"showOptions"),duration,postProcess)}else{inst.dpDiv[(duration==""?"hide":(showAnim=="slideDown"?"slideUp":(showAnim=="fadeIn"?"fadeOut":"hide")))](duration,postProcess)}if(duration==""){this._tidyDialog(inst)}var onClose=this._get(inst,"onClose");if(onClose){onClose.apply((inst.input?inst.input[0]:null),[(inst.input?inst.input.val():""),inst])}this._datepickerShowing=false;this._lastInput=null;if(this._inDialog){this._dialogInput.css({position:"absolute",left:"0",top:"-100px"});if($.blockUI){$.unblockUI();$("body").append(this.dpDiv)}}this._inDialog=false}this._curInst=null},_tidyDialog:function(inst){inst.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},_checkExternalClick:function(event){if(!$.datepicker._curInst){return}var $target=$(event.target);if(($target.parents("#"+$.datepicker._mainDivId).length==0)&&!$target.hasClass($.datepicker.markerClassName)&&!$target.hasClass($.datepicker._triggerClass)&&$.datepicker._datepickerShowing&&!($.datepicker._inDialog&&$.blockUI)){$.datepicker._hideDatepicker(null,"")}},_adjustDate:function(id,offset,period){var target=$(id);var inst=this._getInst(target[0]);if(this._isDisabledDatepicker(target[0])){return}this._adjustInstDate(inst,offset+(period=="M"?this._get(inst,"showCurrentAtPos"):0),period);this._updateDatepicker(inst)},_gotoToday:function(id){var target=$(id);var inst=this._getInst(target[0]);if(this._get(inst,"gotoCurrent")&&inst.currentDay){inst.selectedDay=inst.currentDay;inst.drawMonth=inst.selectedMonth=inst.currentMonth;inst.drawYear=inst.selectedYear=inst.currentYear}else{var date=new Date();inst.selectedDay=date.getDate();inst.drawMonth=inst.selectedMonth=date.getMonth();inst.drawYear=inst.selectedYear=date.getFullYear()}this._notifyChange(inst);this._adjustDate(target)},_selectMonthYear:function(id,select,period){var target=$(id);var inst=this._getInst(target[0]);inst._selectingMonthYear=false;inst["selected"+(period=="M"?"Month":"Year")]=inst["draw"+(period=="M"?"Month":"Year")]=parseInt(select.options[select.selectedIndex].value,10);this._notifyChange(inst);this._adjustDate(target)},_clickMonthYear:function(id){var target=$(id);var inst=this._getInst(target[0]);if(inst.input&&inst._selectingMonthYear&&!$.browser.msie){inst.input[0].focus()}inst._selectingMonthYear=!inst._selectingMonthYear},_selectDay:function(id,month,year,td){var target=$(id);if($(td).hasClass(this._unselectableClass)||this._isDisabledDatepicker(target[0])){return}var inst=this._getInst(target[0]);inst.selectedDay=inst.currentDay=$("a",td).html();inst.selectedMonth=inst.currentMonth=month;inst.selectedYear=inst.currentYear=year;if(inst.stayOpen){inst.endDay=inst.endMonth=inst.endYear=null}this._selectDate(id,this._formatDate(inst,inst.currentDay,inst.currentMonth,inst.currentYear));if(inst.stayOpen){inst.rangeStart=this._daylightSavingAdjust(new Date(inst.currentYear,inst.currentMonth,inst.currentDay));this._updateDatepicker(inst)}},_clearDate:function(id){var target=$(id);var inst=this._getInst(target[0]);inst.stayOpen=false;inst.endDay=inst.endMonth=inst.endYear=inst.rangeStart=null;this._selectDate(target,"")},_selectDate:function(id,dateStr){var target=$(id);var inst=this._getInst(target[0]);dateStr=(dateStr!=null?dateStr:this._formatDate(inst));if(inst.input){inst.input.val(dateStr)}this._updateAlternate(inst);var onSelect=this._get(inst,"onSelect");if(onSelect){onSelect.apply((inst.input?inst.input[0]:null),[dateStr,inst])}else{if(inst.input){inst.input.trigger("change")}}if(inst.inline){this._updateDatepicker(inst)}else{if(!inst.stayOpen){this._hideDatepicker(null,this._get(inst,"duration"));this._lastInput=inst.input[0];if(typeof(inst.input[0])!="object"){inst.input[0].focus()}this._lastInput=null}}},_updateAlternate:function(inst){var altField=this._get(inst,"altField");if(altField){var altFormat=this._get(inst,"altFormat")||this._get(inst,"dateFormat");var date=this._getDate(inst);dateStr=this.formatDate(altFormat,date,this._getFormatConfig(inst));$(altField).each(function(){$(this).val(dateStr)})}},noWeekends:function(date){var day=date.getDay();return[(day>0&&day<6),""]},iso8601Week:function(date){var checkDate=new Date(date.getFullYear(),date.getMonth(),date.getDate());var firstMon=new Date(checkDate.getFullYear(),1-1,4);var firstDay=firstMon.getDay()||7;firstMon.setDate(firstMon.getDate()+1-firstDay);if(firstDay<4&&checkDate<firstMon){checkDate.setDate(checkDate.getDate()-3);return $.datepicker.iso8601Week(checkDate)}else{if(checkDate>new Date(checkDate.getFullYear(),12-1,28)){firstDay=new Date(checkDate.getFullYear()+1,1-1,4).getDay()||7;if(firstDay>4&&(checkDate.getDay()||7)<firstDay-3){return 1}}}return Math.floor(((checkDate-firstMon)/86400000)/7)+1},parseDate:function(format,value,settings){if(format==null||value==null){throw"Invalid arguments"}value=(typeof value=="object"?value.toString():value+"");if(value==""){return null}var shortYearCutoff=(settings?settings.shortYearCutoff:null)||this._defaults.shortYearCutoff;var dayNamesShort=(settings?settings.dayNamesShort:null)||this._defaults.dayNamesShort;var dayNames=(settings?settings.dayNames:null)||this._defaults.dayNames;var monthNamesShort=(settings?settings.monthNamesShort:null)||this._defaults.monthNamesShort;var monthNames=(settings?settings.monthNames:null)||this._defaults.monthNames;var year=-1;var month=-1;var day=-1;var doy=-1;var literal=false;var lookAhead=function(match){var matches=(iFormat+1<format.length&&format.charAt(iFormat+1)==match);if(matches){iFormat++}return matches};var getNumber=function(match){lookAhead(match);var origSize=(match=="@"?14:(match=="y"?4:(match=="o"?3:2)));var size=origSize;var num=0;while(size>0&&iValue<value.length&&value.charAt(iValue)>="0"&&value.charAt(iValue)<="9"){num=num*10+parseInt(value.charAt(iValue++),10);size--}if(size==origSize){throw"Missing number at position "+iValue}return num};var getName=function(match,shortNames,longNames){var names=(lookAhead(match)?longNames:shortNames);var size=0;for(var j=0;j<names.length;j++){size=Math.max(size,names[j].length)}var name="";var iInit=iValue;while(size>0&&iValue<value.length){name+=value.charAt(iValue++);for(var i=0;i<names.length;i++){if(name==names[i]){return i+1}}size--}throw"Unknown name at position "+iInit};var checkLiteral=function(){if(value.charAt(iValue)!=format.charAt(iFormat)){throw"Unexpected literal at position "+iValue}iValue++};var iValue=0;for(var iFormat=0;iFormat<format.length;iFormat++){if(literal){if(format.charAt(iFormat)=="'"&&!lookAhead("'")){literal=false}else{checkLiteral()}}else{switch(format.charAt(iFormat)){case"d":day=getNumber("d");break;case"D":getName("D",dayNamesShort,dayNames);break;case"o":doy=getNumber("o");break;case"m":month=getNumber("m");break;case"M":month=getName("M",monthNamesShort,monthNames);break;case"y":year=getNumber("y");break;case"@":var date=new Date(getNumber("@"));year=date.getFullYear();month=date.getMonth()+1;day=date.getDate();break;case"'":if(lookAhead("'")){checkLiteral()}else{literal=true}break;default:checkLiteral()}}}if(year==-1){year=new Date().getFullYear()}else{if(year<100){year+=new Date().getFullYear()-new Date().getFullYear()%100+(year<=shortYearCutoff?0:-100)}}if(doy>-1){month=1;day=doy;do{var dim=this._getDaysInMonth(year,month-1);if(day<=dim){break}month++;day-=dim}while(true)}var date=this._daylightSavingAdjust(new Date(year,month-1,day));if(date.getFullYear()!=year||date.getMonth()+1!=month||date.getDate()!=day){throw"Invalid date"}return date},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",RFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TIMESTAMP:"@",W3C:"yy-mm-dd",formatDate:function(format,date,settings){if(!date){return""}var dayNamesShort=(settings?settings.dayNamesShort:null)||this._defaults.dayNamesShort;var dayNames=(settings?settings.dayNames:null)||this._defaults.dayNames;var monthNamesShort=(settings?settings.monthNamesShort:null)||this._defaults.monthNamesShort;var monthNames=(settings?settings.monthNames:null)||this._defaults.monthNames;var lookAhead=function(match){var matches=(iFormat+1<format.length&&format.charAt(iFormat+1)==match);if(matches){iFormat++}return matches};var formatNumber=function(match,value,len){var num=""+value;if(lookAhead(match)){while(num.length<len){num="0"+num}}return num};var formatName=function(match,value,shortNames,longNames){return(lookAhead(match)?longNames[value]:shortNames[value])};var output="";var literal=false;if(date){for(var iFormat=0;iFormat<format.length;iFormat++){if(literal){if(format.charAt(iFormat)=="'"&&!lookAhead("'")){literal=false}else{output+=format.charAt(iFormat)}}else{switch(format.charAt(iFormat)){case"d":output+=formatNumber("d",date.getDate(),2);break;case"D":output+=formatName("D",date.getDay(),dayNamesShort,dayNames);break;case"o":var doy=date.getDate();for(var m=date.getMonth()-1;m>=0;m--){doy+=this._getDaysInMonth(date.getFullYear(),m)}output+=formatNumber("o",doy,3);break;case"m":output+=formatNumber("m",date.getMonth()+1,2);break;case"M":output+=formatName("M",date.getMonth(),monthNamesShort,monthNames);break;case"y":output+=(lookAhead("y")?date.getFullYear():(date.getYear()%100<10?"0":"")+date.getYear()%100);break;case"@":output+=date.getTime();break;case"'":if(lookAhead("'")){output+="'"}else{literal=true}break;default:output+=format.charAt(iFormat)}}}}return output},_possibleChars:function(format){var chars="";var literal=false;for(var iFormat=0;iFormat<format.length;iFormat++){if(literal){if(format.charAt(iFormat)=="'"&&!lookAhead("'")){literal=false}else{chars+=format.charAt(iFormat)}}else{switch(format.charAt(iFormat)){case"d":case"m":case"y":case"@":chars+="0123456789";break;case"D":case"M":return null;case"'":if(lookAhead("'")){chars+="'"}else{literal=true}break;default:chars+=format.charAt(iFormat)}}}return chars},_get:function(inst,name){return inst.settings[name]!==undefined?inst.settings[name]:this._defaults[name]},_setDateFromField:function(inst){var dateFormat=this._get(inst,"dateFormat");var dates=inst.input?inst.input.val():null;inst.endDay=inst.endMonth=inst.endYear=null;var date=defaultDate=this._getDefaultDate(inst);var settings=this._getFormatConfig(inst);try{date=this.parseDate(dateFormat,dates,settings)||defaultDate}catch(event){this.log(event);date=defaultDate}inst.selectedDay=date.getDate();inst.drawMonth=inst.selectedMonth=date.getMonth();inst.drawYear=inst.selectedYear=date.getFullYear();inst.currentDay=(dates?date.getDate():0);inst.currentMonth=(dates?date.getMonth():0);inst.currentYear=(dates?date.getFullYear():0);this._adjustInstDate(inst)},_getDefaultDate:function(inst){var date=this._determineDate(this._get(inst,"defaultDate"),new Date());var minDate=this._getMinMaxDate(inst,"min",true);var maxDate=this._getMinMaxDate(inst,"max");date=(minDate&&date<minDate?minDate:date);date=(maxDate&&date>maxDate?maxDate:date);return date},_determineDate:function(date,defaultDate){var offsetNumeric=function(offset){var date=new Date();date.setDate(date.getDate()+offset);return date};var offsetString=function(offset,getDaysInMonth){var date=new Date();var year=date.getFullYear();var month=date.getMonth();var day=date.getDate();var pattern=/([+-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g;var matches=pattern.exec(offset);while(matches){switch(matches[2]||"d"){case"d":case"D":day+=parseInt(matches[1],10);break;case"w":case"W":day+=parseInt(matches[1],10)*7;break;case"m":case"M":month+=parseInt(matches[1],10);day=Math.min(day,getDaysInMonth(year,month));break;case"y":case"Y":year+=parseInt(matches[1],10);day=Math.min(day,getDaysInMonth(year,month));break}matches=pattern.exec(offset)}return new Date(year,month,day)};date=(date==null?defaultDate:(typeof date=="string"?offsetString(date,this._getDaysInMonth):(typeof date=="number"?(isNaN(date)?defaultDate:offsetNumeric(date)):date)));date=(date&&date.toString()=="Invalid Date"?defaultDate:date);if(date){date.setHours(0);date.setMinutes(0);date.setSeconds(0);date.setMilliseconds(0)}return this._daylightSavingAdjust(date)},_daylightSavingAdjust:function(date){if(!date){return null}date.setHours(date.getHours()>12?date.getHours()+2:0);return date},_setDate:function(inst,date,endDate){var clear=!(date);var origMonth=inst.selectedMonth;var origYear=inst.selectedYear;date=this._determineDate(date,new Date());inst.selectedDay=inst.currentDay=date.getDate();inst.drawMonth=inst.selectedMonth=inst.currentMonth=date.getMonth();inst.drawYear=inst.selectedYear=inst.currentYear=date.getFullYear();if(origMonth!=inst.selectedMonth||origYear!=inst.selectedYear){this._notifyChange(inst)}this._adjustInstDate(inst);if(inst.input){inst.input.val(clear?"":this._formatDate(inst))}},_getDate:function(inst){var startDate=(!inst.currentYear||(inst.input&&inst.input.val()=="")?null:this._daylightSavingAdjust(new Date(inst.currentYear,inst.currentMonth,inst.currentDay)));return startDate},_generateHTML:function(inst){var today=new Date();today=this._daylightSavingAdjust(new Date(today.getFullYear(),today.getMonth(),today.getDate()));var isRTL=this._get(inst,"isRTL");var showButtonPanel=this._get(inst,"showButtonPanel");var hideIfNoPrevNext=this._get(inst,"hideIfNoPrevNext");var navigationAsDateFormat=this._get(inst,"navigationAsDateFormat");var numMonths=this._getNumberOfMonths(inst);var showCurrentAtPos=this._get(inst,"showCurrentAtPos");var stepMonths=this._get(inst,"stepMonths");var stepBigMonths=this._get(inst,"stepBigMonths");var isMultiMonth=(numMonths[0]!=1||numMonths[1]!=1);var currentDate=this._daylightSavingAdjust((!inst.currentDay?new Date(9999,9,9):new Date(inst.currentYear,inst.currentMonth,inst.currentDay)));var minDate=this._getMinMaxDate(inst,"min",true);var maxDate=this._getMinMaxDate(inst,"max");var drawMonth=inst.drawMonth-showCurrentAtPos;var drawYear=inst.drawYear;if(drawMonth<0){drawMonth+=12;drawYear--}if(maxDate){var maxDraw=this._daylightSavingAdjust(new Date(maxDate.getFullYear(),maxDate.getMonth()-numMonths[1]+1,maxDate.getDate()));maxDraw=(minDate&&maxDraw<minDate?minDate:maxDraw);while(this._daylightSavingAdjust(new Date(drawYear,drawMonth,1))>maxDraw){drawMonth--;if(drawMonth<0){drawMonth=11;drawYear--}}}inst.drawMonth=drawMonth;inst.drawYear=drawYear;var prevText=this._get(inst,"prevText");prevText=(!navigationAsDateFormat?prevText:this.formatDate(prevText,this._daylightSavingAdjust(new Date(drawYear,drawMonth-stepMonths,1)),this._getFormatConfig(inst)));var prev=(this._canAdjustMonth(inst,-1,drawYear,drawMonth)?'<a class="ui-datepicker-prev ui-corner-all" onclick="DP_jQuery.datepicker._adjustDate(\'#'+inst.id+"', -"+stepMonths+", 'M');\" title=\""+prevText+'"><span class="ui-icon ui-icon-circle-triangle-'+(isRTL?"e":"w")+'">'+prevText+"</span></a>":(hideIfNoPrevNext?"":'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="'+prevText+'"><span class="ui-icon ui-icon-circle-triangle-'+(isRTL?"e":"w")+'">'+prevText+"</span></a>"));var nextText=this._get(inst,"nextText");nextText=(!navigationAsDateFormat?nextText:this.formatDate(nextText,this._daylightSavingAdjust(new Date(drawYear,drawMonth+stepMonths,1)),this._getFormatConfig(inst)));var next=(this._canAdjustMonth(inst,+1,drawYear,drawMonth)?'<a class="ui-datepicker-next ui-corner-all" onclick="DP_jQuery.datepicker._adjustDate(\'#'+inst.id+"', +"+stepMonths+", 'M');\" title=\""+nextText+'"><span class="ui-icon ui-icon-circle-triangle-'+(isRTL?"w":"e")+'">'+nextText+"</span></a>":(hideIfNoPrevNext?"":'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="'+nextText+'"><span class="ui-icon ui-icon-circle-triangle-'+(isRTL?"w":"e")+'">'+nextText+"</span></a>"));var currentText=this._get(inst,"currentText");var gotoDate=(this._get(inst,"gotoCurrent")&&inst.currentDay?currentDate:today);currentText=(!navigationAsDateFormat?currentText:this.formatDate(currentText,gotoDate,this._getFormatConfig(inst)));var controls=(!inst.inline?'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" onclick="DP_jQuery.datepicker._hideDatepicker();">'+this._get(inst,"closeText")+"</button>":"");var buttonPanel=(showButtonPanel)?'<div class="ui-datepicker-buttonpane ui-widget-content">'+(isRTL?controls:"")+(this._isInRange(inst,gotoDate)?'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" onclick="DP_jQuery.datepicker._gotoToday(\'#'+inst.id+"');\">"+currentText+"</button>":"")+(isRTL?"":controls)+"</div>":"";var firstDay=parseInt(this._get(inst,"firstDay"),10);firstDay=(isNaN(firstDay)?0:firstDay);var dayNames=this._get(inst,"dayNames");var dayNamesShort=this._get(inst,"dayNamesShort");var dayNamesMin=this._get(inst,"dayNamesMin");var monthNames=this._get(inst,"monthNames");var monthNamesShort=this._get(inst,"monthNamesShort");var beforeShowDay=this._get(inst,"beforeShowDay");var showOtherMonths=this._get(inst,"showOtherMonths");var calculateWeek=this._get(inst,"calculateWeek")||this.iso8601Week;var endDate=inst.endDay?this._daylightSavingAdjust(new Date(inst.endYear,inst.endMonth,inst.endDay)):currentDate;var defaultDate=this._getDefaultDate(inst);var html="";for(var row=0;row<numMonths[0];row++){var group="";for(var col=0;col<numMonths[1];col++){var selectedDate=this._daylightSavingAdjust(new Date(drawYear,drawMonth,inst.selectedDay));var cornerClass=" ui-corner-all";var calender="";if(isMultiMonth){calender+='<div class="ui-datepicker-group ui-datepicker-group-';switch(col){case 0:calender+="first";cornerClass=" ui-corner-"+(isRTL?"right":"left");break;case numMonths[1]-1:calender+="last";cornerClass=" ui-corner-"+(isRTL?"left":"right");break;default:calender+="middle";cornerClass="";break}calender+='">'}calender+='<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix'+cornerClass+'">'+(/all|left/.test(cornerClass)&&row==0?(isRTL?next:prev):"")+(/all|right/.test(cornerClass)&&row==0?(isRTL?prev:next):"")+this._generateMonthYearHeader(inst,drawMonth,drawYear,minDate,maxDate,selectedDate,row>0||col>0,monthNames,monthNamesShort)+'</div><table class="ui-datepicker-calendar"><thead><tr>';var thead="";for(var dow=0;dow<7;dow++){var day=(dow+firstDay)%7;thead+="<th"+((dow+firstDay+6)%7>=5?' class="ui-datepicker-week-end"':"")+'><span title="'+dayNames[day]+'">'+dayNamesMin[day]+"</span></th>"}calender+=thead+"</tr></thead><tbody>";var daysInMonth=this._getDaysInMonth(drawYear,drawMonth);if(drawYear==inst.selectedYear&&drawMonth==inst.selectedMonth){inst.selectedDay=Math.min(inst.selectedDay,daysInMonth)}var leadDays=(this._getFirstDayOfMonth(drawYear,drawMonth)-firstDay+7)%7;var numRows=(isMultiMonth?6:Math.ceil((leadDays+daysInMonth)/7));var printDate=this._daylightSavingAdjust(new Date(drawYear,drawMonth,1-leadDays));for(var dRow=0;dRow<numRows;dRow++){calender+="<tr>";var tbody="";for(var dow=0;dow<7;dow++){var daySettings=(beforeShowDay?beforeShowDay.apply((inst.input?inst.input[0]:null),[printDate]):[true,""]);var otherMonth=(printDate.getMonth()!=drawMonth);var unselectable=otherMonth||!daySettings[0]||(minDate&&printDate<minDate)||(maxDate&&printDate>maxDate);tbody+='<td class="'+((dow+firstDay+6)%7>=5?" ui-datepicker-week-end":"")+(otherMonth?" ui-datepicker-other-month":"")+((printDate.getTime()==selectedDate.getTime()&&drawMonth==inst.selectedMonth&&inst._keyEvent)||(defaultDate.getTime()==printDate.getTime()&&defaultDate.getTime()==selectedDate.getTime())?" "+this._dayOverClass:"")+(unselectable?" "+this._unselectableClass+" ui-state-disabled":"")+(otherMonth&&!showOtherMonths?"":" "+daySettings[1]+(printDate.getTime()>=currentDate.getTime()&&printDate.getTime()<=endDate.getTime()?" "+this._currentClass:"")+(printDate.getTime()==today.getTime()?" ui-datepicker-today":""))+'"'+((!otherMonth||showOtherMonths)&&daySettings[2]?' title="'+daySettings[2]+'"':"")+(unselectable?"":" onclick=\"DP_jQuery.datepicker._selectDay('#"+inst.id+"',"+drawMonth+","+drawYear+', this);return false;"')+">"+(otherMonth?(showOtherMonths?printDate.getDate():"&#xa0;"):(unselectable?'<span class="ui-state-default">'+printDate.getDate()+"</span>":'<a class="ui-state-default'+(printDate.getTime()==today.getTime()?" ui-state-highlight":"")+(printDate.getTime()>=currentDate.getTime()&&printDate.getTime()<=endDate.getTime()?" ui-state-active":"")+'" href="#">'+printDate.getDate()+"</a>"))+"</td>";printDate.setDate(printDate.getDate()+1);printDate=this._daylightSavingAdjust(printDate)}calender+=tbody+"</tr>"}drawMonth++;if(drawMonth>11){drawMonth=0;drawYear++}calender+="</tbody></table>"+(isMultiMonth?"</div>"+((numMonths[0]>0&&col==numMonths[1]-1)?'<div class="ui-datepicker-row-break"></div>':""):"");group+=calender}html+=group}html+=buttonPanel+($.browser.msie&&parseInt($.browser.version,10)<7&&!inst.inline?'<iframe src="javascript:false;" class="ui-datepicker-cover" frameborder="0"></iframe>':"");inst._keyEvent=false;return html},_generateMonthYearHeader:function(inst,drawMonth,drawYear,minDate,maxDate,selectedDate,secondary,monthNames,monthNamesShort){minDate=(inst.rangeStart&&minDate&&selectedDate<minDate?selectedDate:minDate);var changeMonth=this._get(inst,"changeMonth");var changeYear=this._get(inst,"changeYear");var showMonthAfterYear=this._get(inst,"showMonthAfterYear");var html='<div class="ui-datepicker-title">';var monthHtml="";if(secondary||!changeMonth){monthHtml+='<span class="ui-datepicker-month">'+monthNames[drawMonth]+"</span> "}else{var inMinYear=(minDate&&minDate.getFullYear()==drawYear);var inMaxYear=(maxDate&&maxDate.getFullYear()==drawYear);monthHtml+='<select class="ui-datepicker-month" onchange="DP_jQuery.datepicker._selectMonthYear(\'#'+inst.id+"', this, 'M');\" onclick=\"DP_jQuery.datepicker._clickMonthYear('#"+inst.id+"');\">";for(var month=0;month<12;month++){if((!inMinYear||month>=minDate.getMonth())&&(!inMaxYear||month<=maxDate.getMonth())){monthHtml+='<option value="'+month+'"'+(month==drawMonth?' selected="selected"':"")+">"+monthNamesShort[month]+"</option>"}}monthHtml+="</select>"}if(!showMonthAfterYear){html+=monthHtml+((secondary||changeMonth||changeYear)&&(!(changeMonth&&changeYear))?"&#xa0;":"")}if(secondary||!changeYear){html+='<span class="ui-datepicker-year">'+drawYear+"</span>"}else{var years=this._get(inst,"yearRange").split(":");var year=0;var endYear=0;if(years.length!=2){year=drawYear-10;endYear=drawYear+10}else{if(years[0].charAt(0)=="+"||years[0].charAt(0)=="-"){year=drawYear+parseInt(years[0],10);endYear=drawYear+parseInt(years[1],10)}else{year=parseInt(years[0],10);endYear=parseInt(years[1],10)}}year=(minDate?Math.max(year,minDate.getFullYear()):year);endYear=(maxDate?Math.min(endYear,maxDate.getFullYear()):endYear);html+='<select class="ui-datepicker-year" onchange="DP_jQuery.datepicker._selectMonthYear(\'#'+inst.id+"', this, 'Y');\" onclick=\"DP_jQuery.datepicker._clickMonthYear('#"+inst.id+"');\">";for(;year<=endYear;year++){html+='<option value="'+year+'"'+(year==drawYear?' selected="selected"':"")+">"+year+"</option>"}html+="</select>"}if(showMonthAfterYear){html+=(secondary||changeMonth||changeYear?"&#xa0;":"")+monthHtml}html+="</div>";return html},_adjustInstDate:function(inst,offset,period){var year=inst.drawYear+(period=="Y"?offset:0);var month=inst.drawMonth+(period=="M"?offset:0);var day=Math.min(inst.selectedDay,this._getDaysInMonth(year,month))+(period=="D"?offset:0);var date=this._daylightSavingAdjust(new Date(year,month,day));var minDate=this._getMinMaxDate(inst,"min",true);var maxDate=this._getMinMaxDate(inst,"max");date=(minDate&&date<minDate?minDate:date);date=(maxDate&&date>maxDate?maxDate:date);inst.selectedDay=date.getDate();inst.drawMonth=inst.selectedMonth=date.getMonth();inst.drawYear=inst.selectedYear=date.getFullYear();if(period=="M"||period=="Y"){this._notifyChange(inst)}},_notifyChange:function(inst){var onChange=this._get(inst,"onChangeMonthYear");if(onChange){onChange.apply((inst.input?inst.input[0]:null),[inst.selectedYear,inst.selectedMonth+1,inst])}},_getNumberOfMonths:function(inst){var numMonths=this._get(inst,"numberOfMonths");return(numMonths==null?[1,1]:(typeof numMonths=="number"?[1,numMonths]:numMonths))},_getMinMaxDate:function(inst,minMax,checkRange){var date=this._determineDate(this._get(inst,minMax+"Date"),null);return(!checkRange||!inst.rangeStart?date:(!date||inst.rangeStart>date?inst.rangeStart:date))},_getDaysInMonth:function(year,month){return 32-new Date(year,month,32).getDate()},_getFirstDayOfMonth:function(year,month){return new Date(year,month,1).getDay()},_canAdjustMonth:function(inst,offset,curYear,curMonth){var numMonths=this._getNumberOfMonths(inst);var date=this._daylightSavingAdjust(new Date(curYear,curMonth+(offset<0?offset:numMonths[1]),1));if(offset<0){date.setDate(this._getDaysInMonth(date.getFullYear(),date.getMonth()))}return this._isInRange(inst,date)},_isInRange:function(inst,date){var newMinDate=(!inst.rangeStart?null:this._daylightSavingAdjust(new Date(inst.selectedYear,inst.selectedMonth,inst.selectedDay)));newMinDate=(newMinDate&&inst.rangeStart<newMinDate?inst.rangeStart:newMinDate);var minDate=newMinDate||this._getMinMaxDate(inst,"min");var maxDate=this._getMinMaxDate(inst,"max");return((!minDate||date>=minDate)&&(!maxDate||date<=maxDate))},_getFormatConfig:function(inst){var shortYearCutoff=this._get(inst,"shortYearCutoff");shortYearCutoff=(typeof shortYearCutoff!="string"?shortYearCutoff:new Date().getFullYear()%100+parseInt(shortYearCutoff,10));return{shortYearCutoff:shortYearCutoff,dayNamesShort:this._get(inst,"dayNamesShort"),dayNames:this._get(inst,"dayNames"),monthNamesShort:this._get(inst,"monthNamesShort"),monthNames:this._get(inst,"monthNames")}},_formatDate:function(inst,day,month,year){if(!day){inst.currentDay=inst.selectedDay;inst.currentMonth=inst.selectedMonth;inst.currentYear=inst.selectedYear}var date=(day?(typeof day=="object"?day:this._daylightSavingAdjust(new Date(year,month,day))):this._daylightSavingAdjust(new Date(inst.currentYear,inst.currentMonth,inst.currentDay)));return this.formatDate(this._get(inst,"dateFormat"),date,this._getFormatConfig(inst))}});function extendRemove(target,props){$.extend(target,props);for(var name in props){if(props[name]==null||props[name]==undefined){target[name]=props[name]}}return target}function isArray(a){return(a&&(($.browser.safari&&typeof a=="object"&&a.length)||(a.constructor&&a.constructor.toString().match(/\Array\(\)/))))}$.fn.datepicker=function(options){if(!$.datepicker.initialized){$(document).mousedown($.datepicker._checkExternalClick).find("body").append($.datepicker.dpDiv);$.datepicker.initialized=true}var otherArgs=Array.prototype.slice.call(arguments,1);if(typeof options=="string"&&(options=="isDisabled"||options=="getDate")){return $.datepicker["_"+options+"Datepicker"].apply($.datepicker,[this[0]].concat(otherArgs))}if(options=="option"&&arguments.length==2&&typeof arguments[1]=="string"){return $.datepicker["_"+options+"Datepicker"].apply($.datepicker,[this[0]].concat(otherArgs))}return this.each(function(){typeof options=="string"?$.datepicker["_"+options+"Datepicker"].apply($.datepicker,[this].concat(otherArgs)):$.datepicker._attachDatepicker(this,options)})};$.datepicker=new Datepicker();$.datepicker.initialized=false;$.datepicker.uuid=new Date().getTime();$.datepicker.version="1.7.2";window.DP_jQuery=$})(jQuery);;/*
  * jQuery UI Progressbar 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -145,7 +145,7 @@
 (function(a){a.widget("ui.progressbar",{_init:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this._valueMin(),"aria-valuemax":this._valueMax(),"aria-valuenow":this._value()});this.valueDiv=a('<div class="ui-progressbar-value ui-widget-header ui-corner-left"></div>').appendTo(this.element);this._refreshValue()},destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow").removeData("progressbar").unbind(".progressbar");this.valueDiv.remove();a.widget.prototype.destroy.apply(this,arguments)},value:function(b){if(b===undefined){return this._value()}this._setData("value",b);return this},_setData:function(b,c){switch(b){case"value":this.options.value=c;this._refreshValue();this._trigger("change",null,{});break}a.widget.prototype._setData.apply(this,arguments)},_value:function(){var b=this.options.value;if(b<this._valueMin()){b=this._valueMin()}if(b>this._valueMax()){b=this._valueMax()}return b},_valueMin:function(){var b=0;return b},_valueMax:function(){var b=100;return b},_refreshValue:function(){var b=this.value();this.valueDiv[b==this._valueMax()?"addClass":"removeClass"]("ui-corner-right");this.valueDiv.width(b+"%");this.element.attr("aria-valuenow",b)}});a.extend(a.ui.progressbar,{version:"1.7.2",defaults:{value:0}})})(jQuery);;/*
  * jQuery UI Effects 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -154,7 +154,7 @@
 jQuery.effects||(function(d){d.effects={version:"1.7.2",save:function(g,h){for(var f=0;f<h.length;f++){if(h[f]!==null){g.data("ec.storage."+h[f],g[0].style[h[f]])}}},restore:function(g,h){for(var f=0;f<h.length;f++){if(h[f]!==null){g.css(h[f],g.data("ec.storage."+h[f]))}}},setMode:function(f,g){if(g=="toggle"){g=f.is(":hidden")?"show":"hide"}return g},getBaseline:function(g,h){var i,f;switch(g[0]){case"top":i=0;break;case"middle":i=0.5;break;case"bottom":i=1;break;default:i=g[0]/h.height}switch(g[1]){case"left":f=0;break;case"center":f=0.5;break;case"right":f=1;break;default:f=g[1]/h.width}return{x:f,y:i}},createWrapper:function(f){if(f.parent().is(".ui-effects-wrapper")){return f.parent()}var g={width:f.outerWidth(true),height:f.outerHeight(true),"float":f.css("float")};f.wrap('<div class="ui-effects-wrapper" style="font-size:100%;background:transparent;border:none;margin:0;padding:0"></div>');var j=f.parent();if(f.css("position")=="static"){j.css({position:"relative"});f.css({position:"relative"})}else{var i=f.css("top");if(isNaN(parseInt(i,10))){i="auto"}var h=f.css("left");if(isNaN(parseInt(h,10))){h="auto"}j.css({position:f.css("position"),top:i,left:h,zIndex:f.css("z-index")}).show();f.css({position:"relative",top:0,left:0})}j.css(g);return j},removeWrapper:function(f){if(f.parent().is(".ui-effects-wrapper")){return f.parent().replaceWith(f)}return f},setTransition:function(g,i,f,h){h=h||{};d.each(i,function(k,j){unit=g.cssUnit(j);if(unit[0]>0){h[j]=unit[0]*f+unit[1]}});return h},animateClass:function(h,i,k,j){var f=(typeof k=="function"?k:(j?j:null));var g=(typeof k=="string"?k:null);return this.each(function(){var q={};var o=d(this);var p=o.attr("style")||"";if(typeof p=="object"){p=p.cssText}if(h.toggle){o.hasClass(h.toggle)?h.remove=h.toggle:h.add=h.toggle}var l=d.extend({},(document.defaultView?document.defaultView.getComputedStyle(this,null):this.currentStyle));if(h.add){o.addClass(h.add)}if(h.remove){o.removeClass(h.remove)}var m=d.extend({},(document.defaultView?document.defaultView.getComputedStyle(this,null):this.currentStyle));if(h.add){o.removeClass(h.add)}if(h.remove){o.addClass(h.remove)}for(var r in m){if(typeof m[r]!="function"&&m[r]&&r.indexOf("Moz")==-1&&r.indexOf("length")==-1&&m[r]!=l[r]&&(r.match(/color/i)||(!r.match(/color/i)&&!isNaN(parseInt(m[r],10))))&&(l.position!="static"||(l.position=="static"&&!r.match(/left|top|bottom|right/)))){q[r]=m[r]}}o.animate(q,i,g,function(){if(typeof d(this).attr("style")=="object"){d(this).attr("style")["cssText"]="";d(this).attr("style")["cssText"]=p}else{d(this).attr("style",p)}if(h.add){d(this).addClass(h.add)}if(h.remove){d(this).removeClass(h.remove)}if(f){f.apply(this,arguments)}})})}};function c(g,f){var i=g[1]&&g[1].constructor==Object?g[1]:{};if(f){i.mode=f}var h=g[1]&&g[1].constructor!=Object?g[1]:(i.duration?i.duration:g[2]);h=d.fx.off?0:typeof h==="number"?h:d.fx.speeds[h]||d.fx.speeds._default;var j=i.callback||(d.isFunction(g[1])&&g[1])||(d.isFunction(g[2])&&g[2])||(d.isFunction(g[3])&&g[3]);return[g[0],i,h,j]}d.fn.extend({_show:d.fn.show,_hide:d.fn.hide,__toggle:d.fn.toggle,_addClass:d.fn.addClass,_removeClass:d.fn.removeClass,_toggleClass:d.fn.toggleClass,effect:function(g,f,h,i){return d.effects[g]?d.effects[g].call(this,{method:g,options:f||{},duration:h,callback:i}):null},show:function(){if(!arguments[0]||(arguments[0].constructor==Number||(/(slow|normal|fast)/).test(arguments[0]))){return this._show.apply(this,arguments)}else{return this.effect.apply(this,c(arguments,"show"))}},hide:function(){if(!arguments[0]||(arguments[0].constructor==Number||(/(slow|normal|fast)/).test(arguments[0]))){return this._hide.apply(this,arguments)}else{return this.effect.apply(this,c(arguments,"hide"))}},toggle:function(){if(!arguments[0]||(arguments[0].constructor==Number||(/(slow|normal|fast)/).test(arguments[0]))||(d.isFunction(arguments[0])||typeof arguments[0]=="boolean")){return this.__toggle.apply(this,arguments)}else{return this.effect.apply(this,c(arguments,"toggle"))}},addClass:function(g,f,i,h){return f?d.effects.animateClass.apply(this,[{add:g},f,i,h]):this._addClass(g)},removeClass:function(g,f,i,h){return f?d.effects.animateClass.apply(this,[{remove:g},f,i,h]):this._removeClass(g)},toggleClass:function(g,f,i,h){return((typeof f!=="boolean")&&f)?d.effects.animateClass.apply(this,[{toggle:g},f,i,h]):this._toggleClass(g,f)},morph:function(f,h,g,j,i){return d.effects.animateClass.apply(this,[{add:h,remove:f},g,j,i])},switchClass:function(){return this.morph.apply(this,arguments)},cssUnit:function(f){var g=this.css(f),h=[];d.each(["em","px","%","pt"],function(j,k){if(g.indexOf(k)>0){h=[parseFloat(g),k]}});return h}});d.each(["backgroundColor","borderBottomColor","borderLeftColor","borderRightColor","borderTopColor","color","outlineColor"],function(g,f){d.fx.step[f]=function(h){if(h.state==0){h.start=e(h.elem,f);h.end=b(h.end)}h.elem.style[f]="rgb("+[Math.max(Math.min(parseInt((h.pos*(h.end[0]-h.start[0]))+h.start[0],10),255),0),Math.max(Math.min(parseInt((h.pos*(h.end[1]-h.start[1]))+h.start[1],10),255),0),Math.max(Math.min(parseInt((h.pos*(h.end[2]-h.start[2]))+h.start[2],10),255),0)].join(",")+")"}});function b(g){var f;if(g&&g.constructor==Array&&g.length==3){return g}if(f=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(g)){return[parseInt(f[1],10),parseInt(f[2],10),parseInt(f[3],10)]}if(f=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(g)){return[parseFloat(f[1])*2.55,parseFloat(f[2])*2.55,parseFloat(f[3])*2.55]}if(f=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(g)){return[parseInt(f[1],16),parseInt(f[2],16),parseInt(f[3],16)]}if(f=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(g)){return[parseInt(f[1]+f[1],16),parseInt(f[2]+f[2],16),parseInt(f[3]+f[3],16)]}if(f=/rgba\(0, 0, 0, 0\)/.exec(g)){return a.transparent}return a[d.trim(g).toLowerCase()]}function e(h,f){var g;do{g=d.curCSS(h,f);if(g!=""&&g!="transparent"||d.nodeName(h,"body")){break}f="backgroundColor"}while(h=h.parentNode);return b(g)}var a={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0],transparent:[255,255,255]};d.easing.jswing=d.easing.swing;d.extend(d.easing,{def:"easeOutQuad",swing:function(g,h,f,j,i){return d.easing[d.easing.def](g,h,f,j,i)},easeInQuad:function(g,h,f,j,i){return j*(h/=i)*h+f},easeOutQuad:function(g,h,f,j,i){return -j*(h/=i)*(h-2)+f},easeInOutQuad:function(g,h,f,j,i){if((h/=i/2)<1){return j/2*h*h+f}return -j/2*((--h)*(h-2)-1)+f},easeInCubic:function(g,h,f,j,i){return j*(h/=i)*h*h+f},easeOutCubic:function(g,h,f,j,i){return j*((h=h/i-1)*h*h+1)+f},easeInOutCubic:function(g,h,f,j,i){if((h/=i/2)<1){return j/2*h*h*h+f}return j/2*((h-=2)*h*h+2)+f},easeInQuart:function(g,h,f,j,i){return j*(h/=i)*h*h*h+f},easeOutQuart:function(g,h,f,j,i){return -j*((h=h/i-1)*h*h*h-1)+f},easeInOutQuart:function(g,h,f,j,i){if((h/=i/2)<1){return j/2*h*h*h*h+f}return -j/2*((h-=2)*h*h*h-2)+f},easeInQuint:function(g,h,f,j,i){return j*(h/=i)*h*h*h*h+f},easeOutQuint:function(g,h,f,j,i){return j*((h=h/i-1)*h*h*h*h+1)+f},easeInOutQuint:function(g,h,f,j,i){if((h/=i/2)<1){return j/2*h*h*h*h*h+f}return j/2*((h-=2)*h*h*h*h+2)+f},easeInSine:function(g,h,f,j,i){return -j*Math.cos(h/i*(Math.PI/2))+j+f},easeOutSine:function(g,h,f,j,i){return j*Math.sin(h/i*(Math.PI/2))+f},easeInOutSine:function(g,h,f,j,i){return -j/2*(Math.cos(Math.PI*h/i)-1)+f},easeInExpo:function(g,h,f,j,i){return(h==0)?f:j*Math.pow(2,10*(h/i-1))+f},easeOutExpo:function(g,h,f,j,i){return(h==i)?f+j:j*(-Math.pow(2,-10*h/i)+1)+f},easeInOutExpo:function(g,h,f,j,i){if(h==0){return f}if(h==i){return f+j}if((h/=i/2)<1){return j/2*Math.pow(2,10*(h-1))+f}return j/2*(-Math.pow(2,-10*--h)+2)+f},easeInCirc:function(g,h,f,j,i){return -j*(Math.sqrt(1-(h/=i)*h)-1)+f},easeOutCirc:function(g,h,f,j,i){return j*Math.sqrt(1-(h=h/i-1)*h)+f},easeInOutCirc:function(g,h,f,j,i){if((h/=i/2)<1){return -j/2*(Math.sqrt(1-h*h)-1)+f}return j/2*(Math.sqrt(1-(h-=2)*h)+1)+f},easeInElastic:function(g,i,f,m,l){var j=1.70158;var k=0;var h=m;if(i==0){return f}if((i/=l)==1){return f+m}if(!k){k=l*0.3}if(h<Math.abs(m)){h=m;var j=k/4}else{var j=k/(2*Math.PI)*Math.asin(m/h)}return -(h*Math.pow(2,10*(i-=1))*Math.sin((i*l-j)*(2*Math.PI)/k))+f},easeOutElastic:function(g,i,f,m,l){var j=1.70158;var k=0;var h=m;if(i==0){return f}if((i/=l)==1){return f+m}if(!k){k=l*0.3}if(h<Math.abs(m)){h=m;var j=k/4}else{var j=k/(2*Math.PI)*Math.asin(m/h)}return h*Math.pow(2,-10*i)*Math.sin((i*l-j)*(2*Math.PI)/k)+m+f},easeInOutElastic:function(g,i,f,m,l){var j=1.70158;var k=0;var h=m;if(i==0){return f}if((i/=l/2)==2){return f+m}if(!k){k=l*(0.3*1.5)}if(h<Math.abs(m)){h=m;var j=k/4}else{var j=k/(2*Math.PI)*Math.asin(m/h)}if(i<1){return -0.5*(h*Math.pow(2,10*(i-=1))*Math.sin((i*l-j)*(2*Math.PI)/k))+f}return h*Math.pow(2,-10*(i-=1))*Math.sin((i*l-j)*(2*Math.PI)/k)*0.5+m+f},easeInBack:function(g,h,f,k,j,i){if(i==undefined){i=1.70158}return k*(h/=j)*h*((i+1)*h-i)+f},easeOutBack:function(g,h,f,k,j,i){if(i==undefined){i=1.70158}return k*((h=h/j-1)*h*((i+1)*h+i)+1)+f},easeInOutBack:function(g,h,f,k,j,i){if(i==undefined){i=1.70158}if((h/=j/2)<1){return k/2*(h*h*(((i*=(1.525))+1)*h-i))+f}return k/2*((h-=2)*h*(((i*=(1.525))+1)*h+i)+2)+f},easeInBounce:function(g,h,f,j,i){return j-d.easing.easeOutBounce(g,i-h,0,j,i)+f},easeOutBounce:function(g,h,f,j,i){if((h/=i)<(1/2.75)){return j*(7.5625*h*h)+f}else{if(h<(2/2.75)){return j*(7.5625*(h-=(1.5/2.75))*h+0.75)+f}else{if(h<(2.5/2.75)){return j*(7.5625*(h-=(2.25/2.75))*h+0.9375)+f}else{return j*(7.5625*(h-=(2.625/2.75))*h+0.984375)+f}}}},easeInOutBounce:function(g,h,f,j,i){if(h<i/2){return d.easing.easeInBounce(g,h*2,0,j,i)*0.5+f}return d.easing.easeOutBounce(g,h*2-i,0,j,i)*0.5+j*0.5+f}})})(jQuery);;/*
  * jQuery UI Effects Blind 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -166,7 +166,7 @@
 (function(a){a.effects.blind=function(b){return this.queue(function(){var d=a(this),c=["position","top","left"];var h=a.effects.setMode(d,b.options.mode||"hide");var g=b.options.direction||"vertical";a.effects.save(d,c);d.show();var j=a.effects.createWrapper(d).css({overflow:"hidden"});var e=(g=="vertical")?"height":"width";var i=(g=="vertical")?j.height():j.width();if(h=="show"){j.css(e,0)}var f={};f[e]=h=="show"?i:0;j.animate(f,b.duration,b.options.easing,function(){if(h=="hide"){d.hide()}a.effects.restore(d,c);a.effects.removeWrapper(d);if(b.callback){b.callback.apply(d[0],arguments)}d.dequeue()})})}})(jQuery);;/*
  * jQuery UI Effects Bounce 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -178,7 +178,7 @@
 (function(a){a.effects.bounce=function(b){return this.queue(function(){var e=a(this),l=["position","top","left"];var k=a.effects.setMode(e,b.options.mode||"effect");var n=b.options.direction||"up";var c=b.options.distance||20;var d=b.options.times||5;var g=b.duration||250;if(/show|hide/.test(k)){l.push("opacity")}a.effects.save(e,l);e.show();a.effects.createWrapper(e);var f=(n=="up"||n=="down")?"top":"left";var p=(n=="up"||n=="left")?"pos":"neg";var c=b.options.distance||(f=="top"?e.outerHeight({margin:true})/3:e.outerWidth({margin:true})/3);if(k=="show"){e.css("opacity",0).css(f,p=="pos"?-c:c)}if(k=="hide"){c=c/(d*2)}if(k!="hide"){d--}if(k=="show"){var h={opacity:1};h[f]=(p=="pos"?"+=":"-=")+c;e.animate(h,g/2,b.options.easing);c=c/2;d--}for(var j=0;j<d;j++){var o={},m={};o[f]=(p=="pos"?"-=":"+=")+c;m[f]=(p=="pos"?"+=":"-=")+c;e.animate(o,g/2,b.options.easing).animate(m,g/2,b.options.easing);c=(k=="hide")?c*2:c/2}if(k=="hide"){var h={opacity:0};h[f]=(p=="pos"?"-=":"+=")+c;e.animate(h,g/2,b.options.easing,function(){e.hide();a.effects.restore(e,l);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(this,arguments)}})}else{var o={},m={};o[f]=(p=="pos"?"-=":"+=")+c;m[f]=(p=="pos"?"+=":"-=")+c;e.animate(o,g/2,b.options.easing).animate(m,g/2,b.options.easing,function(){a.effects.restore(e,l);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(this,arguments)}})}e.queue("fx",function(){e.dequeue()});e.dequeue()})}})(jQuery);;/*
  * jQuery UI Effects Clip 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -190,7 +190,7 @@
 (function(a){a.effects.clip=function(b){return this.queue(function(){var f=a(this),j=["position","top","left","height","width"];var i=a.effects.setMode(f,b.options.mode||"hide");var k=b.options.direction||"vertical";a.effects.save(f,j);f.show();var c=a.effects.createWrapper(f).css({overflow:"hidden"});var e=f[0].tagName=="IMG"?c:f;var g={size:(k=="vertical")?"height":"width",position:(k=="vertical")?"top":"left"};var d=(k=="vertical")?e.height():e.width();if(i=="show"){e.css(g.size,0);e.css(g.position,d/2)}var h={};h[g.size]=i=="show"?d:0;h[g.position]=i=="show"?0:d/2;e.animate(h,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){if(i=="hide"){f.hide()}a.effects.restore(f,j);a.effects.removeWrapper(f);if(b.callback){b.callback.apply(f[0],arguments)}f.dequeue()}})})}})(jQuery);;/*
  * jQuery UI Effects Drop 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -202,7 +202,7 @@
 (function(a){a.effects.drop=function(b){return this.queue(function(){var e=a(this),d=["position","top","left","opacity"];var i=a.effects.setMode(e,b.options.mode||"hide");var h=b.options.direction||"left";a.effects.save(e,d);e.show();a.effects.createWrapper(e);var f=(h=="up"||h=="down")?"top":"left";var c=(h=="up"||h=="left")?"pos":"neg";var j=b.options.distance||(f=="top"?e.outerHeight({margin:true})/2:e.outerWidth({margin:true})/2);if(i=="show"){e.css("opacity",0).css(f,c=="pos"?-j:j)}var g={opacity:i=="show"?1:0};g[f]=(i=="show"?(c=="pos"?"+=":"-="):(c=="pos"?"-=":"+="))+j;e.animate(g,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){if(i=="hide"){e.hide()}a.effects.restore(e,d);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(this,arguments)}e.dequeue()}})})}})(jQuery);;/*
  * jQuery UI Effects Explode 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -214,7 +214,7 @@
 (function(a){a.effects.explode=function(b){return this.queue(function(){var k=b.options.pieces?Math.round(Math.sqrt(b.options.pieces)):3;var e=b.options.pieces?Math.round(Math.sqrt(b.options.pieces)):3;b.options.mode=b.options.mode=="toggle"?(a(this).is(":visible")?"hide":"show"):b.options.mode;var h=a(this).show().css("visibility","hidden");var l=h.offset();l.top-=parseInt(h.css("marginTop"),10)||0;l.left-=parseInt(h.css("marginLeft"),10)||0;var g=h.outerWidth(true);var c=h.outerHeight(true);for(var f=0;f<k;f++){for(var d=0;d<e;d++){h.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-d*(g/e),top:-f*(c/k)}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:g/e,height:c/k,left:l.left+d*(g/e)+(b.options.mode=="show"?(d-Math.floor(e/2))*(g/e):0),top:l.top+f*(c/k)+(b.options.mode=="show"?(f-Math.floor(k/2))*(c/k):0),opacity:b.options.mode=="show"?0:1}).animate({left:l.left+d*(g/e)+(b.options.mode=="show"?0:(d-Math.floor(e/2))*(g/e)),top:l.top+f*(c/k)+(b.options.mode=="show"?0:(f-Math.floor(k/2))*(c/k)),opacity:b.options.mode=="show"?1:0},b.duration||500)}}setTimeout(function(){b.options.mode=="show"?h.css({visibility:"visible"}):h.css({visibility:"visible"}).hide();if(b.callback){b.callback.apply(h[0])}h.dequeue();a("div.ui-effects-explode").remove()},b.duration||500)})}})(jQuery);;/*
  * jQuery UI Effects Fold 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -226,7 +226,7 @@
 (function(a){a.effects.fold=function(b){return this.queue(function(){var e=a(this),k=["position","top","left"];var h=a.effects.setMode(e,b.options.mode||"hide");var o=b.options.size||15;var n=!(!b.options.horizFirst);var g=b.duration?b.duration/2:a.fx.speeds._default/2;a.effects.save(e,k);e.show();var d=a.effects.createWrapper(e).css({overflow:"hidden"});var i=((h=="show")!=n);var f=i?["width","height"]:["height","width"];var c=i?[d.width(),d.height()]:[d.height(),d.width()];var j=/([0-9]+)%/.exec(o);if(j){o=parseInt(j[1],10)/100*c[h=="hide"?0:1]}if(h=="show"){d.css(n?{height:0,width:o}:{height:o,width:0})}var m={},l={};m[f[0]]=h=="show"?c[0]:o;l[f[1]]=h=="show"?c[1]:0;d.animate(m,g,b.options.easing).animate(l,g,b.options.easing,function(){if(h=="hide"){e.hide()}a.effects.restore(e,k);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(e[0],arguments)}e.dequeue()})})}})(jQuery);;/*
  * jQuery UI Effects Highlight 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -238,7 +238,7 @@
 (function(a){a.effects.highlight=function(b){return this.queue(function(){var e=a(this),d=["backgroundImage","backgroundColor","opacity"];var h=a.effects.setMode(e,b.options.mode||"show");var c=b.options.color||"#ffff99";var g=e.css("backgroundColor");a.effects.save(e,d);e.show();e.css({backgroundImage:"none",backgroundColor:c});var f={backgroundColor:g};if(h=="hide"){f.opacity=0}e.animate(f,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){if(h=="hide"){e.hide()}a.effects.restore(e,d);if(h=="show"&&a.browser.msie){this.style.removeAttribute("filter")}if(b.callback){b.callback.apply(this,arguments)}e.dequeue()}})})}})(jQuery);;/*
  * jQuery UI Effects Pulsate 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -250,7 +250,7 @@
 (function(a){a.effects.pulsate=function(b){return this.queue(function(){var d=a(this);var g=a.effects.setMode(d,b.options.mode||"show");var f=b.options.times||5;var e=b.duration?b.duration/2:a.fx.speeds._default/2;if(g=="hide"){f--}if(d.is(":hidden")){d.css("opacity",0);d.show();d.animate({opacity:1},e,b.options.easing);f=f-2}for(var c=0;c<f;c++){d.animate({opacity:0},e,b.options.easing).animate({opacity:1},e,b.options.easing)}if(g=="hide"){d.animate({opacity:0},e,b.options.easing,function(){d.hide();if(b.callback){b.callback.apply(this,arguments)}})}else{d.animate({opacity:0},e,b.options.easing).animate({opacity:1},e,b.options.easing,function(){if(b.callback){b.callback.apply(this,arguments)}})}d.queue("fx",function(){d.dequeue()});d.dequeue()})}})(jQuery);;/*
  * jQuery UI Effects Scale 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -262,7 +262,7 @@
 (function(a){a.effects.puff=function(b){return this.queue(function(){var f=a(this);var c=a.extend(true,{},b.options);var h=a.effects.setMode(f,b.options.mode||"hide");var g=parseInt(b.options.percent,10)||150;c.fade=true;var e={height:f.height(),width:f.width()};var d=g/100;f.from=(h=="hide")?e:{height:e.height*d,width:e.width*d};c.from=f.from;c.percent=(h=="hide")?g:100;c.mode=h;f.effect("scale",c,b.duration,b.callback);f.dequeue()})};a.effects.scale=function(b){return this.queue(function(){var g=a(this);var d=a.extend(true,{},b.options);var j=a.effects.setMode(g,b.options.mode||"effect");var h=parseInt(b.options.percent,10)||(parseInt(b.options.percent,10)==0?0:(j=="hide"?0:100));var i=b.options.direction||"both";var c=b.options.origin;if(j!="effect"){d.origin=c||["middle","center"];d.restore=true}var f={height:g.height(),width:g.width()};g.from=b.options.from||(j=="show"?{height:0,width:0}:f);var e={y:i!="horizontal"?(h/100):1,x:i!="vertical"?(h/100):1};g.to={height:f.height*e.y,width:f.width*e.x};if(b.options.fade){if(j=="show"){g.from.opacity=0;g.to.opacity=1}if(j=="hide"){g.from.opacity=1;g.to.opacity=0}}d.from=g.from;d.to=g.to;d.mode=j;g.effect("size",d,b.duration,b.callback);g.dequeue()})};a.effects.size=function(b){return this.queue(function(){var c=a(this),n=["position","top","left","width","height","overflow","opacity"];var m=["position","top","left","overflow","opacity"];var j=["width","height","overflow"];var p=["fontSize"];var k=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"];var f=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"];var g=a.effects.setMode(c,b.options.mode||"effect");var i=b.options.restore||false;var e=b.options.scale||"both";var o=b.options.origin;var d={height:c.height(),width:c.width()};c.from=b.options.from||d;c.to=b.options.to||d;if(o){var h=a.effects.getBaseline(o,d);c.from.top=(d.height-c.from.height)*h.y;c.from.left=(d.width-c.from.width)*h.x;c.to.top=(d.height-c.to.height)*h.y;c.to.left=(d.width-c.to.width)*h.x}var l={from:{y:c.from.height/d.height,x:c.from.width/d.width},to:{y:c.to.height/d.height,x:c.to.width/d.width}};if(e=="box"||e=="both"){if(l.from.y!=l.to.y){n=n.concat(k);c.from=a.effects.setTransition(c,k,l.from.y,c.from);c.to=a.effects.setTransition(c,k,l.to.y,c.to)}if(l.from.x!=l.to.x){n=n.concat(f);c.from=a.effects.setTransition(c,f,l.from.x,c.from);c.to=a.effects.setTransition(c,f,l.to.x,c.to)}}if(e=="content"||e=="both"){if(l.from.y!=l.to.y){n=n.concat(p);c.from=a.effects.setTransition(c,p,l.from.y,c.from);c.to=a.effects.setTransition(c,p,l.to.y,c.to)}}a.effects.save(c,i?n:m);c.show();a.effects.createWrapper(c);c.css("overflow","hidden").css(c.from);if(e=="content"||e=="both"){k=k.concat(["marginTop","marginBottom"]).concat(p);f=f.concat(["marginLeft","marginRight"]);j=n.concat(k).concat(f);c.find("*[width]").each(function(){child=a(this);if(i){a.effects.save(child,j)}var q={height:child.height(),width:child.width()};child.from={height:q.height*l.from.y,width:q.width*l.from.x};child.to={height:q.height*l.to.y,width:q.width*l.to.x};if(l.from.y!=l.to.y){child.from=a.effects.setTransition(child,k,l.from.y,child.from);child.to=a.effects.setTransition(child,k,l.to.y,child.to)}if(l.from.x!=l.to.x){child.from=a.effects.setTransition(child,f,l.from.x,child.from);child.to=a.effects.setTransition(child,f,l.to.x,child.to)}child.css(child.from);child.animate(child.to,b.duration,b.options.easing,function(){if(i){a.effects.restore(child,j)}})})}c.animate(c.to,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){if(g=="hide"){c.hide()}a.effects.restore(c,i?n:m);a.effects.removeWrapper(c);if(b.callback){b.callback.apply(this,arguments)}c.dequeue()}})})}})(jQuery);;/*
  * jQuery UI Effects Shake 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -274,7 +274,7 @@
 (function(a){a.effects.shake=function(b){return this.queue(function(){var e=a(this),l=["position","top","left"];var k=a.effects.setMode(e,b.options.mode||"effect");var n=b.options.direction||"left";var c=b.options.distance||20;var d=b.options.times||3;var g=b.duration||b.options.duration||140;a.effects.save(e,l);e.show();a.effects.createWrapper(e);var f=(n=="up"||n=="down")?"top":"left";var p=(n=="up"||n=="left")?"pos":"neg";var h={},o={},m={};h[f]=(p=="pos"?"-=":"+=")+c;o[f]=(p=="pos"?"+=":"-=")+c*2;m[f]=(p=="pos"?"-=":"+=")+c*2;e.animate(h,g,b.options.easing);for(var j=1;j<d;j++){e.animate(o,g,b.options.easing).animate(m,g,b.options.easing)}e.animate(o,g,b.options.easing).animate(h,g/2,b.options.easing,function(){a.effects.restore(e,l);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(this,arguments)}});e.queue("fx",function(){e.dequeue()});e.dequeue()})}})(jQuery);;/*
  * jQuery UI Effects Slide 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -286,7 +286,7 @@
 (function(a){a.effects.slide=function(b){return this.queue(function(){var e=a(this),d=["position","top","left"];var i=a.effects.setMode(e,b.options.mode||"show");var h=b.options.direction||"left";a.effects.save(e,d);e.show();a.effects.createWrapper(e).css({overflow:"hidden"});var f=(h=="up"||h=="down")?"top":"left";var c=(h=="up"||h=="left")?"pos":"neg";var j=b.options.distance||(f=="top"?e.outerHeight({margin:true}):e.outerWidth({margin:true}));if(i=="show"){e.css(f,c=="pos"?-j:j)}var g={};g[f]=(i=="show"?(c=="pos"?"+=":"-="):(c=="pos"?"-=":"+="))+j;e.animate(g,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){if(i=="hide"){e.hide()}a.effects.restore(e,d);a.effects.removeWrapper(e);if(b.callback){b.callback.apply(this,arguments)}e.dequeue()}})})}})(jQuery);;/*
  * jQuery UI Effects Transfer 1.7.2
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
Binary file web/data/pdf_icon.gif has changed
--- a/web/data/ui.all.css	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/ui.all.css	Mon Jul 19 15:36:16 2010 +0200
@@ -1,6 +1,6 @@
 /*
 * jQuery UI CSS Framework
-* Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+* Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
 * Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
 */
 
@@ -38,7 +38,7 @@
 
 /*
 * jQuery UI CSS Framework
-* Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+* Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
 * Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
 * To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Verdana,Arial,sans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=cccccc&bgTextureHeader=03_highlight_soft.png&bgImgOpacityHeader=75&borderColorHeader=aaaaaa&fcHeader=222222&iconColorHeader=222222&bgColorContent=ffffff&bgTextureContent=01_flat.png&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=222222&iconColorContent=222222&bgColorDefault=e6e6e6&bgTextureDefault=02_glass.png&bgImgOpacityDefault=75&borderColorDefault=d3d3d3&fcDefault=555555&iconColorDefault=888888&bgColorHover=dadada&bgTextureHover=02_glass.png&bgImgOpacityHover=75&borderColorHover=999999&fcHover=212121&iconColorHover=454545&bgColorActive=ffffff&bgTextureActive=02_glass.png&bgImgOpacityActive=65&borderColorActive=aaaaaa&fcActive=212121&iconColorActive=454545&bgColorHighlight=fbf9ee&bgTextureHighlight=02_glass.png&bgImgOpacityHighlight=55&borderColorHighlight=fcefa1&fcHighlight=363636&iconColorHighlight=2e83ff&bgColorError=fef1ec&bgTextureError=02_glass.png&bgImgOpacityError=95&borderColorError=cd0a0a&fcError=cd0a0a&iconColorError=cd0a0a&bgColorOverlay=aaaaaa&bgTextureOverlay=01_flat.png&bgImgOpacityOverlay=0&opacityOverlay=30&bgColorShadow=aaaaaa&bgTextureShadow=01_flat.png&bgImgOpacityShadow=0&opacityShadow=30&thicknessShadow=8px&offsetTopShadow=-8px&offsetLeftShadow=-8px&cornerRadiusShadow=8px
 */
--- a/web/data/ui.core.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/ui.core.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /*
  * jQuery UI @VERSION
  *
- * Copyright (c) 2008 Paul Bakaus (ui.jquery.com)
+ * Copyright (c) 2010 Paul Bakaus (ui.jquery.com)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
--- a/web/data/ui.slider.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/ui.slider.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /*
  * jQuery UI 1.7.1
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -9,7 +9,7 @@
  */
jQuery.ui||(function(c){var i=c.fn.remove,d=c.browser.mozilla&&(parseFloat(c.browser.version)<1.9);c.ui={version:"1.7.1",plugin:{add:function(k,l,n){var m=c.ui[k].prototype;for(var j in n){m.plugins[j]=m.plugins[j]||[];m.plugins[j].push([l,n[j]])}},call:function(j,l,k){var n=j.plugins[l];if(!n||!j.element[0].parentNode){return}for(var m=0;m<n.length;m++){if(j.options[n[m][0]]){n[m][1].apply(j.element,k)}}}},contains:function(k,j){return document.compareDocumentPosition?k.compareDocumentPosition(j)&16:k!==j&&k.contains(j)},hasScroll:function(m,k){if(c(m).css("overflow")=="hidden"){return false}var j=(k&&k=="left")?"scrollLeft":"scrollTop",l=false;if(m[j]>0){return true}m[j]=1;l=(m[j]>0);m[j]=0;return l},isOverAxis:function(k,j,l){return(k>j)&&(k<(j+l))},isOver:function(o,k,n,m,j,l){return c.ui.isOverAxis(o,n,j)&&c.ui.isOverAxis(k,m,l)},keyCode:{BACKSPACE:8,CAPS_LOCK:20,COMMA:188,CONTROL:17,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,INSERT:45,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SHIFT:16,SPACE:32,TAB:9,UP:38}};if(d){var f=c.attr,e=c.fn.removeAttr,h="http://www.w3.org/2005/07/aaa",a=/^aria-/,b=/^wairole:/;c.attr=function(k,j,l){var m=l!==undefined;return(j=="role"?(m?f.call(this,k,j,"wairole:"+l):(f.apply(this,arguments)||"").replace(b,"")):(a.test(j)?(m?k.setAttributeNS(h,j.replace(a,"aaa:"),l):f.call(this,k,j.replace(a,"aaa:"))):f.apply(this,arguments)))};c.fn.removeAttr=function(j){return(a.test(j)?this.each(function(){this.removeAttributeNS(h,j.replace(a,""))}):e.call(this,j))}}c.fn.extend({remove:function(){c("*",this).add(this).each(function(){c(this).triggerHandler("remove")});return i.apply(this,arguments)},enableSelection:function(){return this.attr("unselectable","off").css("MozUserSelect","").unbind("selectstart.ui")},disableSelection:function(){return this.attr("unselectable","on").css("MozUserSelect","none").bind("selectstart.ui",function(){return false})},scrollParent:function(){var j;if((c.browser.msie&&(/(static|relative)/).test(this.css("position")))||(/absolute/).test(this.css("position"))){j=this.parents().filter(function(){return(/(relative|absolute|fixed)/).test(c.curCSS(this,"position",1))&&(/(auto|scroll)/).test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0)}else{j=this.parents().filter(function(){return(/(auto|scroll)/).test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0)}return(/fixed/).test(this.css("position"))||!j.length?c(document):j}});c.extend(c.expr[":"],{data:function(l,k,j){return !!c.data(l,j[3])},focusable:function(k){var l=k.nodeName.toLowerCase(),j=c.attr(k,"tabindex");return(/input|select|textarea|button|object/.test(l)?!k.disabled:"a"==l||"area"==l?k.href||!isNaN(j):!isNaN(j))&&!c(k)["area"==l?"parents":"closest"](":hidden").length},tabbable:function(k){var j=c.attr(k,"tabindex");return(isNaN(j)||j>=0)&&c(k).is(":focusable")}});function g(m,n,o,l){function k(q){var p=c[m][n][q]||[];return(typeof p=="string"?p.split(/,?\s+/):p)}var j=k("getter");if(l.length==1&&typeof l[0]=="string"){j=j.concat(k("getterSetter"))}return(c.inArray(o,j)!=-1)}c.widget=function(k,j){var l=k.split(".")[0];k=k.split(".")[1];c.fn[k]=function(p){var n=(typeof p=="string"),o=Array.prototype.slice.call(arguments,1);if(n&&p.substring(0,1)=="_"){return this}if(n&&g(l,k,p,o)){var m=c.data(this[0],k);return(m?m[p].apply(m,o):undefined)}return this.each(function(){var q=c.data(this,k);(!q&&!n&&c.data(this,k,new c[l][k](this,p))._init());(q&&n&&c.isFunction(q[p])&&q[p].apply(q,o))})};c[l]=c[l]||{};c[l][k]=function(o,n){var m=this;this.namespace=l;this.widgetName=k;this.widgetEventPrefix=c[l][k].eventPrefix||k;this.widgetBaseClass=l+"-"+k;this.options=c.extend({},c.widget.defaults,c[l][k].defaults,c.metadata&&c.metadata.get(o)[k],n);this.element=c(o).bind("setData."+k,function(q,p,r){if(q.target==o){return m._setData(p,r)}}).bind("getData."+k,function(q,p){if(q.target==o){return m._getData(p)}}).bind("remove",function(){return m.destroy()})};c[l][k].prototype=c.extend({},c.widget.prototype,j);c[l][k].getterSetter="option"};c.widget.prototype={_init:function(){},destroy:function(){this.element.removeData(this.widgetName).removeClass(this.widgetBaseClass+"-disabled "+this.namespace+"-state-disabled").removeAttr("aria-disabled")},option:function(l,m){var k=l,j=this;if(typeof l=="string"){if(m===undefined){return this._getData(l)}k={};k[l]=m}c.each(k,function(n,o){j._setData(n,o)})},_getData:function(j){return this.options[j]},_setData:function(j,k){this.options[j]=k;if(j=="disabled"){this.element[k?"addClass":"removeClass"](this.widgetBaseClass+"-disabled "+this.namespace+"-state-disabled").attr("aria-disabled",k)}},enable:function(){this._setData("disabled",false)},disable:function(){this._setData("disabled",true)},_trigger:function(l,m,n){var p=this.options[l],j=(l==this.widgetEventPrefix?l:this.widgetEventPrefix+l);m=c.Event(m);m.type=j;if(m.originalEvent){for(var k=c.event.props.length,o;k;){o=c.event.props[--k];m[o]=m.originalEvent[o]}}this.element.trigger(m,n);return !(c.isFunction(p)&&p.call(this.element[0],m,n)===false||m.isDefaultPrevented())}};c.widget.defaults={disabled:false};c.ui.mouse={_mouseInit:function(){var j=this;this.element.bind("mousedown."+this.widgetName,function(k){return j._mouseDown(k)}).bind("click."+this.widgetName,function(k){if(j._preventClickEvent){j._preventClickEvent=false;k.stopImmediatePropagation();return false}});if(c.browser.msie){this._mouseUnselectable=this.element.attr("unselectable");this.element.attr("unselectable","on")}this.started=false},_mouseDestroy:function(){this.element.unbind("."+this.widgetName);(c.browser.msie&&this.element.attr("unselectable",this._mouseUnselectable))},_mouseDown:function(l){l.originalEvent=l.originalEvent||{};if(l.originalEvent.mouseHandled){return}(this._mouseStarted&&this._mouseUp(l));this._mouseDownEvent=l;var k=this,m=(l.which==1),j=(typeof this.options.cancel=="string"?c(l.target).parents().add(l.target).filter(this.options.cancel).length:false);if(!m||j||!this._mouseCapture(l)){return true}this.mouseDelayMet=!this.options.delay;if(!this.mouseDelayMet){this._mouseDelayTimer=setTimeout(function(){k.mouseDelayMet=true},this.options.delay)}if(this._mouseDistanceMet(l)&&this._mouseDelayMet(l)){this._mouseStarted=(this._mouseStart(l)!==false);if(!this._mouseStarted){l.preventDefault();return true}}this._mouseMoveDelegate=function(n){return k._mouseMove(n)};this._mouseUpDelegate=function(n){return k._mouseUp(n)};c(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate);(c.browser.safari||l.preventDefault());l.originalEvent.mouseHandled=true;return true},_mouseMove:function(j){if(c.browser.msie&&!j.button){return this._mouseUp(j)}if(this._mouseStarted){this._mouseDrag(j);return j.preventDefault()}if(this._mouseDistanceMet(j)&&this._mouseDelayMet(j)){this._mouseStarted=(this._mouseStart(this._mouseDownEvent,j)!==false);(this._mouseStarted?this._mouseDrag(j):this._mouseUp(j))}return !this._mouseStarted},_mouseUp:function(j){c(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate);if(this._mouseStarted){this._mouseStarted=false;this._preventClickEvent=(j.target==this._mouseDownEvent.target);this._mouseStop(j)}return false},_mouseDistanceMet:function(j){return(Math.max(Math.abs(this._mouseDownEvent.pageX-j.pageX),Math.abs(this._mouseDownEvent.pageY-j.pageY))>=this.options.distance)},_mouseDelayMet:function(j){return this.mouseDelayMet},_mouseStart:function(j){},_mouseDrag:function(j){},_mouseStop:function(j){},_mouseCapture:function(j){return true}};c.ui.mouse.defaults={cancel:null,distance:1,delay:0}})(jQuery);;/*
  * jQuery UI Draggable 1.7.1
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -20,7 +20,7 @@
  */
(function(a){a.widget("ui.draggable",a.extend({},a.ui.mouse,{_init:function(){if(this.options.helper=="original"&&!(/^(?:r|a|f)/).test(this.element.css("position"))){this.element[0].style.position="relative"}(this.options.addClasses&&this.element.addClass("ui-draggable"));(this.options.disabled&&this.element.addClass("ui-draggable-disabled"));this._mouseInit()},destroy:function(){if(!this.element.data("draggable")){return}this.element.removeData("draggable").unbind(".draggable").removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled");this._mouseDestroy()},_mouseCapture:function(b){var c=this.options;if(this.helper||c.disabled||a(b.target).is(".ui-resizable-handle")){return false}this.handle=this._getHandle(b);if(!this.handle){return false}return true},_mouseStart:function(b){var c=this.options;this.helper=this._createHelper(b);this._cacheHelperProportions();if(a.ui.ddmanager){a.ui.ddmanager.current=this}this._cacheMargins();this.cssPosition=this.helper.css("position");this.scrollParent=this.helper.scrollParent();this.offset=this.element.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};a.extend(this.offset,{click:{left:b.pageX-this.offset.left,top:b.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(b);this.originalPageX=b.pageX;this.originalPageY=b.pageY;if(c.cursorAt){this._adjustOffsetFromHelper(c.cursorAt)}if(c.containment){this._setContainment()}this._trigger("start",b);this._cacheHelperProportions();if(a.ui.ddmanager&&!c.dropBehaviour){a.ui.ddmanager.prepareOffsets(this,b)}this.helper.addClass("ui-draggable-dragging");this._mouseDrag(b,true);return true},_mouseDrag:function(b,d){this.position=this._generatePosition(b);this.positionAbs=this._convertPositionTo("absolute");if(!d){var c=this._uiHash();this._trigger("drag",b,c);this.position=c.position}if(!this.options.axis||this.options.axis!="y"){this.helper[0].style.left=this.position.left+"px"}if(!this.options.axis||this.options.axis!="x"){this.helper[0].style.top=this.position.top+"px"}if(a.ui.ddmanager){a.ui.ddmanager.drag(this,b)}return false},_mouseStop:function(c){var d=false;if(a.ui.ddmanager&&!this.options.dropBehaviour){d=a.ui.ddmanager.drop(this,c)}if(this.dropped){d=this.dropped;this.dropped=false}if((this.options.revert=="invalid"&&!d)||(this.options.revert=="valid"&&d)||this.options.revert===true||(a.isFunction(this.options.revert)&&this.options.revert.call(this.element,d))){var b=this;a(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){b._trigger("stop",c);b._clear()})}else{this._trigger("stop",c);this._clear()}return false},_getHandle:function(b){var c=!this.options.handle||!a(this.options.handle,this.element).length?true:false;a(this.options.handle,this.element).find("*").andSelf().each(function(){if(this==b.target){c=true}});return c},_createHelper:function(c){var d=this.options;var b=a.isFunction(d.helper)?a(d.helper.apply(this.element[0],[c])):(d.helper=="clone"?this.element.clone():this.element);if(!b.parents("body").length){b.appendTo((d.appendTo=="parent"?this.element[0].parentNode:d.appendTo))}if(b[0]!=this.element[0]&&!(/(fixed|absolute)/).test(b.css("position"))){b.css("position","absolute")}return b},_adjustOffsetFromHelper:function(b){if(b.left!=undefined){this.offset.click.left=b.left+this.margins.left}if(b.right!=undefined){this.offset.click.left=this.helperProportions.width-b.right+this.margins.left}if(b.top!=undefined){this.offset.click.top=b.top+this.margins.top}if(b.bottom!=undefined){this.offset.click.top=this.helperProportions.height-b.bottom+this.margins.top}},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var b=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0])){b.left+=this.scrollParent.scrollLeft();b.top+=this.scrollParent.scrollTop()}if((this.offsetParent[0]==document.body)||(this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&a.browser.msie)){b={top:0,left:0}}return{top:b.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:b.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var b=this.element.position();return{top:b.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:b.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else{return{top:0,left:0}}},_cacheMargins:function(){this.margins={left:(parseInt(this.element.css("marginLeft"),10)||0),top:(parseInt(this.element.css("marginTop"),10)||0)}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var e=this.options;if(e.containment=="parent"){e.containment=this.helper[0].parentNode}if(e.containment=="document"||e.containment=="window"){this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,a(e.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(a(e.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top]}if(!(/^(document|window|parent)$/).test(e.containment)&&e.containment.constructor!=Array){var c=a(e.containment)[0];if(!c){return}var d=a(e.containment).offset();var b=(a(c).css("overflow")!="hidden");this.containment=[d.left+(parseInt(a(c).css("borderLeftWidth"),10)||0)+(parseInt(a(c).css("paddingLeft"),10)||0)-this.margins.left,d.top+(parseInt(a(c).css("borderTopWidth"),10)||0)+(parseInt(a(c).css("paddingTop"),10)||0)-this.margins.top,d.left+(b?Math.max(c.scrollWidth,c.offsetWidth):c.offsetWidth)-(parseInt(a(c).css("borderLeftWidth"),10)||0)-(parseInt(a(c).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,d.top+(b?Math.max(c.scrollHeight,c.offsetHeight):c.offsetHeight)-(parseInt(a(c).css("borderTopWidth"),10)||0)-(parseInt(a(c).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}else{if(e.containment.constructor==Array){this.containment=e.containment}}},_convertPositionTo:function(f,h){if(!h){h=this.position}var c=f=="absolute"?1:-1;var e=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,g=(/(html|body)/i).test(b[0].tagName);return{top:(h.top+this.offset.relative.top*c+this.offset.parent.top*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(g?0:b.scrollTop()))*c)),left:(h.left+this.offset.relative.left*c+this.offset.parent.left*c-(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():g?0:b.scrollLeft())*c))}},_generatePosition:function(e){var h=this.options,b=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&a.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,i=(/(html|body)/i).test(b[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0])){this.offset.relative=this._getRelativeOffset()}var d=e.pageX;var c=e.pageY;if(this.originalPosition){if(this.containment){if(e.pageX-this.offset.click.left<this.containment[0]){d=this.containment[0]+this.offset.click.left}if(e.pageY-this.offset.click.top<this.containment[1]){c=this.containment[1]+this.offset.click.top}if(e.pageX-this.offset.click.left>this.containment[2]){d=this.containment[2]+this.offset.click.left}if(e.pageY-this.offset.click.top>this.containment[3]){c=this.containment[3]+this.offset.click.top}}if(h.grid){var g=this.originalPageY+Math.round((c-this.originalPageY)/h.grid[1])*h.grid[1];c=this.containment?(!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:(!(g-this.offset.click.top<this.containment[1])?g-h.grid[1]:g+h.grid[1])):g;var f=this.originalPageX+Math.round((d-this.originalPageX)/h.grid[0])*h.grid[0];d=this.containment?(!(f-this.offset.click.left<this.containment[0]||f-this.offset.click.left>this.containment[2])?f:(!(f-this.offset.click.left<this.containment[0])?f-h.grid[0]:f+h.grid[0])):f}}return{top:(c-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():(i?0:b.scrollTop())))),left:(d-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(a.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:b.scrollLeft())))}},_clear:function(){this.helper.removeClass("ui-draggable-dragging");if(this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval){this.helper.remove()}this.helper=null;this.cancelHelperRemoval=false},_trigger:function(b,c,d){d=d||this._uiHash();a.ui.plugin.call(this,b,[c,d]);if(b=="drag"){this.positionAbs=this._convertPositionTo("absolute")}return a.widget.prototype._trigger.call(this,b,c,d)},plugins:{},_uiHash:function(b){return{helper:this.helper,position:this.position,absolutePosition:this.positionAbs,offset:this.positionAbs}}}));a.extend(a.ui.draggable,{version:"1.7.1",eventPrefix:"drag",defaults:{addClasses:true,appendTo:"parent",axis:false,cancel:":input,option",connectToSortable:false,containment:false,cursor:"auto",cursorAt:false,delay:0,distance:1,grid:false,handle:false,helper:"original",iframeFix:false,opacity:false,refreshPositions:false,revert:false,revertDuration:500,scope:"default",scroll:true,scrollSensitivity:20,scrollSpeed:20,snap:false,snapMode:"both",snapTolerance:20,stack:false,zIndex:false}});a.ui.plugin.add("draggable","connectToSortable",{start:function(c,e){var d=a(this).data("draggable"),f=d.options,b=a.extend({},e,{item:d.element});d.sortables=[];a(f.connectToSortable).each(function(){var g=a.data(this,"sortable");if(g&&!g.options.disabled){d.sortables.push({instance:g,shouldRevert:g.options.revert});g._refreshItems();g._trigger("activate",c,b)}})},stop:function(c,e){var d=a(this).data("draggable"),b=a.extend({},e,{item:d.element});a.each(d.sortables,function(){if(this.instance.isOver){this.instance.isOver=0;d.cancelHelperRemoval=true;this.instance.cancelHelperRemoval=false;if(this.shouldRevert){this.instance.options.revert=true}this.instance._mouseStop(c);this.instance.options.helper=this.instance.options._helper;if(d.options.helper=="original"){this.instance.currentItem.css({top:"auto",left:"auto"})}}else{this.instance.cancelHelperRemoval=false;this.instance._trigger("deactivate",c,b)}})},drag:function(c,f){var e=a(this).data("draggable"),b=this;var d=function(i){var n=this.offset.click.top,m=this.offset.click.left;var g=this.positionAbs.top,k=this.positionAbs.left;var j=i.height,l=i.width;var p=i.top,h=i.left;return a.ui.isOver(g+n,k+m,p,h,j,l)};a.each(e.sortables,function(g){this.instance.positionAbs=e.positionAbs;this.instance.helperProportions=e.helperProportions;this.instance.offset.click=e.offset.click;if(this.instance._intersectsWith(this.instance.containerCache)){if(!this.instance.isOver){this.instance.isOver=1;this.instance.currentItem=a(b).clone().appendTo(this.instance.element).data("sortable-item",true);this.instance.options._helper=this.instance.options.helper;this.instance.options.helper=function(){return f.helper[0]};c.target=this.instance.currentItem[0];this.instance._mouseCapture(c,true);this.instance._mouseStart(c,true,true);this.instance.offset.click.top=e.offset.click.top;this.instance.offset.click.left=e.offset.click.left;this.instance.offset.parent.left-=e.offset.parent.left-this.instance.offset.parent.left;this.instance.offset.parent.top-=e.offset.parent.top-this.instance.offset.parent.top;e._trigger("toSortable",c);e.dropped=this.instance.element;e.currentItem=e.element;this.instance.fromOutside=e}if(this.instance.currentItem){this.instance._mouseDrag(c)}}else{if(this.instance.isOver){this.instance.isOver=0;this.instance.cancelHelperRemoval=true;this.instance.options.revert=false;this.instance._trigger("out",c,this.instance._uiHash(this.instance));this.instance._mouseStop(c,true);this.instance.options.helper=this.instance.options._helper;this.instance.currentItem.remove();if(this.instance.placeholder){this.instance.placeholder.remove()}e._trigger("fromSortable",c);e.dropped=false}}})}});a.ui.plugin.add("draggable","cursor",{start:function(c,d){var b=a("body"),e=a(this).data("draggable").options;if(b.css("cursor")){e._cursor=b.css("cursor")}b.css("cursor",e.cursor)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._cursor){a("body").css("cursor",d._cursor)}}});a.ui.plugin.add("draggable","iframeFix",{start:function(b,c){var d=a(this).data("draggable").options;a(d.iframeFix===true?"iframe":d.iframeFix).each(function(){a('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1000}).css(a(this).offset()).appendTo("body")})},stop:function(b,c){a("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)})}});a.ui.plugin.add("draggable","opacity",{start:function(c,d){var b=a(d.helper),e=a(this).data("draggable").options;if(b.css("opacity")){e._opacity=b.css("opacity")}b.css("opacity",e.opacity)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._opacity){a(c.helper).css("opacity",d._opacity)}}});a.ui.plugin.add("draggable","scroll",{start:function(c,d){var b=a(this).data("draggable");if(b.scrollParent[0]!=document&&b.scrollParent[0].tagName!="HTML"){b.overflowOffset=b.scrollParent.offset()}},drag:function(d,e){var c=a(this).data("draggable"),f=c.options,b=false;if(c.scrollParent[0]!=document&&c.scrollParent[0].tagName!="HTML"){if(!f.axis||f.axis!="x"){if((c.overflowOffset.top+c.scrollParent[0].offsetHeight)-d.pageY<f.scrollSensitivity){c.scrollParent[0].scrollTop=b=c.scrollParent[0].scrollTop+f.scrollSpeed}else{if(d.pageY-c.overflowOffset.top<f.scrollSensitivity){c.scrollParent[0].scrollTop=b=c.scrollParent[0].scrollTop-f.scrollSpeed}}}if(!f.axis||f.axis!="y"){if((c.overflowOffset.left+c.scrollParent[0].offsetWidth)-d.pageX<f.scrollSensitivity){c.scrollParent[0].scrollLeft=b=c.scrollParent[0].scrollLeft+f.scrollSpeed}else{if(d.pageX-c.overflowOffset.left<f.scrollSensitivity){c.scrollParent[0].scrollLeft=b=c.scrollParent[0].scrollLeft-f.scrollSpeed}}}}else{if(!f.axis||f.axis!="x"){if(d.pageY-a(document).scrollTop()<f.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()-f.scrollSpeed)}else{if(a(window).height()-(d.pageY-a(document).scrollTop())<f.scrollSensitivity){b=a(document).scrollTop(a(document).scrollTop()+f.scrollSpeed)}}}if(!f.axis||f.axis!="y"){if(d.pageX-a(document).scrollLeft()<f.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()-f.scrollSpeed)}else{if(a(window).width()-(d.pageX-a(document).scrollLeft())<f.scrollSensitivity){b=a(document).scrollLeft(a(document).scrollLeft()+f.scrollSpeed)}}}}if(b!==false&&a.ui.ddmanager&&!f.dropBehaviour){a.ui.ddmanager.prepareOffsets(c,d)}}});a.ui.plugin.add("draggable","snap",{start:function(c,d){var b=a(this).data("draggable"),e=b.options;b.snapElements=[];a(e.snap.constructor!=String?(e.snap.items||":data(draggable)"):e.snap).each(function(){var g=a(this);var f=g.offset();if(this!=b.element[0]){b.snapElements.push({item:this,width:g.outerWidth(),height:g.outerHeight(),top:f.top,left:f.left})}})},drag:function(u,p){var g=a(this).data("draggable"),q=g.options;var y=q.snapTolerance;var x=p.offset.left,w=x+g.helperProportions.width,f=p.offset.top,e=f+g.helperProportions.height;for(var v=g.snapElements.length-1;v>=0;v--){var s=g.snapElements[v].left,n=s+g.snapElements[v].width,m=g.snapElements[v].top,A=m+g.snapElements[v].height;if(!((s-y<x&&x<n+y&&m-y<f&&f<A+y)||(s-y<x&&x<n+y&&m-y<e&&e<A+y)||(s-y<w&&w<n+y&&m-y<f&&f<A+y)||(s-y<w&&w<n+y&&m-y<e&&e<A+y))){if(g.snapElements[v].snapping){(g.options.snap.release&&g.options.snap.release.call(g.element,u,a.extend(g._uiHash(),{snapItem:g.snapElements[v].item})))}g.snapElements[v].snapping=false;continue}if(q.snapMode!="inner"){var c=Math.abs(m-e)<=y;var z=Math.abs(A-f)<=y;var j=Math.abs(s-w)<=y;var k=Math.abs(n-x)<=y;if(c){p.position.top=g._convertPositionTo("relative",{top:m-g.helperProportions.height,left:0}).top-g.margins.top}if(z){p.position.top=g._convertPositionTo("relative",{top:A,left:0}).top-g.margins.top}if(j){p.position.left=g._convertPositionTo("relative",{top:0,left:s-g.helperProportions.width}).left-g.margins.left}if(k){p.position.left=g._convertPositionTo("relative",{top:0,left:n}).left-g.margins.left}}var h=(c||z||j||k);if(q.snapMode!="outer"){var c=Math.abs(m-f)<=y;var z=Math.abs(A-e)<=y;var j=Math.abs(s-x)<=y;var k=Math.abs(n-w)<=y;if(c){p.position.top=g._convertPositionTo("relative",{top:m,left:0}).top-g.margins.top}if(z){p.position.top=g._convertPositionTo("relative",{top:A-g.helperProportions.height,left:0}).top-g.margins.top}if(j){p.position.left=g._convertPositionTo("relative",{top:0,left:s}).left-g.margins.left}if(k){p.position.left=g._convertPositionTo("relative",{top:0,left:n-g.helperProportions.width}).left-g.margins.left}}if(!g.snapElements[v].snapping&&(c||z||j||k||h)){(g.options.snap.snap&&g.options.snap.snap.call(g.element,u,a.extend(g._uiHash(),{snapItem:g.snapElements[v].item})))}g.snapElements[v].snapping=(c||z||j||k||h)}}});a.ui.plugin.add("draggable","stack",{start:function(b,c){var e=a(this).data("draggable").options;var d=a.makeArray(a(e.stack.group)).sort(function(g,f){return(parseInt(a(g).css("zIndex"),10)||e.stack.min)-(parseInt(a(f).css("zIndex"),10)||e.stack.min)});a(d).each(function(f){this.style.zIndex=e.stack.min+f});this[0].style.zIndex=e.stack.min+d.length}});a.ui.plugin.add("draggable","zIndex",{start:function(c,d){var b=a(d.helper),e=a(this).data("draggable").options;if(b.css("zIndex")){e._zIndex=b.css("zIndex")}b.css("zIndex",e.zIndex)},stop:function(b,c){var d=a(this).data("draggable").options;if(d._zIndex){a(c.helper).css("zIndex",d._zIndex)}}})})(jQuery);;/*
  * jQuery UI Droppable 1.7.1
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
@@ -32,7 +32,7 @@
  */
(function(a){a.widget("ui.droppable",{_init:function(){var c=this.options,b=c.accept;this.isover=0;this.isout=1;this.options.accept=this.options.accept&&a.isFunction(this.options.accept)?this.options.accept:function(e){return e.is(b)};this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight};a.ui.ddmanager.droppables[this.options.scope]=a.ui.ddmanager.droppables[this.options.scope]||[];a.ui.ddmanager.droppables[this.options.scope].push(this);(this.options.addClasses&&this.element.addClass("ui-droppable"))},destroy:function(){var b=a.ui.ddmanager.droppables[this.options.scope];for(var c=0;c<b.length;c++){if(b[c]==this){b.splice(c,1)}}this.element.removeClass("ui-droppable ui-droppable-disabled").removeData("droppable").unbind(".droppable")},_setData:function(b,c){if(b=="accept"){this.options.accept=c&&a.isFunction(c)?c:function(e){return e.is(c)}}else{a.widget.prototype._setData.apply(this,arguments)}},_activate:function(c){var b=a.ui.ddmanager.current;if(this.options.activeClass){this.element.addClass(this.options.activeClass)}(b&&this._trigger("activate",c,this.ui(b)))},_deactivate:function(c){var b=a.ui.ddmanager.current;if(this.options.activeClass){this.element.removeClass(this.options.activeClass)}(b&&this._trigger("deactivate",c,this.ui(b)))},_over:function(c){var b=a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.hoverClass){this.element.addClass(this.options.hoverClass)}this._trigger("over",c,this.ui(b))}},_out:function(c){var b=a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.hoverClass){this.element.removeClass(this.options.hoverClass)}this._trigger("out",c,this.ui(b))}},_drop:function(c,d){var b=d||a.ui.ddmanager.current;if(!b||(b.currentItem||b.element)[0]==this.element[0]){return false}var e=false;this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var f=a.data(this,"droppable");if(f.options.greedy&&a.ui.intersect(b,a.extend(f,{offset:f.element.offset()}),f.options.tolerance)){e=true;return false}});if(e){return false}if(this.options.accept.call(this.element[0],(b.currentItem||b.element))){if(this.options.activeClass){this.element.removeClass(this.options.activeClass)}if(this.options.hoverClass){this.element.removeClass(this.options.hoverClass)}this._trigger("drop",c,this.ui(b));return this.element}return false},ui:function(b){return{draggable:(b.currentItem||b.element),helper:b.helper,position:b.position,absolutePosition:b.positionAbs,offset:b.positionAbs}}});a.extend(a.ui.droppable,{version:"1.7.1",eventPrefix:"drop",defaults:{accept:"*",activeClass:false,addClasses:true,greedy:false,hoverClass:false,scope:"default",tolerance:"intersect"}});a.ui.intersect=function(q,j,o){if(!j.offset){return false}var e=(q.positionAbs||q.position.absolute).left,d=e+q.helperProportions.width,n=(q.positionAbs||q.position.absolute).top,m=n+q.helperProportions.height;var g=j.offset.left,c=g+j.proportions.width,p=j.offset.top,k=p+j.proportions.height;switch(o){case"fit":return(g<e&&d<c&&p<n&&m<k);break;case"intersect":return(g<e+(q.helperProportions.width/2)&&d-(q.helperProportions.width/2)<c&&p<n+(q.helperProportions.height/2)&&m-(q.helperProportions.height/2)<k);break;case"pointer":var h=((q.positionAbs||q.position.absolute).left+(q.clickOffset||q.offset.click).left),i=((q.positionAbs||q.position.absolute).top+(q.clickOffset||q.offset.click).top),f=a.ui.isOver(i,h,p,g,j.proportions.height,j.proportions.width);return f;break;case"touch":return((n>=p&&n<=k)||(m>=p&&m<=k)||(n<p&&m>k))&&((e>=g&&e<=c)||(d>=g&&d<=c)||(e<g&&d>c));break;default:return false;break}};a.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(e,g){var b=a.ui.ddmanager.droppables[e.options.scope];var f=g?g.type:null;var h=(e.currentItem||e.element).find(":data(droppable)").andSelf();droppablesLoop:for(var d=0;d<b.length;d++){if(b[d].options.disabled||(e&&!b[d].options.accept.call(b[d].element[0],(e.currentItem||e.element)))){continue}for(var c=0;c<h.length;c++){if(h[c]==b[d].element[0]){b[d].proportions.height=0;continue droppablesLoop}}b[d].visible=b[d].element.css("display")!="none";if(!b[d].visible){continue}b[d].offset=b[d].element.offset();b[d].proportions={width:b[d].element[0].offsetWidth,height:b[d].element[0].offsetHeight};if(f=="mousedown"){b[d]._activate.call(b[d],g)}}},drop:function(b,c){var d=false;a.each(a.ui.ddmanager.droppables[b.options.scope],function(){if(!this.options){return}if(!this.options.disabled&&this.visible&&a.ui.intersect(b,this,this.options.tolerance)){d=this._drop.call(this,c)}if(!this.options.disabled&&this.visible&&this.options.accept.call(this.element[0],(b.currentItem||b.element))){this.isout=1;this.isover=0;this._deactivate.call(this,c)}});return d},drag:function(b,c){if(b.options.refreshPositions){a.ui.ddmanager.prepareOffsets(b,c)}a.each(a.ui.ddmanager.droppables[b.options.scope],function(){if(this.options.disabled||this.greedyChild||!this.visible){return}var e=a.ui.intersect(b,this,this.options.tolerance);var g=!e&&this.isover==1?"isout":(e&&this.isover==0?"isover":null);if(!g){return}var f;if(this.options.greedy){var d=this.element.parents(":data(droppable):eq(0)");if(d.length){f=a.data(d[0],"droppable");f.greedyChild=(g=="isover"?1:0)}}if(f&&g=="isover"){f.isover=0;f.isout=1;f._out.call(f,c)}this[g]=1;this[g=="isout"?"isover":"isout"]=0;this[g=="isover"?"_over":"_out"].call(this,c);if(f&&g=="isout"){f.isout=0;f.isover=1;f._over.call(f,c)}})}}})(jQuery);;/*
  * jQuery UI Slider 1.7.1
  *
- * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
--- a/web/data/ui.tabs.js	Thu May 06 08:24:46 2010 +0200
+++ b/web/data/ui.tabs.js	Mon Jul 19 15:36:16 2010 +0200
@@ -1,7 +1,7 @@
 /*
  * jQuery UI Tabs @VERSION
  *
- * Copyright (c) 2007, 2008 Klaus Hartl (stilbuero.de)
+ * Copyright (c) 2007, 2010 Klaus Hartl (stilbuero.de)
  * Dual licensed under the MIT (MIT-LICENSE.txt)
  * and GPL (GPL-LICENSE.txt) licenses.
  *
--- a/web/facet.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/facet.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,7 +21,6 @@
 """
 __docformat__ = "restructuredtext en"
 
-from itertools import chain
 from copy import deepcopy
 from datetime import date, datetime, timedelta
 
@@ -158,6 +157,10 @@
     if rqlst.groupby:
         rqlst.add_group_var(newvar)
     rqlst.add_selected(newvar)
+    # add is restriction if necessary
+    if mainvar.stinfo['typerel'] is None:
+        etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
+        rqlst.add_type_restriction(mainvar, etypes)
     return newvar
 
 def _remove_relation(rqlst, rel, var):
@@ -211,10 +214,6 @@
         _set_orderby(rqlst, attrvar, sortasc, sortfuncname)
     # add attribute variable to selection
     rqlst.add_selected(attrvar)
-    # add is restriction if necessary
-    if not mainvar.stinfo['typerels']:
-        etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
-        rqlst.add_type_restriction(mainvar, etypes)
     return var
 
 def _cleanup_rqlst(rqlst, mainvar):
@@ -241,12 +240,16 @@
         for ovarname in linkedvars:
             vargraph[ovarname].remove(trvarname)
         # remove relation using this variable
-        for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']):
+        for rel in trvar.stinfo['relations']:
             if rel in removed:
                 # already removed
                 continue
             rqlst.remove_node(rel)
             removed.add(rel)
+        rel = trvar.stinfo['typerel']
+        if rel is not None and not rel in removed:
+            rqlst.remove_node(rel)
+            removed.add(rel)
         # cleanup groupby clause
         if rqlst.groupby:
             for vref in rqlst.groupby[:]:
@@ -342,9 +345,9 @@
     def support_and(self):
         return False
 
-    def rqlexec(self, rql, args=None, cachekey=None):
+    def rqlexec(self, rql, args=None):
         try:
-            return self._cw.execute(rql, args, cachekey)
+            return self._cw.execute(rql, args)
         except Unauthorized:
             return []
 
@@ -385,7 +388,7 @@
             if self.target_type is not None:
                 rqlst.add_type_restriction(var, self.target_type)
             try:
-                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey)
+                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
             except:
                 self.exception('error while getting vocabulary for %s, rql: %s',
                                self, rqlst.as_string())
@@ -464,6 +467,7 @@
     attrtype = 'String'
     # type of comparison: default is an exact match on the attribute value
     comparator = '=' # could be '<', '<=', '>', '>='
+    i18nable = True
 
     def vocabulary(self):
         """return vocabulary for this facet, eg a list of 2-uple (label, value)
@@ -476,7 +480,7 @@
             newvar = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role)
             _set_orderby(rqlst, newvar, self.sortasc, self.sortfunc)
             try:
-                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args, self.cw_rset.cachekey)
+                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
             except:
                 self.exception('error while getting vocabulary for %s, rql: %s',
                                self, rqlst.as_string())
@@ -488,7 +492,10 @@
         return rset and self.rset_vocabulary(rset)
 
     def rset_vocabulary(self, rset):
-        _ = self._cw._
+        if self.i18nable:
+            _ = self._cw._
+        else:
+            _ = unicode
         return [(_(value), value) for value, in rset]
 
     def support_and(self):
@@ -513,7 +520,7 @@
     def build_rql(self):#, tablefilter=False):
         form = self._cw.form
         facetids = form['facets'].split(',')
-        select = parse(form['baserql']).children[0] # XXX Union unsupported yet
+        select = self._cw.vreg.parse(self._cw, form['baserql']).children[0] # XXX Union unsupported yet
         mainvar = filtered_variable(select)
         toupdate = []
         for facetid in facetids:
--- a/web/form.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/form.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""abstract form classes for CubicWeb web client
-
-"""
+"""abstract form classes for CubicWeb web client"""
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
@@ -80,8 +78,6 @@
     __metaclass__ = metafieldsform
     __registry__ = 'forms'
 
-    internal_fields = ('__errorurl',) + controller.NAV_FORM_PARAMETERS
-
     parent_form = None
     force_session_key = None
     domid = 'form'
@@ -129,14 +125,16 @@
     def form_valerror(self):
         """the validation error exception if any"""
         if self.parent_form is None:
-            return self._form_valerror
+            # unset if restore_previous_post has not be called
+            return getattr(self, '_form_valerror', None)
         return self.parent_form.form_valerror
 
     @property
     def form_previous_values(self):
         """previously posted values (on validation error)"""
         if self.parent_form is None:
-            return self._form_previous_values
+            # unset if restore_previous_post has not be called
+            return getattr(self, '_form_previous_values', {})
         return self.parent_form.form_previous_values
 
     @iclassmethod
@@ -222,7 +220,7 @@
             warn('[3.6.1] restore_previous_post already called, remove this call',
                  DeprecationWarning, stacklevel=2)
             return
-        forminfo = self._cw.get_session_data(sessionkey, pop=True)
+        forminfo = self._cw.session.data.pop(sessionkey, None)
         if forminfo:
             self._form_previous_values = forminfo['values']
             self._form_valerror = forminfo['error']
--- a/web/formfields.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/formfields.py	Mon Jul 19 15:36:16 2010 +0200
@@ -82,6 +82,8 @@
     it
     """
 
+def normalize_filename(filename):
+    return filename.split('\\')[-1]
 
 def vocab_sort(vocab):
     """sort vocabulary, considering option groups"""
@@ -433,9 +435,11 @@
             # attribute or relation
             return True
         # if it's a non final relation, we need the eids
-        if isinstance(previous_value, tuple):
+        # XXX underlying regression: getattr(ent, 'foo') used to return
+        #     a tuple, now we get a list
+        if isinstance(previous_value, (list, tuple)):
             # widget should return a set of untyped eids
-            previous_value = set(unicode(e.eid) for e in previous_value)
+            previous_value = set(e.eid for e in previous_value)
         try:
             new_value = self.process_form_value(form)
         except ProcessFormError:
@@ -723,15 +727,18 @@
             # raise UnmodifiedField instead of returning None, since the later
             # will try to remove already attached file if any
             raise UnmodifiedField()
-        # skip browser submitted mime type
-        filename, _, stream = value
-        # value is a  3-uple (filename, mimetype, stream)
+        # value is a 2-uple (filename, stream)
+        try:
+            filename, stream = value
+        except ValueError:
+            raise UnmodifiedField()
+        # XXX avoid in memory loading of posted files. Requires Binary handling changes...
         value = Binary(stream.read())
         if not value.getvalue(): # usually an unexistant file
             value = None
         else:
             # set filename on the Binary instance, may be used later in hooks
-            value.filename = filename
+            value.filename = normalize_filename(filename)
         return value
 
 
--- a/web/htmlwidgets.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/htmlwidgets.py	Mon Jul 19 15:36:16 2010 +0200
@@ -326,7 +326,7 @@
         self.w(u'<tr class="header">')
         for column in self.columns:
             attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.iteritems())
-            self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name))
+            self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name or u''))
         self.w(u'</tr>')
         self.w(u'</thead><tbody>')
         for rowindex in xrange(len(self.model.get_rows())):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/http_headers.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,1542 @@
+# This file has been extracted from the abandoned TwistedWeb2 project
+# http://twistedmatrix.com/trac/wiki/TwistedWeb2
+
+
+from __future__ import generators
+
+import types, time
+from calendar import timegm
+import base64
+import re
+
+def dashCapitalize(s):
+    ''' Capitalize a string, making sure to treat - as a word seperator '''
+    return '-'.join([ x.capitalize() for x in s.split('-')])
+
+# datetime parsing and formatting
+weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+weekdayname_lower = [name.lower() for name in weekdayname]
+monthname = [None,
+             'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+             'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+monthname_lower = [name and name.lower() for name in monthname]
+
+# HTTP Header parsing API
+
+header_case_mapping = {}
+
+def casemappingify(d):
+    global header_case_mapping
+    newd = dict([(key.lower(),key) for key in d.keys()])
+    header_case_mapping.update(newd)
+
+def lowerify(d):
+    return dict([(key.lower(),value) for key,value in d.items()])
+
+
+class HeaderHandler(object):
+    """HeaderHandler manages header generating and parsing functions.
+    """
+    HTTPParsers = {}
+    HTTPGenerators = {}
+
+    def __init__(self, parsers=None, generators=None):
+        """
+        @param parsers: A map of header names to parsing functions.
+        @type parsers: L{dict}
+
+        @param generators: A map of header names to generating functions.
+        @type generators: L{dict}
+        """
+
+        if parsers:
+            self.HTTPParsers.update(parsers)
+        if generators:
+            self.HTTPGenerators.update(generators)
+
+    def parse(self, name, header):
+        """
+        Parse the given header based on its given name.
+
+        @param name: The header name to parse.
+        @type name: C{str}
+
+        @param header: A list of unparsed headers.
+        @type header: C{list} of C{str}
+
+        @return: The return value is the parsed header representation,
+            it is dependent on the header.  See the HTTP Headers document.
+        """
+        parser = self.HTTPParsers.get(name, None)
+        if parser is None:
+            raise ValueError("No header parser for header '%s', either add one or use getHeaderRaw." % (name,))
+
+        try:
+            for p in parser:
+                # print "Parsing %s: %s(%s)" % (name, repr(p), repr(h))
+                header = p(header)
+                # if isinstance(h, types.GeneratorType):
+                #     h=list(h)
+        except ValueError,v:
+            # print v
+            header=None
+
+        return header
+
+    def generate(self, name, header):
+        """
+        Generate the given header based on its given name.
+
+        @param name: The header name to generate.
+        @type name: C{str}
+
+        @param header: A parsed header, such as the output of
+            L{HeaderHandler}.parse.
+
+        @return: C{list} of C{str} each representing a generated HTTP header.
+        """
+        generator = self.HTTPGenerators.get(name, None)
+
+        if generator is None:
+            # print self.generators
+            raise ValueError("No header generator for header '%s', either add one or use setHeaderRaw." % (name,))
+
+        for g in generator:
+            header = g(header)
+
+        #self._raw_headers[name] = h
+        return header
+
+    def updateParsers(self, parsers):
+        """Update en masse the parser maps.
+
+        @param parsers: Map of header names to parser chains.
+        @type parsers: C{dict}
+        """
+        casemappingify(parsers)
+        self.HTTPParsers.update(lowerify(parsers))
+
+    def addParser(self, name, value):
+        """Add an individual parser chain for the given header.
+
+        @param name: Name of the header to add
+        @type name: C{str}
+
+        @param value: The parser chain
+        @type value: C{str}
+        """
+        self.updateParsers({name: value})
+
+    def updateGenerators(self, generators):
+        """Update en masse the generator maps.
+
+        @param parsers: Map of header names to generator chains.
+        @type parsers: C{dict}
+        """
+        casemappingify(generators)
+        self.HTTPGenerators.update(lowerify(generators))
+
+    def addGenerators(self, name, value):
+        """Add an individual generator chain for the given header.
+
+        @param name: Name of the header to add
+        @type name: C{str}
+
+        @param value: The generator chain
+        @type value: C{str}
+        """
+        self.updateGenerators({name: value})
+
+    def update(self, parsers, generators):
+        """Conveniently update parsers and generators all at once.
+        """
+        self.updateParsers(parsers)
+        self.updateGenerators(generators)
+
+
+DefaultHTTPHandler = HeaderHandler()
+
+
+## HTTP DateTime parser
+def parseDateTime(dateString):
+    """Convert an HTTP date string (one of three formats) to seconds since epoch."""
+    parts = dateString.split()
+
+    if not parts[0][0:3].lower() in weekdayname_lower:
+        # Weekday is stupid. Might have been omitted.
+        try:
+            return parseDateTime("Sun, "+dateString)
+        except ValueError:
+            # Guess not.
+            pass
+
+    partlen = len(parts)
+    if (partlen == 5 or partlen == 6) and parts[1].isdigit():
+        # 1st date format: Sun, 06 Nov 1994 08:49:37 GMT
+        # (Note: "GMT" is literal, not a variable timezone)
+        # (also handles without "GMT")
+        # This is the normal format
+        day = parts[1]
+        month = parts[2]
+        year = parts[3]
+        time = parts[4]
+    elif (partlen == 3 or partlen == 4) and parts[1].find('-') != -1:
+        # 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT
+        # (Note: "GMT" is literal, not a variable timezone)
+        # (also handles without without "GMT")
+        # Two digit year, yucko.
+        day, month, year = parts[1].split('-')
+        time = parts[2]
+        year=int(year)
+        if year < 69:
+            year = year + 2000
+        elif year < 100:
+            year = year + 1900
+    elif len(parts) == 5:
+        # 3rd date format: Sun Nov  6 08:49:37 1994
+        # ANSI C asctime() format.
+        day = parts[2]
+        month = parts[1]
+        year = parts[4]
+        time = parts[3]
+    else:
+        raise ValueError("Unknown datetime format %r" % dateString)
+
+    day = int(day)
+    month = int(monthname_lower.index(month.lower()))
+    year = int(year)
+    hour, min, sec = map(int, time.split(':'))
+    return int(timegm((year, month, day, hour, min, sec)))
+
+
+##### HTTP tokenizer
+class Token(str):
+    __slots__=[]
+    tokens = {}
+    def __new__(self, char):
+        token = Token.tokens.get(char)
+        if token is None:
+            Token.tokens[char] = token = str.__new__(self, char)
+        return token
+
+    def __repr__(self):
+        return "Token(%s)" % str.__repr__(self)
+
+
+http_tokens = " \t\"()<>@,;:\\/[]?={}"
+http_ctls = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f"
+
+def tokenize(header, foldCase=True):
+    """Tokenize a string according to normal HTTP header parsing rules.
+
+    In particular:
+     - Whitespace is irrelevant and eaten next to special separator tokens.
+       Its existance (but not amount) is important between character strings.
+     - Quoted string support including embedded backslashes.
+     - Case is insignificant (and thus lowercased), except in quoted strings.
+        (unless foldCase=False)
+     - Multiple headers are concatenated with ','
+
+    NOTE: not all headers can be parsed with this function.
+
+    Takes a raw header value (list of strings), and
+    Returns a generator of strings and Token class instances.
+    """
+    tokens=http_tokens
+    ctls=http_ctls
+
+    string = ",".join(header)
+    list = []
+    start = 0
+    cur = 0
+    quoted = False
+    qpair = False
+    inSpaces = -1
+    qstring = None
+
+    for x in string:
+        if quoted:
+            if qpair:
+                qpair = False
+                qstring = qstring+string[start:cur-1]+x
+                start = cur+1
+            elif x == '\\':
+                qpair = True
+            elif x == '"':
+                quoted = False
+                yield qstring+string[start:cur]
+                qstring=None
+                start = cur+1
+        elif x in tokens:
+            if start != cur:
+                if foldCase:
+                    yield string[start:cur].lower()
+                else:
+                    yield string[start:cur]
+
+            start = cur+1
+            if x == '"':
+                quoted = True
+                qstring = ""
+                inSpaces = False
+            elif x in " \t":
+                if inSpaces is False:
+                    inSpaces = True
+            else:
+                inSpaces = -1
+                yield Token(x)
+        elif x in ctls:
+            raise ValueError("Invalid control character: %d in header" % ord(x))
+        else:
+            if inSpaces is True:
+                yield Token(' ')
+                inSpaces = False
+
+            inSpaces = False
+        cur = cur+1
+
+    if qpair:
+        raise ValueError, "Missing character after '\\'"
+    if quoted:
+        raise ValueError, "Missing end quote"
+
+    if start != cur:
+        if foldCase:
+            yield string[start:cur].lower()
+        else:
+            yield string[start:cur]
+
+def split(seq, delim):
+    """The same as str.split but works on arbitrary sequences.
+    Too bad it's not builtin to python!"""
+
+    cur = []
+    for item in seq:
+        if item == delim:
+            yield cur
+            cur = []
+        else:
+            cur.append(item)
+    yield cur
+
+# def find(seq, *args):
+#     """The same as seq.index but returns -1 if not found, instead
+#     Too bad it's not builtin to python!"""
+#     try:
+#         return seq.index(value, *args)
+#     except ValueError:
+#         return -1
+
+
+def filterTokens(seq):
+    """Filter out instances of Token, leaving only a list of strings.
+
+    Used instead of a more specific parsing method (e.g. splitting on commas)
+    when only strings are expected, so as to be a little lenient.
+
+    Apache does it this way and has some comments about broken clients which
+    forget commas (?), so I'm doing it the same way. It shouldn't
+    hurt anything, in any case.
+    """
+
+    l=[]
+    for x in seq:
+        if not isinstance(x, Token):
+            l.append(x)
+    return l
+
+##### parser utilities:
+def checkSingleToken(tokens):
+    if len(tokens) != 1:
+        raise ValueError, "Expected single token, not %s." % (tokens,)
+    return tokens[0]
+
+def parseKeyValue(val):
+    if len(val) == 1:
+        return val[0],None
+    elif len(val) == 3 and val[1] == Token('='):
+        return val[0],val[2]
+    raise ValueError, "Expected key or key=value, but got %s." % (val,)
+
+def parseArgs(field):
+    args=split(field, Token(';'))
+    val = args.next()
+    args = [parseKeyValue(arg) for arg in args]
+    return val,args
+
+def listParser(fun):
+    """Return a function which applies 'fun' to every element in the
+    comma-separated list"""
+    def listParserHelper(tokens):
+        fields = split(tokens, Token(','))
+        for field in fields:
+            if len(field) != 0:
+                yield fun(field)
+
+    return listParserHelper
+
+def last(seq):
+    """Return seq[-1]"""
+
+    return seq[-1]
+
+##### Generation utilities
+def quoteString(s):
+    return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"')
+
+def listGenerator(fun):
+    """Return a function which applies 'fun' to every element in
+    the given list, then joins the result with generateList"""
+    def listGeneratorHelper(l):
+        return generateList([fun(e) for e in l])
+
+    return listGeneratorHelper
+
+def generateList(seq):
+    return ", ".join(seq)
+
+def singleHeader(item):
+    return [item]
+
+def generateKeyValues(kvs):
+    l = []
+    # print kvs
+    for k,v in kvs:
+        if v is None:
+            l.append('%s' % k)
+        else:
+            l.append('%s=%s' % (k,v))
+    return ";".join(l)
+
+
+class MimeType(object):
+    def fromString(klass, mimeTypeString):
+        """Generate a MimeType object from the given string.
+
+        @param mimeTypeString: The mimetype to parse
+
+        @return: L{MimeType}
+        """
+        return DefaultHTTPHandler.parse('content-type', [mimeTypeString])
+
+    fromString = classmethod(fromString)
+
+    def __init__(self, mediaType, mediaSubtype, params={}, **kwargs):
+        """
+        @type mediaType: C{str}
+
+        @type mediaSubtype: C{str}
+
+        @type params: C{dict}
+        """
+        self.mediaType = mediaType
+        self.mediaSubtype = mediaSubtype
+        self.params = dict(params)
+
+        if kwargs:
+            self.params.update(kwargs)
+
+    def __eq__(self, other):
+        if not isinstance(other, MimeType): return NotImplemented
+        return (self.mediaType == other.mediaType and
+                self.mediaSubtype == other.mediaSubtype and
+                self.params == other.params)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __repr__(self):
+        return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
+
+    def __hash__(self):
+        return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+
+##### Specific header parsers.
+def parseAccept(field):
+    type,args = parseArgs(field)
+
+    if len(type) != 3 or type[1] != Token('/'):
+        raise ValueError, "MIME Type "+str(type)+" invalid."
+
+    # okay, this spec is screwy. A 'q' parameter is used as the separator
+    # between MIME parameters and (as yet undefined) additional HTTP
+    # parameters.
+
+    num = 0
+    for arg in args:
+        if arg[0] == 'q':
+            mimeparams=tuple(args[0:num])
+            params=args[num:]
+            break
+        num = num + 1
+    else:
+        mimeparams=tuple(args)
+        params=[]
+
+    # Default values for parameters:
+    qval = 1.0
+
+    # Parse accept parameters:
+    for param in params:
+        if param[0] =='q':
+            qval = float(param[1])
+        else:
+            # Warn? ignored parameter.
+            pass
+
+    ret = MimeType(type[0],type[2],mimeparams),qval
+    return ret
+
+def parseAcceptQvalue(field):
+    type,args=parseArgs(field)
+
+    type = checkSingleToken(type)
+
+    qvalue = 1.0 # Default qvalue is 1
+    for arg in args:
+        if arg[0] == 'q':
+            qvalue = float(arg[1])
+    return type,qvalue
+
+def addDefaultCharset(charsets):
+    if charsets.get('*') is None and charsets.get('iso-8859-1') is None:
+        charsets['iso-8859-1'] = 1.0
+    return charsets
+
+def addDefaultEncoding(encodings):
+    if encodings.get('*') is None and encodings.get('identity') is None:
+        # RFC doesn't specify a default value for identity, only that it
+        # "is acceptable" if not mentioned. Thus, give it a very low qvalue.
+        encodings['identity'] = .0001
+    return encodings
+
+
+def parseContentType(header):
+    # Case folding is disabled for this header, because of use of
+    # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf
+    # So, we need to explicitly .lower() the type/subtype and arg keys.
+
+    type,args = parseArgs(header)
+
+    if len(type) != 3 or type[1] != Token('/'):
+        raise ValueError, "MIME Type "+str(type)+" invalid."
+
+    args = [(kv[0].lower(), kv[1]) for kv in args]
+
+    return MimeType(type[0].lower(), type[2].lower(), tuple(args))
+
+def parseContentMD5(header):
+    try:
+        return base64.decodestring(header)
+    except Exception,e:
+        raise ValueError(e)
+
+def parseContentRange(header):
+    """Parse a content-range header into (kind, start, end, realLength).
+
+    realLength might be None if real length is not known ('*').
+    start and end might be None if start,end unspecified (for response code 416)
+    """
+    kind, other = header.strip().split()
+    if kind.lower() != "bytes":
+        raise ValueError("a range of type %r is not supported")
+    startend, realLength = other.split("/")
+    if startend.strip() == '*':
+        start,end=None,None
+    else:
+        start, end = map(int, startend.split("-"))
+    if realLength == "*":
+        realLength = None
+    else:
+        realLength = int(realLength)
+    return (kind, start, end, realLength)
+
+def parseExpect(field):
+    type,args=parseArgs(field)
+
+    type=parseKeyValue(type)
+    return (type[0], (lambda *args:args)(type[1], *args))
+
+def parseExpires(header):
+    # """HTTP/1.1 clients and caches MUST treat other invalid date formats,
+    #    especially including the value 0, as in the past (i.e., "already expired")."""
+
+    try:
+        return parseDateTime(header)
+    except ValueError:
+        return 0
+
+def parseIfModifiedSince(header):
+    # Ancient versions of netscape and *current* versions of MSIE send
+    #   If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123
+    # which is blantantly RFC-violating and not documented anywhere
+    # except bug-trackers for web frameworks.
+
+    # So, we'll just strip off everything after a ';'.
+    return parseDateTime(header.split(';', 1)[0])
+
+def parseIfRange(headers):
+    try:
+        return ETag.parse(tokenize(headers))
+    except ValueError:
+        return parseDateTime(last(headers))
+
+def parseRange(range):
+    range = list(range)
+    if len(range) < 3 or range[1] != Token('='):
+        raise ValueError("Invalid range header format: %s" %(range,))
+
+    type=range[0]
+    if type != 'bytes':
+        raise ValueError("Unknown range unit: %s." % (type,))
+    rangeset=split(range[2:], Token(','))
+    ranges = []
+
+    for byterangespec in rangeset:
+        if len(byterangespec) != 1:
+            raise ValueError("Invalid range header format: %s" % (range,))
+        start,end=byterangespec[0].split('-')
+
+        if not start and not end:
+            raise ValueError("Invalid range header format: %s" % (range,))
+
+        if start:
+            start = int(start)
+        else:
+            start = None
+
+        if end:
+            end = int(end)
+        else:
+            end = None
+
+        if start and end and start > end:
+            raise ValueError("Invalid range header, start > end: %s" % (range,))
+        ranges.append((start,end))
+    return type,ranges
+
+def parseRetryAfter(header):
+    try:
+        # delta seconds
+        return time.time() + int(header)
+    except ValueError:
+        # or datetime
+        return parseDateTime(header)
+
+# WWW-Authenticate and Authorization
+
+def parseWWWAuthenticate(tokenized):
+    headers = []
+
+    tokenList = list(tokenized)
+
+    while tokenList:
+        scheme = tokenList.pop(0)
+        challenge = {}
+        last = None
+        kvChallenge = False
+
+        while tokenList:
+            token = tokenList.pop(0)
+            if token == Token('='):
+                kvChallenge = True
+                challenge[last] = tokenList.pop(0)
+                last = None
+
+            elif token == Token(','):
+                if kvChallenge:
+                    if len(tokenList) > 1 and tokenList[1] != Token('='):
+                        break
+
+                else:
+                    break
+
+            else:
+                last = token
+
+        if last and scheme and not challenge and not kvChallenge:
+            challenge = last
+            last = None
+
+        headers.append((scheme, challenge))
+
+    if last and last not in (Token('='), Token(',')):
+        if headers[-1] == (scheme, challenge):
+            scheme = last
+            challenge = {}
+            headers.append((scheme, challenge))
+
+    return headers
+
+def parseAuthorization(header):
+    scheme, rest = header.split(' ', 1)
+    # this header isn't tokenized because it may eat characters
+    # in the unquoted base64 encoded credentials
+    return scheme.lower(), rest
+
+#### Header generators
+def generateAccept(accept):
+    mimeType,q = accept
+
+    out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+    if mimeType.params:
+        out+=';'+generateKeyValues(mimeType.params.iteritems())
+
+    if q != 1.0:
+        out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
+
+    return out
+
+def removeDefaultEncoding(seq):
+    for item in seq:
+        if item[0] != 'identity' or item[1] != .0001:
+            yield item
+
+def generateAcceptQvalue(keyvalue):
+    if keyvalue[1] == 1.0:
+        return "%s" % keyvalue[0:1]
+    else:
+        return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.')
+
+def parseCacheControl(kv):
+    k, v = parseKeyValue(kv)
+    if k == 'max-age' or k == 'min-fresh' or k == 's-maxage':
+        # Required integer argument
+        if v is None:
+            v = 0
+        else:
+            v = int(v)
+    elif k == 'max-stale':
+        # Optional integer argument
+        if v is not None:
+            v = int(v)
+    elif k == 'private' or k == 'no-cache':
+        # Optional list argument
+        if v is not None:
+            v = [field.strip().lower() for field in v.split(',')]
+    return k, v
+
+def generateCacheControl((k, v)):
+    if v is None:
+        return str(k)
+    else:
+        if k == 'no-cache' or k == 'private':
+            # quoted list of values
+            v = quoteString(generateList(
+                [header_case_mapping.get(name) or dashCapitalize(name) for name in v]))
+        return '%s=%s' % (k,v)
+
+def generateContentRange(tup):
+    """tup is (type, start, end, len)
+    len can be None.
+    """
+    type, start, end, len = tup
+    if len == None:
+        len = '*'
+    else:
+        len = int(len)
+    if start == None and end == None:
+        startend = '*'
+    else:
+        startend = '%d-%d' % (start, end)
+
+    return '%s %s/%s' % (type, startend, len)
+
+def generateDateTime(secSinceEpoch):
+    """Convert seconds since epoch to HTTP datetime string."""
+    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch)
+    s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+        weekdayname[wd],
+        day, monthname[month], year,
+        hh, mm, ss)
+    return s
+
+def generateExpect(item):
+    if item[1][0] is None:
+        out = '%s' % (item[0],)
+    else:
+        out = '%s=%s' % (item[0], item[1][0])
+    if len(item[1]) > 1:
+        out += ';'+generateKeyValues(item[1][1:])
+    return out
+
+def generateRange(range):
+    def noneOr(s):
+        if s is None:
+            return ''
+        return s
+
+    type,ranges=range
+
+    if type != 'bytes':
+        raise ValueError("Unknown range unit: "+type+".")
+
+    return (type+'='+
+            ','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1]))
+                      for startend in ranges]))
+
+def generateRetryAfter(when):
+    # always generate delta seconds format
+    return str(int(when - time.time()))
+
+def generateContentType(mimeType):
+    out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+    if mimeType.params:
+        out+=';'+generateKeyValues(mimeType.params.iteritems())
+    return out
+
+def generateIfRange(dateOrETag):
+    if isinstance(dateOrETag, ETag):
+        return dateOrETag.generate()
+    else:
+        return generateDateTime(dateOrETag)
+
+# WWW-Authenticate and Authorization
+
+def generateWWWAuthenticate(headers):
+    _generated = []
+    for seq in headers:
+        scheme, challenge = seq[0], seq[1]
+
+        # If we're going to parse out to something other than a dict
+        # we need to be able to generate from something other than a dict
+
+        try:
+            l = []
+            for k,v in dict(challenge).iteritems():
+                l.append("%s=%s" % (k, quoteString(v)))
+
+            _generated.append("%s %s" % (scheme, ", ".join(l)))
+        except ValueError:
+            _generated.append("%s %s" % (scheme, challenge))
+
+    return _generated
+
+def generateAuthorization(seq):
+    return [' '.join(seq)]
+
+
+####
+class ETag(object):
+    def __init__(self, tag, weak=False):
+        self.tag = str(tag)
+        self.weak = weak
+
+    def match(self, other, strongCompare):
+        # Sec 13.3.
+        # The strong comparison function: in order to be considered equal, both
+        #   validators MUST be identical in every way, and both MUST NOT be weak.
+        #
+        # The weak comparison function: in order to be considered equal, both
+        #   validators MUST be identical in every way, but either or both of
+        #   them MAY be tagged as "weak" without affecting the result.
+
+        if not isinstance(other, ETag) or other.tag != self.tag:
+            return False
+
+        if strongCompare and (other.weak or self.weak):
+            return False
+        return True
+
+    def __eq__(self, other):
+        return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __repr__(self):
+        return "Etag(%r, weak=%r)" % (self.tag, self.weak)
+
+    def parse(tokens):
+        tokens=tuple(tokens)
+        if len(tokens) == 1 and not isinstance(tokens[0], Token):
+            return ETag(tokens[0])
+
+        if(len(tokens) == 3 and tokens[0] == "w"
+           and tokens[1] == Token('/')):
+            return ETag(tokens[2], weak=True)
+
+        raise ValueError("Invalid ETag.")
+
+    parse=staticmethod(parse)
+
+    def generate(self):
+        if self.weak:
+            return 'W/'+quoteString(self.tag)
+        else:
+            return quoteString(self.tag)
+
+def parseStarOrETag(tokens):
+    tokens=tuple(tokens)
+    if tokens == ('*',):
+        return '*'
+    else:
+        return ETag.parse(tokens)
+
+def generateStarOrETag(etag):
+    if etag=='*':
+        return etag
+    else:
+        return etag.generate()
+
+#### Cookies. Blech!
+class Cookie(object):
+    # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version']
+
+    def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0):
+        self.name=name
+        self.value=value
+        self.path=path
+        self.domain=domain
+        self.ports=ports
+        self.expires=expires
+        self.discard=discard
+        self.secure=secure
+        self.comment=comment
+        self.commenturl=commenturl
+        self.version=version
+
+    def __repr__(self):
+        s="Cookie(%r=%r" % (self.name, self.value)
+        if self.path is not None: s+=", path=%r" % (self.path,)
+        if self.domain is not None: s+=", domain=%r" % (self.domain,)
+        if self.ports is not None: s+=", ports=%r" % (self.ports,)
+        if self.expires is not None: s+=", expires=%r" % (self.expires,)
+        if self.secure is not False: s+=", secure=%r" % (self.secure,)
+        if self.comment is not None: s+=", comment=%r" % (self.comment,)
+        if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,)
+        if self.version != 0: s+=", version=%r" % (self.version,)
+        s+=")"
+        return s
+
+    def __eq__(self, other):
+        return (isinstance(other, Cookie) and
+                other.path == self.path and
+                other.domain == self.domain and
+                other.ports == self.ports and
+                other.expires == self.expires and
+                other.secure == self.secure and
+                other.comment == self.comment and
+                other.commenturl == self.commenturl and
+                other.version == self.version)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+
+def parseCookie(headers):
+    """Bleargh, the cookie spec sucks.
+    This surely needs interoperability testing.
+    There are two specs that are supported:
+    Version 0) http://wp.netscape.com/newsref/std/cookie_spec.html
+    Version 1) http://www.faqs.org/rfcs/rfc2965.html
+    """
+
+    cookies = []
+    # There can't really be multiple cookie headers according to RFC, because
+    # if multiple headers are allowed, they must be joinable with ",".
+    # Neither new RFC2965 cookies nor old netscape cookies are.
+
+    header = ';'.join(headers)
+    if header[0:8].lower() == "$version":
+        # RFC2965 cookie
+        h=tokenize([header], foldCase=False)
+        r_cookies = split(h, Token(','))
+        for r_cookie in r_cookies:
+            last_cookie = None
+            rr_cookies = split(r_cookie, Token(';'))
+            for cookie in rr_cookies:
+                nameval = tuple(split(cookie, Token('=')))
+                if len(nameval) == 2:
+                    (name,), (value,) = nameval
+                else:
+                    (name,), = nameval
+                    value = None
+
+                name=name.lower()
+                if name == '$version':
+                    continue
+                if name[0] == '$':
+                    if last_cookie is not None:
+                        if name == '$path':
+                            last_cookie.path=value
+                        elif name == '$domain':
+                            last_cookie.domain=value
+                        elif name == '$port':
+                            if value is None:
+                                last_cookie.ports = ()
+                            else:
+                                last_cookie.ports=tuple([int(s) for s in value.split(',')])
+                else:
+                    last_cookie = Cookie(name, value, version=1)
+                    cookies.append(last_cookie)
+    else:
+        # Oldstyle cookies don't do quoted strings or anything sensible.
+        # All characters are valid for names except ';' and '=', and all
+        # characters are valid for values except ';'. Spaces are stripped,
+        # however.
+        r_cookies = header.split(';')
+        for r_cookie in r_cookies:
+            name,value = r_cookie.split('=', 1)
+            name=name.strip(' \t')
+            value=value.strip(' \t')
+
+            cookies.append(Cookie(name, value))
+
+    return cookies
+
+cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$"
+cookie_validname_re = re.compile(cookie_validname)
+cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$'
+cookie_validvalue_re = re.compile(cookie_validvalue)
+
+def generateCookie(cookies):
+    # There's a fundamental problem with the two cookie specifications.
+    # They both use the "Cookie" header, and the RFC Cookie header only allows
+    # one version to be specified. Thus, when you have a collection of V0 and
+    # V1 cookies, you have to either send them all as V0 or send them all as
+    # V1.
+
+    # I choose to send them all as V1.
+
+    # You might think converting a V0 cookie to a V1 cookie would be lossless,
+    # but you'd be wrong. If you do the conversion, and a V0 parser tries to
+    # read the cookie, it will see a modified form of the cookie, in cases
+    # where quotes must be added to conform to proper V1 syntax.
+    # (as a real example: "Cookie: cartcontents=oid:94680,qty:1,auto:0,esp:y")
+
+    # However, that is what we will do, anyways. It has a high probability of
+    # breaking applications that only handle oldstyle cookies, where some other
+    # application set a newstyle cookie that is applicable over for site
+    # (or host), AND where the oldstyle cookie uses a value which is invalid
+    # syntax in a newstyle cookie.
+
+    # Also, the cookie name *cannot* be quoted in V1, so some cookies just
+    # cannot be converted at all. (e.g. "Cookie: phpAds_capAd[32]=2"). These
+    # are just dicarded during conversion.
+
+    # As this is an unsolvable problem, I will pretend I can just say
+    # OH WELL, don't do that, or else upgrade your old applications to have
+    # newstyle cookie parsers.
+
+    # I will note offhandedly that there are *many* sites which send V0 cookies
+    # that are not valid V1 cookie syntax. About 20% for my cookies file.
+    # However, they do not generally mix them with V1 cookies, so this isn't
+    # an issue, at least right now. I have not tested to see how many of those
+    # webapps support RFC2965 V1 cookies. I suspect not many.
+
+    max_version = max([cookie.version for cookie in cookies])
+
+    if max_version == 0:
+        # no quoting or anything.
+        return ';'.join(["%s=%s" % (cookie.name, cookie.value) for cookie in cookies])
+    else:
+        str_cookies = ['$Version="1"']
+        for cookie in cookies:
+            if cookie.version == 0:
+                # Version 0 cookie: we make sure the name and value are valid
+                # V1 syntax.
+
+                # If they are, we use them as is. This means in *most* cases,
+                # the cookie will look literally the same on output as it did
+                # on input.
+                # If it isn't a valid name, ignore the cookie.
+                # If it isn't a valid value, quote it and hope for the best on
+                # the other side.
+
+                if cookie_validname_re.match(cookie.name) is None:
+                    continue
+
+                value=cookie.value
+                if cookie_validvalue_re.match(cookie.value) is None:
+                    value = quoteString(value)
+
+                str_cookies.append("%s=%s" % (cookie.name, value))
+            else:
+                # V1 cookie, nice and easy
+                str_cookies.append("%s=%s" % (cookie.name, quoteString(cookie.value)))
+
+            if cookie.path:
+                str_cookies.append("$Path=%s" % quoteString(cookie.path))
+            if cookie.domain:
+                str_cookies.append("$Domain=%s" % quoteString(cookie.domain))
+            if cookie.ports is not None:
+                if len(cookie.ports) == 0:
+                    str_cookies.append("$Port")
+                else:
+                    str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
+        return ';'.join(str_cookies)
+
+def parseSetCookie(headers):
+    setCookies = []
+    for header in headers:
+        try:
+            parts = header.split(';')
+            l = []
+
+            for part in parts:
+                namevalue = part.split('=',1)
+                if len(namevalue) == 1:
+                    name=namevalue[0]
+                    value=None
+                else:
+                    name,value=namevalue
+                    value=value.strip(' \t')
+
+                name=name.strip(' \t')
+
+                l.append((name, value))
+
+            setCookies.append(makeCookieFromList(l, True))
+        except ValueError:
+            # If we can't parse one Set-Cookie, ignore it,
+            # but not the rest of Set-Cookies.
+            pass
+    return setCookies
+
+def parseSetCookie2(toks):
+    outCookies = []
+    for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))]
+                   for y in split(toks, Token(','))]:
+        try:
+            outCookies.append(makeCookieFromList(cookie, False))
+        except ValueError:
+            # Again, if we can't handle one cookie -- ignore it.
+            pass
+    return outCookies
+
+def makeCookieFromList(tup, netscapeFormat):
+    name, value = tup[0]
+    if name is None or value is None:
+        raise ValueError("Cookie has missing name or value")
+    if name.startswith("$"):
+        raise ValueError("Invalid cookie name: %r, starts with '$'." % name)
+    cookie = Cookie(name, value)
+    hadMaxAge = False
+
+    for name,value in tup[1:]:
+        name = name.lower()
+
+        if value is None:
+            if name in ("discard", "secure"):
+                # Boolean attrs
+                value = True
+            elif name != "port":
+                # Can be either boolean or explicit
+                continue
+
+        if name in ("comment", "commenturl", "discard", "domain", "path", "secure"):
+            # simple cases
+            setattr(cookie, name, value)
+        elif name == "expires" and not hadMaxAge:
+            if netscapeFormat and value[0] == '"' and value[-1] == '"':
+                value = value[1:-1]
+            cookie.expires = parseDateTime(value)
+        elif name == "max-age":
+            hadMaxAge = True
+            cookie.expires = int(value) + time.time()
+        elif name == "port":
+            if value is None:
+                cookie.ports = ()
+            else:
+                if netscapeFormat and value[0] == '"' and value[-1] == '"':
+                    value = value[1:-1]
+                cookie.ports = tuple([int(s) for s in value.split(',')])
+        elif name == "version":
+            cookie.version = int(value)
+
+    return cookie
+
+
+def generateSetCookie(cookies):
+    setCookies = []
+    for cookie in cookies:
+        out = ["%s=%s" % (cookie.name, cookie.value)]
+        if cookie.expires:
+            out.append("expires=%s" % generateDateTime(cookie.expires))
+        if cookie.path:
+            out.append("path=%s" % cookie.path)
+        if cookie.domain:
+            out.append("domain=%s" % cookie.domain)
+        if cookie.secure:
+            out.append("secure")
+
+        setCookies.append('; '.join(out))
+    return setCookies
+
+def generateSetCookie2(cookies):
+    setCookies = []
+    for cookie in cookies:
+        out = ["%s=%s" % (cookie.name, quoteString(cookie.value))]
+        if cookie.comment:
+            out.append("Comment=%s" % quoteString(cookie.comment))
+        if cookie.commenturl:
+            out.append("CommentURL=%s" % quoteString(cookie.commenturl))
+        if cookie.discard:
+            out.append("Discard")
+        if cookie.domain:
+            out.append("Domain=%s" % quoteString(cookie.domain))
+        if cookie.expires:
+            out.append("Max-Age=%s" % (cookie.expires - time.time()))
+        if cookie.path:
+            out.append("Path=%s" % quoteString(cookie.path))
+        if cookie.ports is not None:
+            if len(cookie.ports) == 0:
+                out.append("Port")
+            else:
+                out.append("Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
+        if cookie.secure:
+            out.append("Secure")
+        out.append('Version="1"')
+        setCookies.append('; '.join(out))
+    return setCookies
+
+def parseDepth(depth):
+    if depth not in ("0", "1", "infinity"):
+        raise ValueError("Invalid depth header value: %s" % (depth,))
+    return depth
+
+def parseOverWrite(overwrite):
+    if overwrite == "F":
+        return False
+    elif overwrite == "T":
+        return True
+    raise ValueError("Invalid overwrite header value: %s" % (overwrite,))
+
+def generateOverWrite(overwrite):
+    if overwrite:
+        return "T"
+    else:
+        return "F"
+
+##### Random stuff that looks useful.
+# def sortMimeQuality(s):
+#     def sorter(item1, item2):
+#         if item1[0] == '*':
+#             if item2[0] == '*':
+#                 return 0
+
+
+# def sortQuality(s):
+#     def sorter(item1, item2):
+#         if item1[1] < item2[1]:
+#             return -1
+#         if item1[1] < item2[1]:
+#             return 1
+#         if item1[0] == item2[0]:
+#             return 0
+
+
+# def getMimeQuality(mimeType, accepts):
+#     type,args = parseArgs(mimeType)
+#     type=type.split(Token('/'))
+#     if len(type) != 2:
+#         raise ValueError, "MIME Type "+s+" invalid."
+
+#     for accept in accepts:
+#         accept,acceptQual=accept
+#         acceptType=accept[0:1]
+#         acceptArgs=accept[2]
+
+#         if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and
+#             (args == acceptArgs or len(acceptArgs) == 0)):
+#             return acceptQual
+
+# def getQuality(type, accepts):
+#     qual = accepts.get(type)
+#     if qual is not None:
+#         return qual
+
+#     return accepts.get('*')
+
+# Headers object
+class __RecalcNeeded(object):
+    def __repr__(self):
+        return "<RecalcNeeded>"
+
+_RecalcNeeded = __RecalcNeeded()
+
+class Headers(object):
+    """This class stores the HTTP headers as both a parsed representation and
+    the raw string representation. It converts between the two on demand."""
+
+    def __init__(self, headers=None, rawHeaders=None, handler=DefaultHTTPHandler):
+        self._raw_headers = {}
+        self._headers = {}
+        self.handler = handler
+        if headers is not None:
+            for key, value in headers.iteritems():
+                self.setHeader(key, value)
+        if rawHeaders is not None:
+            for key, value in rawHeaders.iteritems():
+                self.setRawHeaders(key, value)
+
+    def _setRawHeaders(self, headers):
+        self._raw_headers = headers
+        self._headers = {}
+
+    def _toParsed(self, name):
+        r = self._raw_headers.get(name, None)
+        h = self.handler.parse(name, r)
+        if h is not None:
+            self._headers[name] = h
+        return h
+
+    def _toRaw(self, name):
+        h = self._headers.get(name, None)
+        r = self.handler.generate(name, h)
+        if r is not None:
+            self._raw_headers[name] = r
+        return r
+
+    def hasHeader(self, name):
+        """Does a header with the given name exist?"""
+        name=name.lower()
+        return self._raw_headers.has_key(name)
+
+    def getRawHeaders(self, name, default=None):
+        """Returns a list of headers matching the given name as the raw string given."""
+
+        name=name.lower()
+        raw_header = self._raw_headers.get(name, default)
+        if raw_header is not _RecalcNeeded:
+            return raw_header
+
+        return self._toRaw(name)
+
+    def getHeader(self, name, default=None):
+        """Ret9urns the parsed representation of the given header.
+        The exact form of the return value depends on the header in question.
+
+        If no parser for the header exists, raise ValueError.
+
+        If the header doesn't exist, return default (or None if not specified)
+        """
+        name=name.lower()
+        parsed = self._headers.get(name, default)
+        if parsed is not _RecalcNeeded:
+            return parsed
+        return self._toParsed(name)
+
+    def setRawHeaders(self, name, value):
+        """Sets the raw representation of the given header.
+        Value should be a list of strings, each being one header of the
+        given name.
+        """
+        name=name.lower()
+        self._raw_headers[name] = value
+        self._headers[name] = _RecalcNeeded
+
+    def setHeader(self, name, value):
+        """Sets the parsed representation of the given header.
+        Value should be a list of objects whose exact form depends
+        on the header in question.
+        """
+        name=name.lower()
+        self._raw_headers[name] = _RecalcNeeded
+        self._headers[name] = value
+
+    def addRawHeader(self, name, value):
+        """
+        Add a raw value to a header that may or may not already exist.
+        If it exists, add it as a separate header to output; do not
+        replace anything.
+        """
+        name=name.lower()
+        raw_header = self._raw_headers.get(name)
+        if raw_header is None:
+            # No header yet
+            raw_header = []
+            self._raw_headers[name] = raw_header
+        elif raw_header is _RecalcNeeded:
+            raw_header = self._toRaw(name)
+
+        raw_header.append(value)
+        self._headers[name] = _RecalcNeeded
+
+    def removeHeader(self, name):
+        """Removes the header named."""
+
+        name=name.lower()
+        if self._raw_headers.has_key(name):
+            del self._raw_headers[name]
+            del self._headers[name]
+
+    def __repr__(self):
+        return '<Headers: Raw: %s Parsed: %s>'% (self._raw_headers, self._headers)
+
+    def canonicalNameCaps(self, name):
+        """Return the name with the canonical capitalization, if known,
+        otherwise, Caps-After-Dashes"""
+        return header_case_mapping.get(name) or dashCapitalize(name)
+
+    def getAllRawHeaders(self):
+        """Return an iterator of key,value pairs of all headers
+        contained in this object, as strings. The keys are capitalized
+        in canonical capitalization."""
+        for k,v in self._raw_headers.iteritems():
+            if v is _RecalcNeeded:
+                v = self._toRaw(k)
+            yield self.canonicalNameCaps(k), v
+
+    def makeImmutable(self):
+        """Make this header set immutable. All mutating operations will
+        raise an exception."""
+        self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise
+
+    def _mutateRaise(self, *args):
+        raise AttributeError("This header object is immutable as the headers have already been sent.")
+
+
+"""The following dicts are all mappings of header to list of operations
+   to perform. The first operation should generally be 'tokenize' if the
+   header can be parsed according to the normal tokenization rules. If
+   it cannot, generally the first thing you want to do is take only the
+   last instance of the header (in case it was sent multiple times, which
+   is strictly an error, but we're nice.).
+   """
+
+iteritems = lambda x: x.iteritems()
+
+
+parser_general_headers = {
+    'Cache-Control':(tokenize, listParser(parseCacheControl), dict),
+    'Connection':(tokenize,filterTokens),
+    'Date':(last,parseDateTime),
+#    'Pragma':tokenize
+#    'Trailer':tokenize
+    'Transfer-Encoding':(tokenize,filterTokens),
+#    'Upgrade':tokenize
+#    'Via':tokenize,stripComment
+#    'Warning':tokenize
+}
+
+generator_general_headers = {
+    'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader),
+    'Connection':(generateList,singleHeader),
+    'Date':(generateDateTime,singleHeader),
+#    'Pragma':
+#    'Trailer':
+    'Transfer-Encoding':(generateList,singleHeader),
+#    'Upgrade':
+#    'Via':
+#    'Warning':
+}
+
+parser_request_headers = {
+    'Accept': (tokenize, listParser(parseAccept), dict),
+    'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset),
+    'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
+    'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict),
+    'Authorization': (last, parseAuthorization),
+    'Cookie':(parseCookie,),
+    'Expect':(tokenize, listParser(parseExpect), dict),
+    'From':(last,),
+    'Host':(last,),
+    'If-Match':(tokenize, listParser(parseStarOrETag), list),
+    'If-Modified-Since':(last, parseIfModifiedSince),
+    'If-None-Match':(tokenize, listParser(parseStarOrETag), list),
+    'If-Range':(parseIfRange,),
+    'If-Unmodified-Since':(last,parseDateTime),
+    'Max-Forwards':(last,int),
+#    'Proxy-Authorization':str, # what is "credentials"
+    'Range':(tokenize, parseRange),
+    'Referer':(last,str), # TODO: URI object?
+    'TE':(tokenize, listParser(parseAcceptQvalue), dict),
+    'User-Agent':(last,str),
+}
+
+generator_request_headers = {
+    'Accept': (iteritems,listGenerator(generateAccept),singleHeader),
+    'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+    'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader),
+    'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+    'Authorization': (generateAuthorization,), # what is "credentials"
+    'Cookie':(generateCookie,singleHeader),
+    'Expect':(iteritems, listGenerator(generateExpect), singleHeader),
+    'From':(str,singleHeader),
+    'Host':(str,singleHeader),
+    'If-Match':(listGenerator(generateStarOrETag), singleHeader),
+    'If-Modified-Since':(generateDateTime,singleHeader),
+    'If-None-Match':(listGenerator(generateStarOrETag), singleHeader),
+    'If-Range':(generateIfRange, singleHeader),
+    'If-Unmodified-Since':(generateDateTime,singleHeader),
+    'Max-Forwards':(str, singleHeader),
+#    'Proxy-Authorization':str, # what is "credentials"
+    'Range':(generateRange,singleHeader),
+    'Referer':(str,singleHeader),
+    'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+    'User-Agent':(str,singleHeader),
+}
+
+parser_response_headers = {
+    'Accept-Ranges':(tokenize, filterTokens),
+    'Age':(last,int),
+    'ETag':(tokenize, ETag.parse),
+    'Location':(last,), # TODO: URI object?
+#    'Proxy-Authenticate'
+    'Retry-After':(last, parseRetryAfter),
+    'Server':(last,),
+    'Set-Cookie':(parseSetCookie,),
+    'Set-Cookie2':(tokenize, parseSetCookie2),
+    'Vary':(tokenize, filterTokens),
+    'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False),
+                         parseWWWAuthenticate,)
+}
+
+generator_response_headers = {
+    'Accept-Ranges':(generateList, singleHeader),
+    'Age':(str, singleHeader),
+    'ETag':(ETag.generate, singleHeader),
+    'Location':(str, singleHeader),
+#    'Proxy-Authenticate'
+    'Retry-After':(generateRetryAfter, singleHeader),
+    'Server':(str, singleHeader),
+    'Set-Cookie':(generateSetCookie,),
+    'Set-Cookie2':(generateSetCookie2,),
+    'Vary':(generateList, singleHeader),
+    'WWW-Authenticate':(generateWWWAuthenticate,)
+}
+
+parser_entity_headers = {
+    'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens),
+    'Content-Encoding':(tokenize, filterTokens),
+    'Content-Language':(tokenize, filterTokens),
+    'Content-Length':(last, int),
+    'Content-Location':(last,), # TODO: URI object?
+    'Content-MD5':(last, parseContentMD5),
+    'Content-Range':(last, parseContentRange),
+    'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType),
+    'Expires':(last, parseExpires),
+    'Last-Modified':(last, parseDateTime),
+    }
+
+generator_entity_headers = {
+    'Allow':(generateList, singleHeader),
+    'Content-Encoding':(generateList, singleHeader),
+    'Content-Language':(generateList, singleHeader),
+    'Content-Length':(str, singleHeader),
+    'Content-Location':(str, singleHeader),
+    'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader),
+    'Content-Range':(generateContentRange, singleHeader),
+    'Content-Type':(generateContentType, singleHeader),
+    'Expires':(generateDateTime, singleHeader),
+    'Last-Modified':(generateDateTime, singleHeader),
+    }
+
+DefaultHTTPHandler.updateParsers(parser_general_headers)
+DefaultHTTPHandler.updateParsers(parser_request_headers)
+DefaultHTTPHandler.updateParsers(parser_response_headers)
+DefaultHTTPHandler.updateParsers(parser_entity_headers)
+
+DefaultHTTPHandler.updateGenerators(generator_general_headers)
+DefaultHTTPHandler.updateGenerators(generator_request_headers)
+DefaultHTTPHandler.updateGenerators(generator_response_headers)
+DefaultHTTPHandler.updateGenerators(generator_entity_headers)
+
+
+# casemappingify(DefaultHTTPParsers)
+# casemappingify(DefaultHTTPGenerators)
+
+# lowerify(DefaultHTTPParsers)
+# lowerify(DefaultHTTPGenerators)
--- a/web/httpcache.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/httpcache.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""HTTP cache managers
+"""HTTP cache managers"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 
 from time import mktime
@@ -56,6 +54,8 @@
     """
 
     def etag(self):
+        if not self.req.cnx: # session without established connection to the repo
+            return self.view.__regid__
         return self.view.__regid__ + '/' + ','.join(sorted(self.req.user.groups))
 
     def max_age(self):
@@ -144,8 +144,5 @@
 # max-age=0 to actually force revalidation when needed
 viewmod.View.cache_max_age = 0
 
-
-viewmod.EntityView.http_cache_manager = EntityHTTPCacheManager
-
 viewmod.StartupView.http_cache_manager = MaxAgeHTTPCacheManager
 viewmod.StartupView.cache_max_age = 60*60*2 # stay in http cache for 2 hours by default
--- a/web/request.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/request.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,13 +15,12 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""abstract class for http request
+"""abstract class for http request"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import Cookie
-import sha
+import hashlib
 import time
 import random
 import base64
@@ -42,6 +41,8 @@
 from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT
 from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit,
                           RequestError, StatusResponse, json)
+from cubicweb.web.http_headers import Headers
+
 dumps = json.dumps
 
 _MARKER = object()
@@ -100,6 +101,8 @@
         self.pageid = None
         self.datadir_url = self._datadir_url()
         self._set_pageid()
+        # prepare output header
+        self.headers_out = Headers()
 
     def _set_pageid(self):
         """initialize self.pageid
@@ -131,11 +134,11 @@
             self.set_page_data('rql_varmaker', varmaker)
         return varmaker
 
-    def set_connection(self, cnx, user=None):
+    def set_session(self, session, user=None):
         """method called by the session handler when the user is authenticated
         or an anonymous connection is open
         """
-        super(CubicWebRequestBase, self).set_connection(cnx, user)
+        super(CubicWebRequestBase, self).set_session(session, user)
         # set request language
         vreg = self.vreg
         if self.user:
@@ -160,8 +163,9 @@
         gettext, self.pgettext = self.translations[lang]
         self._ = self.__ = gettext
         self.lang = lang
-        self.cnx.set_session_props(lang=lang)
         self.debug('request language: %s', lang)
+        if self.cnx:
+            self.cnx.set_session_props(lang=lang)
 
     # input form parameters management ########################################
 
@@ -245,7 +249,7 @@
     @property
     def message(self):
         try:
-            return self.get_session_data(self._msgid, default=u'', pop=True)
+            return self.session.data.pop(self._msgid, '')
         except AttributeError:
             try:
                 return self._msg
@@ -266,17 +270,17 @@
     def set_redirect_message(self, msg):
         assert isinstance(msg, unicode)
         msgid = self.redirect_message_id()
-        self.set_session_data(msgid, msg)
+        self.session.data[msgid] = msg
         return msgid
 
     def append_to_redirect_message(self, msg):
         msgid = self.redirect_message_id()
-        currentmsg = self.get_session_data(msgid)
+        currentmsg = self.session.data.get(msgid)
         if currentmsg is not None:
             currentmsg = '%s %s' % (currentmsg, msg)
         else:
             currentmsg = msg
-        self.set_session_data(msgid, currentmsg)
+        self.session.data[msgid] = currentmsg
         return msgid
 
     def reset_message(self):
@@ -288,8 +292,8 @@
     def update_search_state(self):
         """update the current search state"""
         searchstate = self.form.get('__mode')
-        if not searchstate and self.cnx is not None:
-            searchstate = self.get_session_data('search_state', 'normal')
+        if not searchstate and self.cnx:
+            searchstate = self.session.data.get('search_state', 'normal')
         self.set_search_state(searchstate)
 
     def set_search_state(self, searchstate):
@@ -299,8 +303,8 @@
         else:
             self.search_state = ('linksearch', searchstate.split(':'))
             assert len(self.search_state[-1]) == 4
-        if self.cnx is not None:
-            self.set_session_data('search_state', searchstate)
+        if self.cnx:
+            self.session.data['search_state'] = searchstate
 
     def match_search_state(self, rset):
         """when searching an entity to create a relation, return True if entities in
@@ -317,12 +321,12 @@
 
     def update_breadcrumbs(self):
         """stores the last visisted page in session data"""
-        searchstate = self.get_session_data('search_state')
+        searchstate = self.session.data.get('search_state')
         if searchstate == 'normal':
-            breadcrumbs = self.get_session_data('breadcrumbs', None)
+            breadcrumbs = self.session.data.get('breadcrumbs')
             if breadcrumbs is None:
                 breadcrumbs = SizeConstrainedList(10)
-                self.set_session_data('breadcrumbs', breadcrumbs)
+                self.session.data['breadcrumbs'] = breadcrumbs
                 breadcrumbs.append(self.url())
             else:
                 url = self.url()
@@ -330,7 +334,7 @@
                     breadcrumbs.append(url)
 
     def last_visited_page(self):
-        breadcrumbs = self.get_session_data('breadcrumbs', None)
+        breadcrumbs = self.session.data.get('breadcrumbs')
         if breadcrumbs:
             return breadcrumbs.pop()
         return self.base_url()
@@ -355,15 +359,15 @@
 
     def register_onetime_callback(self, func, *args):
         cbname = 'cb_%s' % (
-            sha.sha('%s%s%s%s' % (time.time(), func.__name__,
-                                  random.random(),
-                                  self.user.login)).hexdigest())
+            hashlib.sha1('%s%s%s%s' % (time.time(), func.__name__,
+                                       random.random(),
+                                       self.user.login)).hexdigest())
         def _cb(req):
             try:
                 ret = func(req, *args)
             except TypeError:
                 from warnings import warn
-                warn('user callback should now take request as argument')
+                warn('[3.2] user callback should now take request as argument')
                 ret = func(*args)
             self.unregister_callback(self.pageid, cbname)
             return ret
@@ -377,11 +381,10 @@
         self.del_page_data(cbname)
 
     def clear_user_callbacks(self):
-        if self.cnx is not None:
-            sessdata = self.session_data()
-            callbacks = [key for key in sessdata if key.startswith('cb_')]
-            for callback in callbacks:
-                self.del_session_data(callback)
+        if self.session is not None: # XXX
+            for key in self.session.data.keys():
+                if key.startswith('cb_'):
+                    del self.session.data[key]
 
     # web edition helpers #####################################################
 
@@ -447,13 +450,13 @@
         This is needed when the edition is completed (whether it's validated
         or cancelled)
         """
-        self.del_session_data('pending_insert')
-        self.del_session_data('pending_delete')
+        self.session.data.pop('pending_insert', None)
+        self.session.data.pop('pending_delete', None)
 
     def cancel_edition(self, errorurl):
         """remove pending operations and `errorurl`'s specific stored data
         """
-        self.del_session_data(errorurl)
+        self.session.data.pop(errorurl, None)
         self.remove_pending_operations()
 
     # high level methods for HTTP headers management ##########################
@@ -672,17 +675,26 @@
         """
         raise NotImplementedError()
 
-    def set_header(self, header, value):
+    def set_header(self, header, value, raw=True):
         """set an output HTTP header"""
-        raise NotImplementedError()
+        if raw:
+            # adding encoded header is important, else page content
+            # will be reconverted back to unicode and apart unefficiency, this
+            # may cause decoding problem (e.g. when downloading a file)
+            self.headers_out.setRawHeaders(header, [str(value)])
+        else:
+            self.headers_out.setHeader(header, value)
 
     def add_header(self, header, value):
         """add an output HTTP header"""
-        raise NotImplementedError()
+        # adding encoded header is important, else page content
+        # will be reconverted back to unicode and apart unefficiency, this
+        # may cause decoding problem (e.g. when downloading a file)
+        self.headers_out.addRawHeader(header, str(value))
 
     def remove_header(self, header):
         """remove an output HTTP header"""
-        raise NotImplementedError()
+        self.headers_out.removeHeader(header)
 
     def header_authorization(self):
         """returns a couple (auth-type, auth-value)"""
@@ -747,27 +759,30 @@
     # page data management ####################################################
 
     def get_page_data(self, key, default=None):
-        """return value associated to `key` in curernt page data"""
-        page_data = self.cnx.get_session_data(self.pageid, {})
+        """return value associated to `key` in current page data"""
+        page_data = self.session.data.get(self.pageid)
+        if page_data is None:
+            return default
         return page_data.get(key, default)
 
     def set_page_data(self, key, value):
         """set value associated to `key` in current page data"""
         self.html_headers.add_unload_pagedata()
-        page_data = self.cnx.get_session_data(self.pageid, {})
+        page_data = self.session.data.setdefault(self.pageid, {})
         page_data[key] = value
-        return self.cnx.set_session_data(self.pageid, page_data)
+        self.session.data[self.pageid] = page_data
 
     def del_page_data(self, key=None):
         """remove value associated to `key` in current page data
         if `key` is None, all page data will be cleared
         """
         if key is None:
-            self.cnx.del_session_data(self.pageid)
+            self.session.data.pop(self.pageid, None)
         else:
-            page_data = self.cnx.get_session_data(self.pageid, {})
-            page_data.pop(key, None)
-            self.cnx.set_session_data(self.pageid, page_data)
+            try:
+                del self.session.data[self.pageid][key]
+            except KeyError:
+                pass
 
     # user-agent detection ####################################################
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/schemaviewer.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,244 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""an helper class to display CubicWeb schema using ureports
+
+"""
+__docformat__ = "restructuredtext en"
+_ = unicode
+
+from logilab.common.ureports import Section, Title, Table, Link, Span, Text
+
+from yams.schema2dot import CARD_MAP
+from yams.schema import RelationDefinitionSchema
+from operator import attrgetter
+
+TYPE_GETTER = attrgetter('type')
+
+I18NSTRINGS = [_('read'), _('add'), _('delete'), _('update'), _('order')]
+
+
+class SchemaViewer(object):
+    """return an ureport layout for some part of a schema"""
+    def __init__(self, req=None, encoding=None):
+        self.req = req
+        if req is not None:
+            req.add_css('cubicweb.schema.css')
+            if encoding is None:
+                encoding = req.encoding
+            self._ = req._
+        else:
+            encoding = 'ascii'
+            self._ = unicode
+        self.encoding = encoding
+
+    # no self.req managements
+
+    def may_read(self, rdef, action='read'):
+        """Return true if request user may read the given schema.
+        Always return True when no request is provided.
+        """
+        if self.req is None:
+            return True
+        return rdef.may_have_permission(action, self.req)
+
+    def format_eschema(self, eschema):
+        text = eschema.type
+        if self.req is None:
+            return Text(text)
+        return Link(self.req.build_url('cwetype/%s' % eschema), text)
+
+    def format_rschema(self, rschema, label=None):
+        if label is None:
+            label = rschema.type
+        if self.req is None:
+            return Text(label)
+        return Link(self.req.build_url('cwrtype/%s' % rschema), label)
+
+    # end of no self.req managements
+
+    def visit_schema(self, schema, display_relations=0, skiptypes=()):
+        """get a layout for a whole schema"""
+        title = Title(self._('Schema %s') % schema.name,
+                      klass='titleUnderline')
+        layout = Section(children=(title,))
+        esection = Section(children=(Title(self._('Entities'),
+                                           klass='titleUnderline'),))
+        layout.append(esection)
+        eschemas = [eschema for eschema in schema.entities()
+                    if not (eschema.final or eschema in skiptypes)]
+        for eschema in sorted(eschemas, key=TYPE_GETTER):
+            esection.append(self.visit_entityschema(eschema, skiptypes))
+        if display_relations:
+            title = Title(self._('Relations'), klass='titleUnderline')
+            rsection = Section(children=(title,))
+            layout.append(rsection)
+            relations = [rschema for rschema in sorted(schema.relations(), key=TYPE_GETTER)
+                         if not (rschema.final or rschema.type in skiptypes)]
+            keys = [(rschema.type, rschema) for rschema in relations]
+            for key, rschema in sorted(keys, cmp=(lambda x, y: cmp(x[1], y[1]))):
+                relstr = self.visit_relationschema(rschema)
+                rsection.append(relstr)
+        return layout
+
+    def _entity_attributes_data(self, eschema):
+        _ = self._
+        data = [_('attribute'), _('type'), _('default'), _('constraints')]
+        attributes = sorted(eschema.attribute_definitions(), cmp=(lambda x, y: cmp(x[0].type, y[0].type)))
+        for rschema, aschema in attributes:
+            rdef = eschema.rdef(rschema)
+            if not self.may_read(rdef):
+                continue
+            aname = rschema.type
+            if aname == 'eid':
+                continue
+            data.append('%s (%s)' % (aname, _(aname)))
+            data.append(_(aschema.type))
+            defaultval = eschema.default(aname)
+            if defaultval is not None:
+                default = self.to_string(defaultval)
+            elif rdef.cardinality[0] == '1':
+                default = _('required field')
+            else:
+                default = ''
+            data.append(default)
+            constraints = rschema.rproperty(eschema.type, aschema.type,
+                                            'constraints')
+            data.append(', '.join(str(constr) for constr in constraints))
+        return data
+
+
+    def stereotype(self, name):
+        return Span((' <<%s>>' % name,), klass='stereotype')
+
+    def visit_entityschema(self, eschema, skiptypes=()):
+        """get a layout for an entity schema"""
+        etype = eschema.type
+        layout = Section(children=' ', klass='clear')
+        layout.append(Link(etype,'&#160;' , id=etype)) # anchor
+        title = self.format_eschema(eschema)
+        boxchild = [Section(children=(title,), klass='title')]
+        data = []
+        data.append(Section(children=boxchild, klass='box'))
+        data.append(Section(children='', klass='vl'))
+        data.append(Section(children='', klass='hl'))
+        t_vars = []
+        rels = []
+        first = True
+
+        rel_defs = sorted(eschema.relation_definitions(),
+                          cmp=(lambda x, y: cmp((x[0].type, x[0].cardinality),
+                          (y[0].type, y[0].cardinality))))
+        for rschema, targetschemas, role in rel_defs:
+            if rschema.type in skiptypes:
+                continue
+            for oeschema in sorted(targetschemas, key=TYPE_GETTER):
+                rdef = rschema.role_rdef(eschema, oeschema, role)
+                if not self.may_read(rdef):
+                    continue
+                label = rschema.type
+                if role == 'subject':
+                    cards = rschema.rproperty(eschema, oeschema, 'cardinality')
+                else:
+                    cards = rschema.rproperty(oeschema, eschema, 'cardinality')
+                    cards = cards[::-1]
+                label = '%s %s %s' % (CARD_MAP[cards[1]], label,
+                                      CARD_MAP[cards[0]])
+                rlink = self.format_rschema(rschema, label)
+                elink = self.format_eschema(oeschema)
+                if first:
+                    t_vars.append(Section(children=(elink,), klass='firstvar'))
+                    rels.append(Section(children=(rlink,), klass='firstrel'))
+                    first = False
+                else:
+                    t_vars.append(Section(children=(elink,), klass='var'))
+                    rels.append(Section(children=(rlink,), klass='rel'))
+        data.append(Section(children=rels, klass='rels'))
+        data.append(Section(children=t_vars, klass='vars'))
+        layout.append(Section(children=data, klass='entityAttributes'))
+        return layout
+
+    def visit_relationschema(self, rschema, title=True):
+        """get a layout for a relation schema"""
+        _ = self._
+        if title:
+            title = self.format_rschema(rschema)
+            stereotypes = []
+            if rschema.meta:
+                stereotypes.append('meta')
+            if rschema.symmetric:
+                stereotypes.append('symmetric')
+            if rschema.inlined:
+                stereotypes.append('inlined')
+            title = Section(children=(title,), klass='title')
+            if stereotypes:
+                title.append(self.stereotype(','.join(stereotypes)))
+            layout = Section(children=(title,), klass='schema')
+        else:
+            layout = Section(klass='schema')
+        data = [_('from'), _('to')]
+        schema = rschema.schema
+        rschema_objects = rschema.objects()
+        if rschema_objects:
+            # might be empty
+            properties = [p for p in RelationDefinitionSchema.rproperty_defs(rschema_objects[0])
+                          if not p in ('cardinality', 'composite', 'eid')]
+        else:
+            properties = []
+        data += [_(prop) for prop in properties]
+        cols = len(data)
+        done = set()
+        for subjtype, objtypes in sorted(rschema.associations()):
+            for objtype in objtypes:
+                if (subjtype, objtype) in done:
+                    continue
+                done.add((subjtype, objtype))
+                if rschema.symmetric:
+                    done.add((objtype, subjtype))
+                data.append(self.format_eschema(schema[subjtype]))
+                data.append(self.format_eschema(schema[objtype]))
+                rdef = rschema.rdef(subjtype, objtype)
+                for prop in properties:
+                    val = getattr(rdef, prop)
+                    if val is None:
+                        val = ''
+                    elif prop == 'constraints':
+                        val = ', '.join([c.restriction for c in val])
+                    elif isinstance(val, dict):
+                        for key, value in val.iteritems():
+                            if isinstance(value, (list, tuple)):
+                                val[key] = ', '.join(sorted( str(v) for v in value))
+                        val = str(val)
+
+                    elif isinstance(val, (list, tuple)):
+                        val = sorted(val)
+                        val = ', '.join(str(v) for v in val)
+                    elif val and isinstance(val, basestring):
+                        val = _(val)
+                    else:
+                        val = str(val)
+                    data.append(Text(val))
+        table = Table(cols=cols, rheaders=1, children=data, klass='listing')
+        layout.append(Section(children=(table,), klass='relationDefinition'))
+        layout.append(Section(children='', klass='clear'))
+        return layout
+
+    def to_string(self, value):
+        """used to converte arbitrary values to encoded string"""
+        if isinstance(value, unicode):
+            return value.encode(self.encoding, 'replace')
+        return str(value)
Binary file web/test/data/sample1.pdf has changed
--- a/web/test/data/sample1.xml	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,138 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd" [
-  <!ATTLIST html xmlns:cubicweb CDATA  #FIXED 'http://www.logilab.org/2008/cubicweb'  >
-
-<!ENTITY % coreattrs
- "id          ID            #IMPLIED
-  class       CDATA         #IMPLIED
-  style       CDATA         #IMPLIED
-  title       CDATA         #IMPLIED
-
- cubicweb:sortvalue         CDATA   #IMPLIED
- cubicweb:target            CDATA   #IMPLIED
- cubicweb:limit             CDATA   #IMPLIED
- cubicweb:type              CDATA   #IMPLIED
- cubicweb:loadtype          CDATA   #IMPLIED
- cubicweb:wdgtype           CDATA   #IMPLIED
- cubicweb:initfunc          CDATA   #IMPLIED
- cubicweb:inputid           CDATA   #IMPLIED
- cubicweb:tindex            CDATA   #IMPLIED
- cubicweb:inputname         CDATA   #IMPLIED
- cubicweb:value             CDATA   #IMPLIED
- cubicweb:required          CDATA   #IMPLIED
- cubicweb:accesskey         CDATA   #IMPLIED
- cubicweb:maxlength         CDATA   #IMPLIED
- cubicweb:variables         CDATA   #IMPLIED
- cubicweb:displayactions    CDATA   #IMPLIED
- cubicweb:fallbackvid       CDATA   #IMPLIED
- cubicweb:fname             CDATA   #IMPLIED
- cubicweb:vid               CDATA   #IMPLIED
- cubicweb:rql               CDATA   #IMPLIED
- cubicweb:actualrql         CDATA   #IMPLIED
- cubicweb:rooteid           CDATA   #IMPLIED
- cubicweb:dataurl           CDATA   #IMPLIED
- cubicweb:size              CDATA   #IMPLIED
- cubicweb:tlunit            CDATA   #IMPLIED
- cubicweb:loadurl           CDATA   #IMPLIED
- cubicweb:uselabel          CDATA   #IMPLIED
- cubicweb:facetargs         CDATA   #IMPLIED
- cubicweb:facetName         CDATA   #IMPLIED
-  "> ] >
-
-<html xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb" xml:lang="fr" lang="fr">
-<head>
-<base href="http://crater:8888/"></base><meta http-equiv="content-type" content="application/xhtml+xml; charset=UTF-8"/>
-<meta name="ROBOTS" content="NOINDEX" />
-<link rel="shortcut icon" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/favicon.ico"/>
-<link rel="alternate" type="application/rss+xml" title="RSS feed" href="http://crater:8888/project/Comet/0.2.0?vid=rss"/>
-<title>Comet 0.2.0 (unset title)</title>
-<script type="text/javascript"><!--//--><![CDATA[//><!--
-pageid = "0499a5d7add13919a458db30006d9832";
-//--><!]]></script>
-<link rel="stylesheet" type="text/css" media="all" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubes.tracker.css"/>
-<link rel="stylesheet" type="text/css" media="print" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.print.css"/>
-<link rel="stylesheet" type="text/css" media="all" href="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.login.css"/>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.corner.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/jquery.json.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.compat.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.python.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.htmlhelpers.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.ajax.js"></script>
-<script type="text/javascript" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/cubicweb.bookmarks.js"></script>
-<script type="text/javascript">
-jQuery(document).ready(function () {
- jQuery("#__login:visible").focus()
- });
-</script>
-</head>
-
-<body>
-<table id="header"><tr>
-<td id="firstcolumn"><a href="http://crater:8888/"><img class="logo" src="http://crater:8888/data0ea37b3fea72bf4b2fde96c64e51f626/logo.png" alt="logo"/></a></td>
-<td id="headtext"><span id="appliName"><a href="http://crater:8888/">unset title</a></span><span class="pathbar">&#160;&gt;&#160;<a href="http://crater:8888/Project">projets</a>&#160;&gt;&#160;<a href="http://crater:8888/project/Comet" title="">Comet</a>&#160;&gt;&#160;
-0.2.0</span></td><td>
-anonyme&#160;[<a class="logout" href="javascript: popupLoginBox();">s'authentifier</a>]</td><td><a href="http://crater:8888/doc/main" class="help" title="aide">&#160;</a></td><td id="lastcolumn"></td>
-</tr></table>
-<div id="popupLoginBox" class="hidden"><div id="loginContent">
-<form method="post" action="http://crater:8888/project/Comet/0.2.0?vid=statussheet" id="login_form">
-<table>
-<tr>
-<td><label for="__login">identifiant</label></td><td><input name="__login" id="__login" class="data" type="text" /></td></tr><tr>
-<td><label for="__password" >mot de passe</label></td><td><input name="__password" id="__password" class="data" type="password" /></td>
-</tr><tr>
-<td>&#160;</td><td><input type="submit" class="loginButton right" value="s'identifier" />
-</td></tr>
-</table>
-</form>
-</div></div>
-
-  <div id="stateheader">
-  </div>
-  <div id="page"><table width="100%" border="0" id="mainLayout"><tr>
-<td class="navcol"><div class="navboxes">
-<div class="searchBoxFrame" id="search_box"><div class="boxTitle"><span><span onclick="javascript: toggleVisibility('rqlinput')">rechercher</span></span></div><div class="boxContent">
-<form action="http://crater:8888/view">
-<table id="tsearch"><tr><td>
-<input id="norql" type="text" accesskey="q" tabindex="1" title="search text" value="" name="rql" />
-<input type="hidden" name="__fromsearchbox" value="1" />
-<input type="hidden" name="subvid" value="tsearch" />
-</td><td>
-<input tabindex="2" type="submit" id="rqlboxsubmit" class="rqlsubmit" value="" />
-</td></tr></table>
-</form></div>
-<div class="shadow">&#160;</div></div><div class="greyBoxFrame" id="edit_box"><div class="boxTitle"><span>actions - version</span></div><div class="boxContent">
-<ul class="boxListing"><li class="boxMainactions"><a href="http://crater:8888/project/Comet/0.2.0" title="keyword: view">voir</a></li>
-<li class="boxMainactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=edition" title="keyword: edit">modifier</a></li>
-<li class="boxMainactions"><a title="aucune transition possible">état: <i>en cours</i></a></li><li><a href="javascript: toggleVisibility('boxmenu_ajouter')" class="boxMenu">ajouter</a><ul id="boxmenu_ajouter" class="hidden"><li class="boxItem"><a href="http://crater:8888/project/Comet/0.2.0?etype=Ticket&amp;__linkto=done_in%3A789%3Asubject&amp;__redirectvid=statussheet&amp;__redirectpath=project%2FComet%2F0.2.0&amp;vid=creation" title="">ticket</a></li>
-<li class="boxItem"><a href="http://crater:8888/project/Comet/0.2.0?etype=Ticket&amp;__linkto=appeared_in%3A789%3Asubject&amp;__redirectvid=statussheet&amp;__redirectpath=project%2FComet%2F0.2.0&amp;vid=creation" title="">signaler une anomalie</a></li>
-</ul></li><li><a href="javascript: toggleVisibility('boxmenu_plus_dactions')" class="boxMenu">plus d'actions</a><ul id="boxmenu_plus_dactions" class="hidden"><li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=security" title="keyword: managepermission">gestion des permissions</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=deleteconf" title="keyword: delete">supprimer</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=copy" title="keyword: copy">copier</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/view?rql=Any%20X%20WHERE%20X%20version_of%20P%2C%20P%20name%20%22Comet%22%2C%20X%20num%20%220.2.0%22%2C%20X%20is%20Version&amp;template=pdf-main-template" title="keyword: pdfexport">export pdf</a></li>
-<li class="boxMoreactions"><a href="http://crater:8888/project/Comet/0.2.0?vid=document" title="keyword: pvrestexport">export ReST</a></li>
-</ul></li></ul>
-</div>
-<div class="shadow">&#160;</div></div><div class="boxFrame" id="bookmarks_box"><div class="boxTitle"><span>signets</span></div><div class="boxContent">
-<ul class="sideBox"><li><a href="javascript: toggleVisibility('boxmenu_gérer_les_signets')" class="boxMenu">gérer les signets</a><ul id="boxmenu_gérer_les_signets" class="hidden"><li class="boxManage"><a href="http://crater:8888/add/Bookmark?__linkto=bookmarked_by%3A5%3Asubject&amp;path=project%2FComet%2F0.2.0%3Fvid%3Dstatussheet" title="keyword: bookmark">poser un signet ici</a></li>
-<li class="boxManage"><a href="http://crater:8888/cwuser/admin?target=subject&amp;vid=xaddrelation&amp;rtype=bookmarked_by" title="">récupérer des signets existants</a></li>
-</ul></li></ul>
-</div>
-<div class="shadow">&#160;</div></div></div></td>
-<td id="contentcol">
-<div id="rqlinput" class="hidden">
-          <form action="http://crater:8888/view">
-<fieldset>
-<input type="text" id="rql" name="rql" value="Any X WHERE X version_of P, P name &quot;Comet&quot;, X num &quot;0.2.0&quot;, X is Version"  title="texte à rechercher ou requête RQL" tabindex="3" accesskey="q" class="searchField" />
-<input type="submit" value="" class="rqlsubmit" tabindex="4" />
-</fieldset>
-</form></div><div id="appMsg" onclick="javascript: toggleVisibility('appMsg')" class="hidden">
-</div><div id="pageContent">
-<div id="contentmain">
-<h2>Fiche de statut</h2><table class="listing"><tr><th rowspan="2">Projets</th><th colspan="2">Version</th><th rowspan="2">Parent</th><th rowspan="2">Tickets ouverts</th><th rowspan="2">Tickets implémentés</th><th rowspan="2">Statut</th></tr><tr><th>actuelle</th><th>ciblée</th></tr><tr><td title=""><a href="http://crater:8888/project/Developper%20manual" title="">Developper manual</a></td><td><a href="http://crater:8888/project/Developper%20manual/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Developper%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td><div title="detail a bit configuration steps"><a href="http://crater:8888/ticket/803">T 803</a></div></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/User%20manual" title="">User manual</a></td><td><a href="http://crater:8888/project/User%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/User%20manual/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td></td><td><div title="write a tutorial"><a href="http://crater:8888/ticket/801">T 801</a></div></td><td>livrée</td></tr><tr><td title=""><a href="http://crater:8888/project/Comet%20documentation" title="">Comet documentation</a></td><td><a href="http://crater:8888/project/Comet%20documentation/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Comet%20documentation/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet" title="">Comet</a></td><td></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/Lgc" title="">Lgc</a></td><td><a href="http://crater:8888/project/Lgc/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Lgc/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Tracker" title="">Tracker</a></td><td></td><td><div title="add support for xhtml -&gt; pdf conversion"><a href="http://crater:8888/ticket/793">T 793</a></div></td><td>livrée</td></tr><tr><td title=""><a href="http://crater:8888/project/Tracker" title="">Tracker</a></td><td><a href="http://crater:8888/project/Tracker/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Tracker/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Confman" title="">Confman</a></td><td><div title="extract core from forge cube"><a href="http://crater:8888/ticket/795">T 795</a></div></td><td></td><td>en cours</td></tr><tr><td title=""><a href="http://crater:8888/project/Confman" title="">Confman</a></td><td><a href="http://crater:8888/project/Confman/0.1.0">0.1.0</a></td><td><a href="http://crater:8888/project/Confman/0.2.0">0.2.0</a></td><td><a href="http://crater:8888/project/Comet" title="">Comet</a></td><td><div title="have a version status sheet"><a href="http://crater:8888/ticket/797">T 797</a></div></td><td></td><td>en cours</td></tr></table></div>
-</div>
-</td>
-</tr></table></div>
-<div class="footer"><a href="http://crater:8888/changelog">nouveautés</a> | <a href="http://crater:8888/doc/about">à propos de ce site</a> | © 2001-2009 <a href="http://www.logilab.fr">Logilab S.A.</a></div></body>
-</html>
\ No newline at end of file
--- a/web/test/data/schema.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/data/schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,8 +19,7 @@
 
 """
 
-from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, ObjectRelation,
+from yams.buildobjs import (EntityType, RelationDefinition, SubjectRelation,
                             String, Int, Datetime, Boolean, Float)
 from yams.constraints import IntervalBoundConstraint
 
@@ -33,7 +32,7 @@
     subject = 'Tag'
     object = ('BlogEntry', 'CWUser')
 
-class checked_by(RelationType):
+class checked_by(RelationDefinition):
     subject = 'BlogEntry'
     object = 'CWUser'
     cardinality = '?*'
@@ -58,7 +57,10 @@
     description = String()
     salary = Float()
     travaille = SubjectRelation('Societe')
-    connait = ObjectRelation('CWUser')
+
+class connait(RelationDefinition):
+    subject = 'CWUser'
+    object = 'Personne'
 
 class Societe(EntityType):
     nom  = String(maxsize=64, fulltextindexed=True)
--- a/web/test/data/views.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/data/views.py	Mon Jul 19 15:36:16 2010 +0200
@@ -27,7 +27,7 @@
 # user
 # NOTE: this require "cookie" authentication mode
 def auto_login_publish(self, path, req):
-    if (req.cnx is None or req.cnx.anonymous_connection) and req.form.get('__fblogin'):
+    if (not req.cnx or req.cnx.anonymous_connection) and req.form.get('__fblogin'):
         login = password = req.form.pop('__fblogin')
         self.repo.register_user(login, password)
         req.form['__login'] = login
--- a/web/test/unittest_application.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_application.py	Mon Jul 19 15:36:16 2010 +0200
@@ -1,4 +1,3 @@
-# -*- coding: iso-8859-1 -*-
 # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
@@ -16,9 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""unit tests for cubicweb.web.application
-
-"""
+"""unit tests for cubicweb.web.application"""
 
 import base64, Cookie
 import sys
@@ -27,9 +24,10 @@
 from logilab.common.testlib import TestCase, unittest_main
 from logilab.common.decorators import clear_cache
 
+from cubicweb import AuthenticationError, Unauthorized
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.fake import FakeRequest
-from cubicweb.web import Redirect, AuthenticationError, ExplicitLogin, INTERNAL_FIELD_VALUE
+from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE
 from cubicweb.web.views.basecontrollers import ViewController
 
 class FakeMapping:
@@ -42,7 +40,7 @@
 class MockCursor:
     def __init__(self):
         self.executed = []
-    def execute(self, rql, args=None, cachekey=None):
+    def execute(self, rql, args=None, build_descr=False):
         args = args or {}
         self.executed.append(rql % args)
 
@@ -52,10 +50,12 @@
     def __init__(self, form=None):
         self._cw = FakeRequest()
         self._cw.form = form or {}
-        self._cursor = self._cw.cursor = MockCursor()
+        self._cursor = MockCursor()
+        self._cw.execute = self._cursor.execute
 
     def new_cursor(self):
-        self._cursor = self._cw.cursor = MockCursor()
+        self._cursor = MockCursor()
+        self._cw.execute = self._cursor.execute
 
     def set_form(self, form):
         self._cw.form = form
@@ -191,7 +191,7 @@
             '__errorurl': 'view?vid=edition...'
             }
         path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
-        forminfo = req.get_session_data('view?vid=edition...')
+        forminfo = req.session.data['view?vid=edition...']
         eidmap = forminfo['eidmap']
         self.assertEquals(eidmap, {})
         values = forminfo['values']
@@ -221,7 +221,7 @@
                     '__errorurl': 'view?vid=edition...',
                     }
         path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
-        forminfo = req.get_session_data('view?vid=edition...')
+        forminfo = req.session.data['view?vid=edition...']
         self.assertEquals(set(forminfo['eidmap']), set('XY'))
         self.assertEquals(forminfo['eidmap']['X'], None)
         self.assertIsInstance(forminfo['eidmap']['Y'], int)
@@ -250,7 +250,7 @@
                     '__errorurl': 'view?vid=edition...',
                     }
         path, params = self.expect_redirect(lambda x: self.app_publish(x, 'edit'), req)
-        forminfo = req.get_session_data('view?vid=edition...')
+        forminfo = req.session.data['view?vid=edition...']
         self.assertEquals(set(forminfo['eidmap']), set('XY'))
         self.assertIsInstance(forminfo['eidmap']['X'], int)
         self.assertIsInstance(forminfo['eidmap']['Y'], int)
@@ -296,6 +296,11 @@
         self.commit()
         self.assertEquals(vreg.property_value('ui.language'), 'en')
 
+    def test_login_not_available_to_authenticated(self):
+        req = self.request()
+        ex = self.assertRaises(Unauthorized, self.app_publish, req, 'login')
+        self.assertEquals(str(ex), 'log out first')
+
     def test_fb_login_concept(self):
         """see data/views.py"""
         self.set_option('auth-mode', 'cookie')
@@ -312,29 +317,29 @@
     # authentication tests ####################################################
 
     def test_http_auth_no_anon(self):
-        req, origcnx = self.init_authentication('http')
+        req, origsession = self.init_authentication('http')
         self.assertAuthFailure(req)
-        self.assertRaises(ExplicitLogin, self.app_publish, req, 'login')
+        self.assertRaises(AuthenticationError, self.app_publish, req, 'login')
         self.assertEquals(req.cnx, None)
-        authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password']))
+        authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
         req._headers['Authorization'] = 'basic %s' % authstr
-        self.assertAuthSuccess(req, origcnx)
-        self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
-        self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+        self.assertAuthSuccess(req, origsession)
+        self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+        self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEquals(len(self.open_sessions), 0)
 
     def test_cookie_auth_no_anon(self):
-        req, origcnx = self.init_authentication('cookie')
+        req, origsession = self.init_authentication('cookie')
         self.assertAuthFailure(req)
         form = self.app_publish(req, 'login')
         self.failUnless('__login' in form)
         self.failUnless('__password' in form)
         self.assertEquals(req.cnx, None)
-        req.form['__login'] = origcnx.login
-        req.form['__password'] = origcnx.authinfo['password']
-        self.assertAuthSuccess(req, origcnx)
-        self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
-        self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+        req.form['__login'] = origsession.login
+        req.form['__password'] = origsession.authinfo['password']
+        self.assertAuthSuccess(req, origsession)
+        self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+        self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEquals(len(self.open_sessions), 0)
 
     def test_login_by_email(self):
@@ -344,71 +349,72 @@
                      'WHERE U login %(login)s', {'address': address, 'login': login})
         self.commit()
         # option allow-email-login not set
-        req, origcnx = self.init_authentication('cookie')
+        req, origsession = self.init_authentication('cookie')
         req.form['__login'] = address
-        req.form['__password'] = origcnx.authinfo['password']
+        req.form['__password'] = origsession.authinfo['password']
         self.assertAuthFailure(req)
         # option allow-email-login set
-        origcnx.login = address
+        origsession.login = address
         self.set_option('allow-email-login', True)
         req.form['__login'] = address
-        req.form['__password'] = origcnx.authinfo['password']
-        self.assertAuthSuccess(req, origcnx)
-        self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
-        self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+        req.form['__password'] = origsession.authinfo['password']
+        self.assertAuthSuccess(req, origsession)
+        self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+        self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEquals(len(self.open_sessions), 0)
 
     def _reset_cookie(self, req):
         # preparing the suite of the test
         # set session id in cookie
         cookie = Cookie.SimpleCookie()
-        cookie['__session'] = req.cnx.sessionid
+        cookie['__session'] = req.session.sessionid
         req._headers['Cookie'] = cookie['__session'].OutputString()
         clear_cache(req, 'get_authorization')
-        # reset cnx as if it was a new incoming request
-        req.cnx = None
+        # reset session as if it was a new incoming request
+        req.session = req.cnx = None
 
     def _test_auth_anon(self, req):
         self.app.connect(req)
-        acnx = req.cnx
+        asession = req.session
         self.assertEquals(len(self.open_sessions), 1)
-        self.assertEquals(acnx.login, 'anon')
-        self.assertEquals(acnx.authinfo['password'], 'anon')
-        self.failUnless(acnx.anonymous_connection)
+        self.assertEquals(asession.login, 'anon')
+        self.assertEquals(asession.authinfo['password'], 'anon')
+        self.failUnless(asession.anonymous_session)
         self._reset_cookie(req)
 
     def _test_anon_auth_fail(self, req):
         self.assertEquals(len(self.open_sessions), 1)
         self.app.connect(req)
         self.assertEquals(req.message, 'authentication failure')
-        self.assertEquals(req.cnx.anonymous_connection, True)
+        self.assertEquals(req.session.anonymous_session, True)
         self.assertEquals(len(self.open_sessions), 1)
         self._reset_cookie(req)
 
     def test_http_auth_anon_allowed(self):
-        req, origcnx = self.init_authentication('http', 'anon')
+        req, origsession = self.init_authentication('http', 'anon')
         self._test_auth_anon(req)
         authstr = base64.encodestring('toto:pouet')
         req._headers['Authorization'] = 'basic %s' % authstr
         self._test_anon_auth_fail(req)
-        authstr = base64.encodestring('%s:%s' % (origcnx.login, origcnx.authinfo['password']))
+        authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
         req._headers['Authorization'] = 'basic %s' % authstr
-        self.assertAuthSuccess(req, origcnx)
-        self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
-        self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+        self.assertAuthSuccess(req, origsession)
+        self.assertEquals(req.session.authinfo, {'password': origsession.authinfo['password']})
+        self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEquals(len(self.open_sessions), 0)
 
     def test_cookie_auth_anon_allowed(self):
-        req, origcnx = self.init_authentication('cookie', 'anon')
+        req, origsession = self.init_authentication('cookie', 'anon')
         self._test_auth_anon(req)
         req.form['__login'] = 'toto'
         req.form['__password'] = 'pouet'
         self._test_anon_auth_fail(req)
-        req.form['__login'] = origcnx.login
-        req.form['__password'] = origcnx.authinfo['password']
-        self.assertAuthSuccess(req, origcnx)
-        self.assertEquals(req.cnx.authinfo, {'password': origcnx.authinfo['password']})
-        self.assertRaises(AuthenticationError, self.app_publish, req, 'logout')
+        req.form['__login'] = origsession.login
+        req.form['__password'] = origsession.authinfo['password']
+        self.assertAuthSuccess(req, origsession)
+        self.assertEquals(req.session.authinfo,
+                          {'password': origsession.authinfo['password']})
+        self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEquals(len(self.open_sessions), 0)
 
     def test_non_regr_optional_first_var(self):
--- a/web/test/unittest_pdf.py	Thu May 06 08:24:46 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,58 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-import os.path as osp
-from tempfile import NamedTemporaryFile
-from subprocess import Popen as sub
-from xml.etree.cElementTree import ElementTree, fromstring, tostring, dump
-
-from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb.utils import can_do_pdf_conversion
-from cubicweb.ext.xhtml2fo import ReportTransformer
-
-DATADIR = osp.join(osp.dirname(__file__), 'data')
-
-class PDFTC(TestCase):
-
-    def test_xhtml_to_fop_to_pdf(self):
-        if not can_do_pdf_conversion():
-            self.skip('dependencies not available : check pysixt and fop')
-        xmltree = ElementTree()
-        xmltree.parse(osp.join(DATADIR, 'sample1.xml'))
-        foptree = ReportTransformer(u'contentmain').transform(xmltree)
-        # next
-        foptmp = NamedTemporaryFile()
-        foptree.write(foptmp)
-        foptmp.flush()
-        pdftmp = NamedTemporaryFile()
-        fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name])
-        fopproc.wait()
-        del foptmp
-        if fopproc.returncode:
-            self.skip('fop returned status %s' % fopproc.returncode)
-        pdftmp.seek(0) # a bit superstitious
-        reference = open(osp.join(DATADIR, 'sample1.pdf'), 'r').read()
-        output = pdftmp.read()
-        # XXX almost equals due to ID, creation date, so it seems to fail
-        self.assertEquals( len(output), len(reference) )
-        # cut begin & end 'cause they contain variyng data
-        self.assertTextEquals(output[150:1500], reference[150:1500])
-
-if __name__ == '__main__':
-    unittest_main()
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_session.py	Mon Jul 19 15:36:16 2010 +0200
@@ -0,0 +1,37 @@
+# -*- coding: iso-8859-1 -*-
+"""unit tests for cubicweb.web.application
+
+:organization: Logilab
+:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+from cubicweb.devtools.testlib import CubicWebTC
+
+class SessionTC(CubicWebTC):
+
+    def test_auto_reconnection(self):
+        sm = self.app.session_handler.session_manager
+        # make is if the web session has been opened by the session manager
+        sm._sessions[self.cnx.sessionid] = self.websession
+        sessionid = self.websession.sessionid
+        self.assertEquals(len(sm._sessions), 1)
+        self.assertEquals(self.websession.sessionid, self.websession.cnx.sessionid)
+        # fake the repo session is expiring
+        self.repo.close(sessionid)
+        try:
+            # fake an incoming http query with sessionid in session cookie
+            # don't use self.request() which try to call req.set_session
+            req = self.requestcls(self.vreg)
+            websession = sm.get_session(req, sessionid)
+            self.assertEquals(len(sm._sessions), 1)
+            self.assertIs(websession, self.websession)
+            self.assertEquals(websession.sessionid, sessionid)
+            self.assertNotEquals(websession.sessionid, websession.cnx.sessionid)
+        finally:
+            # avoid error in tearDown by telling this connection is closed...
+            self.cnx._closed = True
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/web/test/unittest_uicfg.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_uicfg.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,6 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+from logilab.common.testlib import tag
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.web import uicfg
 
@@ -25,6 +26,57 @@
     def test_default_actionbox_appearsin_addmenu_config(self):
         self.failIf(abaa.etype_get('TrInfo', 'wf_info_for', 'object', 'CWUser'))
 
+
+
+class DefinitionOrderTC(CubicWebTC):
+    """This test check that when multiple definition could match a key, only
+    the more accurate apply"""
+
+    def setUp(self):
+
+        new_def = (
+                    (('*', 'login', '*'),
+                         {'formtype':'main', 'section':'hidden'}),
+                    (('*', 'login', '*'),
+                         {'formtype':'muledit', 'section':'hidden'}),
+                    (('CWUser', 'login', '*'),
+                         {'formtype':'main', 'section':'attributes'}),
+                    (('CWUser', 'login', '*'),
+                         {'formtype':'muledit', 'section':'attributes'}),
+                    (('CWUser', 'login', 'String'),
+                         {'formtype':'main', 'section':'inlined'}),
+                    (('CWUser', 'login', 'String'),
+                         {'formtype':'inlined', 'section':'attributes'}),
+                    )
+        self._old_def = []
+
+        for key, kwargs in new_def:
+            nkey = key[0], key[1], key[2], 'subject'
+            self._old_def.append((nkey, uicfg.autoform_section._tagdefs.get(nkey)))
+            uicfg.autoform_section.tag_subject_of(key, **kwargs)
+
+        super(DefinitionOrderTC, self).setUp()
+
+
+    @tag('uicfg')
+    def test_definition_order_hidden(self):
+        result = uicfg.autoform_section.get('CWUser', 'login', 'String', 'subject')
+        expected = set(['main_inlined', 'muledit_attributes', 'inlined_attributes'])
+        self.assertSetEquals(result, expected)
+
+    def tearDown(self):
+        super(DefinitionOrderTC, self).tearDown()
+        for key, tags in self._old_def:
+                if tags is None:
+                    uicfg.autoform_section.del_rtag(*key)
+                else:
+                    for tag in tags:
+                        formtype, section = tag.split('_')
+                        uicfg.autoform_section.tag_subject_of(key[:3], formtype=formtype, section=section)
+
+        uicfg.autoform_section.clear()
+        uicfg.autoform_section.init(self.repo.vreg.schema)
+
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
     unittest_main()
--- a/web/test/unittest_urlpublisher.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_urlpublisher.py	Mon Jul 19 15:36:16 2010 +0200
@@ -39,7 +39,7 @@
         req = self.request()
         b = req.create_entity('BlogEntry', title=u'hell\'o', content=u'blabla')
         c = req.create_entity('Tag', name=u'yo') # take care: Tag's name normalized to lower case
-        self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid}, 'b')
+        self.execute('SET C tags B WHERE C eid %(c)s, B eid %(b)s', {'c':c.eid, 'b':b.eid})
 
     def process(self, url):
         req = self.req = self.request()
--- a/web/test/unittest_views_basecontrollers.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Mon Jul 19 15:36:16 2010 +0200
@@ -83,7 +83,7 @@
             'in_group-subject:'+eid:  groups,
             }
         path, params = self.expect_redirect_publish(req, 'edit')
-        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
         self.assertEquals(e.firstname, u'Sylvain')
         self.assertEquals(e.surname, u'Th\xe9nault')
         self.assertEquals(e.login, user.login)
@@ -123,7 +123,7 @@
             'surname-subject:'+eid:   u'Sylvain',
             }
         path, params = self.expect_redirect_publish(req, 'edit')
-        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0)
+        e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0)
         self.assertEquals(e.login, user.login)
         self.assertEquals(e.firstname, u'Th\xe9nault')
         self.assertEquals(e.surname, u'Sylvain')
@@ -231,7 +231,7 @@
                     'described_by_test-subject:X': u(feid),
                 }
         ex = self.assertRaises(ValidationError, self.ctrl_publish, req)
-        self.assertEquals(ex.errors, {'amount-subject': 'value [0;100] constraint failed for value -10'})
+        self.assertEquals(ex.errors, {'amount-subject': 'value must be >= 0'})
         req = self.request()
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
@@ -240,7 +240,7 @@
                     'described_by_test-subject:X': u(feid),
                     }
         ex = self.assertRaises(ValidationError, self.ctrl_publish, req)
-        self.assertEquals(ex.errors, {'amount-subject': 'value [0;100] constraint failed for value 110'})
+        self.assertEquals(ex.errors, {'amount-subject': 'value must be <= 100'})
         req = self.request()
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
@@ -259,7 +259,7 @@
         tmpgroup = self.request().create_entity('CWGroup', name=u"test")
         user = self.user()
         req = self.request(**req_form(user))
-        req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)]))
+        req.session.data['pending_insert'] = set([(user.eid, 'in_group', tmpgroup.eid)])
         path, params = self.expect_redirect_publish(req, 'edit')
         usergroups = [gname for gname, in
                       self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
@@ -278,7 +278,7 @@
         self.assertUnorderedIterableEquals(usergroups, ['managers', 'test'])
         # now try to delete the relation
         req = self.request(**req_form(user))
-        req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)]))
+        req.session.data['pending_delete'] = set([(user.eid, 'in_group', groupeid)])
         path, params = self.expect_redirect_publish(req, 'edit')
         usergroups = [gname for gname, in
                       self.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})]
@@ -358,7 +358,7 @@
         self.assertIn('_cwmsgid', params)
         eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid
         self.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
-                     {'x': self.session.user.eid, 'e': eid}, 'x')
+                     {'x': self.session.user.eid, 'e': eid})
         self.commit()
         req = req
         req.form = {'eid': u(eid), '__type:%s'%eid: 'EmailAddress',
@@ -397,7 +397,7 @@
             }
         try:
             path, params = self.expect_redirect_publish(req, 'edit')
-            e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}, 'x').get_entity(0, 0)
+            e = self.execute('Any X WHERE X eid %(x)s', {'x': cwetypeeid}).get_entity(0, 0)
             self.assertEquals(e.name, 'CWEType')
             self.assertEquals(sorted(g.eid for g in e.read_permission), groupeids)
         finally:
@@ -419,7 +419,7 @@
         path, params = self.expect_redirect_publish(req, 'edit')
         self.failUnless(path.startswith('blogentry/'))
         eid = path.split('/')[1]
-        e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}, 'x').get_entity(0, 0)
+        e = self.execute('Any C, T WHERE C eid %(x)s, C content T', {'x': eid}).get_entity(0, 0)
         self.assertEquals(e.title, '"13:03:40"')
         self.assertEquals(e.content, '"13:03:43"')
 
@@ -566,17 +566,21 @@
 
     def test_remote_add_existing_tag(self):
         self.remote_call('tag_entity', self.john.eid, ['python'])
-        self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
-                             ['python', 'cubicweb'])
-        self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
-                          [['python']])
+        self.assertUnorderedIterableEquals(
+            [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+            ['python', 'cubicweb'])
+        self.assertEquals(
+            self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
+            [['python']])
 
     def test_remote_add_new_tag(self):
         self.remote_call('tag_entity', self.john.eid, ['javascript'])
-        self.assertUnorderedIterableEquals([tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
-                             ['python', 'cubicweb', 'javascript'])
-        self.assertEquals(self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
-                          [['javascript']])
+        self.assertUnorderedIterableEquals(
+            [tname for tname, in self.execute('Any N WHERE T is Tag, T name N')],
+            ['python', 'cubicweb', 'javascript'])
+        self.assertEquals(
+            self.execute('Any N WHERE T tags P, P is CWUser, T name N').rows,
+            [['javascript']])
 
     def test_pending_insertion(self):
         res, req = self.remote_call('add_pending_inserts', [['12', 'tags', '13']])
--- a/web/test/unittest_views_basetemplates.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_views_basetemplates.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.htmlparser import DTDValidator
 
@@ -26,7 +23,10 @@
 
     def _login_labels(self):
         valid = self.content_type_validators.get('text/html', DTDValidator)()
+        req = self.request()
+        req.cnx.anonymous_connection = True
         page = valid.parse_string(self.vreg['views'].main_template(self.request(), 'login'))
+        req.cnx.anonymous_connection = False
         return page.find_tag('label')
 
     def test_label(self):
@@ -34,3 +34,7 @@
         self.assertEquals(self._login_labels(), ['login or email', 'password'])
         self.set_option('allow-email-login', 'no')
         self.assertEquals(self._login_labels(), ['login', 'password'])
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/web/test/unittest_views_baseviews.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_views_baseviews.py	Mon Jul 19 15:36:16 2010 +0200
@@ -103,7 +103,7 @@
         req = self.request()
         e = req.create_entity("State", name=u'<toto>', description=u'loo"ong blabla')
         rset = req.execute('Any X, D, CD, NOW - CD WHERE X is State, X description D, X creation_date CD, X eid %(x)s',
-                            {'x': e.eid}, 'x')
+                           {'x': e.eid})
         view = self.vreg['views'].select('table', req, rset=rset)
         return e, rset, view
 
--- a/web/test/unittest_views_navigation.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_views_navigation.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,14 +15,13 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb.web.views.navigation unit tests
-
-"""
+"""cubicweb.web.views.navigation unit tests"""
 
 from logilab.common.testlib import unittest_main, mock_object
+
 from cubicweb.devtools.testlib import CubicWebTC
-
-from cubicweb.web.views.navigation import PageNavigation, SortedNavigation
+from cubicweb.web.views.navigation import (PageNavigation, SortedNavigation,
+                                           PageNavigationSelect)
 from cubicweb.web.views.ibreadcrumbs import BreadCrumbEntityVComponent
 
 BreadCrumbEntityVComponent.visible = True
@@ -41,15 +40,24 @@
 
     def test_navigation_selection_ordered(self):
         req = self.request()
-        rset = self.execute('Any X,N ORDERBY N WHERE X name N')
-        navcomp = self.vreg['components'].select('navigation', req, rset=rset)
+        rset = self.execute('Any X,N ORDERBY N LIMIT 40 WHERE X name N')
+        navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20)
         self.assertIsInstance(navcomp, SortedNavigation)
         req.set_search_state('W:X:Y:Z')
-        navcomp = self.vreg['components'].select('navigation', req, rset=rset)
+        navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20)
         self.assertIsInstance(navcomp, SortedNavigation)
         req.set_search_state('normal')
         html = navcomp.render()
 
+    def test_navigation_selection_large_rset(self):
+        req = self.request()
+        rset = self.execute('Any X,N LIMIT 120 WHERE X name N')
+        navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20)
+        self.assertIsInstance(navcomp, PageNavigationSelect)
+        rset = self.execute('Any X,N ORDERBY N LIMIT 120 WHERE X name N')
+        navcomp = self.vreg['components'].select('navigation', req, rset=rset, page_size=20)
+        self.assertIsInstance(navcomp, PageNavigationSelect)
+
     def test_navigation_selection_not_enough(self):
         req = self.request()
         rset = self.execute('Any X,N LIMIT 10 WHERE X name N')
--- a/web/test/unittest_views_searchrestriction.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_views_searchrestriction.py	Mon Jul 19 15:36:16 2010 +0200
@@ -48,17 +48,17 @@
     def test_1(self):
         self.assertEquals(self._generate(self.select, 'in_state', 'subject', 'name'),
                           "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
-                          "B in_state A, A name C, B is CWUser")
+                          "B in_state A, B is CWUser, A name C")
 
     def test_2(self):
         self.assertEquals(self._generate(self.select, 'tags', 'object', 'name'),
                           "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
-                          "A tags B, A name C, B is CWUser")
+                          "A tags B, B is CWUser, A name C")
 
     def test_3(self):
         self.assertEquals(self._generate(self.select, 'created_by', 'subject', 'login'),
                           "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
-                          "B created_by A, A login C, B is CWUser")
+                          "B created_by A, B is CWUser, A login C")
 
     def test_4(self):
         self.assertEquals(self._generate(self.parse('Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'),
@@ -73,7 +73,7 @@
                             'V in_state VS, VS name "published", T created_by U')
         self.assertEquals(self._generate(select, 'created_by', 'subject', 'login'),
                           "DISTINCT Any A,B ORDERBY B WHERE T created_by U, "
-                          "T created_by A, A login B, T is Bookmark")
+                          "T created_by A, T is Bookmark, A login B")
 
     def test_nonregr2(self):
         #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N'
@@ -85,7 +85,7 @@
         try:
             self.assertEquals(self._generate(select, 'in_state', 'subject', 'name'),
                               "DISTINCT Any A,B ORDERBY B WHERE V is CWUser, "
-                              "NOT V in_state VS, VS name 'published', "
+                              "NOT EXISTS(V in_state VS), VS name 'published', "
                               "V in_state A, A name B")
         finally:
             for rdefs in rschema.rdefs.values():
--- a/web/test/unittest_viewselector.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/test/unittest_viewselector.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,8 +16,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""XXX rename, split, reorganize this
-"""
+"""XXX rename, split, reorganize this"""
+
 from logilab.common.testlib import unittest_main
 
 from cubicweb.devtools.testlib import CubicWebTC
@@ -112,7 +112,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', primary.PrimaryView),
+                              ('primary', cwuser.CWGroupPrimaryView),
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
@@ -136,7 +136,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', primary.PrimaryView),
+                              ('primary', cwuser.CWGroupPrimaryView),
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
--- a/web/uicfg.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/uicfg.py	Mon Jul 19 15:36:16 2010 +0200
@@ -80,18 +80,13 @@
 primaryview_section = RelationTags('primaryview_section',
                                    init_primaryview_section,
                                    frozenset(('attributes', 'relations',
-                                               'sideboxes', 'hidden')))
+                                              'sideboxes', 'hidden')))
 
 
 class DisplayCtrlRelationTags(RelationTagsDict):
     def __init__(self, *args, **kwargs):
         super(DisplayCtrlRelationTags, self).__init__(*args, **kwargs)
-        self._counter = 0
-
-    def tag_relation(self, key, tag):
-        tag = super(DisplayCtrlRelationTags, self).tag_relation(key, tag)
-        self._counter += 1
-        tag.setdefault('order', self._counter)
+        self.counter = 0
 
     def tag_subject_of(self, key, tag):
         subj, rtype, obj = key
@@ -117,7 +112,8 @@
         sschema = '*'
         label = '%s_%s' % (rschema, role)
     rtag.setdefault((sschema, rschema, oschema, role), 'label', label)
-    rtag.setdefault((sschema, rschema, oschema, role), 'order', rtag._counter)
+    rtag.counter += 1
+    rtag.setdefault((sschema, rschema, oschema, role), 'order', rtag.counter)
 
 primaryview_display_ctrl = DisplayCtrlRelationTags('primaryview_display_ctrl',
                                                    init_primaryview_display_ctrl)
@@ -282,8 +278,19 @@
         rtags.add('%s_%s' % (formtype, section))
         return rtags
 
-    def init_get(self, *key):
-        return super(AutoformSectionRelationTags, self).get(*key)
+    def init_get(self, stype, rtype, otype, tagged):
+        key = (stype, rtype, otype, tagged)
+        rtags = {}
+        for key in self._get_keys(stype, rtype, otype, tagged):
+            tags = self._tagdefs.get(key, ())
+            for tag in tags:
+                assert '_' in tag, (tag, tags)
+                section, value = tag.split('_', 1)
+                rtags[section] = value
+        cls = self.tag_container_cls
+        rtags = cls('_'.join([section,value]) for section,value in rtags.iteritems())
+        return rtags
+
 
     def get(self, *key):
         # overriden to avoid recomputing done in parent classes
--- a/web/views/actions.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/actions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -252,7 +252,7 @@
     __select__ = action.Action.__select__ & one_line_rset() & non_final_entity()
 
     submenu = _('addrelated')
-    order = 20
+    order = 17
 
     def fill_menu(self, box, menu):
         # when there is only one item in the sub-menu, replace the sub-menu by
@@ -260,16 +260,24 @@
         menu.label_prefix = self._cw._('add')
         super(AddRelatedActions, self).fill_menu(box, menu)
 
+    def redirect_params(self, entity):
+        return {'__redirectpath': entity.rest_path(), # should not be url quoted!
+                '__redirectvid': self._cw.form.get('vid', '')}
+
     def actual_actions(self):
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
         eschema = entity.e_schema
-        for rschema, teschema, x in self.add_related_schemas(entity):
-            if x == 'subject':
-                label = 'add %s %s %s %s' % (eschema, rschema, teschema, x)
-                url = self.linkto_url(entity, rschema, teschema, 'object')
+        params = self.redirect_params(entity)
+        for rschema, teschema, role in self.add_related_schemas(entity):
+            if rschema.role_rdef(eschema, teschema, role).role_cardinality(role) in '1?':
+                if entity.related(rschema, role):
+                    continue
+            if role == 'subject':
+                label = 'add %s %s %s %s' % (eschema, rschema, teschema, role)
+                url = self.linkto_url(entity, rschema, teschema, 'object', **params)
             else:
-                label = 'add %s %s %s %s' % (teschema, rschema, eschema, x)
-                url = self.linkto_url(entity, rschema, teschema, 'subject')
+                label = 'add %s %s %s %s' % (teschema, rschema, eschema, role)
+                url = self.linkto_url(entity, rschema, teschema, 'subject', **params)
             yield self.build_action(self._cw._(label), url)
 
     def add_related_schemas(self, entity):
@@ -305,11 +313,9 @@
                     if teschema.may_have_permission('add', req):
                         yield rschema, teschema, role
 
-    def linkto_url(self, entity, rtype, etype, target):
+    def linkto_url(self, entity, rtype, etype, target, **kwargs):
         return self._cw.build_url('add/%s' % etype,
-                                  __linkto='%s:%s:%s' % (rtype, entity.eid, target),
-                                  __redirectpath=entity.rest_path(), # should not be url quoted!
-                                  __redirectvid=self._cw.form.get('vid', ''))
+                                  __linkto='%s:%s:%s' % (rtype, entity.eid, target), **kwargs)
 
 
 class ViewSameCWEType(action.Action):
--- a/web/views/authentication.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/authentication.py	Mon Jul 19 15:36:16 2010 +0200
@@ -18,14 +18,18 @@
 """user authentication component
 
 """
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
+from threading import Lock
+
 from logilab.common.decorators import clear_cache
 
 from cubicweb import AuthenticationError, BadConnectionId
 from cubicweb.view import Component
 from cubicweb.dbapi import repo_connect, ConnectionProperties
-from cubicweb.web import ExplicitLogin, InvalidSession
+from cubicweb.web import InvalidSession
 from cubicweb.web.application import AbstractAuthenticationManager
 
 class NoAuthInfo(Exception): pass
@@ -41,9 +45,10 @@
         """
         raise NotImplementedError()
 
-    def authenticated(self, req, cnx, retreiver):
+    def authenticated(self, retreiver, req, cnx, login, authinfo):
         """callback when return authentication information have opened a
-        repository connection successfully
+        repository connection successfully. Take care req has no session
+        attached yet, hence req.execute isn't available.
         """
         pass
 
@@ -71,50 +76,58 @@
         self.log_queries = vreg.config['query-log-file']
         self.authinforetreivers = sorted(vreg['webauth'].possible_objects(vreg),
                                     key=lambda x: x.order)
+        # 2-uple login / password, login is None when no anonymous access
+        # configured
         self.anoninfo = vreg.config.anonymous_user()
+        if self.anoninfo[0]:
+            self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]})
 
     def validate_session(self, req, session):
-        """check session validity, and return eventually hijacked session
+        """check session validity, reconnecting it to the repository if the
+        associated connection expired in the repository side (hence the
+        necessity for this method). Return the connected user on success.
 
-        :raise InvalidSession:
-          if session is corrupted for a reason or another and should be closed
+        raise :exc:`InvalidSession` if session is corrupted for a reason or
+        another and should be closed
         """
         # with this authentication manager, session is actually a dbapi
         # connection
-        cnx = session
         login = req.get_authorization()[0]
+        # check session.login and not user.login, since in case of login by
+        # email, login and cnx.login are the email while user.login is the
+        # actual user login
+        if login and session.login != login:
+            raise InvalidSession('login mismatch')
         try:
-            # calling cnx.user() check connection validity, raise
-            # BadConnectionId on failure
-            user = cnx.user(req)
-            # check cnx.login and not user.login, since in case of login by
-            # email, login and cnx.login are the email while user.login is the
-            # actual user login
-            if login and cnx.login != login:
-                cnx.close()
-                raise InvalidSession('login mismatch')
-        except BadConnectionId:
-            # check if a connection should be automatically restablished
-            if (login is None or login == cnx.login):
-                cnx = self._authenticate(req, cnx.login, cnx.authinfo)
+            lock = session.reconnection_lock
+        except AttributeError:
+            lock = session.reconnection_lock = Lock()
+        # need to be locked two avoid duplicated reconnections on concurrent
+        # requests
+        with lock:
+            cnx = session.cnx
+            try:
+                # calling cnx.user() check connection validity, raise
+                # BadConnectionId on failure
                 user = cnx.user(req)
-                # backport session's data
-                cnx.data = session.data
-            else:
-                raise InvalidSession('bad connection id')
-        # associate the connection to the current request
-        req.set_connection(cnx, user)
-        return cnx
+            except BadConnectionId:
+                # check if a connection should be automatically restablished
+                if (login is None or login == session.login):
+                    cnx = self._authenticate(session.login, session.authinfo)
+                    user = cnx.user(req)
+                    session.cnx = cnx
+                else:
+                    raise InvalidSession('bad connection id')
+        return user
 
     def authenticate(self, req):
-        """authenticate user and return corresponding user object
+        """authenticate user using connection information found in the request,
+        and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
+        as well as login and authentication information dictionary used to open
+        the connection.
 
-        :raise ExplicitLogin: if authentication is required (no authentication
-        info found or wrong user/password)
-
-        Note: this method is violating AuthenticationManager interface by
-        returning a session instance instead of the user. This is expected by
-        the InMemoryRepositorySessionManager.
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
         """
         for retreiver in self.authinforetreivers:
             try:
@@ -122,44 +135,28 @@
             except NoAuthInfo:
                 continue
             try:
-                cnx = self._authenticate(req, login, authinfo)
-            except ExplicitLogin:
+                cnx = self._authenticate(login, authinfo)
+            except AuthenticationError:
                 continue # the next one may succeed
             for retreiver_ in self.authinforetreivers:
-                retreiver_.authenticated(req, cnx, retreiver)
-            break
-        else:
-            # false if no authentication info found, eg this is not an
-            # authentication failure
-            if 'login' in locals():
-                req.set_message(req._('authentication failure'))
-            cnx = self._open_anonymous_connection(req)
-        return cnx
+                retreiver_.authenticated(retreiver, req, cnx, login, authinfo)
+            return cnx, login, authinfo
+        # false if no authentication info found, eg this is not an
+        # authentication failure
+        if 'login' in locals():
+            req.set_message(req._('authentication failure'))
+        login, authinfo = self.anoninfo
+        if login:
+            cnx = self._authenticate(login, authinfo)
+            cnx.anonymous_connection = True
+            return cnx, login, authinfo
+        raise AuthenticationError()
 
-    def _authenticate(self, req, login, authinfo):
+    def _authenticate(self, login, authinfo):
         cnxprops = ConnectionProperties(self.vreg.config.repo_method,
                                         close=False, log=self.log_queries)
-        try:
-            cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo)
-        except AuthenticationError:
-            raise ExplicitLogin()
-        self._init_cnx(cnx, login, authinfo)
-        # associate the connection to the current request
-        req.set_connection(cnx)
+        cnx = repo_connect(self.repo, login, cnxprops=cnxprops, **authinfo)
+        # decorate connection
+        cnx.vreg = self.vreg
         return cnx
 
-    def _open_anonymous_connection(self, req):
-        # restore an anonymous connection if possible
-        login, password = self.anoninfo
-        if login:
-            cnx = self._authenticate(req, login, {'password': password})
-            cnx.anonymous_connection = True
-            return cnx
-        raise ExplicitLogin()
-
-    def _init_cnx(self, cnx, login, authinfo):
-        # decorate connection
-        cnx.vreg = self.vreg
-        cnx.login = login
-        cnx.authinfo = authinfo
-
--- a/web/views/autoform.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/autoform.py	Mon Jul 19 15:36:16 2010 +0200
@@ -16,16 +16,13 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """
-The automatic entity form
--------------------------
-
 .. autodocstring:: cubicweb.web.views.autoform::AutomaticEntityForm
 
 Configuration through uicfg
 ```````````````````````````
 
 It is possible to manage which and how an entity's attributes and relations
-will be edited in the various context where the automatic entity form is used
+will be edited in the various contexts where the automatic entity form is used
 by using proper uicfg tags.
 
 The details of the uicfg syntax can be found in the :ref:`uicfg` chapter.
@@ -53,7 +50,7 @@
 
 section may be one of:
 
-* 'hidden', don't display (not even in an hidden input, right?)
+* 'hidden', don't display (not even in a hidden input)
 
 * 'attributes', display in the attributes section
 
@@ -104,7 +101,11 @@
    autoform_field_kwargs.tag_attribute(('RQLExpression', 'expression'),
                                        {'widget': fw.TextInput})
 
+.. note::
 
+   the widget argument can be either a class or an instance (the later
+   case being convenient to pass the Widget specific initialisation
+   options)
 
 Overriding permissions
 ^^^^^^^^^^^^^^^^^^^^^^
@@ -124,8 +125,10 @@
 
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import iclassmethod, cached
+from logilab.common.deprecation import deprecated
 
 from cubicweb import typed_eid, neg_role, uilib
+from cubicweb.vregistry import classid
 from cubicweb.schema import display_name
 from cubicweb.view import EntityView
 from cubicweb.selectors import (
@@ -235,13 +238,29 @@
             self.peid, self.rtype, entity.eid)
         self.render_form(i18nctx, divonclick=divonclick, **kwargs)
 
+    def _get_removejs(self):
+        """
+        Don't display the remove link in edition form if the
+        cardinality is 1. Handled in InlineEntityCreationFormView for
+        creation form.
+        """
+        entity = self._entity()
+        if isinstance(self.peid, int):
+            pentity = self._cw.entity_from_eid(self.peid)
+            petype = pentity.e_schema.type
+            rdef = entity.e_schema.rdef(self.rtype, neg_role(self.role), petype)
+            card= rdef.role_cardinality(self.role)
+            if card == '1': # don't display remove link
+                return None
+        return self.removejs and self.removejs % (
+            self.peid, self.rtype, entity.eid)
+
     def render_form(self, i18nctx, **kwargs):
         """fetch and render the form"""
         entity = self._entity()
         divid = '%s-%s-%s' % (self.peid, self.rtype, entity.eid)
         title = self.form_title(entity, i18nctx)
-        removejs = self.removejs and self.removejs % (
-            self.peid, self.rtype, entity.eid)
+        removejs = self._get_removejs()
         countkey = '%s_count' % self.rtype
         try:
             self._cw.data[countkey] += 1
@@ -293,19 +312,20 @@
         # the 'add a new xxx' link disappears. If the user then cancel the addition,
         # we have to make this link appears back. This is done by giving add new link
         # id to removeInlineForm.
-        if card not in '?1':
+        if card == '?':
+            divid = "addNew%s%s%s:%s" % (self.etype, self.rtype, self.role, self.peid)
+            return "removeInlineForm('%%s', '%%s', '%s', '%%s', '%s')" % (
+                self.role, divid)
+        elif card in '+*':
             return "removeInlineForm('%%s', '%%s', '%s', '%%s')" % self.role
-        divid = "addNew%s%s%s:%s" % (
-            self.etype, self.rtype, self.role, self.peid)
-        return "removeInlineForm('%%s', '%%s', '%s', '%%s', '%s')" % (
-            self.role, divid)
+        # don't do anything for card == '1'
 
     @cached
     def _entity(self):
         try:
             cls = self._cw.vreg['etypes'].etype_class(self.etype)
         except:
-            self.w(self._cw._('no such entity type %s') % etype)
+            self.w(self._cw._('no such entity type %s') % self.etype)
             return
         entity = cls(self._cw)
         entity.eid = self._cw.varmaker.next()
@@ -365,7 +385,7 @@
     This is where are stored relations being added while editing
     an entity. This used to be stored in a temporary cookie.
     """
-    pending = req.get_session_data('pending_insert') or ()
+    pending = req.session.data.get('pending_insert', ())
     return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
             if eid is None or eid in (subj, obj)]
 
@@ -375,7 +395,7 @@
     This is where are stored relations being removed while editing
     an entity. This used to be stored in a temporary cookie.
     """
-    pending = req.get_session_data('pending_delete') or ()
+    pending = req.session.data.get('pending_delete', ())
     return ['%s:%s:%s' % (subj, rel, obj) for subj, rel, obj in pending
             if eid is None or eid in (subj, obj)]
 
@@ -398,7 +418,7 @@
     execute = req.execute
     for subj, rtype, obj in parse_relations_descr(rdefs):
         rql = 'DELETE X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
-        execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+        execute(rql, {'x': subj, 'y': obj})
     req.set_message(req._('relations deleted'))
 
 def insert_relations(req, rdefs):
@@ -406,7 +426,7 @@
     execute = req.execute
     for subj, rtype, obj in parse_relations_descr(rdefs):
         rql = 'SET X %s Y where X eid %%(x)s, Y eid %%(y)s' % rtype
-        execute(rql, {'x': subj, 'y': obj}, ('x', 'y'))
+        execute(rql, {'x': subj, 'y': obj})
 
 
 class GenericRelationsWidget(fw.FieldWidget):
@@ -621,13 +641,13 @@
 # The automatic entity form ####################################################
 
 class AutomaticEntityForm(forms.EntityFieldsForm):
-    """AutomaticEntityForm is an automagic form to edit any entity. It is
-    designed to be fully generated from schema but highly configurable through
-    :ref:`uicfg`.
+    """AutomaticEntityForm is an automagic form to edit any entity. It
+    is designed to be fully generated from schema but highly
+    configurable through uicfg.
 
     Of course, as for other forms, you can also customise it by specifying
     various standard form parameters on selection, overriding, or
-    adding/removing fields in a selected instances.
+    adding/removing fields in selected instances.
     """
     __regid__ = 'edition'
 
@@ -642,6 +662,19 @@
     # set this to a list of [(relation, role)] if you want to explictily tell
     # which relations should be edited
     display_fields = None
+    # action on the form tag
+    _default_form_action_path = 'validateform'
+
+    # pre 3.8.3 compat
+    def set_action(self, action):
+        self._action = action
+    def get_action(self):
+        try:
+            return self._action
+        except AttributeError:
+            return self._cw.build_url(self._default_form_action_path)
+    action = property(deprecated('[3.9] use form.form_action()')(get_action),
+                      set_action)
 
     @iclassmethod
     def field_by_name(cls_or_self, name, role=None, eschema=None):
@@ -712,28 +745,13 @@
             return None
         return self.maxrelitems + 1
 
-    def action(self):
-        """return the form's action attribute. Default to validateform if not
-        explicitly overriden.
-        """
-        try:
-            return self._action
-        except AttributeError:
-            return self._cw.build_url('validateform')
-
-    def set_action(self, value):
-        """override default action"""
-        self._action = value
-
-    action = property(action, set_action)
-
     # autoform specific fields #################################################
 
     def _generic_relations_field(self):
         try:
             srels_by_cat = self.srelations_by_category('generic', 'add', strict=True)
             warn('[3.6] %s: srelations_by_category is deprecated, use uicfg or '
-                 'override editable_relations instead' % classid(form),
+                 'override editable_relations instead' % classid(self),
                  DeprecationWarning)
         except AttributeError:
             srels_by_cat = self.editable_relations()
--- a/web/views/basecomponents.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/basecomponents.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,7 +19,6 @@
 
 * the rql input form
 * the logged user link
-* pdf view link
 
 """
 __docformat__ = "restructuredtext en"
--- a/web/views/basecontrollers.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/basecontrollers.py	Mon Jul 19 15:36:16 2010 +0200
@@ -18,9 +18,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Set of base controllers, which are directly plugged into the application
 object to handle publication.
-
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from smtplib import SMTP
@@ -28,15 +27,15 @@
 from logilab.common.decorators import cached
 from logilab.common.date import strptime
 
-from cubicweb import (NoSelectableObject, ValidationError, ObjectNotFound,
-                      typed_eid)
+from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError,
+                      AuthenticationError, typed_eid)
 from cubicweb.utils import CubicWebJsonEncoder
-from cubicweb.selectors import authenticated_user, match_form_params
+from cubicweb.selectors import authenticated_user, anonymous_user, match_form_params
 from cubicweb.mail import format_mail
-from cubicweb.web import ExplicitLogin, Redirect, RemoteCallFailed, json_dumps, json
+from cubicweb.web import Redirect, RemoteCallFailed, DirectResponse, json_dumps, json
 from cubicweb.web.controller import Controller
-from cubicweb.web.views import vid_from_rset
-from cubicweb.web.views.formrenderers import FormRenderer
+from cubicweb.web.views import vid_from_rset, formrenderers
+
 try:
     from cubicweb.web.facet import (FilterRQLBuilder, get_facet,
                                     prepare_facets_rqlst)
@@ -69,7 +68,7 @@
     user's session data
     """
     def wrapper(self, *args, **kwargs):
-        data = self._cw.get_session_data(self._cw.pageid)
+        data = self._cw.session.data.get(self._cw.pageid)
         if data is None:
             raise RemoteCallFailed(self._cw._('pageid-not-found'))
         return func(self, *args, **kwargs)
@@ -78,12 +77,13 @@
 
 class LoginController(Controller):
     __regid__ = 'login'
+    __select__ = anonymous_user()
 
     def publish(self, rset=None):
         """log in the instance"""
         if self._cw.vreg.config['auth-mode'] == 'http':
             # HTTP authentication
-            raise ExplicitLogin()
+            raise AuthenticationError()
         else:
             # Cookie authentication
             return self.appli.need_login_content(self._cw)
@@ -129,7 +129,10 @@
         req = self._cw
         if rset is None and not hasattr(req, '_rql_processed'):
             req._rql_processed = True
-            rset = self.process_rql(req.form.get('rql'))
+            if req.cnx:
+                rset = self.process_rql(req.form.get('rql'))
+            else:
+                rset = None
         if rset and rset.rowcount == 1 and '__method' in req.form:
             entity = rset.get_entity(0, 0)
             try:
@@ -166,10 +169,6 @@
         if view.add_to_breadcrumbs and not view.binary:
             self._cw.update_breadcrumbs()
 
-    def validate_cache(self, view):
-        view.set_http_cache_headers()
-        self._cw.validate_cache()
-
     def execute_linkto(self, eid=None):
         """XXX __linkto parameter may cause security issue
 
@@ -190,14 +189,14 @@
             else:
                 rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype
             for teid in eids:
-                req.execute(rql, {'x': eid, 'y': typed_eid(teid)}, ('x', 'y'))
+                req.execute(rql, {'x': eid, 'y': typed_eid(teid)})
 
 
 def _validation_error(req, ex):
     req.cnx.rollback()
     # XXX necessary to remove existant validation error?
     # imo (syt), it's not necessary
-    req.get_session_data(req.form.get('__errorurl'), pop=True)
+    req.session.data.pop(req.form.get('__errorurl'), None)
     foreid = ex.entity
     eidmap = req.data.get('eidmap', {})
     for var, eid in eidmap.items():
@@ -249,7 +248,6 @@
         self._cw.set_content_type('text/html')
         jsargs = json.dumps((status, args, entity), cls=CubicWebJsonEncoder)
         return """<script type="text/javascript">
- wp = window.parent;
  window.parent.handleFormValidationResponse('%s', %s, %s, %s, %s);
 </script>""" %  (domid, callback, errback, jsargs, cbargs)
 
@@ -294,7 +292,7 @@
             raise RemoteCallFailed(repr(exc))
         try:
             result = func(*args)
-        except RemoteCallFailed:
+        except (RemoteCallFailed, DirectResponse):
             raise
         except Exception, ex:
             self.exception('an exception occured while calling js_%s(%s): %s',
@@ -312,6 +310,9 @@
         for name, value in zip(names, values):
             # remove possible __action_xxx inputs
             if name.startswith('__action'):
+                if action is None:
+                    # strip '__action_' to get the actual action name
+                    action = name[9:]
                 continue
             # form.setdefault(name, []).append(value)
             if name in form:
@@ -327,12 +328,12 @@
             form['__action_%s' % action] = u'whatever'
         return form
 
-    def _exec(self, rql, args=None, eidkey=None, rocheck=True):
+    def _exec(self, rql, args=None, rocheck=True):
         """json mode: execute RQL and return resultset as json"""
         if rocheck:
             self._cw.ensure_ro_rql(rql)
         try:
-            return self._cw.execute(rql, args, eidkey)
+            return self._cw.execute(rql, args)
         except Exception, ex:
             self.exception("error in _exec(rql=%s): %s", rql, ex)
             return None
@@ -390,7 +391,7 @@
         form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity)
         form.build_context()
         vfield = form.field_by_name('value')
-        renderer = FormRenderer(self._cw)
+        renderer = formrenderers.FormRenderer(self._cw)
         return vfield.render(form, renderer, tabindex=tabindex) \
                + renderer.render_help(form, vfield)
 
@@ -484,7 +485,7 @@
     @check_pageid
     @jsonize
     def js_user_callback(self, cbname):
-        page_data = self._cw.get_session_data(self._cw.pageid, {})
+        page_data = self._cw.session.data.get(self._cw.pageid, {})
         try:
             cb = page_data[cbname]
         except KeyError:
@@ -513,7 +514,7 @@
         self._cw.unregister_callback(self._cw.pageid, cbname)
 
     def js_unload_page_data(self):
-        self._cw.del_session_data(self._cw.pageid)
+        self._cw.session.data.pop(self._cw.pageid, None)
 
     def js_cancel_edition(self, errorurl):
         """cancelling edition from javascript
@@ -538,12 +539,12 @@
             cookies[statename] = nodeeid
             self._cw.set_cookie(cookies, statename)
         else:
-            marked = set(filter(None, treestate.value.split(';')))
+            marked = set(filter(None, treestate.value.split(':')))
             if nodeeid in marked:
                 marked.remove(nodeeid)
             else:
                 marked.add(nodeeid)
-            cookies[statename] = ';'.join(marked)
+            cookies[statename] = ':'.join(marked)
             self._cw.set_cookie(cookies, statename)
 
     @jsonize
@@ -558,15 +559,13 @@
 
     def _add_pending(self, eidfrom, rel, eidto, kind):
         key = 'pending_%s' % kind
-        pendings = self._cw.get_session_data(key, set())
+        pendings = self._cw.session.data.setdefault(key, set())
         pendings.add( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
-        self._cw.set_session_data(key, pendings)
 
     def _remove_pending(self, eidfrom, rel, eidto, kind):
         key = 'pending_%s' % kind
-        pendings = self._cw.get_session_data(key)
+        pendings = self._cw.session.data[key]
         pendings.remove( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
-        self._cw.set_session_data(key, pendings)
 
     def js_remove_pending_insert(self, (eidfrom, rel, eidto)):
         self._remove_pending(eidfrom, rel, eidto, 'insert')
@@ -652,7 +651,7 @@
 
     def redirect(self):
         req = self._cw
-        breadcrumbs = req.get_session_data('breadcrumbs', None)
+        breadcrumbs = req.session.data.get('breadcrumbs', None)
         if breadcrumbs is not None and len(breadcrumbs) > 1:
             url = req.rebuild_url(breadcrumbs[-2],
                                   __message=req._('transaction undoed'))
--- a/web/views/basetemplates.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/basetemplates.py	Mon Jul 19 15:36:16 2010 +0200
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
@@ -16,18 +15,17 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""default templates for CubicWeb web client
+"""default templates for CubicWeb web client"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.mtconverter import xml_escape
 from logilab.common.deprecation import class_renamed
 
 from cubicweb.appobject import objectify_selector
-from cubicweb.selectors import match_kwargs
+from cubicweb.selectors import match_kwargs, no_cnx, anonymous_user
 from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW
-from cubicweb.utils import UStringIO, can_do_pdf_conversion
+from cubicweb.utils import UStringIO
 from cubicweb.schema import display_name
 from cubicweb.web import component, formfields as ff, formwidgets as fw
 from cubicweb.web.views import forms
@@ -60,6 +58,7 @@
 
 class LogInTemplate(LogInOutTemplate):
     __regid__ = 'login'
+    __select__ = anonymous_user()
     title = 'log in'
 
     def content(self, w):
@@ -80,6 +79,7 @@
                 xml_escape(indexurl),
                 self._cw._('go back to the index page')))
 
+
 @objectify_selector
 def templatable_view(cls, req, rset, *args, **kwargs):
     view = kwargs.pop('view', None)
@@ -91,7 +91,6 @@
         return 0
     return view.templatable
 
-
 class NonTemplatableViewTemplate(MainTemplate):
     """main template for any non templatable views (xml, binaries, etc.)"""
     __regid__ = 'main-template'
@@ -205,9 +204,9 @@
 
 
 class ErrorTemplate(TheMainTemplate):
-    """fallback template if an internal error occured during displaying the
-    main template. This template may be called for authentication error,
-    which means that req.cnx and req.user may not be set.
+    """fallback template if an internal error occured during displaying the main
+    template. This template may be called for authentication error, which means
+    that req.cnx and req.user may not be set.
     """
     __regid__ = 'error-template'
 
@@ -281,61 +280,6 @@
             self.w(u'</td>\n')
             self.w(u'</tr></table>\n')
 
-if can_do_pdf_conversion():
-    try:
-        from xml.etree.cElementTree import ElementTree
-    except ImportError: #python2.4
-        from elementtree import ElementTree
-    from subprocess import Popen as sub
-    from StringIO import StringIO
-    from tempfile import NamedTemporaryFile
-    from cubicweb.ext.xhtml2fo import ReportTransformer
-
-
-    class PdfViewComponent(component.EntityVComponent):
-        __regid__ = 'pdfview'
-
-        context = 'ctxtoolbar'
-
-        def cell_call(self, row, col, view):
-            entity = self.cw_rset.get_entity(row, col)
-            url = entity.absolute_url(vid=view.__regid__, __template='pdf-main-template')
-            iconurl = self._cw.build_url('data/pdf_icon.gif')
-            label = self._cw._('Download page as pdf')
-            self.w(u'<a href="%s" title="%s" class="toolbarButton"><img src="%s" alt="%s"/></a>' %
-                   (xml_escape(url), label, xml_escape(iconurl), label))
-
-    class PdfMainTemplate(TheMainTemplate):
-        __regid__ = 'pdf-main-template'
-
-        def call(self, view):
-            """build the standard view, then when it's all done, convert xhtml to pdf
-            """
-            super(PdfMainTemplate, self).call(view)
-            section = self._cw.form.pop('section', 'contentmain')
-            pdf = self.to_pdf(self._stream, section)
-            self._cw.set_content_type('application/pdf', filename='report.pdf')
-            self.binary = True
-            self.w = None
-            self.set_stream()
-            # pylint needs help
-            self.w(pdf)
-
-        def to_pdf(self, stream, section):
-            # XXX see ticket/345282
-            stream = stream.getvalue().replace('&nbsp;', '&#160;').encode('utf-8')
-            xmltree = ElementTree()
-            xmltree.parse(StringIO(stream))
-            foptree = ReportTransformer(section).transform(xmltree)
-            foptmp = NamedTemporaryFile()
-            pdftmp = NamedTemporaryFile()
-            foptree.write(foptmp)
-            foptmp.flush()
-            fopproc = sub(['/usr/bin/fop', foptmp.name, pdftmp.name])
-            fopproc.wait()
-            pdftmp.seek(0)
-            pdf = pdftmp.read()
-            return pdf
 
 # page parts templates ########################################################
 
@@ -418,7 +362,7 @@
         self.w(u'<td id="lastcolumn">')
         self.w(u'</td>\n')
         self.w(u'</tr></table>\n')
-        if self._cw.cnx.anonymous_connection:
+        if self._cw.session.anonymous_session:
             self.wview('logform', rset=self.cw_rset, id='popupLoginBox',
                        klass='hidden', title=False, showmessage=False)
 
@@ -502,9 +446,10 @@
     form_buttons = [fw.SubmitButton(label=_('log in'),
                                     attrs={'class': 'loginButton'})]
 
-    @property
-    def action(self):
-        return xml_escape(login_form_url(self._cw))
+    def form_action(self):
+        if self.action is None:
+            return login_form_url(self._cw)
+        return super(LogForm, self).form_action()
 
 
 class LogFormView(View):
@@ -536,7 +481,7 @@
         if cw.vreg.config['allow-email-login']:
             label = cw._('login or email')
         else:
-            label = cw._('login')
+            label = cw.pgettext('CWUser', 'login')
         form.field_by_name('__login').label = label
         self.w(form.render(table_class='', display_progress_div=False))
         cw.html_headers.add_onload('jQuery("#__login:visible").focus()')
--- a/web/views/baseviews.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/baseviews.py	Mon Jul 19 15:36:16 2010 +0200
@@ -21,13 +21,13 @@
 * primary, sidebox
 * oneline, incontext, outofcontext, text
 * list
-
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
 from datetime import timedelta
+from warnings import warn
 
 from rql import nodes
 
@@ -308,6 +308,18 @@
     __regid__ = 'simplelist'
     redirect_vid = 'incontext'
 
+    def call(self, subvid=None, **kwargs):
+        """display a list of entities by calling their <item_vid> view
+
+        :param listid: the DOM id to use for the root element
+        """
+        if subvid is None and 'vid' in kwargs:
+            warn("should give a 'subvid' argument instead of 'vid'",
+                 DeprecationWarning, stacklevel=2)
+        else:
+            kwargs['vid'] = subvid
+        return super(SimpleListView, self).call(**kwargs)
+
 
 class SameETypeListView(EntityView):
     """list of entities of the same type, when asked explicitly for same etype list
@@ -344,10 +356,15 @@
     __regid__ = 'csv'
     redirect_vid = 'incontext'
 
-    def call(self, **kwargs):
+    def call(self, subvid=None, **kwargs):
+        if subvid is None and 'vid' in kwargs:
+            warn("should give a 'subvid' argument instead of 'vid'",
+                 DeprecationWarning, stacklevel=2)
+        else:
+            kwargs['vid'] = subvid
         rset = self.cw_rset
         for i in xrange(len(rset)):
-            self.cell_call(i, 0, vid=kwargs.get('vid'))
+            self.cell_call(i, 0, **kwargs)
             if i < rset.rowcount-1:
                 self.w(u", ")
 
--- a/web/views/bookmark.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/bookmark.py	Mon Jul 19 15:36:16 2010 +0200
@@ -125,7 +125,7 @@
                 else:
                     # we can't edit shared bookmarks we don't own
                     bookmarksrql = 'Bookmark B WHERE B bookmarked_by U, B owned_by U, U eid %(x)s'
-                    erset = req.execute(bookmarksrql, {'x': ueid}, 'x',
+                    erset = req.execute(bookmarksrql, {'x': ueid},
                                         build_descr=False)
                     bookmarksrql %= {'x': ueid}
                 if erset:
--- a/web/views/boxes.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/boxes.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,8 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-generic boxes for CubicWeb web client:
+"""Generic boxes for CubicWeb web client:
 
 * actions box
 * possible views box
@@ -24,8 +23,8 @@
 additional (disabled by default) boxes
 * schema box
 * startup views box
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -185,7 +184,6 @@
         for view in self._cw.vreg['views'].possible_views(self._cw, None):
             if view.category == 'startupview':
                 box.append(self.box_action(view))
-
         if not box.is_empty():
             box.render(self.w)
 
@@ -201,20 +199,5 @@
         if title:
             self.w(u'<div class="sideBoxTitle"><span>%s</span></div>' % title)
         self.w(u'<div class="%s"><div class="sideBoxBody">' % boxclass)
-        # if not too much entities, show them all in a list
-        maxrelated = self._cw.property_value('navigation.related-limit')
-        if self.cw_rset.rowcount <= maxrelated:
-            if len(self.cw_rset) == 1:
-                self.wview('incontext', self.cw_rset, row=0)
-            elif 1 < len(self.cw_rset) < 5:
-                self.wview('csv', self.cw_rset)
-            else:
-                self.wview('simplelist', self.cw_rset)
-        # else show links to display related entities
-        else:
-            self.cw_rset.limit(maxrelated)
-            rql = self.cw_rset.printable_rql(encoded=False)
-            self.wview('simplelist', self.cw_rset)
-            self.w(u'[<a href="%s">%s</a>]' % (self._cw.build_url(rql=rql),
-                                               self._cw._('see them all')))
+        self.wview('autolimited', self.cw_rset, **self.cw_extra_kwargs)
         self.w(u'</div>\n</div>\n')
--- a/web/views/cwproperties.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/cwproperties.py	Mon Jul 19 15:36:16 2010 +0200
@@ -27,12 +27,14 @@
 
 from cubicweb import UnknownProperty
 from cubicweb.selectors import (one_line_rset, none_rset, implements,
-                                match_user_groups, objectify_selector)
+                                match_user_groups, objectify_selector,
+                                logged_user_in_rset)
 from cubicweb.view import StartupView
 from cubicweb.web import uicfg, stdmsgs
 from cubicweb.web.form import FormViewMixIn
 from cubicweb.web.formfields import FIELDS, StringField
-from cubicweb.web.formwidgets import Select, TextInput, Button, SubmitButton, FieldWidget
+from cubicweb.web.formwidgets import (Select, TextInput, Button, SubmitButton,
+                                      FieldWidget)
 from cubicweb.web.views import primary, formrenderers
 
 uicfg.primaryview_section.tag_object_of(('*', 'for_user', '*'), 'hidden')
@@ -41,7 +43,6 @@
 # groups
 _('navigation')
 _('ui')
-_('actions')
 _('boxes')
 _('components')
 _('contentnavigation')
@@ -120,9 +121,11 @@
         # user's preference but not site's configuration
         for key in vreg.user_property_keys(self.__regid__=='systempropertiesform'):
             parts = key.split('.')
-            if parts[0] in vreg:
+            if parts[0] in vreg and len(parts) >= 3:
                 # appobject configuration
-                reg, oid, propid = parts
+                reg = parts[0]
+                propid = parts[-1]
+                oid = '.'.join(parts[1:-1])
                 groupedopts.setdefault(reg, {}).setdefault(oid, []).append(key)
             else:
                 mainopts.setdefault(parts[0], []).append(key)
@@ -234,17 +237,12 @@
         return subform
 
 
-@objectify_selector
-def is_user_prefs(cls, req, rset=None, row=None, col=0, **kwargs):
-    return req.user.eid == rset[row or 0][col]
-
-
 class CWPropertiesForm(SystemCWPropertiesForm):
     """user's preferences properties edition form"""
     __regid__ = 'propertiesform'
     __select__ = (
         (none_rset() & match_user_groups('users','managers'))
-        | (one_line_rset() & match_user_groups('users') & is_user_prefs())
+        | (one_line_rset() & match_user_groups('users') & logged_user_in_rset())
         | (one_line_rset() & match_user_groups('managers') & implements('CWUser'))
         )
 
@@ -384,7 +382,7 @@
                 w(u'%s' % self.render_label(form, field))
             error = form.field_error(field)
             if error:
-                w(u'<span class="error">%s</span>' % err)
+                w(u'<span class="error">%s</span>' % error)
             w(u'%s' % self.render_help(form, field))
             w(u'<div class="prefinput">')
             w(field.render(form, self))
--- a/web/views/cwuser.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/cwuser.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Specific views for users
+"""Specific views for users and groups"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.mtconverter import xml_escape
@@ -25,6 +24,7 @@
 from cubicweb.selectors import one_line_rset, implements, match_user_groups
 from cubicweb.view import EntityView
 from cubicweb.web import action, uicfg
+from cubicweb.web.views import tabs
 
 _pvs = uicfg.primaryview_section
 _pvs.tag_attribute(('CWUser', 'login'), 'hidden')
@@ -84,3 +84,65 @@
         if emailaddr:
             self.w(u'<foaf:mbox>%s</foaf:mbox>\n' % xml_escape(emailaddr))
         self.w(u'</foaf:Person>\n')
+
+
+# group views ##################################################################
+
+_pvs.tag_object_of(('CWUser', 'in_group', 'CWGroup'), 'hidden')
+_pvs.tag_object_of(('*', 'require_group', 'CWGroup'), 'hidden')
+
+
+class CWGroupPrimaryView(tabs.TabbedPrimaryView):
+    __select__ = implements('CWGroup')
+    tabs = [_('cwgroup-main'), _('cwgroup-permissions')]
+    default_tab = 'cwgroup-main'
+
+
+class CWGroupMainTab(tabs.PrimaryTab):
+    __regid__ = 'cwgroup-main'
+    __select__ = tabs.PrimaryTab.__select__ & implements('CWGroup')
+
+    def render_entity_attributes(self, entity):
+        rql = 'Any U, FN, LN, CD, LL ORDERBY L WHERE U in_group G, ' \
+              'U login L, U firstname FN, U surname LN, U creation_date CD, ' \
+              'U last_login_time LL, G eid %(x)s'
+        rset = self._cw.execute(rql, {'x': entity.eid})
+        headers = (_(u'user'), _(u'first name'), _(u'last name'),
+                   _(u'creation date'), _(u'last login time'))
+        self.wview('editable-table', rset, 'null', displayfilter=True,
+                   displaycols=range(5), mainindex=0, headers=headers)
+
+class CWGroupPermTab(EntityView):
+    __regid__ = 'cwgroup-permissions'
+    __select__ = implements('CWGroup')
+
+    def cell_call(self, row, col):
+        self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css'))
+        access_types = ('read', 'delete', 'add', 'update')
+        w = self.w
+        entity = self.cw_rset.get_entity(row, col)
+        objtype_access = {'CWEType': ('read', 'delete', 'add', 'update'),
+                          'CWRelation': ('add', 'delete')}
+        rql_cwetype = 'DISTINCT Any X WHERE X %s_permission CWG, X is CWEType, ' \
+                      'CWG eid %%(e)s'
+        rql_cwrelation = 'DISTINCT Any RT WHERE X %s_permission CWG, X is CWRelation, ' \
+                         'X relation_type RT, CWG eid %%(e)s'
+        self.render_objtype_access(entity, 'CWEType', objtype_access, rql_cwetype)
+        self.render_objtype_access(entity, 'CWRelation', objtype_access, rql_cwrelation)
+
+    def render_objtype_access(self, entity, objtype, objtype_access, rql):
+        self.w(u'<h4>%s</h4>' % self._cw._(objtype))
+        for access_type in objtype_access[objtype]:
+            rset = self._cw.execute(rql % access_type, {'e': entity.eid})
+            if rset:
+                self.w(u'<div>%s:</div>' % self._cw.__(access_type + '_permission'))
+                self.w(u'<div>%s</div><br/>' % self._cw.view('csv', rset, 'null'))
+
+class CWGroupInContextView(EntityView):
+    __regid__ = 'incontext'
+    __select__ = implements('CWGroup')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.complete_entity(row, col)
+        self.w(u'<a href="%s" class="%s">%s</a>' % (
+            entity.absolute_url(), entity.name, entity.printable_value('name')))
--- a/web/views/debug.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/debug.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""management and error screens
+"""management and error screens"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 
 from time import strftime, localtime
@@ -45,6 +43,7 @@
     __select__ = none_rset() & match_user_groups('managers')
 
     title = _('server information')
+    cache_max_age = 0
 
     def call(self, **kwargs):
         req = self._cw
@@ -128,6 +127,7 @@
     __regid__ = 'registry'
     __select__ = StartupView.__select__ & match_user_groups('managers')
     title = _('registry')
+    cache_max_age = 0
 
     def call(self, **kwargs):
         self.w(u'<h1>%s</h1>' % _("Registry's content"))
@@ -150,6 +150,7 @@
     __regid__ = 'gc'
     __select__ = StartupView.__select__ & match_user_groups('managers')
     title = _('memory leak debugging')
+    cache_max_age = 0
 
     def call(self, **kwargs):
         from cubicweb._gcdebug import gc_info
--- a/web/views/editcontroller.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/editcontroller.py	Mon Jul 19 15:36:16 2010 +0200
@@ -262,13 +262,13 @@
             rql = 'DELETE %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
                 subjvar, rschema, objvar)
             for reid in origvalues.difference(values):
-                self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y')))
+                self.relations_rql.append((rql, {'x': eid, 'y': reid}))
         seteids = values.difference(origvalues)
         if seteids:
             rql = 'SET %s %s %s WHERE X eid %%(x)s, Y eid %%(y)s' % (
                 subjvar, rschema, objvar)
             for reid in seteids:
-                self.relations_rql.append((rql, {'x': eid, 'y': reid}, ('x', 'y')))
+                self.relations_rql.append((rql, {'x': eid, 'y': reid}))
 
     def delete_entities(self, eidtypes):
         """delete entities from the repository"""
--- a/web/views/editforms.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/editforms.py	Mon Jul 19 15:36:16 2010 +0200
@@ -17,8 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Set of HTML automatic forms to create, delete, copy or edit a single entity
 or a list of entities of the same type
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -27,10 +27,11 @@
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import cached
 
+from cubicweb import tags
 from cubicweb.selectors import (match_kwargs, one_line_rset, non_final_entity,
                                 specified_etype_implements, implements, yes)
 from cubicweb.view import EntityView
-from cubicweb import tags
+from cubicweb.schema import display_name
 from cubicweb.web import uicfg, stdmsgs, eid_param, dumps, \
      formfields as ff, formwidgets as fw
 from cubicweb.web.form import FormViewMixIn, FieldNotFound
@@ -306,7 +307,8 @@
         self._cw.add_js('cubicweb.edition.js')
         self._cw.add_css('cubicweb.form.css')
         if default is None:
-            default = xml_escape(self._cw._('<no value>'))
+            default = xml_escape(self._cw._('<%s not specified>')
+                                 % display_name(self._cw, rtype, role))
         schema = self._cw.vreg.schema
         entity = self.cw_rset.get_entity(row, col)
         rschema = schema.rschema(rtype)
@@ -338,16 +340,13 @@
                            self._build_renderer(entity, rtype, role))
 
     def should_edit_attribute(self, entity, rschema, form):
-        rtype = str(rschema)
-        rdef = entity.e_schema.rdef(rtype)
-        afs = uicfg.autoform_section.etype_get(
-            entity.__regid__, rtype, 'subject', rdef.object)
-        if 'main_hidden' in afs or not entity.has_perm('update'):
+        if not entity.has_perm('update'):
             return False
+        rdef = entity.e_schema.rdef(rschema)
         if not rdef.has_perm(self._cw, 'update', eid=entity.eid):
             return False
         try:
-            form.field_by_name(rtype, 'subject', entity.e_schema)
+            form.field_by_name(str(rschema), 'subject', entity.e_schema)
         except FieldNotFound:
             return False
         return True
@@ -435,15 +434,27 @@
     _onclick = (u"loadInlineEditionForm(%(eid)s, '%(rtype)s', '%(role)s', "
                 "'%(divid)s', %(reload)s, '%(vid)s', '%(default)s', '%(lzone)s');")
 
+    def should_edit_attribute(self, entity, rschema, form):
+        rdef = entity.e_schema.rdef(rschema)
+        afs = uicfg.autoform_section.etype_get(
+            entity.__regid__, rschema, 'subject', rdef.object)
+        if 'main_hidden' in afs:
+            return False
+        return super(AutoClickAndEditFormView, self).should_edit_attribute(
+            entity, rschema, form)
+
     def should_edit_relation(self, entity, rschema, role, rvid):
         eschema = entity.e_schema
-        rtype = str(rschema)
-        # XXX check autoform_section. what if 'generic'?
-        dispctrl = _pvdc.etype_get(eschema, rtype, role)
+        dispctrl = _pvdc.etype_get(eschema, rschema, role)
         vid = dispctrl.get('vid', 'reledit')
         if vid != 'reledit': # reledit explicitly disabled
             return False
-        if eschema.rdef(rschema, role).composite == role:
+        rdef = eschema.rdef(rschema, role)
+        if rdef.composite == role:
+            return False
+        afs = uicfg.autoform_section.etype_get(
+            entity.__regid__, rschema, role, rdef.object)
+        if 'main_hidden' in afs:
             return False
         return super(AutoClickAndEditFormView, self).should_edit_relation(
             entity, rschema, role, rvid)
--- a/web/views/editviews.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/editviews.py	Mon Jul 19 15:36:16 2010 +0200
@@ -126,5 +126,5 @@
             text, data = captcha.captcha(self._cw.vreg.config['captcha-font-file'],
                                          self._cw.vreg.config['captcha-font-size'])
             key = self._cw.form.get('captchakey', 'captcha')
-            self._cw.set_session_data(key, text)
+            self._cw.session.data[key] = text
             self.w(data.read())
--- a/web/views/emailaddress.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/emailaddress.py	Mon Jul 19 15:36:16 2010 +0200
@@ -25,8 +25,13 @@
 from cubicweb.schema import display_name
 from cubicweb.selectors import implements
 from cubicweb import Unauthorized
+from cubicweb.web import uicfg
 from cubicweb.web.views import baseviews, primary
 
+_pvs = uicfg.primaryview_section
+_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes')
+_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden')
+
 class EmailAddressPrimaryView(primary.PrimaryView):
     __select__ = implements('EmailAddress')
 
--- a/web/views/embedding.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/embedding.py	Mon Jul 19 15:36:16 2010 +0200
@@ -17,9 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Objects interacting together to provides the external page embeding
 functionality.
-
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 import re
--- a/web/views/formrenderers.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/formrenderers.py	Mon Jul 19 15:36:16 2010 +0200
@@ -174,12 +174,8 @@
             enctype = 'multipart/form-data'
         else:
             enctype = 'application/x-www-form-urlencoded'
-        if form.action is None:
-            action = self._cw.build_url('edit')
-        else:
-            action = form.action
         tag = ('<form action="%s" method="post" enctype="%s"' % (
-            xml_escape(action or '#'), enctype))
+            xml_escape(form.form_action() or '#'), enctype))
         if form.domid:
             tag += ' id="%s"' % form.domid
         if form.onsubmit:
@@ -401,10 +397,6 @@
     _options = FormRenderer._options + ('main_form_title',)
     main_form_title = _('main informations')
 
-    def render(self, form, values):
-        rendered = super(EntityFormRenderer, self).render(form, values)
-        return rendered + u'</div>' # close extra div introducted by open_form
-
     def open_form(self, form, values):
         attrs_fs_label = ''
         if self.main_form_title:
@@ -413,6 +405,13 @@
         attrs_fs_label += '<div class="formBody">'
         return attrs_fs_label + super(EntityFormRenderer, self).open_form(form, values)
 
+    def close_form(self, form, values):
+        """seems dumb but important for consistency w/ close form, and necessary
+        for form renderers overriding open_form to use something else or more than
+        and <form>
+        """
+        return super(EntityFormRenderer, self).close_form(form, values) + '</div>'
+
     def render_buttons(self, w, form):
         if len(form.form_buttons) == 3:
             w("""<table width="100%%">
@@ -448,14 +447,20 @@
                 values['divid'], self._cw._('click on the box to cancel the deletion')))
         w(u'<div class="iformBody">')
         eschema = form.edited_entity.e_schema
-        values['removemsg'] = self._cw._('remove-inlined-entity-form')
-        w(u'<div class="iformTitle"><span>%(title)s</span> '
-          '#<span class="icounter">%(counter)s</span> '
-          '[<a href="javascript: %(removejs)s;noop();">%(removemsg)s</a>]</div>'
-          % values)
+        if values['removejs']:
+            values['removemsg'] = self._cw._('remove-inlined-entity-form')
+            w(u'<div class="iformTitle"><span>%(title)s</span> '
+              '#<span class="icounter">%(counter)s</span> '
+              '[<a href="javascript: %(removejs)s;noop();">%(removemsg)s</a>]</div>'
+              % values)
+        else:
+            w(u'<div class="iformTitle"><span>%(title)s</span> '
+              '#<span class="icounter">%(counter)s</span></div>'
+              % values)
+        # XXX that stinks
         # cleanup values
         for key in ('title', 'removejs', 'removemsg'):
-            values.pop(key)
+            values.pop(key, None)
         self.render_fields(w, form, values)
         w(u'</div></div>')
         return '\n'.join(data)
--- a/web/views/forms.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/forms.py	Mon Jul 19 15:36:16 2010 +0200
@@ -25,8 +25,8 @@
    using a form renderer. No display is actually done here, though you'll find
    some attributes of form that are used to control the rendering process.
 
-Besides the automagic form we'll see later, they are barely two form
-classes in |cubicweb|:
+Besides the automagic form we'll see later, there are roughly two main
+form classes in |cubicweb|:
 
 .. autoclass:: cubicweb.web.views.forms.FieldsForm
 .. autoclass:: cubicweb.web.views.forms.EntityFieldsForm
@@ -194,6 +194,16 @@
             for field in field.actual_fields(self):
                 field.form_init(self)
 
+    _default_form_action_path = 'edit'
+    def form_action(self):
+        try:
+            action = self.get_action() # avoid spurious warning w/ autoform bw compat property
+        except AttributeError:
+            action = self.action
+        if action is None:
+            return self._cw.build_url(self._default_form_action_path)
+        return action
+
     @deprecated('[3.6] use .add_hidden(name, value, **kwargs)')
     def form_add_hidden(self, name, value=None, **kwargs):
         return self.add_hidden(name, value, **kwargs)
@@ -222,8 +232,6 @@
     __regid__ = 'base'
     __select__ = (match_kwargs('entity')
                   | (one_line_rset() & non_final_entity()))
-
-    internal_fields = FieldsForm.internal_fields + ('__type', 'eid', '__maineid')
     domid = 'entityForm'
 
     @iclassmethod
--- a/web/views/idownloadable.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/idownloadable.py	Mon Jul 19 15:36:16 2010 +0200
@@ -28,7 +28,7 @@
                                 implements, match_context_prop)
 from cubicweb.interfaces import IDownloadable
 from cubicweb.mttransforms import ENGINE
-from cubicweb.web.box import EntityBoxTemplate
+from cubicweb.web import box, httpcache
 from cubicweb.web.views import primary, baseviews
 
 
@@ -54,7 +54,7 @@
     w(u'</div></div>\n')
 
 
-class DownloadBox(EntityBoxTemplate):
+class DownloadBox(box.EntityBoxTemplate):
     __regid__ = 'download_box'
     # no download box for images
     # XXX primary_view selector ?
@@ -77,6 +77,7 @@
     templatable = False
     content_type = 'application/octet-stream'
     binary = True
+    http_cache_manager = httpcache.EntityHTTPCacheManager
     add_to_breadcrumbs = False
 
     def set_request_content_type(self):
@@ -95,6 +96,8 @@
     def call(self):
         self.w(self.cw_rset.complete_entity(0, 0).download_data())
 
+    def last_modified(self):
+        return self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0).modification_date
 
 class DownloadLinkView(EntityView):
     """view displaying a link to download the file"""
--- a/web/views/iprogress.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/iprogress.py	Mon Jul 19 15:36:16 2010 +0200
@@ -30,9 +30,9 @@
 from cubicweb.interfaces import IProgress, IMileStone
 from cubicweb.schema import display_name
 from cubicweb.view import EntityView
-
+from cubicweb.web.views.tableview import EntityAttributesTableView
 
-class ProgressTableView(EntityView):
+class ProgressTableView(EntityAttributesTableView):
     """The progress table view is able to display progress information
     of any object implement IMileStone.
 
@@ -52,26 +52,13 @@
     __regid__ = 'progress_table_view'
     title = _('task progression')
     __select__ = implements(IMileStone)
+    table_css = "progress"
+    css_files = ('cubicweb.iprogress.css',)
 
     # default columns of the table
     columns = (_('project'), _('milestone'), _('state'), _('eta_date'),
                _('cost'), _('progress'), _('todo_by'))
 
-
-    def call(self, columns=None):
-        """displays all versions in a table"""
-        self._cw.add_css('cubicweb.iprogress.css')
-        _ = self._cw._
-        self.columns = columns or self.columns
-        ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0])
-        self.w(u'<table class="progress">')
-        self.table_header(ecls)
-        self.w(u'<tbody>')
-        for row in xrange(self.cw_rset.rowcount):
-            self.cell_call(row=row, col=0)
-        self.w(u'</tbody>')
-        self.w(u'</table>')
-
     def cell_call(self, row, col):
         _ = self._cw._
         entity = self.cw_rset.get_entity(row, col)
@@ -104,20 +91,6 @@
         """use entity's type as label"""
         return display_name(self._cw, ecls.__regid__)
 
-    def table_header(self, ecls):
-        """builds the table's header"""
-        self.w(u'<thead><tr>')
-        _ = self._cw._
-        for column in self.columns:
-            meth = getattr(self, 'header_for_%s' % column, None)
-            if meth:
-                colname = meth(ecls)
-            else:
-                colname = _(column)
-            self.w(u'<th>%s</th>' % xml_escape(colname))
-        self.w(u'</tr></thead>\n')
-
-
     ## cell management ########################################################
     def build_project_cell(self, entity):
         """``project`` column cell renderer"""
--- a/web/views/management.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/management.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""security management and error screens
+"""security management and error screens"""
 
-
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -29,56 +27,14 @@
 from cubicweb.uilib import html_traceback, rest_traceback
 from cubicweb.web import formwidgets as wdgs
 from cubicweb.web.formfields import guess_field
+from cubicweb.web.views.schema import SecurityViewMixIn
+
+from yams.buildobjs import EntityType
 
 SUBMIT_MSGID = _('Submit bug report')
 MAIL_SUBMIT_MSGID = _('Submit bug report by mail')
 
-
-class SecurityViewMixIn(object):
-    """display security information for a given schema """
-
-    def schema_definition(self, eschema, link=True,  access_types=None):
-        w = self.w
-        _ = self._cw._
-        if not access_types:
-            access_types = eschema.ACTIONS
-        w(u'<table class="schemaInfo">')
-        w(u'<tr><th>%s</th><th>%s</th><th>%s</th></tr>' % (
-            _("permission"), _('granted to groups'), _('rql expressions')))
-        for access_type in access_types:
-            w(u'<tr>')
-            w(u'<td>%s</td>' % self._cw.__('%s_perm' % access_type))
-            groups = eschema.get_groups(access_type)
-            l = []
-            groups = [(_(group), group) for group in groups]
-            for trad, group in sorted(groups):
-                if link:
-                    # XXX we should get a group entity and call its absolute_url
-                    # method
-                    l.append(u'<a href="%s" class="%s">%s</a><br/>' % (
-                    self._cw.build_url('cwgroup/%s' % group), group, trad))
-                else:
-                    l.append(u'<div class="%s">%s</div>' % (group, trad))
-            w(u'<td>%s</td>' % u''.join(l))
-            rqlexprs = eschema.get_rqlexprs(access_type)
-            w(u'<td>%s</td>' % u'<br/><br/>'.join(expr.expression for expr in rqlexprs))
-            w(u'</tr>\n')
-        w(u'</table>')
-
-    def has_schema_modified_permissions(self, eschema, access_types):
-        """ return True if eschema's actual permissions are diffrents
-        from the default ones
-        """
-        for access_type in access_types:
-            if eschema.get_rqlexprs(access_type):
-                return True
-            if eschema.get_groups(access_type) != \
-                    frozenset(eschema.get_default_groups()[access_type]):
-                return True
-        return False
-
-
-class SecurityManagementView(EntityView, SecurityViewMixIn):
+class SecurityManagementView(SecurityViewMixIn, EntityView):
     """display security information for a given entity"""
     __regid__ = 'security'
     __select__ = EntityView.__select__ & authenticated_user()
@@ -101,7 +57,7 @@
              xml_escape(entity.dc_title())))
         # first show permissions defined by the schema
         self.w('<h2>%s</h2>' % _('schema\'s permissions definitions'))
-        self.schema_definition(entity.e_schema)
+        self.permissions_table(entity.e_schema)
         self.w('<h2>%s</h2>' % _('manage security'))
         # ownership information
         if self._cw.vreg.schema.rschema('owned_by').has_perm(self._cw, 'add',
@@ -253,7 +209,7 @@
         # creates a bug submission link if submit-mail is set
         if self._cw.vreg.config['submit-mail']:
             form = self._cw.vreg['forms'].select('base', self._cw, rset=None,
-                                             mainform=False)
+                                                 mainform=False)
             binfo = text_error_description(ex, excinfo, req, eversion, cversions)
             form.add_hidden('description', binfo,
                             # we must use a text area to keep line breaks
--- a/web/views/navigation.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/navigation.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""navigation components definition for CubicWeb web client
+"""navigation components definition for CubicWeb web client"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -38,29 +37,52 @@
 
     def call(self):
         """displays a resultset by page"""
-        w = self.w
-        req = self._cw
+        params = dict(self._cw.form)
+        self.clean_params(params)
+        basepath = self._cw.relative_path(includeparams=False)
+        self.w(u'<div class="pagination">')
+        self.w(u'%s&#160;' % self.previous_link(basepath, params))
+        self.w(u'[&#160;%s&#160;]' %
+               u'&#160;| '.join(self.iter_page_links(basepath, params)))
+        self.w(u'&#160;%s' % self.next_link(basepath, params))
+        self.w(u'</div>')
+
+    def index_display(self, start, stop):
+        return u'%s - %s' % (start+1, stop+1)
+
+    def iter_page_links(self, basepath, params):
         rset = self.cw_rset
         page_size = self.page_size
         start = 0
-        blocklist = []
-        params = dict(req.form)
-        self.clean_params(params)
-        basepath = req.relative_path(includeparams=False)
         while start < rset.rowcount:
             stop = min(start + page_size - 1, rset.rowcount - 1)
-            blocklist.append(self.page_link(basepath, params, start, stop,
-                                            self.index_display(start, stop)))
+            yield self.page_link(basepath, params, start, stop,
+                                 self.index_display(start, stop))
             start = stop + 1
+
+
+class PageNavigationSelect(PageNavigation):
+    """displays a resultset by page as PageNavigationSelect but in a <select>,
+    better when there are a lot of results.
+    """
+    __select__ = paginated_rset(4)
+
+    page_link_templ = u'<option value="%s" title="%s">%s</option>'
+    selected_page_link_templ = u'<option value="%s" selected="selected" title="%s">%s</option>'
+    def call(self):
+        params = dict(self._cw.form)
+        self.clean_params(params)
+        basepath = self._cw.relative_path(includeparams=False)
+        w = self.w
         w(u'<div class="pagination">')
         w(u'%s&#160;' % self.previous_link(basepath, params))
-        w(u'[&#160;%s&#160;]' % u'&#160;| '.join(blocklist))
+        w(u'<select onchange="javascript: document.location=this.options[this.selectedIndex].value">')
+        for option in self.iter_page_links(basepath, params):
+            w(option)
+        w(u'</select>')
         w(u'&#160;%s' % self.next_link(basepath, params))
         w(u'</div>')
 
-    def index_display(self, start, stop):
-        return u'%s - %s' % (start+1, stop+1)
-
 
 class SortedNavigation(NavigationComponent):
     """sorted navigation apply if navigation is needed (according to page size)
@@ -215,7 +237,7 @@
     if w is None:
         w = view.w
     nav = req.vreg['components'].select_or_none(
-        'navigation', req, rset=rset, page_size=page_size)
+        'navigation', req, rset=rset, page_size=page_size, view=view)
     if nav:
         if w is None:
             w = view.w
--- a/web/views/primary.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/primary.py	Mon Jul 19 15:36:16 2010 +0200
@@ -82,7 +82,7 @@
             self.w(u'</td><td>')
             self.w(u'<div class="primaryRight">')
             if hasattr(self, 'render_side_related'):
-                warn('render_side_related is deprecated')
+                warn('[3.2] render_side_related is deprecated')
                 self.render_side_related(entity, [])
             self.render_side_boxes(boxes)
             self.w(u'</div>')
@@ -95,7 +95,7 @@
             try:
                 comp.render(w=self.w, row=self.cw_row, view=self)
             except NotImplementedError:
-                warn('component %s doesnt implement cell_call, please update'
+                warn('[3.2] component %s doesnt implement cell_call, please update'
                      % comp.__class__, DeprecationWarning)
                 comp.render(w=self.w, view=self)
         self.w(u'</div>')
@@ -126,7 +126,7 @@
         """default implementation return an empty string"""
         return u''
 
-    def render_entity_attributes(self, entity, siderelations=None):
+    def render_entity_attributes(self, entity):
         display_attributes = []
         for rschema, _, role, dispctrl in self._section_def(entity, 'attributes'):
             vid = dispctrl.get('vid', 'reledit')
@@ -152,7 +152,7 @@
                     self._render_attribute(rschema, value, role=role, table=True)
             self.w(u'</table>')
 
-    def render_entity_relations(self, entity, siderelations=None):
+    def render_entity_relations(self, entity):
         for rschema, tschemas, role, dispctrl in self._section_def(entity, 'relations'):
             rset = self._relation_rset(entity, rschema, role, dispctrl)
             if rset:
@@ -176,9 +176,10 @@
                     warn('[3.5] box views should now be defined as a 4-uple (label, rset, vid, dispctrl), '
                          'please update %s' % self.__class__.__name__,
                          DeprecationWarning)
-                    label, rset, vid  = box
+                    label, rset, vid = box
+                    dispctrl = {}
                 self.w(u'<div class="sideBox">')
-                self.wview(vid, rset, title=label)
+                self.wview(vid, rset, title=label, initargs={'dispctrl': dispctrl})
                 self.w(u'</div>')
             else:
                 try:
@@ -265,30 +266,33 @@
     __regid__ = 'autolimited'
 
     def call(self, **kwargs):
-        # nb: rset retreived using entity.related with limit + 1 if any
-        # because of that, we known that rset.printable_rql() will return
-        # rql with no limit set anyway (since it's handled manually)
+        # nb: rset is retreived using entity.related with limit + 1 if any.
+        # Because of that, we know that rset.printable_rql() will return rql
+        # with no limit set anyway (since it's handled manually)
         if 'dispctrl' in self.cw_extra_kwargs:
             limit = self.cw_extra_kwargs['dispctrl'].get('limit')
+            subvid = self.cw_extra_kwargs['dispctrl'].get('subvid', 'incontext')
         else:
             limit = None
+            subvid = 'incontext'
         if limit is None or self.cw_rset.rowcount <= limit:
             if self.cw_rset.rowcount == 1:
-                self.wview('incontext', self.cw_rset, row=0)
+                self.wview(subvid, self.cw_rset, row=0)
             elif 1 < self.cw_rset.rowcount <= 5:
-                self.wview('csv', self.cw_rset)
+                self.wview('csv', self.cw_rset, subvid=subvid)
             else:
                 self.w(u'<div>')
-                self.wview('simplelist', self.cw_rset)
+                self.wview('simplelist', self.cw_rset, subvid=subvid)
                 self.w(u'</div>')
         # else show links to display related entities
         else:
             rql = self.cw_rset.printable_rql()
             self.cw_rset.limit(limit) # remove extra entity
             self.w(u'<div>')
-            self.wview('simplelist', self.cw_rset)
-            self.w(u'[<a href="%s">%s</a>]' % (self._cw.build_url(rql=rql),
-                                               self._cw._('see them all')))
+            self.wview('simplelist', self.cw_rset, subvid=subvid)
+            self.w(u'[<a href="%s">%s</a>]' % (
+                xml_escape(self._cw.build_url(rql=rql, vid=subvid)),
+                self._cw._('see them all')))
             self.w(u'</div>')
 
 
@@ -310,18 +314,7 @@
 
 _pvs = uicfg.primaryview_section
 for rtype in ('eid', 'creation_date', 'modification_date', 'cwuri',
-              'is', 'is_instance_of', 'identity',
-              'owned_by', 'created_by', 'in_state',
-              'wf_info_for', 'by_transition', 'from_state', 'to_state',
-              'require_permission', 'from_entity', 'to_entity',
-              'see_also'):
+              'is', 'is_instance_of', 'identity', 'owned_by', 'created_by',
+              'require_permission', 'see_also'):
     _pvs.tag_subject_of(('*', rtype, '*'), 'hidden')
     _pvs.tag_object_of(('*', rtype, '*'), 'hidden')
-
-_pvs.tag_subject_of(('*', 'use_email', '*'), 'attributes')
-_pvs.tag_subject_of(('*', 'primary_email', '*'), 'hidden')
-
-for attr in ('name', 'final'):
-    _pvs.tag_attribute(('CWEType', attr), 'hidden')
-for attr in ('name', 'final', 'symmetric', 'inlined'):
-    _pvs.tag_attribute(('CWRType', attr), 'hidden')
--- a/web/views/schema.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/schema.py	Mon Jul 19 15:36:16 2010 +0200
@@ -22,19 +22,20 @@
 
 from itertools import cycle
 
+from logilab.common.ureports import Section, Table
 from logilab.mtconverter import xml_escape
 from yams import BASE_TYPES, schema2dot as s2d
+from yams.buildobjs import DEFAULT_ATTRPERMS
 
 from cubicweb.selectors import (implements, yes, match_user_groups,
-                                has_related_entities)
+                                has_related_entities, authenticated_user)
 from cubicweb.schema import (META_RTYPES, SCHEMA_TYPES, SYSTEM_RTYPES,
                              WORKFLOW_TYPES, INTERNAL_TYPES)
-from cubicweb.schemaviewer import SchemaViewer
 from cubicweb.view import EntityView, StartupView
 from cubicweb import tags, uilib
-from cubicweb.web import action, facet, uicfg
+from cubicweb.web import action, facet, uicfg, schemaviewer
 from cubicweb.web.views import TmpFileViewMixin
-from cubicweb.web.views import primary, baseviews, tabs, management
+from cubicweb.web.views import primary, baseviews, tabs, tableview, iprogress
 
 ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES
 SKIP_TYPES  = (ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES | WORKFLOW_TYPES
@@ -47,59 +48,143 @@
     return ALWAYS_SKIP_TYPES
 
 _pvs = uicfg.primaryview_section
+_pvdc = uicfg.primaryview_display_ctrl
+
 for _action in ('read', 'add', 'update', 'delete'):
     _pvs.tag_subject_of(('*', '%s_permission' % _action, '*'), 'hidden')
     _pvs.tag_object_of(('*', '%s_permission' % _action, '*'), 'hidden')
 
+for _etype in ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'):
+    _pvdc.tag_attribute((_etype, 'description'), {'showlabel': False})
+
+_pvs.tag_attribute(('CWEType', 'name'), 'hidden')
+_pvs.tag_attribute(('CWEType', 'final'), 'hidden')
+_pvs.tag_object_of(('*', 'workflow_of', 'CWEType'), 'hidden')
+_pvs.tag_subject_of(('CWEType', 'default_workflow', '*'), 'hidden')
+_pvs.tag_object_of(('*', 'specializes', 'CWEType'), 'hidden')
+_pvs.tag_subject_of(('CWEType', 'specializes', '*'), 'hidden')
+_pvs.tag_object_of(('*', 'from_entity', 'CWEType'), 'hidden')
+_pvs.tag_object_of(('*', 'to_entity', 'CWEType'), 'hidden')
+
+_pvs.tag_attribute(('CWRType', 'name'), 'hidden')
+_pvs.tag_attribute(('CWRType', 'final'), 'hidden')
+_pvs.tag_object_of(('*', 'relation_type', 'CWRType'), 'hidden')
+
+_pvs.tag_subject_of(('CWAttribute', 'constrained_by', '*'), 'hidden')
+_pvs.tag_subject_of(('CWRelation', 'constrained_by', '*'), 'hidden')
+
+
+class SecurityViewMixIn(object):
+    """mixin providing methods to display security information for a entity,
+    relation or relation definition schema
+    """
+
+    def permissions_table(self, erschema, permissions=None):
+        self._cw.add_css('cubicweb.acl.css')
+        w = self.w
+        _ = self._cw._
+        w(u'<table class="schemaInfo">')
+        w(u'<tr><th>%s</th><th>%s</th><th>%s</th></tr>' % (
+            _("permission"), _('granted to groups'), _('rql expressions')))
+        for action in erschema.ACTIONS:
+            w(u'<tr><td>%s</td><td>' % _(action))
+            if permissions is None:
+                groups = erschema.get_groups(action)
+                rqlexprs = sorted(e.expression for e in erschema.get_rqlexprs(action))
+            else:
+                groups = permissions[action][0]
+                rqlexprs = permissions[action][1]
+            # XXX get group entity and call it's incontext view
+            groups = [u'<a class="%s" href="%s">%s</a>' % (
+                group, self._cw.build_url('cwgroup/%s' % group), label)
+                      for group, label in sorted((_(g), g) for g in groups)]
+            w(u'<br/>'.join(groups))
+            w(u'</td><td>')
+            w(u'<br/>'.join(rqlexprs))
+            w(u'</td></tr>\n')
+        w(u'</table>')
+
+    def grouped_permissions_table(self, rschema):
+        # group relation definitions with identical permissions
+        perms = {}
+        for rdef in rschema.rdefs.itervalues():
+            rdef_perms = []
+            for action in ('read', 'add', 'delete'):
+                groups = sorted(rdef.get_groups(action))
+                exprs = sorted(e.expression for e in rdef.get_rqlexprs(action))
+                rdef_perms.append( (action, (tuple(groups), tuple(exprs))) )
+            rdef_perms = tuple(rdef_perms)
+            if rdef_perms in perms:
+                perms[rdef_perms].append( (rdef.subject, rdef.object) )
+            else:
+                perms[rdef_perms] = [(rdef.subject, rdef.object)]
+        # set layout permissions in a table for each group of relation
+        # definition
+        w = self.w
+        w(u'<div style="margin: 0px 1.5em">')
+        tmpl = u'<strong>%s</strong> %s <strong>%s</strong>'
+        for perm, rdefs in perms.iteritems():
+            w(u'<div>%s</div>' % u', '.join(
+                tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs))
+            # accessing rdef from previous loop by design: only used to get
+            # ACTIONS
+            self.permissions_table(rdef, dict(perm))
+        w(u'</div>')
+
+
 # global schema view ###########################################################
 
 class SchemaView(tabs.TabsMixin, StartupView):
+    """display schema information (graphically, listing tables...) in tabs"""
     __regid__ = 'schema'
     title = _('instance schema')
-    tabs = [_('schema-text'), _('schema-image')]
-    default_tab = 'schema-text'
+    tabs = [_('schema-diagram'), _('schema-entity-types'),
+            _('schema-relation-types'), _('schema-security')]
+    default_tab = 'schema-diagram'
 
     def call(self):
-        """display schema information"""
-        self._cw.add_js('cubicweb.ajax.js')
-        self._cw.add_css(('cubicweb.schema.css','cubicweb.acl.css'))
         self.w(u'<h1>%s</h1>' % _('Schema of the data model'))
         self.render_tabs(self.tabs, self.default_tab)
 
 
-class SchemaTabImageView(StartupView):
-    __regid__ = 'schema-image'
+class SchemaImageTab(StartupView):
+    __regid__ = 'schema-diagram'
 
     def call(self):
         self.w(_(u'<div>This schema of the data model <em>excludes</em> the '
                  u'meta-data, but you can also display a <a href="%s">complete '
                  u'schema with meta-data</a>.</div>')
                % xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=0)))
+        self.w(u'<div><a href="%s">%s</a></div>' %
+               (self._cw.build_url('view', vid='owl'),
+                self._cw._(u'Download schema as OWL')))
         self.w(u'<img src="%s" alt="%s"/>\n' % (
             xml_escape(self._cw.build_url('view', vid='schemagraph', skipmeta=1)),
             self._cw._("graphical representation of the instance'schema")))
 
 
-class SchemaTabTextView(StartupView):
-    __regid__ = 'schema-text'
+class SchemaETypeTab(StartupView):
+    __regid__ = 'schema-entity-types'
 
     def call(self):
-        rset = self._cw.execute('Any X ORDERBY N WHERE X is CWEType, X name N, '
-                                'X final FALSE')
-        self.wview('table', rset, displayfilter=True)
+        self.wview('table', self._cw.execute(
+            'Any X ORDERBY N WHERE X is CWEType, X name N, X final FALSE'))
 
 
-class ManagerSchemaPermissionsView(StartupView, management.SecurityViewMixIn):
+class SchemaRTypeTab(StartupView):
+    __regid__ = 'schema-relation-types'
+
+    def call(self):
+        self.wview('table', self._cw.execute(
+            'Any X ORDERBY N WHERE X is CWRType, X name N, X final FALSE'))
+
+
+class SchemaPermissionsTab(SecurityViewMixIn, StartupView):
     __regid__ = 'schema-security'
     __select__ = StartupView.__select__ & match_user_groups('managers')
 
     def call(self, display_relations=True):
-        self._cw.add_css('cubicweb.acl.css')
         skiptypes = skip_types(self._cw)
-        formparams = {}
-        formparams['sec'] = self.__regid__
-        if not skiptypes:
-            formparams['skipmeta'] = u'0'
         schema = self._cw.vreg.schema
         # compute entities
         entities = sorted(eschema for eschema in schema.entities()
@@ -114,249 +199,422 @@
             relations = []
         # index
         _ = self._cw._
-        self.w(u'<div id="schema_security"><a id="index" href="index"/>')
-        self.w(u'<h2 class="schema">%s</h2>' % _('index').capitalize())
-        self.w(u'<h4>%s</h4>' %   _('Entities').capitalize())
+        url = xml_escape(self._cw.build_url('schema'))
+        self.w(u'<div id="schema_security">')
+        self.w(u'<h2 class="schema">%s</h2>' % _('Index'))
+        self.w(u'<h4 id="entities">%s</h4>' % _('Entity types'))
         ents = []
         for eschema in sorted(entities):
-            url = xml_escape(self._cw.build_url('schema', **formparams))
-            ents.append(u'<a class="grey" href="%s#%s">%s</a> (%s)' % (
-                url,  eschema.type, eschema.type, _(eschema.type)))
+            ents.append(u'<a class="grey" href="%s#%s">%s</a>' % (
+                url,  eschema.type, eschema.type))
         self.w(u', '.join(ents))
-        self.w(u'<h4>%s</h4>' % (_('relations').capitalize()))
+        self.w(u'<h4 id="relations">%s</h4>' % _('Relation types'))
         rels = []
         for rschema in sorted(relations):
-            url = xml_escape(self._cw.build_url('schema', **formparams))
-            rels.append(u'<a class="grey" href="%s#%s">%s</a> (%s), ' %  (
-                url , rschema.type, rschema.type, _(rschema.type)))
-        self.w(u', '.join(ents))
-        # entities
-        self.display_entities(entities, formparams)
-        # relations
+            rels.append(u'<a class="grey" href="%s#%s">%s</a>' %  (
+                url , rschema.type, rschema.type))
+        self.w(u', '.join(rels))
+        # permissions tables
+        self.display_entities(entities)
         if relations:
-            self.display_relations(relations, formparams)
+            self.display_relations(relations)
         self.w(u'</div>')
 
-    def display_entities(self, entities, formparams):
+    def has_non_default_perms(self, rdef):
+        """return true if the given *attribute* relation definition has custom
+        permission
+        """
+        for action in rdef.ACTIONS:
+            def_rqlexprs = []
+            def_groups = []
+            for perm in DEFAULT_ATTRPERMS[action]:
+                if not isinstance(perm, basestring):
+                    def_rqlexprs.append(perm.expression)
+                else:
+                    def_groups.append(perm)
+            rqlexprs = [rql.expression for rql in rdef.get_rqlexprs(action)]
+            groups = rdef.get_groups(action)
+            if groups != frozenset(def_groups) or \
+                frozenset(rqlexprs) != frozenset(def_rqlexprs):
+                return True
+        return False
+
+    def display_entities(self, entities):
         _ = self._cw._
-        self.w(u'<a id="entities" href="entities"/>')
-        self.w(u'<h2 class="schema">%s</h2>' % _('permissions for entities').capitalize())
+        url = xml_escape(self._cw.build_url('schema'))
+        self.w(u'<h2 id="entities" class="schema">%s</h2>' % _('Permissions for entity types'))
         for eschema in entities:
-            self.w(u'<a id="%s" href="%s"/>' %  (eschema.type, eschema.type))
-            self.w(u'<h3 class="schema">%s (%s) ' % (eschema.type, _(eschema.type)))
-            url = xml_escape(self._cw.build_url('schema', **formparams) + '#index')
-            self.w(u'<a href="%s"><img src="%s" alt="%s"/></a>' % (
+            self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
+                eschema.type, self._cw.build_url('cwetype/%s' % eschema.type),
+                eschema.type, _(eschema.type)))
+            self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
                 url,  self._cw.external_resource('UP_ICON'), _('up')))
             self.w(u'</h3>')
             self.w(u'<div style="margin: 0px 1.5em">')
-            self._cw.vreg.schema_definition(eschema, link=False)
+            self.permissions_table(eschema)
             # display entity attributes only if they have some permissions modified
             modified_attrs = []
             for attr, etype in  eschema.attribute_definitions():
-                if self.has_schema_modified_permissions(attr, attr.ACTIONS):
-                    modified_attrs.append(attr)
-            if  modified_attrs:
-                self.w(u'<h4>%s</h4>' % _('attributes with modified permissions:').capitalize())
+                rdef = eschema.rdef(attr)
+                if attr not in META_RTYPES and self.has_non_default_perms(rdef):
+                    modified_attrs.append(rdef)
+            if modified_attrs:
+                self.w(u'<h4>%s</h4>' % _('Attributes with non default permissions:'))
                 self.w(u'</div>')
                 self.w(u'<div style="margin: 0px 6em">')
-                for attr in  modified_attrs:
-                    self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attr.type, _(attr.type)))
-                    self._cw.vreg.schema_definition(attr, link=False)
+                for rdef in modified_attrs:
+                    attrtype = str(rdef.rtype)
+                    self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attrtype, _(attrtype)))
+                    self.permissions_table(rdef)
             self.w(u'</div>')
 
-    def display_relations(self, relations, formparams):
+    def display_relations(self, relations):
         _ = self._cw._
-        self.w(u'<a id="relations" href="relations"/>')
-        self.w(u'<h2 class="schema">%s </h2>' % _('permissions for relations').capitalize())
+        url = xml_escape(self._cw.build_url('schema'))
+        self.w(u'<h2 id="relations" class="schema">%s</h2>' % _('Permissions for relations'))
         for rschema in relations:
-            self.w(u'<a id="%s" href="%s"/>' %  (rschema.type, rschema.type))
-            self.w(u'<h3 class="schema">%s (%s) ' % (rschema.type, _(rschema.type)))
-            url = xml_escape(self._cw.build_url('schema', **formparams) + '#index')
-            self.w(u'<a href="%s"><img src="%s" alt="%s"/></a>' % (
+            self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
+                rschema.type, self._cw.build_url('cwrtype/%s' % rschema.type),
+                rschema.type, _(rschema.type)))
+            self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
                 url,  self._cw.external_resource('UP_ICON'), _('up')))
             self.w(u'</h3>')
-            self.w(u'<div style="margin: 0px 1.5em">')
-            subjects = [str(subj) for subj in rschema.subjects()]
-            self.w(u'<div><strong>%s</strong> %s (%s)</div>' % (
-                _('subject_plural:'),
-                ', '.join(str(subj) for subj in rschema.subjects()),
-                ', '.join(_(str(subj)) for subj in rschema.subjects())))
-            self.w(u'<div><strong>%s</strong> %s (%s)</div>' % (
-                _('object_plural:'),
-                ', '.join(str(obj) for obj in rschema.objects()),
-                ', '.join(_(str(obj)) for obj in rschema.objects())))
-            self._cw.vreg.schema_definition(rschema, link=False)
-            self.w(u'</div>')
-
-
-class SchemaUreportsView(StartupView):
-    __regid__ = 'schema-block'
-
-    def call(self):
-        viewer = SchemaViewer(self._cw)
-        layout = viewer.visit_schema(self._cw.vreg.schema, display_relations=True,
-                                     skiptypes=skip_types(self._cw))
-        self.w(uilib.ureport_as_html(layout))
-
-
-# CWAttribute / CWRelation #####################################################
-
-class CWRDEFPrimaryView(primary.PrimaryView):
-    __select__ = implements('CWAttribute', 'CWRelation')
-    cache_max_age = 60*60*2 # stay in http cache for 2 hours by default
-
-    def render_entity_title(self, entity):
-        self.w(u'<h1><span class="etype">%s</span> %s</h1>'
-               % (entity.dc_type().capitalize(),
-                  xml_escape(entity.dc_long_title())))
+            self.grouped_permissions_table(rschema)
 
 
 # CWEType ######################################################################
 
+# register msgid generated in entity relations tables
+_('i18ncard_1'), _('i18ncard_?'), _('i18ncard_+'), _('i18ncard_*')
+
+class CWETypePrimaryView(tabs.TabbedPrimaryView):
+    __select__ = implements('CWEType')
+    tabs = [_('cwetype-description'), _('cwetype-box'), _('cwetype-workflow'),
+            _('cwetype-views'), _('cwetype-permissions')]
+    default_tab = 'cwetype-description'
+
+
+class CWETypeDescriptionTab(tabs.PrimaryTab):
+    __regid__ = 'cwetype-description'
+    __select__ = tabs.PrimaryTab.__select__ & implements('CWEType')
+
+    def render_entity_attributes(self, entity):
+        super(CWETypeDescriptionTab, self).render_entity_attributes(entity)
+        _ = self._cw._
+        # inheritance
+        if entity.specializes:
+            self.w(u'<div>%s' % _('Parent classes:'))
+            self.wview('csv', entity.related('specializes', 'subject'))
+            self.w(u'</div>')
+        if entity.reverse_specializes:
+            self.w(u'<div>%s' % _('Sub-classes:'))
+            self.wview('csv', entity.related('specializes', 'object'))
+            self.w(u'</div>')
+        # entity schema image
+        self.w(u'<img src="%s" alt="%s"/>' % (
+            xml_escape(entity.absolute_url(vid='schemagraph')),
+            xml_escape(_('graphical schema for %s') % entity.name)))
+        # entity schema attributes
+        self.w(u'<h2>%s</h2>' % _('CWAttribute_plural'))
+        rset = self._cw.execute(
+            'Any A,ON,D,C,A,DE,A, IDX,FTI,I18N,R,O,RN,S ORDERBY AA '
+            'WHERE A is CWAttribute, A from_entity S, S eid %(x)s, '
+            'A ordernum AA, A defaultval D, A description DE, A cardinality C, '
+            'A fulltextindexed FTI, A internationalizable I18N, A indexed IDX, '
+            'A relation_type R, R name RN, A to_entity O, O name ON',
+            {'x': entity.eid})
+        self.wview('table', rset, 'null',
+                   cellvids={0: 'rdef-name-cell',
+                             3: 'etype-attr-cardinality-cell',
+                             4: 'rdef-constraints-cell',
+                             6: 'rdef-options-cell'},
+                   headers=(_(u'name'), _(u'type'),
+                            _(u'default value'), _(u'required'),
+                            _(u'constraints'), _(u'description'), _('options')))
+        # entity schema relations
+        self.w(u'<h2>%s</h2>' % _('CWRelation_plural'))
+        cellvids = {0: 'rdef-name-cell',
+                    2: 'etype-rel-cardinality-cell',
+                    3: 'rdef-constraints-cell',
+                    4: 'rdef-options-cell'}
+        headers= [_(u'name'), _(u'object type'), _(u'cardinality'),
+                  _(u'constraints'), _(u'options')]
+        rset = self._cw.execute(
+            'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN '
+            'WHERE A is CWRelation, A from_entity S, S eid %(x)s, '
+            'A composite K, A cardinality C, '
+            'A relation_type R, R name RN, A to_entity TT, TT name TTN',
+            {'x': entity.eid})
+        if rset:
+            self.w(u'<h5>%s %s</h5>' % (entity.name, _('is subject of:')))
+            self.wview('table', rset, cellvids=cellvids, headers=headers)
+        rset = self._cw.execute(
+            'Any A,TT,"i18ncard_"+SUBSTRING(C,1,1),A,A, K,TTN,R,RN ORDERBY RN '
+            'WHERE A is CWRelation, A to_entity O, O eid %(x)s, '
+            'A composite K, A cardinality C, '
+            'A relation_type R, R name RN, A from_entity TT, TT name TTN',
+            {'x': entity.eid})
+        if rset:
+            cellvids[0] = 'rdef-object-name-cell'
+            headers[1] = _(u'subject type')
+            self.w(u'<h5>%s %s</h5>' % (entity.name, _('is object of:')))
+            self.wview('table', rset, cellvids=cellvids, headers=headers)
+
+
+class CWETypeAttributeCardinalityCell(baseviews.FinalView):
+    __regid__ = 'etype-attr-cardinality-cell'
+
+    def cell_call(self, row, col):
+        if self.cw_rset.rows[row][col][0] == '1':
+            self.w(self._cw._(u'yes'))
+        else:
+            self.w(self._cw._(u'no'))
+
+
+class CWETypeRelationCardinalityCell(baseviews.FinalView):
+    __regid__ = 'etype-rel-cardinality-cell'
+
+    def cell_call(self, row, col):
+        self.w(self._cw._(self.cw_rset.rows[row][col]))
+
+
+class CWETypeBoxTab(EntityView):
+    __regid__ = 'cwetype-box'
+    __select__ = implements('CWEType')
+
+    def cell_call(self, row, col):
+        viewer = schemaviewer.SchemaViewer(self._cw)
+        entity = self.cw_rset.get_entity(row, col)
+        eschema = self._cw.vreg.schema.eschema(entity.name)
+        layout = viewer.visit_entityschema(eschema)
+        self.w(uilib.ureport_as_html(layout))
+        self.w(u'<br class="clear"/>')
+
+
+class CWETypePermTab(SecurityViewMixIn, EntityView):
+    __regid__ = 'cwetype-permissions'
+    __select__ = implements('CWEType') & authenticated_user()
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        eschema = self._cw.vreg.schema.eschema(entity.name)
+        self.w(u'<h4>%s</h4>' % _('This entity type permissions:').capitalize())
+        self.permissions_table(eschema)
+        self.w(u'<div style="margin: 0px 1.5em">')
+        self.w(u'<h4>%s</h4>' % _('Attributes permissions:').capitalize())
+        for attr, etype in  eschema.attribute_definitions():
+            if attr not in META_RTYPES:
+                rdef = eschema.rdef(attr)
+                attrtype = str(rdef.rtype)
+                self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attrtype, _(attrtype)))
+                self.permissions_table(rdef)
+        self.w(u'</div>')
+
+
+class CWETypeWorkflowTab(EntityView):
+    __regid__ = 'cwetype-workflow'
+    __select__ = (implements('CWEType')
+                  & has_related_entities('workflow_of', 'object'))
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        if entity.default_workflow:
+            wf = entity.default_workflow[0]
+            if len(entity.reverse_workflow_of) > 1:
+                self.w(u'<h1>%s (%s)</h1>'
+                       % (wf.name, self._cw._('default_workflow')))
+            self.display_workflow(wf)
+            defaultwfeid = wf.eid
+        else:
+            self.w(u'<div class="error">%s</div>'
+                   % self._cw._('There is no default workflow'))
+            defaultwfeid = None
+        for altwf in entity.reverse_workflow_of:
+            if altwf.eid == defaultwfeid:
+                continue
+            self.w(u'<h1>%s</h1>' % altwf.name)
+            self.display_workflow(altwf)
+
+    def display_workflow(self, wf):
+        self.w(wf.view('wfgraph'))
+        self.w('<a href="%s">%s</a>' % (
+            wf.absolute_url(), self._cw._('more info about this workflow')))
+
+
+class CWETypeViewsTab(EntityView):
+    """possible views for this entity type"""
+    __regid__ = 'cwetype-views'
+    __select__ = EntityView.__select__ & implements('CWEType')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        _ = self._cw._
+        self.w('<div>%s</div>' % _('Non exhaustive list of views that may '
+                                   'apply to entities of this type'))
+        views = [(view.content_type, view.__regid__, _(view.title))
+                 for view in self.possible_views(entity.name)]
+        self.wview('pyvaltable', pyvalue=sorted(views),
+                   headers=(_(u'content type'), _(u'view identifier'),
+                            _(u'view title')))
+
+    def possible_views(self, etype):
+        rset = self._cw.etype_rset(etype)
+        return [v for v in self._cw.vreg['views'].possible_views(self._cw, rset)
+                if v.category != 'startupview']
+
+
 class CWETypeOneLineView(baseviews.OneLineView):
     __select__ = implements('CWEType')
 
     def cell_call(self, row, col, **kwargs):
         entity = self.cw_rset.get_entity(row, col)
-        final = entity.final
-        if final:
+        if entity.final:
             self.w(u'<em class="finalentity">')
         super(CWETypeOneLineView, self).cell_call(row, col, **kwargs)
-        if final:
+        if entity.final:
             self.w(u'</em>')
 
 
-class CWETypePrimaryView(tabs.TabsMixin, primary.PrimaryView):
-    __select__ = implements('CWEType')
-    title = _('in memory entity schema')
-    main_related_section = False
-    tabs = [_('cwetype-schema-text'), _('cwetype-schema-image'),
-            _('cwetype-schema-permissions'), _('cwetype-workflow')]
-    default_tab = 'cwetype-schema-text'
-
-    def render_entity(self, entity):
-        self.render_entity_title(entity)
-        self.w(u'<div>%s</div>' % entity.description)
-        self.render_tabs(self.tabs, self.default_tab, entity)
-
-
-class CWETypeSTextView(EntityView):
-    __regid__ = 'cwetype-schema-text'
-    __select__ = EntityView.__select__ & implements('CWEType')
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        self.w(u'<h2>%s</h2>' % _('Attributes'))
-        rset = self._cw.execute('Any N,F,D,I,J,DE,A '
-                                'ORDERBY AA WHERE A is CWAttribute, '
-                                'A ordernum AA, A defaultval D, '
-                                'A description DE, '
-                                'A fulltextindexed I, A internationalizable J, '
-                                'A relation_type R, R name N, '
-                                'A to_entity O, O name F, '
-                                'A from_entity S, S eid %(x)s',
-                                {'x': entity.eid})
-        self.wview('editable-table', rset, 'null', displayfilter=True)
-        self.w(u'<h2>%s</h2>' % _('Relations'))
-        rset = self._cw.execute(
-            'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN '
-            'WHERE A is CWRelation, A description D, A composite K, '
-            'A relation_type R, R name RN, A to_entity TT, TT name TTN, '
-            'A cardinality C, A from_entity S, S eid %(x)s',
-            {'x': entity.eid})
-        self.wview('editable-table', rset, 'null', displayfilter=True,
-                   displaycols=range(6), mainindex=5)
-        rset = self._cw.execute(
-            'Any R,C,TT,K,D,A,RN,TTN ORDERBY RN '
-            'WHERE A is CWRelation, A description D, A composite K, '
-            'A relation_type R, R name RN, A from_entity TT, TT name TTN, '
-            'A cardinality C, A to_entity O, O eid %(x)s',
-            {'x': entity.eid})
-        self.wview('editable-table', rset, 'null', displayfilter=True,
-                   displaycols=range(6), mainindex=5)
-
-
-class CWETypeSImageView(EntityView):
-    __regid__ = 'cwetype-schema-image'
-    __select__ = EntityView.__select__ & implements('CWEType')
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        url = entity.absolute_url(vid='schemagraph')
-        self.w(u'<img src="%s" alt="%s"/>' % (
-            xml_escape(url),
-            xml_escape(self._cw._('graphical schema for %s') % entity.name)))
-
-
-class CWETypeSPermView(EntityView):
-    __regid__ = 'cwetype-schema-permissions'
-    __select__ = EntityView.__select__ & implements('CWEType')
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        _ = self._cw._
-        self.w(u'<h2>%s</h2>' % _('Add permissions'))
-        rset = self._cw.execute('Any P WHERE X add_permission P, '
-                                'X eid %(x)s',
-                                {'x': entity.eid})
-        self.wview('outofcontext', rset, 'null')
-        self.w(u'<h2>%s</h2>' % _('Read permissions'))
-        rset = self._cw.execute('Any P WHERE X read_permission P, '
-                                'X eid %(x)s',
-                                {'x': entity.eid})
-        self.wview('outofcontext', rset, 'null')
-        self.w(u'<h2>%s</h2>' % _('Update permissions'))
-        rset = self._cw.execute('Any P WHERE X update_permission P, '
-                                'X eid %(x)s',
-                                {'x': entity.eid})
-        self.wview('outofcontext', rset, 'null')
-        self.w(u'<h2>%s</h2>' % _('Delete permissions'))
-        rset = self._cw.execute('Any P WHERE X delete_permission P, '
-                                'X eid %(x)s',
-                                {'x': entity.eid})
-        self.wview('outofcontext', rset, 'null')
-
-
-class CWETypeSWorkflowView(EntityView):
-    __regid__ = 'cwetype-workflow'
-    __select__ = (EntityView.__select__ & implements('CWEType') &
-                  has_related_entities('workflow_of', 'object'))
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        if entity.default_workflow:
-            wf = entity.default_workflow[0]
-            self.w(u'<h1>%s (%s)</h1>' % (wf.name, self._cw._('default')))
-            self.wf_image(wf)
-        for altwf in entity.reverse_workflow_of:
-            if altwf.eid == wf.eid:
-                continue
-            self.w(u'<h1>%s</h1>' % altwf.name)
-            self.wf_image(altwf)
-
-    def wf_image(self, wf):
-        self.w(u'<img src="%s" alt="%s"/>' % (
-            xml_escape(wf.absolute_url(vid='wfgraph')),
-            xml_escape(self._cw._('graphical representation of %s') % wf.name)))
-
-
 # CWRType ######################################################################
 
-class CWRTypeSchemaView(primary.PrimaryView):
+class CWRTypePrimaryView(tabs.TabbedPrimaryView):
+    __select__ = implements('CWRType')
+    tabs = [_('cwrtype-description'), _('cwrtype-permissions')]
+    default_tab = 'cwrtype-description'
+
+
+class CWRTypeDescriptionTab(tabs.PrimaryTab):
+    __regid__ = 'cwrtype-description'
     __select__ = implements('CWRType')
-    title = _('in memory relation schema')
-    main_related_section = False
+
+    def render_entity_attributes(self, entity):
+        super(CWRTypeDescriptionTab, self).render_entity_attributes(entity)
+        _ = self._cw._
+        if not entity.final:
+            msg = _('graphical schema for %s') % entity.name
+            self.w(tags.img(src=entity.absolute_url(vid='schemagraph'),
+                            alt=msg))
+        rset = self._cw.execute('Any R,C,R,R, RT WHERE '
+                                'R relation_type RT, RT eid %(x)s, '
+                                'R cardinality C', {'x': entity.eid})
+        self.wview('table', rset, 'null',
+                   headers=(_(u'relation'),  _(u'cardinality'), _(u'constraints'),
+                            _(u'options')),
+                   cellvids={2: 'rdef-constraints-cell',
+                             3: 'rdef-options-cell'})
+
+
+class CWRTypePermTab(SecurityViewMixIn, EntityView):
+    __regid__ = 'cwrtype-permissions'
+    __select__ = implements('CWRType') & authenticated_user()
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        rschema = self._cw.vreg.schema.rschema(entity.name)
+        self.grouped_permissions_table(rschema)
+
+
+# CWAttribute / CWRelation #####################################################
+
+class RDEFPrimaryView(tabs.TabbedPrimaryView):
+    __select__ = implements('CWRelation', 'CWAttribute')
+    tabs = [_('rdef-description'), _('rdef-permissions')]
+    default_tab = 'rdef-description'
+
+
+class RDEFDescriptionTab(tabs.PrimaryTab):
+    __regid__ = 'rdef-description'
+    __select__ = implements('CWRelation', 'CWAttribute')
 
     def render_entity_attributes(self, entity):
-        super(CWRTypeSchemaView, self).render_entity_attributes(entity)
-        rschema = self._cw.vreg.schema.rschema(entity.name)
-        viewer = SchemaViewer(self._cw)
-        layout = viewer.visit_relationschema(rschema, title=False)
-        self.w(uilib.ureport_as_html(layout))
-        if not rschema.final:
-            msg = self._cw._('graphical schema for %s') % entity.name
-            self.w(tags.img(src=entity.absolute_url(vid='schemagraph'),
-                            alt=msg))
+        super(RDEFDescriptionTab, self).render_entity_attributes(entity)
+        rdef = entity.yams_schema()
+        if rdef.constraints:
+            self.w(u'<h4>%s</h4>' % self._cw._('constrained_by'))
+            self.w(entity.view('rdef-constraints-cell'))
+
+
+class RDEFPermTab(SecurityViewMixIn, EntityView):
+    __regid__ = 'rdef-permissions'
+    __select__ = implements('CWRelation', 'CWAttribute') & authenticated_user()
+
+    def cell_call(self, row, col):
+        self.permissions_table(self.cw_rset.get_entity(row, col).yams_schema())
+
+
+class RDEFNameView(tableview.CellView):
+    """display relation name and its translation only in a cell view, link to
+    relation definition's primary view (for use in entity type relations table
+    for instance)
+    """
+    __regid__ = 'rdef-name-cell'
+    __select__ = implements('CWRelation', 'CWAttribute')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        rtype = entity.relation_type[0].name
+        # XXX use context entity + pgettext
+        self.w(u'<a href="%s">%s</a> (%s)' % (
+            entity.absolute_url(), rtype, self._cw._(rtype)))
+
+class RDEFObjectNameView(tableview.CellView):
+    """same as RDEFNameView but when the context is the object entity
+    """
+    __regid__ = 'rdef-object-name-cell'
+    __select__ = implements('CWRelation', 'CWAttribute')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        rtype = entity.relation_type[0].name
+        # XXX use context entity + pgettext
+        self.w(u'<a href="%s">%s</a> (%s)' % (
+            entity.absolute_url(), rtype, self._cw.__(rtype + '_object')))
+
+class RDEFConstraintsCell(EntityView):
+    __regid__ = 'rdef-constraints-cell'
+    __select__ = implements('CWAttribute', 'CWRelation')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        rschema = self._cw.vreg.schema.rschema(entity.rtype.name)
+        rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)]
+        constraints = [xml_escape(unicode(c)) for c in getattr(rdef, 'constraints')]
+        self.w(u'<br/>'.join(constraints))
+
+class CWAttributeOptionsCell(EntityView):
+    __regid__ = 'rdef-options-cell'
+    __select__ = implements('CWAttribute')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        options = []
+        if entity.indexed:
+            options.append(self._cw._('indexed'))
+        if entity.fulltextindexed:
+            options.append(self._cw._('fulltextindexed'))
+        if entity.internationalizable:
+            options.append(self._cw._('internationalizable'))
+        self.w(u','.join(options))
+
+class CWRelationOptionsCell(EntityView):
+    __regid__ = 'rdef-options-cell'
+    __select__ = implements('CWRelation',)
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        rtype = entity.rtype
+        options = []
+        if rtype.symmetric:
+            options.append(self._cw._('symmetric'))
+        if rtype.inlined:
+            options.append(self._cw._('inlined'))
+        if rtype.fulltext_container:
+            options.append('%s=%s' % (self._cw._('fulltext_container'),
+                                      self._cw._(rtype.fulltext_container)))
+        if entity.composite:
+            options.append('%s=%s' % (self._cw._('composite'),
+                                      self._cw._(entity.composite)))
+        self.w(u','.join(options))
 
 
 # schema images ###############################################################
--- a/web/views/sessions.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/sessions.py	Mon Jul 19 15:36:16 2010 +0200
@@ -23,6 +23,7 @@
 
 from cubicweb.web import InvalidSession
 from cubicweb.web.application import AbstractSessionManager
+from cubicweb.dbapi import DBAPISession
 
 
 class InMemoryRepositorySessionManager(AbstractSessionManager):
@@ -53,37 +54,40 @@
         if self.has_expired(session):
             self.close_session(session)
             raise InvalidSession()
-        # give an opportunity to auth manager to hijack the session (necessary
-        # with the RepositoryAuthenticationManager in case the connection to the
-        # repository has expired)
         try:
-            session = self.authmanager.validate_session(req, session)
-            # necessary in case session has been hijacked
-            self._sessions[session.sessionid] = session
+            user = self.authmanager.validate_session(req, session)
         except InvalidSession:
             # invalid session
-            del self._sessions[sessionid]
+            self.close_session(session)
             raise
+        # associate the connection to the current request
+        req.set_session(session, user)
         return session
 
     def open_session(self, req):
-        """open and return a new session for the given request
+        """open and return a new session for the given request. The session is
+        also bound to the request.
 
-        :raise ExplicitLogin: if authentication is required
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
         """
-        session = self.authmanager.authenticate(req)
+        cnx, login, authinfo = self.authmanager.authenticate(req)
+        session = DBAPISession(cnx, login, authinfo)
         self._sessions[session.sessionid] = session
+        # associate the connection to the current request
+        req.set_session(session)
         return session
 
     def close_session(self, session):
         """close session on logout or on invalid session detected (expired out,
         corrupted...)
         """
-        self.info('closing http session %s' % session)
+        self.info('closing http session %s' % session.sessionid)
         del self._sessions[session.sessionid]
         try:
-            session.close()
+            session.cnx.close()
         except:
             # already closed, may occurs if the repository session expired but
             # not the web session
             pass
+        session.cnx = None
--- a/web/views/sparql.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/sparql.py	Mon Jul 19 15:36:16 2010 +0200
@@ -20,8 +20,8 @@
 """
 __docformat__ = "restructuredtext en"
 
-import rql
 from yams import xy
+from rql import TypeResolverException
 
 from lxml import etree
 from lxml.builder import E
@@ -51,24 +51,26 @@
 class SparqlFormView(form.FormViewMixIn, StartupView):
     __regid__ = 'sparql'
     def call(self):
-        form = self._cw.vreg.select('forms', 'sparql', self._cw)
+        form = self._cw.vreg['forms'].select('sparql', self._cw)
         self.w(form.render())
         sparql = self._cw.form.get('sparql')
         vid = self._cw.form.get('resultvid', 'table')
         if sparql:
             try:
                 qinfo = Sparql2rqlTranslator(self._cw.vreg.schema).translate(sparql)
-            except rql.TypeResolverException:
-                self.w(self._cw._('can not resolve entity types:') + u' ' + unicode('ex'))
+            except TypeResolverException, exc:
+                self.w(self._cw._('can not resolve entity types:') + u' ' + unicode(exc))
             except UnsupportedQuery:
                 self.w(self._cw._('we are not yet ready to handle this query'))
-            except xy.UnsupportedVocabulary, ex:
-                self.w(self._cw._('unknown vocabulary:') + u' ' + unicode('ex'))
-            if vid == 'sparqlxml':
-                url = self._cw.build_url('view', rql=qinfo.finalize(), vid=vid)
-                raise Redirect(url)
-            rset = self._cw.execute(qinfo.finalize())
-            self.wview(vid, rset, 'null')
+            except xy.UnsupportedVocabulary, exc:
+                self.w(self._cw._('unknown vocabulary:') + u' ' + unicode(exc))
+            else:
+                rql, args = qinfo.finalize()
+                if vid == 'sparqlxml':
+                    url = self._cw.build_url('view', rql=(rql,args), vid=vid)
+                    raise Redirect(url)
+                rset = self._cw.execute(rql, args)
+                self.wview(vid, rset, 'null')
 
 
 ## sparql resultset views #####################################################
--- a/web/views/startup.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/startup.py	Mon Jul 19 15:36:16 2010 +0200
@@ -17,8 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Set of HTML startup views. A startup view is global, e.g. doesn't
 apply to a result set.
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -97,7 +97,8 @@
         self.startupviews_table()
 
     def startupviews_table(self):
-        for v in self._cw.vreg['views'].possible_views(self._cw, None):
+        views = self._cw.vreg['views'].possible_views(self._cw, None)
+        for v in sorted(views, key=lambda x: self._cw._(x.title)):
             if v.category != 'startupview' or v.__regid__ in ('index', 'tree', 'manage'):
                 continue
             self.w('<p><a href="%s">%s</a></p>' % (
--- a/web/views/tableview.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/tableview.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,10 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""generic table view, including filtering abilities
-
-
-"""
+"""generic table view, including filtering abilities"""
 __docformat__ = "restructuredtext en"
 
 try:
@@ -49,7 +46,7 @@
     finalview = 'final'
 
     def form_filter(self, divid, displaycols, displayactions, displayfilter,
-                    hidden=True):
+                    paginate, hidden=True):
         rqlst = self.cw_rset.syntax_tree()
         # union not yet supported
         if len(rqlst.children) != 1:
@@ -63,7 +60,8 @@
         wdgs = [wdg for wdg in wdgs if wdg is not None]
         if wdgs:
             self._generate_form(divid, baserql, wdgs, hidden,
-                               vidargs={'displaycols': displaycols,
+                               vidargs={'paginate': paginate,
+                                        'displaycols': displaycols,
                                         'displayactions': displayactions,
                                         'displayfilter': displayfilter})
             return self.show_hide_actions(divid, not hidden)
@@ -79,7 +77,7 @@
         # drop False / None values from vidargs
         vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
         w(u'<form method="post" cubicweb:facetargs="%s" action="">' %
-          xml_escape(dumps([divid, 'table', False, vidargs])))
+          xml_escape(dumps([divid, self.__regid__, False, vidargs])))
         w(u'<fieldset id="%sForm" class="%s">' % (divid, hidden and 'hidden' or ''))
         w(u'<input type="hidden" name="divid" value="%s" />' % divid)
         w(u'<input type="hidden" name="fromformfilter" value="1" />')
@@ -109,17 +107,20 @@
                 continue
         return None
 
-    def displaycols(self, displaycols):
+    def displaycols(self, displaycols, headers):
         if displaycols is None:
             if 'displaycols' in self._cw.form:
                 displaycols = [int(idx) for idx in self._cw.form['displaycols']]
+            elif headers is not None:
+                displaycols = range(len(headers))
             else:
                 displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection))
         return displaycols
 
     def call(self, title=None, subvid=None, displayfilter=None, headers=None,
              displaycols=None, displayactions=None, actions=(), divid=None,
-             cellvids=None, cellattrs=None, mainindex=None):
+             cellvids=None, cellattrs=None, mainindex=None,
+             paginate=False, page_size=None):
         """Produces a table displaying a composite query
 
         :param title: title added before table
@@ -149,7 +150,7 @@
                     hidden = False
             if displayactions is None and 'displayactions' in req.form:
                 displayactions = True
-        displaycols = self.displaycols(displaycols)
+        displaycols = self.displaycols(displaycols, headers)
         fromformfilter = 'fromformfilter' in req.form
         # if fromformfilter is true, this is an ajax call and we only want to
         # replace the inner div, so don't regenerate everything under the if
@@ -162,23 +163,23 @@
                 self.w(u'<h2 class="tableTitle">%s</h2>\n' % title)
             if displayfilter:
                 actions += self.form_filter(divid, displaycols, displayfilter,
-                                            displayactions)
+                                            displayactions, paginate)
         elif displayfilter:
             actions += self.show_hide_actions(divid, True)
-        self.w(u'<div id="%s"' % divid)
+        self.w(u'<div id="%s">' % divid)
         if displayactions:
             actionsbycat = self._cw.vreg['actions'].possible_actions(req, self.cw_rset)
             for action in actionsbycat.get('mainactions', ()):
                 for action in action.actual_actions():
                     actions.append( (action.url(), req._(action.title),
                                      action.html_class(), None) )
-            self.w(u' cubicweb:displayactions="1">') # close <div tag
-        else:
-            self.w(u'>') # close <div tag
         # render actions menu
         if actions:
             self.render_actions(divid, actions)
         # render table
+        if paginate:
+            self.divid = divid # XXX iirk (see usage in page_navigation_url)
+            self.paginate(page_size=page_size, show_all_option=False)
         table = TableWidget(self)
         for column in self.get_columns(computed_labels, displaycols, headers,
                                        subvid, cellvids, cellattrs, mainindex):
@@ -188,6 +189,16 @@
         if not fromformfilter:
             self.w(u'</div>\n')
 
+    def page_navigation_url(self, navcomp, path, params):
+        if hasattr(self, 'divid'):
+            divid = self.divid
+        else:
+            divid = params.get('divid', 'pageContent'),
+        rql = params.pop('rql', self.cw_rset.printable_rql())
+        # latest 'true' used for 'swap' mode
+        return 'javascript: replacePageChunk(%s, %s, %s, %s, true)' % (
+            dumps(divid), dumps(rql), dumps(self.__regid__), dumps(params))
+
     def show_hide_actions(self, divid, currentlydisplayed=False):
         showhide = u';'.join(toggle_action('%s%s' % (divid, what))[11:]
                              for what in ('Form', 'Show', 'Hide', 'Actions'))
@@ -222,7 +233,7 @@
             # compute column header
             if headers is not None:
                 label = headers[displaycols.index(colindex)]
-            if colindex == mainindex:
+            if colindex == mainindex and label is not None:
                 label += ' (%s)' % self.cw_rset.rowcount
             column = TableColumn(label, colindex)
             coltype = self.cw_rset.description[0][colindex]
@@ -285,8 +296,7 @@
         """
         etype, val = self.cw_rset.description[row][col], self.cw_rset[row][col]
         if val is not None and not self._cw.vreg.schema.eschema(etype).final:
-            e = self.cw_rset.get_entity(row, col)
-            e.view(cellvid or 'outofcontext', w=self.w)
+            self.wview(cellvid or 'outofcontext', self.cw_rset, row=row, col=col)
         elif val is None:
             # This is usually caused by a left outer join and in that case,
             # regular views will most certainly fail if they don't have
@@ -309,17 +319,21 @@
       displayed with default restrictions set
     """
     __regid__ = 'initialtable'
-    __select__ = nonempty_rset() & match_form_params('actualrql')
+    __select__ = nonempty_rset()
     # should not be displayed in possible view since it expects some specific
     # parameters
     title = None
 
     def call(self, title=None, subvid=None, headers=None, divid=None,
-             displaycols=None, displayactions=None, mainindex=None):
+             paginate=False, displaycols=None, displayactions=None, mainindex=None):
         """Dumps a table displaying a composite query"""
-        actrql = self._cw.form['actualrql']
-        self._cw.ensure_ro_rql(actrql)
-        displaycols = self.displaycols(displaycols)
+        try:
+            actrql = self._cw.form['actualrql']
+        except KeyError:
+            actrql = self.cw_rset.printable_rql()
+        else:
+            self._cw.ensure_ro_rql(actrql)
+        displaycols = self.displaycols(displaycols, headers)
         if displayactions is None and 'displayactions' in self._cw.form:
             displayactions = True
         if divid is None and 'divid' in self._cw.form:
@@ -333,7 +347,8 @@
         if mainindex is None:
             mainindex = self.main_var_index()
         if mainindex is not None:
-            actions = self.form_filter(divid, displaycols, displayactions, True)
+            actions = self.form_filter(divid, displaycols, displayactions,
+                                       paginate, True)
         else:
             actions = ()
         if not subvid and 'subvid' in self._cw.form:
@@ -348,3 +363,67 @@
 class EditableInitialTableTableView(InitialTableView):
     __regid__ = 'editable-initialtable'
     finalview = 'editable-final'
+
+
+class EntityAttributesTableView(EntityView):
+    """This table displays entity attributes in a table and allow to set a
+    specific method to help building cell content for each attribute as well as
+    column header.
+
+    Table will render entity cell by using the appropriate build_COLNAME_cell
+    methods if defined otherwise cell content will be entity.COLNAME.
+
+    Table will render column header using the method header_for_COLNAME if
+    defined otherwise COLNAME will be used.
+    """
+    __abstract__ = True
+    columns = ()
+    table_css = "listing"
+    css_files = ()
+
+    def call(self, columns=None):
+        if self.css_files:
+            self._cw.add_css(self.css_files)
+        _ = self._cw._
+        self.columns = columns or self.columns
+        ecls = self._cw.vreg['etypes'].etype_class(self.cw_rset.description[0][0])
+        self.w(u'<table class="%s">' % self.table_css)
+        self.table_header(ecls)
+        self.w(u'<tbody>')
+        for row in xrange(self.cw_rset.rowcount):
+            self.cell_call(row=row, col=0)
+        self.w(u'</tbody>')
+        self.w(u'</table>')
+
+    def cell_call(self, row, col):
+        _ = self._cw._
+        entity = self.cw_rset.get_entity(row, col)
+        infos = {}
+        for col in self.columns:
+            meth = getattr(self, 'build_%s_cell' % col, None)
+            # find the build method or try to find matching attribute
+            if meth:
+                content = meth(entity)
+            else:
+                content = entity.printable_value(col)
+            infos[col] = content
+        self.w(u"""<tr onmouseover="addElementClass(this, 'highlighted');"
+            onmouseout="removeElementClass(this, 'highlighted')">""")
+        line = u''.join(u'<td>%%(%s)s</td>' % col for col in self.columns)
+        self.w(line % infos)
+        self.w(u'</tr>\n')
+
+    def table_header(self, ecls):
+        """builds the table's header"""
+        self.w(u'<thead><tr>')
+        _ = self._cw._
+        for column in self.columns:
+            meth = getattr(self, 'header_for_%s' % column, None)
+            if meth:
+                colname = meth(ecls)
+            else:
+                colname = _(column)
+            self.w(u'<th>%s</th>' % xml_escape(colname))
+        self.w(u'</tr></thead>\n')
+
+
--- a/web/views/treeview.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/treeview.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Set of tree-building widgets, based on jQuery treeview plugin
+"""Set of tree-building widgets, based on jQuery treeview plugin"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.mtconverter import xml_escape
@@ -143,7 +142,7 @@
         cookies = self._cw.get_cookie()
         treestate = cookies.get(treecookiename(treeid))
         if treestate:
-            return str(eeid) in treestate.value.split(';')
+            return str(eeid) in treestate.value.split(':')
         return self.default_branch_state_is_open
 
     def cell_call(self, row, col, treeid, vid='oneline', parentvid='treeview',
--- a/web/views/urlpublishing.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/urlpublishing.py	Mon Jul 19 15:36:16 2010 +0200
@@ -142,8 +142,7 @@
         if len(parts) != 1:
             raise PathDontMatch()
         try:
-            rset = req.execute('Any X WHERE X eid %(x)s',
-                               {'x': typed_eid(parts[0])}, 'x')
+            rset = req.execute('Any X WHERE X eid %(x)s', {'x': typed_eid(parts[0])})
         except ValueError:
             raise PathDontMatch()
         if rset.rowcount == 0:
@@ -190,7 +189,7 @@
         rql = u'Any X WHERE X is %s, X %s %%(x)s' % (etype, attrname)
         if attrname == 'eid':
             try:
-                rset = req.execute(rql, {'x': typed_eid(value)}, 'x')
+                rset = req.execute(rql, {'x': typed_eid(value)})
             except (ValueError, TypeResolverException):
                 # conflicting eid/type
                 raise PathDontMatch()
--- a/web/views/urlrewrite.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/urlrewrite.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Rules based url rewriter component, to get configurable RESTful urls.
+"""Rules based url rewriter component, to get configurable RESTful urls"""
 
-"""
 import re
 
 from cubicweb import typed_eid
--- a/web/views/workflow.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/views/workflow.py	Mon Jul 19 15:36:16 2010 +0200
@@ -19,11 +19,14 @@
 
 * IWorkflowable views and forms
 * workflow entities views (State, Transition, TrInfo)
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+import tempfile
+import os
+
 from logilab.mtconverter import xml_escape
 from logilab.common.graph import escape, GraphGenerator, DotBackend
 
@@ -31,17 +34,24 @@
 from cubicweb.selectors import (implements, has_related_entities, one_line_rset,
                                 relation_possible, match_form_params,
                                 implements, score_entity)
+from cubicweb.utils import make_uid
 from cubicweb.interfaces import IWorkflowable
 from cubicweb.view import EntityView
 from cubicweb.schema import display_name
 from cubicweb.web import uicfg, stdmsgs, action, component, form, action
 from cubicweb.web import formfields as ff, formwidgets as fwdgs
 from cubicweb.web.views import TmpFileViewMixin, forms, primary, autoform
+from cubicweb.web.views.tabs import TabbedPrimaryView, PrimaryTab
 
 _pvs = uicfg.primaryview_section
 _pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden')
 _pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden')
 _pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden')
+_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden')
+for rtype in ('in_state', 'by_transition', 'from_state', 'to_state'):
+    _pvs.tag_subject_of(('*', rtype, '*'), 'hidden')
+    _pvs.tag_object_of(('*', rtype, '*'), 'hidden')
+_pvs.tag_object_of(('*', 'wf_info_for', '*'), 'hidden')
 
 _abaa = uicfg.actionbox_appearsin_addmenu
 _abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False)
@@ -142,7 +152,7 @@
             headers = (_('from_state'), _('to_state'), _('comment'), _('date'))
         rql = '%s %s, X eid %%(x)s' % (sel, rql)
         try:
-            rset = self._cw.execute(rql, {'x': eid}, 'x')
+            rset = self._cw.execute(rql, {'x': eid})
         except Unauthorized:
             return
         if rset:
@@ -200,6 +210,7 @@
 _pvs.tag_subject_of(('Workflow', 'initial_state', '*'), 'hidden')
 _pvs.tag_object_of(('*', 'state_of', 'Workflow'), 'hidden')
 _pvs.tag_object_of(('*', 'transition_of', 'Workflow'), 'hidden')
+_pvs.tag_object_of(('*', 'default_workflow', 'Workflow'), 'hidden')
 
 _abaa = uicfg.actionbox_appearsin_addmenu
 _abaa.tag_subject_of(('BaseTransition', 'condition', 'RQLExpression'), False)
@@ -211,14 +222,10 @@
 _abaa.tag_object_of(('Transition', 'transition_of', 'Workflow'), True)
 _abaa.tag_object_of(('WorkflowTransition', 'transition_of', 'Workflow'), True)
 
-class WorkflowPrimaryView(primary.PrimaryView):
+class WorkflowPrimaryView(TabbedPrimaryView):
     __select__ = implements('Workflow')
-
-    def render_entity_attributes(self, entity):
-        self.w(entity.view('reledit', rtype='description'))
-        self.w(u'<img src="%s" alt="%s"/>' % (
-            xml_escape(entity.absolute_url(vid='wfgraph')),
-            xml_escape(self._cw._('graphical workflow for %s') % entity.name)))
+    tabs = [  _('wf_tab_info'), _('wfgraph'),]
+    default_tab = 'wf_tab_info'
 
 
 class CellView(view.EntityView):
@@ -238,6 +245,59 @@
         self.w(xml_escape(self._cw.view('textincontext', self.cw_rset,
                                         row=row, col=col)))
 
+class WorkflowTabTextView(PrimaryTab):
+    __regid__ = 'wf_tab_info'
+    __select__ = PrimaryTab.__select__ & one_line_rset() & implements('Workflow')
+
+    def render_entity_attributes(self, entity):
+        _ = self._cw._
+        self.w(u'<div>%s</div>' % (entity.printable_value('description')))
+        self.w(u'<span>%s%s</span>' % (_("workflow_of").capitalize(), _(" :")))
+        html = []
+        for e in  entity.workflow_of:
+            view = e.view('outofcontext')
+            if entity.eid == e.default_workflow[0].eid:
+                view += u' <span>[%s]</span>' % _('default_workflow')
+            html.append(view)
+        self.w(', '.join(v for v in html))
+        self.w(u'<h2>%s</h2>' % _("Transition_plural"))
+        rset = self._cw.execute(
+            'Any T,T,DS,T,TT ORDERBY TN WHERE T transition_of WF, WF eid %(x)s,'
+            'T type TT, T name TN, T destination_state DS?', {'x': entity.eid})
+        self.wview('editable-table', rset, 'null',
+                   cellvids={ 1: 'trfromstates', 2: 'outofcontext', 3:'trsecurity',},
+                   headers = (_('Transition'),  _('from_state'),
+                              _('to_state'), _('permissions'), _('type') ),
+                   )
+
+
+class TransitionSecurityTextView(view.EntityView):
+    __regid__ = 'trsecurity'
+    __select__ = implements('Transition')
+
+    def cell_call(self, row, col):
+        _ = self._cw._
+        entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
+        if entity.require_group:
+            self.w(u'<div>%s%s %s</div>' %
+                   (_('groups'), _(" :"),
+                    u', '.join((g.view('incontext') for g
+                               in entity.require_group))))
+        if entity.condition:
+            self.w(u'<div>%s%s %s</div>' %
+                   ( _('conditions'), _(" :"),
+                     u'<br/>'.join((e.dc_title() for e
+                                in entity.condition))))
+
+class TransitionAllowedTextView(view.EntityView):
+    __regid__ = 'trfromstates'
+    __select__ = implements('Transition')
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
+        self.w(u', '.join((e.view('outofcontext') for e
+                           in entity.reverse_allowed_transition)))
+
 
 # workflow entity types edition ################################################
 
@@ -297,24 +357,18 @@
     def node_properties(self, stateortransition):
         """return default DOT drawing options for a state or transition"""
         props = {'label': stateortransition.printable_value('name'),
-                 'fontname': 'Courier'}
+                 'fontname': 'Courier', 'fontsize':10,
+                 'href': stateortransition.absolute_url(),
+                 }
         if hasattr(stateortransition, 'state_of'):
             props['shape'] = 'box'
             props['style'] = 'filled'
             if stateortransition.reverse_initial_state:
-                props['color'] = '#88CC88'
+                props['fillcolor'] = '#88CC88'
         else:
             props['shape'] = 'ellipse'
             descr = []
             tr = stateortransition
-            if tr.require_group:
-                descr.append('%s %s'% (
-                    self._('groups:'),
-                    ','.join(g.printable_value('name') for g in tr.require_group)))
-            if tr.condition:
-                descr.append('%s %s'% (
-                    self._('condition:'),
-                    ' | '.join(e.expression for e in tr.condition)))
             if descr:
                 props['label'] += escape('\n'.join(descr))
         return props
@@ -344,17 +398,43 @@
                 yield transition.eid, outgoingstate.eid, transition
 
 
-class WorkflowImageView(TmpFileViewMixin, view.EntityView):
+class WorkflowGraphView(view.EntityView):
     __regid__ = 'wfgraph'
-    __select__ = implements('Workflow')
-    content_type = 'image/png'
+    __select__ = EntityView.__select__ & one_line_rset() & implements('Workflow')
 
-    def _generate(self, tmpfile):
-        """display schema information for an entity"""
-        entity = self.cw_rset.get_entity(self.cw_row, self.cw_col)
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
         visitor = WorkflowVisitor(entity)
         prophdlr = WorkflowDotPropsHandler(self._cw)
-        generator = GraphGenerator(DotBackend('workflow', 'LR',
-                                              ratio='compress', size='30,12'))
-        return generator.generate(visitor, prophdlr, tmpfile)
+        wfname = 'workflow%s' % str(entity.eid)
+        generator = GraphGenerator(DotBackend(wfname, None,
+                                              ratio='compress', size='30,10'))
+        # map file
+        pmap, mapfile = tempfile.mkstemp(".map", wfname)
+        os.close(pmap)
+        # image file
+        fd, tmpfile = tempfile.mkstemp('.png')
+        os.close(fd)
+        generator.generate(visitor, prophdlr, tmpfile, mapfile)
+        filekeyid = make_uid()
+        self._cw.session.data[filekeyid] = tmpfile
+        self.w(u'<img src="%s" alt="%s" usemap="#%s" />' % (
+            xml_escape(entity.absolute_url(vid='tmppng', tmpfile=filekeyid)),
+            xml_escape(self._cw._('graphical workflow for %s') % entity.name),
+            wfname))
+        stream = open(mapfile, 'r').read()
+        stream = stream.decode(self._cw.encoding)
+        self.w(stream)
+        os.unlink(mapfile)
 
+
+class TmpPngView(TmpFileViewMixin, view.EntityView):
+    __regid__ = 'tmppng'
+    __select__ = match_form_params('tmpfile')
+    content_type = 'image/png'
+    binary = True
+
+    def cell_call(self, row=0, col=0):
+        tmpfile = self._cw.session.data[self._cw.form['tmpfile']]
+        self.w(open(tmpfile, 'rb').read())
+        os.unlink(tmpfile)
--- a/web/wdoc/ChangeLog_en	Thu May 06 08:24:46 2010 +0200
+++ b/web/wdoc/ChangeLog_en	Mon Jul 19 15:36:16 2010 +0200
@@ -4,6 +4,41 @@
 .. _SPARQL: http://www.w3.org/TR/rdf-sparql-query/
 .. _schema: schema
 .. _OWL: http://www.w3.org/TR/owl-features/
+.. _pdfexport: http://www.cubicweb.org/project/cubicweb-pdfexport
+
+2010-06-11  --  3.8.4
+   * support full text prefix search for instances using postgres > 8.4 as database: try it
+     by using search such as 'cubic*'
+
+
+2010-04-20  --  3.8.0
+   * nicer schema_ and workflow views (clickable image!)
+
+   * more power to undo, though not yet complete (missing entity updates, soon available...)
+
+   * pdf export functionnality moved to its own cube. If it's no more
+     present on this site while you found it useful, ask you site
+     administrator to install the pdfexport_ cube.
+
+
+2010-03-16  --  3.7.0
+   * experimental support for undoing of deletion. If you're not proposed to *undo*
+     deletion of one or several entities, ask you site administrator to activate
+     the feature.
+
+
+2010-02-10  --  3.6.0
+   * nice 'demo widget' to edit bookmark's path, e.g. a relative url, splitted
+     as path and parameters and dealing nicely with url encodings. Try to
+     edit your bookmarks!
+
+   * hell lot of refactorings, but you should hopefuly not see that from the outside
+
+2009-09-17  --  3.5.0
+
+    * selectable workflows: authorized users may change the workflow used
+      by some workflowable entities
+
 
 2009-08-07  --  3.4.0
 
--- a/web/wdoc/ChangeLog_fr	Thu May 06 08:24:46 2010 +0200
+++ b/web/wdoc/ChangeLog_fr	Mon Jul 19 15:36:16 2010 +0200
@@ -4,6 +4,49 @@
 .. _SPARQL: http://www.w3.org/TR/rdf-sparql-query/
 .. _schema: schema
 .. _OWL: http://www.w3.org/TR/owl-features/
+.. _pdfexport: http://www.cubicweb.org/project/cubicweb-pdfexport
+
+2010-06-11  --  3.8.4
+   * support pour la recherche de préfixe pour les instances utilisant postgres > 8.4 :
+     essayez en cherchant par ex. 'cubic*'
+
+2010-04-20  --  3.8.0
+
+   * amélioration des vues de schema_ et des vues de workflows
+     (images clickable !)
+
+   * meilleure support du "undo", mais il manque toujours le support
+     sur la modification d'entité (bientôt...)
+
+   * la fonctionnalité d'export d'pdf a été déplacé dans son propre
+     cube. Si cette fonctionalité n'est plus disponible sur ce site et
+     que vous la trouviez utile, demander à l'administrateur
+     d'installer le cube pdfexport_.
+
+
+2010-03-16  --  3.7.0
+
+   * support experimental pour l'annulation ("undo") de la
+     suppression. Si, après une suppression d'une ou plusieurs
+     entités, on ne vous propose pas d'annuler l'opération, demander à
+     l'administrateur d'activé la fonctionnalité
+
+
+2010-02-10  --  3.6.0
+
+   * nouvelle widget (de démonstration :) pour éditer le chemin des
+     signets. Celui-ci, une url relative finalement, est décomposée de
+     chemin et paramètres que vous pouvez éditer individuellement et
+     surtout lisiblement car la gestion de l'échappement de l'url est
+     géré de manière transparente
+
+   * beaucoup de refactoring, mais vous ne devriez rien remarquer :)
+
+2009-09-17  --  3.5.0
+
+    * workflow sélectionnable: les utilisateurs autorisés peuvent
+      changer le workflow à utilister pour les entités le supportant
+
 
 2009-08-07  --  3.4.0
 
--- a/web/webconfig.py	Thu May 06 08:24:46 2010 +0200
+++ b/web/webconfig.py	Mon Jul 19 15:36:16 2010 +0200
@@ -75,7 +75,7 @@
     """the WebConfiguration is a singleton object handling instance's
     configuration and preferences
     """
-    cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['web/views'])
+    cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set([join('web', 'views')])
     cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['views'])
 
     options = merge_options(CubicWebConfiguration.options + (
@@ -83,20 +83,20 @@
          {'type' : 'string',
           'default': None,
           'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'web', 'level': 1,
           }),
         ('anonymous-password',
          {'type' : 'string',
           'default': None,
           'help': 'password of the CubicWeb user account to use for anonymous user, '
           'if anonymous-user is set',
-          'group': 'main', 'inputlevel': 1,
+          'group': 'web', 'level': 1,
           }),
         ('query-log-file',
          {'type' : 'string',
           'default': None,
           'help': 'web instance query log file',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         # web configuration
         ('https-url',
@@ -110,20 +110,20 @@
           'differentiate between http vs https access. For instance: \n'\
           'RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]\n'\
           'where the cubicweb web server is listening on port 8080.',
-          'group': 'main', 'inputlevel': 3,
+          'group': 'main', 'level': 3,
           }),
         ('auth-mode',
          {'type' : 'choice',
           'choices' : ('cookie', 'http'),
           'default': 'cookie',
           'help': 'authentication mode (cookie / http)',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('realm',
          {'type' : 'string',
           'default': 'cubicweb',
           'help': 'realm to use on HTTP authentication mode',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('http-session-time',
          {'type' : 'time',
@@ -131,7 +131,7 @@
           'help': "duration of the cookie used to store session identifier. "
           "If 0, the cookie will expire when the user exist its browser. "
           "Should be 0 or greater than repository\'s session-time.",
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 2,
           }),
         ('cleanup-session-time',
          {'type' : 'time',
@@ -142,7 +142,7 @@
           'So even if http-session-time is 0 and the user don\'t close his '
           'browser, he will have to reauthenticate after this time of '
           'inactivity. Default to 24h.',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('cleanup-anonymous-session-time',
          {'type' : 'time',
@@ -150,14 +150,14 @@
           'help': 'Same as cleanup-session-time but specific to anonymous '
           'sessions. You can have a much smaller timeout here since it will be '
           'transparent to the user. Default to 5min.',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('force-html-content-type',
          {'type' : 'yn',
           'default': False,
           'help': 'force text/html content type for your html pages instead of cubicweb user-agent based'\
           'deduction of an appropriate content type',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('embed-allowed',
          {'type' : 'regexp',
@@ -165,14 +165,14 @@
           'help': 'regular expression matching URLs that may be embeded. \
 leave it blank if you don\'t want the embedding feature, or set it to ".*" \
 if you want to allow everything',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('submit-mail',
          {'type' : 'string',
           'default': None,
           'help': ('Mail used as recipient to report bug in this instance, '
                    'if you want this feature on'),
-          'group': 'web', 'inputlevel': 2,
+          'group': 'web', 'level': 2,
           }),
 
         ('language-negociation',
@@ -180,14 +180,14 @@
           'default': True,
           'help': 'use Accept-Language http header to try to set user '\
           'interface\'s language according to browser defined preferences',
-          'group': 'web', 'inputlevel': 2,
+          'group': 'web', 'level': 2,
           }),
 
         ('print-traceback',
          {'type' : 'yn',
           'default': CubicWebConfiguration.mode != 'system',
           'help': 'print the traceback on the error page when an error occured',
-          'group': 'web', 'inputlevel': 2,
+          'group': 'web', 'level': 2,
           }),
 
         ('captcha-font-file',
@@ -195,14 +195,14 @@
           'default': join(CubicWebConfiguration.shared_dir(), 'data', 'porkys.ttf'),
           'help': 'True type font to use for captcha image generation (you \
 must have the python imaging library installed to use captcha)',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
         ('captcha-font-size',
          {'type' : 'int',
           'default': 25,
           'help': 'Font size to use for captcha image generation (you must \
 have the python imaging library installed to use captcha)',
-          'group': 'web', 'inputlevel': 3,
+          'group': 'web', 'level': 3,
           }),
 
         ))
--- a/wsgi/handler.py	Thu May 06 08:24:46 2010 +0200
+++ b/wsgi/handler.py	Mon Jul 19 15:36:16 2010 +0200
@@ -22,8 +22,7 @@
 __docformat__ = "restructuredtext en"
 
 from cubicweb import AuthenticationError
-from cubicweb.web import (NotFound, Redirect, DirectResponse, StatusResponse,
-                          ExplicitLogin)
+from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
 from cubicweb.web.application import CubicWebPublisher
 from cubicweb.wsgi.request import CubicWebWsgiRequest
 
@@ -126,8 +125,6 @@
             req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
         try:
             self.appli.connect(req)
-        except AuthenticationError:
-            return self.request_auth(req)
         except Redirect, ex:
             return self.redirect(req, ex.location)
         path = req.path
@@ -139,12 +136,9 @@
             return WSGIResponse(200, req, ex.response)
         except StatusResponse, ex:
             return WSGIResponse(ex.status, req, ex.content)
-        except NotFound:
-            result = self.appli.notfound_content(req)
-            return WSGIResponse(404, req, result)
-        except ExplicitLogin:  # must be before AuthenticationError
+        except AuthenticationError:  # must be before AuthenticationError
             return self.request_auth(req)
-        except AuthenticationError:
+        except LogOut:
             if self.config['auth-mode'] == 'cookie':
                 # in cookie mode redirecting to the index view is enough :
                 # either anonymous connection is allowed and the page will
--- a/wsgi/request.py	Thu May 06 08:24:46 2010 +0200
+++ b/wsgi/request.py	Mon Jul 19 15:36:16 2010 +0200
@@ -51,9 +51,9 @@
         post, files = self.get_posted_data()
         super(CubicWebWsgiRequest, self).__init__(vreg, https, post)
         if files is not None:
-            for fdef in files.itervalues():
-                fdef[0] = unicode(fdef[0], self.encoding)
-            self.form.update(files)
+            for key, (name, _, stream) in files.iteritems():
+                name = unicode(name, self.encoding)
+                self.form[key] = (name, stream)
         # prepare output headers
         self.headers_out = {}
 
--- a/xy.py	Thu May 06 08:24:46 2010 +0200
+++ b/xy.py	Mon Jul 19 15:36:16 2010 +0200
@@ -15,19 +15,21 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""map standard cubicweb schema to xml vocabularies
-
-"""
+"""map standard cubicweb schema to xml vocabularies"""
 
 from yams import xy
 
+xy.register_prefix('http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'rdf')
 xy.register_prefix('http://purl.org/dc/elements/1.1/', 'dc')
-xy.register_prefix('http://xmlns.com/foaf/0.1/', 'foaf')
-xy.register_prefix('http://usefulinc.com/ns/doap#', 'doap')
+xy.register_prefix('http://xmlns.com/foaf/0.1/',       'foaf')
+xy.register_prefix('http://usefulinc.com/ns/doap#',    'doap')
+xy.register_prefix('http://rdfs.org/sioc/ns#',         'sioc')
+xy.register_prefix('http://www.w3.org/2002/07/owl#',   'owl')
+xy.register_prefix('http://purl.org/dc/terms/',        'dcterms')
 
 xy.add_equivalence('creation_date', 'dc:date')
 xy.add_equivalence('created_by', 'dc:creator')
 xy.add_equivalence('description', 'dc:description')
 xy.add_equivalence('CWUser', 'foaf:Person')
-xy.add_equivalence('CWUser login', 'dc:title')
+xy.add_equivalence('CWUser login', 'foaf:Person dc:title')
 xy.add_equivalence('CWUser surname', 'foaf:Person foaf:name')