oldstable is 3.13.X oldstable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Fri, 09 Dec 2011 12:08:27 +0100
brancholdstable
changeset 8123 a4e667270dd4
parent 7863 d8bb8f631d41 (current diff)
parent 8118 7b2c7f3d3703 (diff)
child 8176 eff4fe02ec64
oldstable is 3.13.X
devtools/livetest.py
--- a/.hgignore	Mon Sep 26 18:37:23 2011 +0200
+++ b/.hgignore	Fri Dec 09 12:08:27 2011 +0100
@@ -14,4 +14,6 @@
 .*/data/database/.*\.sqlite
 .*/data/database/.*\.config
 .*/data/database/tmpdb.*
-
+^doc/html/
+^doc/doctrees/
+^doc/book/en/devweb/js_api/
--- a/.hgtags	Mon Sep 26 18:37:23 2011 +0200
+++ b/.hgtags	Fri Dec 09 12:08:27 2011 +0100
@@ -204,11 +204,34 @@
 6dfe78a0797ccc34962510f8c2a57f63d65ce41e cubicweb-debian-version-3.12.5-1
 a18dac758150fe9c1f9e4958d898717c32a8f679 cubicweb-version-3.12.6
 105767487c7075dbcce36474f1af0485985cbf2c cubicweb-debian-version-3.12.6-1
+b661ef475260ca7d9ea5c36ba2cc86e95e5b17d3 cubicweb-version-3.13.0
+a96137858f571711678954477da6f7f435870cea cubicweb-debian-version-3.13.0-1
 628fe57ce746c1dac87fb1b078b2026057df894e cubicweb-version-3.12.7
 a07517985136bbbfa6610c428a1b42cd04cd530b cubicweb-debian-version-3.12.7-1
 50122a47ce4fb2ecbf3cf20ed2777f4276c93609 cubicweb-version-3.12.8
 cf49ed55685a810d8d73585330ad1a57cc76260d cubicweb-debian-version-3.12.8-1
 cb2990aaa63cbfe593bcf3afdbb9071e4c76815a cubicweb-version-3.12.9
 92464e39134c70e4ddbe6cd78a6e3338a3b88b05 cubicweb-debian-version-3.12.9-1
+7d84317ef185a10c5eb78e6086f2297d2f4bd1e3 cubicweb-version-3.13.1
+cc0578049cbe8b1d40009728e36c17e45da1fc6b cubicweb-debian-version-3.13.1-1
+f9227b9d61835f03163b8133a96da35db37a0c8d cubicweb-version-3.13.2
+9ad5411199e00b2611366439b82f35d7d3285423 cubicweb-debian-version-3.13.2-1
+0e82e7e5a34f57d7239c7a42e48ba4d5e53abab2 cubicweb-version-3.13.3
+fb48c55cb80234bc0164c9bcc0e2cfc428836e5f cubicweb-debian-version-3.13.3-1
+223ecf0620b6c87d997f8011aca0d9f0ee4750af cubicweb-version-3.13.4
+52f26475d764129c5559b2d80fd57e6ea1bdd6ba cubicweb-debian-version-3.13.4-1
+a62f24e1497e953fbaed5894f6064a64f7ac0be3 cubicweb-version-3.10.x
+20d9c550c57eb6f9adcb0cfab1c11b6b8793afb6 cubicweb-version-3.13.5
+2e9dd7d945557c210d3b79153c65f6885e755315 cubicweb-debian-version-3.13.5-1
 074c848a3712a77737d9a1bfbb618c75f5c0cbfa cubicweb-version-3.12.10
 9dfd21fa0a8b9f121a08866ad3e2ebd1dd06790d cubicweb-debian-version-3.12.10-1
+17c007ad845abbac82e12146abab32a634657574 cubicweb-version-3.13.6
+8a8949ca5351d48c5cf795ccdff06c1d4aab2ce0 cubicweb-debian-version-3.13.6-1
+68e8c81fa96d6bcd21cc17bc9832d388ce05a9eb cubicweb-version-3.13.7
+2f93ce32febe2f82565994fbd454f331f76ca883 cubicweb-debian-version-3.13.7-1
+249bd41693392d4716686f05c6b84628cd14dfcd cubicweb-version-3.13.8
+43f83f5d0a4d57a06e9a4990bc957fcfa691eec3 cubicweb-debian-version-3.13.8-1
+07afe32945aa275052747f78ef1f55858aaf6fa9 cubicweb-version-3.13.9
+0a3cb5e60d57a7a9851371b4ae487094ec2bf614 cubicweb-debian-version-3.13.9-1
+2ad4e5173c73a43804c265207bcabb8940bd42f4 cubicweb-version-3.13.10
+2eab9a5a6bf8e3b0cf706bee8cdf697759c0a33a cubicweb-debian-version-3.13.10-1
--- a/MANIFEST.in	Mon Sep 26 18:37:23 2011 +0200
+++ b/MANIFEST.in	Fri Dec 09 12:08:27 2011 +0100
@@ -5,7 +5,7 @@
 include bin/cubicweb-*
 include man/cubicweb-ctl.1
 
-recursive-include doc README makefile *.conf *.css *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia
+recursive-include doc README makefile *.conf *.js *.css *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia
 
 recursive-include misc *.py *.png *.display
 
@@ -32,5 +32,6 @@
 
 prune doc/book/en/.static
 prune doc/book/fr/.static
+prune doc/html/_sources/
 prune misc/cwfs
 prune goa
--- a/README	Mon Sep 26 18:37:23 2011 +0200
+++ b/README	Fri Dec 09 12:08:27 2011 +0100
@@ -5,20 +5,21 @@
 developped at Logilab.
 
 This package contains:
-* a repository server
-* a RQL command line client to the repository
-* an adaptative modpython interface to the server
-* a bunch of other management tools
+
+- a repository server
+- a RQL command line client to the repository
+- an adaptative modpython interface to the server
+- a bunch of other management tools
 
 Install
 -------
 
-More details at http://www.cubicweb.org/doc/en/admin/setup
+More details at http://docs.cubicweb.org/admin/setup
 
 Getting started
 ---------------
 
-Execute:
+Execute::
 
  apt-get install cubicweb cubicweb-dev cubicweb-blog
  cubicweb-ctl create blog myblog
--- a/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/__pkginfo__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/__pkginfo__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -22,7 +22,7 @@
 
 modname = distname = "cubicweb"
 
-numversion = (3, 12, 10)
+numversion = (3, 13, 10)
 version = '.'.join(str(num) for num in numversion)
 
 description = "a repository of entities / relations for knowledge management"
@@ -40,10 +40,10 @@
 ]
 
 __depends__ = {
-    'logilab-common': '>= 0.55.2',
+    'logilab-common': '>= 0.56.2',
     'logilab-mtconverter': '>= 0.8.0',
     'rql': '>= 0.28.0',
-    'yams': '>= 0.32.0',
+    'yams': '>= 0.33.0',
     'docutils': '>= 0.6',
     #gettext                    # for xgettext, msgcat, etc...
     # web dependancies
--- a/appobject.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/appobject.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -180,12 +180,13 @@
         return self.__class__.__name__
 
     def search_selector(self, selector):
-        """search for the given selector or selector instance in the selectors
-        tree. Return it of None if not found
+        """search for the given selector, selector instance or tuple of
+        selectors in the selectors tree. Return None if not found.
         """
         if self is selector:
             return self
-        if isinstance(selector, type) and isinstance(self, selector):
+        if (isinstance(selector, type) or isinstance(selector, tuple)) and \
+               isinstance(self, selector):
             return self
         return None
 
@@ -240,7 +241,7 @@
         for selector in selectors:
             try:
                 selector = _instantiate_selector(selector)
-            except:
+            except Exception:
                 pass
             #assert isinstance(selector, Selector), selector
             if isinstance(selector, cls):
@@ -250,8 +251,8 @@
         return merged_selectors
 
     def search_selector(self, selector):
-        """search for the given selector or selector instance in the selectors
-        tree. Return it of None if not found
+        """search for the given selector or selector instance (or tuple of
+        selectors) in the selectors tree. Return None if not found
         """
         for childselector in self.selectors:
             if childselector is selector:
@@ -259,7 +260,8 @@
             found = childselector.search_selector(selector)
             if found is not None:
                 return found
-        return None
+        # if not found in children, maybe we are looking for self?
+        return super(MultiSelector, self).search_selector(selector)
 
 
 class AndSelector(MultiSelector):
@@ -322,7 +324,7 @@
     selected according to a context (usually at least a request and a result
     set).
 
-    The following attributes should be set on concret appobject classes:
+    The following attributes should be set on concrete appobject classes:
 
     :attr:`__registry__`
       name of the registry for this object (string like 'views',
@@ -413,7 +415,7 @@
         appobject is returned without any transformation.
         """
         try: # XXX < 3.6 bw compat
-            pdefs = cls.property_defs
+            pdefs = cls.property_defs # pylint: disable=E1101
         except AttributeError:
             pdefs = getattr(cls, 'cw_property_defs', {})
         else:
--- a/crypto.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/crypto.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,9 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Simple cryptographic routines, based on python-crypto.
-
-"""
+"""Simple cryptographic routines, based on python-crypto."""
 __docformat__ = "restructuredtext en"
 
 from pickle import dumps, loads
--- a/cwconfig.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/cwconfig.py	Fri Dec 09 12:08:27 2011 +0100
@@ -22,6 +22,9 @@
 Resource mode
 -------------
 
+Standard resource mode
+```````````````````````````
+
 A resource *mode* is a predefined set of settings for various resources
 directories, such as cubes, instances, etc. to ease development with the
 framework. There are two running modes with *CubicWeb*:
@@ -30,7 +33,7 @@
   usually requiring root access):
 
   - instances are stored in :file:`<INSTALL_PREFIX>/etc/cubicweb.d`
-  - temporary files (such as pid file) in :file:`/var/run/cubicweb`
+  - temporary files (such as pid file) in :file:`<INSTALL_PREFIX>/var/run/cubicweb`
 
   where `<INSTALL_PREFIX>` is the detected installation prefix ('/usr/local' for
   instance).
@@ -42,6 +45,25 @@
 
 
 
+
+.. _CubicwebWithinVirtualEnv:
+
+Within virtual environment
+```````````````````````````
+
+If you are not administrator of you machine or if you need to play with some
+specific version of |cubicweb| you can use `virtualenv`_ a tool to create
+isolated Python environments.  Since version 3.9 |cubicweb| is **`virtualenv`
+friendly** and won't write any file outside the virtualenv directory.
+
+- instances are stored in :file:`<VIRTUAL_ENV>/etc/cubicweb.d`
+- temporary files (such as pid file) in :file:`<VIRTUAL_ENV>/var/run/cubicweb`
+
+.. _`virtualenv`: http://pypi.python.org/pypi/virtualenv
+
+Custom resource location
+````````````````````````````````
+
 Notice that each resource path may be explicitly set using an environment
 variable if the default doesn't suit your needs. Here are the default resource
 directories that are affected according to mode:
@@ -49,8 +71,8 @@
 * **system**: ::
 
         CW_INSTANCES_DIR = <INSTALL_PREFIX>/etc/cubicweb.d/
-        CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/
-        CW_RUNTIME_DIR = /var/run/cubicweb/
+        CW_INSTANCES_DATA_DIR = <INSTALL_PREFIX>/var/lib/cubicweb/instances/
+        CW_RUNTIME_DIR = <INSTALL_PREFIX>/var/run/cubicweb/
 
 * **user**: ::
 
@@ -60,10 +82,13 @@
 
 Cubes search path is also affected, see the :ref:`Cube` section.
 
-By default, the mode automatically set to `user` if a :file:`.hg` directory is found
-in the cubicweb package, else it's set to `system`. You can force this by setting
-the :envvar:`CW_MODE` environment variable to either `user` or `system` so you can
-easily:
+Setting Cubicweb Mode
+`````````````````````
+
+By default, the mode is set to 'system' for standard installation. The mode is
+set to 'user' if `cubicweb is used from a mercurial repository`_. You can force
+this by setting the :envvar:`CW_MODE` environment variable to either 'user' or
+'system' so you can easily:
 
 * use system wide installation but user specific instances and all, without root
   privileges on the system (`export CW_MODE=user`)
@@ -74,7 +99,15 @@
 If you've a doubt about the mode you're currently running, check the first line
 outputed by the :command:`cubicweb-ctl list` command.
 
-Also, if cubicweb is a mercurial checkout located in `<CW_SOFTWARE_ROOT>`:
+.. _`cubicweb is used from a mercurial repository`: CubicwebDevelopmentMod_
+
+.. _CubicwebDevelopmentMod:
+
+Development Mode
+`````````````````````
+If :file:`.hg` directory is found into the cubicweb package, there are specific resource rules.
+
+`<CW_SOFTWARE_ROOT>` is the mercurial checkout of cubicweb:
 
 * main cubes directory is `<CW_SOFTWARE_ROOT>/../cubes`. You can specify
   another one with :envvar:`CW_INSTANCES_DIR` environment variable or simply
@@ -144,7 +177,8 @@
 from threading import Lock
 from os.path import (exists, join, expanduser, abspath, normpath,
                      basename, isdir, dirname, splitext)
-from warnings import warn
+from warnings import warn, filterwarnings
+
 from logilab.common.decorators import cached, classproperty
 from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods, init_log
@@ -618,7 +652,7 @@
                 try:
                     __import__('cubes.%s.ccplugin' % cube)
                     cls.info('loaded cubicweb-ctl plugin from %s', cube)
-                except:
+                except Exception:
                     cls.exception('while loading plugin %s', pluginfile)
             elif exists(oldpluginfile):
                 warn('[3.6] %s: ecplugin module should be renamed to ccplugin' % cube,
@@ -626,12 +660,12 @@
                 try:
                     __import__('cubes.%s.ecplugin' % cube)
                     cls.info('loaded cubicweb-ctl plugin from %s', cube)
-                except:
+                except Exception:
                     cls.exception('while loading plugin %s', oldpluginfile)
             elif exists(initfile):
                 try:
                     __import__('cubes.%s' % cube)
-                except:
+                except Exception:
                     cls.exception('while loading cube %s', cube)
             else:
                 cls.warning('no __init__ file in cube %s', cube)
@@ -696,6 +730,9 @@
         return vregpath
 
     def __init__(self, debugmode=False):
+        if debugmode:
+            # in python 2.7, DeprecationWarning are not shown anymore by default
+            filterwarnings('default', category=DeprecationWarning)
         register_stored_procedures()
         self._cubes = None
         super(CubicWebNoAppConfiguration, self).__init__()
@@ -826,6 +863,13 @@
         """
         return [self.cube_dir(p) for p in self.cubes()]
 
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    @classmethod
+    def debug(cls, msg, *a, **kw):
+        pass
+    info = warning = error = critical = exception = debug
+
 
 class CubicWebConfiguration(CubicWebNoAppConfiguration):
     """base class for cubicweb server and web configurations"""
@@ -849,6 +893,9 @@
     # wouldn't be possible otherwise
     repairing = False
 
+    # set by upgrade command
+    verbosity = 0
+
     options = CubicWebNoAppConfiguration.options + (
         ('log-file',
          {'type' : 'string',
@@ -1068,13 +1115,13 @@
 
     @cached
     def instance_md5_version(self):
-        import hashlib
+        from hashlib import md5 # pylint: disable=E0611
         infos = []
         for pkg in sorted(self.cubes()):
             version = self.cube_version(pkg)
             infos.append('%s-%s' % (pkg, version))
         infos.append('cubicweb-%s' % str(self.cubicweb_version()))
-        return hashlib.md5(';'.join(infos)).hexdigest()
+        return md5(';'.join(infos)).hexdigest()
 
     def load_configuration(self):
         """load instance's configuration files"""
@@ -1118,7 +1165,7 @@
 
     def _gettext_init(self):
         """set language for gettext"""
-        from gettext import translation
+        from cubicweb.gettext import translation
         path = join(self.apphome, 'i18n')
         for language in self.available_languages():
             self.info("loading language %s", language)
@@ -1184,13 +1231,6 @@
             SMTP_LOCK.release()
         return True
 
-    # these are overridden by set_log_methods below
-    # only defining here to prevent pylint from complaining
-    @classmethod
-    def debug(cls, msg, *a, **kw):
-        pass
-    info = warning = error = critical = exception = debug 
-
 set_log_methods(CubicWebNoAppConfiguration,
                 logging.getLogger('cubicweb.configuration'))
 
@@ -1235,6 +1275,7 @@
 
     class LIMIT_SIZE(FunctionDescr):
         supported_backends = ('postgres', 'sqlite',)
+        minargs = maxargs = 3
         rtype = 'String'
 
         def st_description(self, funcnode, mainindex, tr):
@@ -1245,6 +1286,7 @@
 
     class TEXT_LIMIT_SIZE(LIMIT_SIZE):
         supported_backends = ('mysql', 'postgres', 'sqlite',)
+        minargs = maxargs = 2
 
     register_function(TEXT_LIMIT_SIZE)
 
@@ -1297,7 +1339,7 @@
             try:
                 return Binary(fpath)
             except OSError, ex:
-                self.critical("can't open %s: %s", fpath, ex)
+                source.critical("can't open %s: %s", fpath, ex)
                 return None
 
     register_function(FSPATH)
--- a/cwctl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/cwctl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -554,7 +554,7 @@
         pid = int(open(pidf).read().strip())
         try:
             kill(pid, signal.SIGTERM)
-        except:
+        except Exception:
             print >> sys.stderr, "process %s seems already dead." % pid
         else:
             try:
@@ -564,7 +564,7 @@
                 print >> sys.stderr, 'trying SIGKILL'
                 try:
                     kill(pid, signal.SIGKILL)
-                except:
+                except Exception:
                     # probably dead now
                     pass
                 wait_process_end(pid)
@@ -728,11 +728,9 @@
         config = cwcfg.config_for(appid)
         config.repairing = True # notice we're not starting the server
         config.verbosity = self.config.verbosity
-        try:
-            config.set_sources_mode(self.config.ext_sources or ('migration',))
-        except AttributeError:
-            # not a server config
-            pass
+        set_sources_mode = getattr(config, 'set_sources_mode', None)
+        if set_sources_mode is not None:
+            set_sources_mode(self.config.ext_sources or ('migration',))
         # get instance and installed versions for the server and the componants
         mih = config.migration_handler()
         repo = mih.repo_connect()
@@ -802,6 +800,28 @@
                 return False
         return True
 
+
+class ListVersionsInstanceCommand(InstanceCommand):
+    """List versions used by an instance.
+
+    <instance>...
+      identifiers of the instances to list versions for.
+    """
+    name = 'versions'
+
+    def versions_instance(self, appid):
+        from logilab.common.changelog import Version
+        config = cwcfg.config_for(appid)
+        # should not raise error if db versions don't match fs versions
+        config.repairing = True
+        if hasattr(config, 'set_sources_mode'):
+            config.set_sources_mode(('migration',))
+        repo = config.migration_handler().repo_connect()
+        vcconf = repo.get_versions()
+        for key in sorted(vcconf):
+            print key+': %s.%s.%s' % vcconf[key]
+
+
 class ShellCommand(Command):
     """Run an interactive migration shell on an instance. This is a python shell
     with enhanced migration commands predefined in the namespace. An additional
@@ -964,6 +984,7 @@
                StartInstanceCommand, StopInstanceCommand, RestartInstanceCommand,
                ReloadConfigurationCommand, StatusCommand,
                UpgradeInstanceCommand,
+               ListVersionsInstanceCommand,
                ShellCommand,
                RecompileInstanceCatalogsCommand,
                ListInstancesCommand, ListCubesCommand,
--- a/cwvreg.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/cwvreg.py	Fri Dec 09 12:08:27 2011 +0100
@@ -194,17 +194,18 @@
 _ = unicode
 
 from warnings import warn
+from datetime import datetime, date, time, timedelta
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated, class_deprecated
 from logilab.common.modutils import cleanup_sys_modules
 
 from rql import RQLHelper
+from yams.constraints import BASE_CONVERTERS
 
 from cubicweb import (ETYPE_NAME_MAP, Binary, UnknownProperty, UnknownEid,
                       ObjectNotFound, NoSelectableObject, RegistryNotFound,
                       CW_EVENT_MANAGER)
-from cubicweb.utils import dump_class
 from cubicweb.vregistry import VRegistry, Registry, class_regid, classid
 from cubicweb.rtags import RTAGS
 
@@ -368,7 +369,10 @@
         # make a copy event if cls.__regid__ == etype, else we may have pb for
         # client application using multiple connections to different
         # repositories (eg shingouz)
-        cls = dump_class(cls, etype)
+        # __autogenerated__ attribute is just a marker
+        cls = type(str(etype), (cls,), {'__autogenerated__': True,
+                                        '__doc__': cls.__doc__,
+                                        '__module__': cls.__module__})
         cls.__regid__ = etype
         cls.__initialize__(self.schema)
         return cls
@@ -412,10 +416,8 @@
                      if not isinstance(view, class_deprecated)]
             try:
                 view = self._select_best(views, req, rset=rset, **kwargs)
-                if view.linkable():
+                if view is not None and view.linkable():
                     yield view
-            except NoSelectableObject:
-                continue
             except Exception:
                 self.exception('error while trying to select %s view for %s',
                                vid, rset)
@@ -849,24 +851,15 @@
         return self['views'].select(__vid, req, rset=rset, **kwargs)
 
 
-import decimal
-from datetime import datetime, date, time, timedelta
-
-YAMS_TO_PY = { # XXX unify with yams.constraints.BASE_CONVERTERS?
-    'String' :  unicode,
-    'Bytes':    Binary,
-    'Password': str,
-
-    'Boolean':  bool,
-    'Int':      int,
-    'Float':    float,
-    'Decimal':  decimal.Decimal,
-
+# XXX unify with yams.constraints.BASE_CONVERTERS?
+YAMS_TO_PY = BASE_CONVERTERS.copy()
+YAMS_TO_PY.update({
+    'Bytes':      Binary,
     'Date':       date,
     'Datetime':   datetime,
     'TZDatetime': datetime,
     'Time':       time,
     'TZTime':     time,
     'Interval':   timedelta,
-    }
+    })
 
--- a/dataimport.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/dataimport.py	Fri Dec 09 12:08:27 2011 +0100
@@ -445,14 +445,14 @@
         ObjectStore.__init__(self)
         if session is None:
             sys.exit('please provide a session of run this script with cubicweb-ctl shell and pass cnx as session')
-        if not hasattr(session, 'set_pool'):
+        if not hasattr(session, 'set_cnxset'):
             # connection
             cnx = session
             session = session.request()
-            session.set_pool = lambda : None
+            session.set_cnxset = lambda : None
             commit = commit or cnx.commit
         else:
-            session.set_pool()
+            session.set_cnxset()
         self.session = session
         self._commit = commit or session.commit
 
@@ -462,7 +462,7 @@
 
     def commit(self):
         txuuid = self._commit()
-        self.session.set_pool()
+        self.session.set_cnxset()
         return txuuid
 
     def rql(self, *args):
@@ -554,7 +554,7 @@
             self.tell("Run import function '%s'..." % func_name)
             try:
                 func(self)
-            except:
+            except Exception:
                 if self.catcherrors:
                     self.record_error(func_name, 'While calling %s' % func.__name__)
                 else:
@@ -642,7 +642,9 @@
         for k, v in kwargs.iteritems():
             kwargs[k] = getattr(v, 'eid', v)
         entity, rels = self.metagen.base_etype_dicts(etype)
+        # make a copy to keep cached entity pristine
         entity = copy(entity)
+        entity.cw_edited = copy(entity.cw_edited)
         entity.cw_clear_relation_cache()
         self.metagen.init_entity(entity)
         entity.cw_edited.update(kwargs, skipsec=False)
--- a/dbapi.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/dbapi.py	Fri Dec 09 12:08:27 2011 +0100
@@ -254,6 +254,8 @@
     def anonymous_session(self):
         return not self.cnx or self.cnx.anonymous_connection
 
+    def __repr__(self):
+        return '<DBAPISession %r>' % self.sessionid
 
 class DBAPIRequest(RequestSessionBase):
 
@@ -292,7 +294,7 @@
             self.user = user
             self.set_entity_cache(user)
 
-    def execute(self, *args, **kwargs):
+    def execute(self, *args, **kwargs): # pylint: disable=E0202
         """overriden when session is set. By default raise authentication error
         so authentication is requested.
         """
@@ -301,7 +303,7 @@
     def set_default_language(self, vreg):
         try:
             self.lang = vreg.property_value('ui.language')
-        except: # property may not be registered
+        except Exception: # property may not be registered
             self.lang = 'en'
         # use req.__ to translate a message without registering it to the catalog
         try:
@@ -311,7 +313,7 @@
         except KeyError:
             # this occurs usually during test execution
             self._ = self.__ = unicode
-            self.pgettext = lambda x, y: y
+            self.pgettext = lambda x, y: unicode(y)
         self.debug('request default language: %s', self.lang)
 
     # entities cache management ###############################################
@@ -347,9 +349,9 @@
 
     # server session compat layer #############################################
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.cnx.describe(eid)
+        return self.cnx.describe(eid, asdict)
 
     def source_defs(self):
         """return the definition of sources used by the repository."""
@@ -483,7 +485,7 @@
 def check_not_closed(func):
     def decorator(self, *args, **kwargs):
         if self._closed is not None:
-            raise ProgrammingError('Closed connection')
+            raise ProgrammingError('Closed connection %s' % self.sessionid)
         return func(self, *args, **kwargs)
     return decorator
 
@@ -532,7 +534,7 @@
         if self._closed is None and self._close_on_del:
             try:
                 self.close()
-            except:
+            except Exception:
                 pass
 
     # connection initialization methods ########################################
@@ -621,7 +623,8 @@
         """
         return self._repo.check_session(self.sessionid)
 
-    def _txid(self, cursor=None): # XXX could now handle various isolation level!
+    def _txid(self, cursor=None): # pylint: disable=E0202
+        # XXX could now handle various isolation level!
         # return a dict as bw compat trick
         return {'txid': currentThread().getName()}
 
@@ -675,8 +678,15 @@
         return self._repo.get_option_value(option, foreid)
 
     @check_not_closed
-    def describe(self, eid):
-        return self._repo.describe(self.sessionid, eid, **self._txid())
+    def describe(self, eid, asdict=False):
+        metas = self._repo.describe(self.sessionid, eid, **self._txid())
+        if len(metas) == 3: # backward compat
+            metas = list(metas)
+            metas.append(metas[1])
+        if asdict:
+            return dict(zip(('type', 'source', 'extid', 'asource'), metas))
+        # XXX :-1 for cw compat, use asdict=True for full information
+        return metas[:-1]
 
     # db-api like interface ####################################################
 
--- a/debian/changelog	Mon Sep 26 18:37:23 2011 +0200
+++ b/debian/changelog	Fri Dec 09 12:08:27 2011 +0100
@@ -1,3 +1,63 @@
+cubicweb (3.13.10-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Thu, 08 Dec 2011 13:22:05 +0100
+
+cubicweb (3.13.9-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 21 Oct 2011 11:03:45 +0200
+
+cubicweb (3.13.8-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 07 Oct 2011 16:20:35 +0200
+
+cubicweb (3.13.7-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Thu, 29 Sep 2011 14:08:07 +0200
+
+cubicweb (3.13.6-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Mon, 26 Sep 2011 18:36:00 +0200
+
+cubicweb (3.13.5-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Thu, 08 Sep 2011 16:53:13 +0200
+
+cubicweb (3.13.4-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 05 Aug 2011 12:22:11 +0200
+
+cubicweb (3.13.3-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Wed, 27 Jul 2011 19:06:16 +0200
+
+cubicweb (3.13.2-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Wed, 20 Jul 2011 17:15:22 +0200
+
+cubicweb (3.13.1-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Tue, 12 Jul 2011 12:23:54 +0200
+
 cubicweb (3.12.10-1) unstable; urgency=low
 
   * new upstream release
@@ -10,6 +70,12 @@
 
  -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Tue, 12 Jul 2011 11:30:10 +0200
 
+cubicweb (3.13.0-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Thu, 09 Jun 2011 20:18:41 +0200
+
 cubicweb (3.12.8-1) unstable; urgency=low
 
   * new upstream release
--- a/debian/control	Mon Sep 26 18:37:23 2011 +0200
+++ b/debian/control	Fri Dec 09 12:08:27 2011 +0100
@@ -7,9 +7,9 @@
            Adrien Di Mascio <Adrien.DiMascio@logilab.fr>,
            Aurélien Campéas <aurelien.campeas@logilab.fr>,
            Nicolas Chauvat <nicolas.chauvat@logilab.fr>
-Build-Depends: debhelper (>= 7), python (>= 2.5), python-central (>= 0.5)
+Build-Depends: debhelper (>= 7), python (>= 2.5), python-central (>= 0.5), python-sphinx
 # for the documentation:
-# python-sphinx, python-logilab-common, python-unittest2,
+# python-sphinx, python-logilab-common, python-unittest2, logilab-doctools, logilab-xml
 Standards-Version: 3.9.1
 Homepage: http://www.cubicweb.org
 XS-Python-Version: >= 2.5, << 2.7
@@ -35,7 +35,7 @@
 Conflicts: cubicweb-multisources
 Replaces: cubicweb-multisources
 Provides: cubicweb-multisources
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.5.0), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.5.0), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2, python-logilab-common (>= 0.56.2)
 Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
 Description: server part of the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -70,7 +70,7 @@
 Architecture: all
 XB-Python-Version: ${python:Versions}
 Provides: cubicweb-web-frontend
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web, python-logilab-common (>= 0.56.2)
 Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
 Description: twisted-based web interface for the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -99,7 +99,7 @@
 Package: cubicweb-common
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.55.2), python-yams (>= 0.32.0), python-rql (>= 0.28.0), python-lxml
+Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.55.2), python-yams (>= 0.33.0), python-rql (>= 0.28.0), python-lxml
 Recommends: python-simpletal (>= 4.0), python-crypto
 Conflicts: cubicweb-core
 Replaces: cubicweb-core
--- a/debian/cubicweb-ctl.cubicweb.init	Mon Sep 26 18:37:23 2011 +0200
+++ b/debian/cubicweb-ctl.cubicweb.init	Fri Dec 09 12:08:27 2011 +0100
@@ -4,8 +4,8 @@
 # Provides:          cubicweb
 # Required-Start:    $remote_fs $syslog $local_fs $network
 # Required-Stop:     $remote_fs $syslog $local_fs $network
-# Should-Start:      $postgresql $pyro-nsd
-# Should-Stop:       $postgresql $pyro-nsd
+# Should-Start:      postgresql pyro-nsd
+# Should-Stop:       postgresql pyro-nsd
 # Default-Start:     2 3 4 5
 # Default-Stop:      0 1 6
 # Short-Description: Start cubicweb application at boot time
--- a/debian/rules	Mon Sep 26 18:37:23 2011 +0200
+++ b/debian/rules	Fri Dec 09 12:08:27 2011 +0100
@@ -10,14 +10,11 @@
 build: build-stamp
 build-stamp:
 	dh_testdir
-	# XXX doesn't work if logilab-doctools, logilab-xml are not in build depends
-	# and I can't get pbuilder find them in its chroot :(
-	# cd doc && make
-	# FIXME cleanup and use sphinx-build as build-depends ?
 	NO_SETUPTOOLS=1 python setup.py build
-	# XXX uncomment this and associated build-depends in control
-	#when necessary sphinx version is in all built distribution
-	#PYTHONPATH=$(CURDIR)/.. $(MAKE) -C doc/book/en all
+	# documentation build is now made optional since it can break for old
+	# distributions and we don't want to block a new release of Cubicweb
+	# because of documentation issues.
+	-PYTHONPATH=$(CURDIR)/.. $(MAKE) -C doc/book/en all
 	touch build-stamp
 
 clean:
@@ -73,7 +70,7 @@
 	dh_installman -i
 	dh_installchangelogs -i
 	dh_link -i
-	dh_compress -i -X.py -X.ini -X.xml
+	dh_compress -i -X.py -X.ini -X.xml -X.js -X.rst
 	dh_fixperms -i
 	dh_installdeb -i
 	dh_gencontrol  -i
--- a/devtools/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -28,15 +28,17 @@
 import pickle
 import glob
 import warnings
+from hashlib import sha1 # pylint: disable=E0611
 from datetime import timedelta
 from os.path import (abspath, join, exists, basename, dirname, normpath, split,
                      isfile, isabs, splitext, isdir, expanduser)
 from functools import partial
-import hashlib
 
 from logilab.common.date import strptime
 from logilab.common.decorators import cached, clear_cache
-from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError, schema, cwconfig, BadConnectionId
+
+from cubicweb import ConfigurationError, ExecutionError, BadConnectionId
+from cubicweb import CW_SOFTWARE_ROOT, schema, cwconfig
 from cubicweb.server.serverconfig import ServerConfiguration
 from cubicweb.etwist.twconfig import TwistedConfiguration
 
@@ -91,7 +93,7 @@
     """ Idea: this is less costly than a full re-creation of the repo object.
     off:
     * session are closed,
-    * pools are closed
+    * cnxsets are closed
     * system source is shutdown
     """
     if not repo._needs_refresh:
@@ -102,8 +104,8 @@
                 repo.close(sessionid)
             except BadConnectionId: #this is strange ? thread issue ?
                 print 'XXX unknown session', sessionid
-        for pool in repo.pools:
-            pool.close(True)
+        for cnxset in repo.cnxsets:
+            cnxset.close(True)
         repo.system_source.shutdown()
         repo._needs_refresh = True
         repo._has_started = False
@@ -111,12 +113,12 @@
 def turn_repo_on(repo):
     """Idea: this is less costly than a full re-creation of the repo object.
     on:
-    * pools are connected
+    * cnxsets are connected
     * cache are cleared
     """
     if repo._needs_refresh:
-        for pool in repo.pools:
-            pool.reconnect()
+        for cnxset in repo.cnxsets:
+            cnxset.reconnect()
         repo._type_source_cache = {}
         repo._extid_cache = {}
         repo.querier._rql_cache = {}
@@ -197,7 +199,10 @@
         directory from wich tests are launched or by specifying an alternative
         sources file using self.sourcefile.
         """
-        sources = super(TestServerConfiguration, self).sources()
+        try:
+            sources = super(TestServerConfiguration, self).sources()
+        except ExecutionError:
+            sources = {}
         if not sources:
             sources = DEFAULT_SOURCES
         if 'admin' not in sources:
@@ -207,9 +212,6 @@
     # web config methods needed here for cases when we use this config as a web
     # config
 
-    def instance_md5_version(self):
-        return ''
-
     def default_base_url(self):
         return BASE_URL
 
@@ -258,8 +260,9 @@
     Example usage::
 
       class MyTests(CubicWebTC):
-          _config = RealDatabseConfiguration('myapp',
-                                             sourcefile='/path/to/sources')
+          _config = RealDatabaseConfiguration('myapp',
+                                              sourcefile='/path/to/sources')
+
           def test_something(self):
               rset = self.execute('Any X WHERE X is CWUser')
               self.view('foaf', rset)
@@ -475,12 +478,11 @@
             repo = self.get_repo(startup=True)
             cnx = self.get_cnx()
             session = repo._sessions[cnx.sessionid]
-            session.set_pool()
+            session.set_cnxset()
             _commit = session.commit
-            def always_pooled_commit():
-                _commit()
-                session.set_pool()
-            session.commit = always_pooled_commit
+            def keep_cnxset_commit():
+                _commit(free_cnxset=False)
+            session.commit = keep_cnxset_commit
             pre_setup_func(session, self.config)
             session.commit()
             cnx.close()
@@ -576,7 +578,7 @@
                 templcursor.close()
                 cnx.close()
             init_repository(self.config, interactive=False)
-        except:
+        except BaseException:
             if self.dbcnx is not None:
                 self.dbcnx.rollback()
             print >> sys.stderr, 'building', self.dbname, 'failed'
@@ -596,7 +598,7 @@
 
     @property
     def _config_id(self):
-        return hashlib.sha1(self.config.apphome).hexdigest()[:10]
+        return sha1(self.config.apphome).hexdigest()[:10]
 
     def _backup_name(self, db_id): # merge me with parent
         backup_name = '_'.join(('cache', self._config_id, self.dbname, db_id))
@@ -656,6 +658,25 @@
 class SQLiteTestDataBaseHandler(TestDataBaseHandler):
     DRIVER = 'sqlite'
 
+    __TMPDB = set()
+
+    @classmethod
+    def _cleanup_all_tmpdb(cls):
+        for dbpath in cls.__TMPDB:
+            cls._cleanup_database(dbpath)
+
+
+
+    def __init__(self, *args, **kwargs):
+        super(SQLiteTestDataBaseHandler, self).__init__(*args, **kwargs)
+        # use a dedicated base for each process.
+        if 'global-db-name' not in self.system_source:
+            self.system_source['global-db-name'] = self.system_source['db-name']
+            process_db = self.system_source['db-name'] + str(os.getpid())
+            self.system_source['db-name'] = process_db
+        process_db = self.absolute_dbfile() # update db-name to absolute path
+        self.__TMPDB.add(process_db)
+
     @staticmethod
     def _cleanup_database(dbfile):
         try:
@@ -664,6 +685,10 @@
         except OSError:
             pass
 
+    @property
+    def dbname(self):
+        return self.system_source['global-db-name']
+
     def absolute_dbfile(self):
         """absolute path of current database file"""
         dbfile = join(self._ensure_test_backup_db_dir(),
@@ -671,7 +696,6 @@
         self.config.sources()['system']['db-name'] = dbfile
         return dbfile
 
-
     def process_cache_entry(self, directory, dbname, db_id, entry):
         return entry.get('sqlite')
 
@@ -706,6 +730,9 @@
         self._cleanup_database(self.absolute_dbfile())
         init_repository(self.config, interactive=False)
 
+import atexit
+atexit.register(SQLiteTestDataBaseHandler._cleanup_all_tmpdb)
+
 
 def install_sqlite_patch(querier):
     """This patch hotfixes the following sqlite bug :
@@ -726,13 +753,13 @@
                             value = value.rsplit('.', 1)[0]
                             try:
                                 row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
-                            except:
+                            except Exception:
                                 row[cellindex] = strptime(value, '%Y-%m-%d')
                         if vtype == 'Time' and type(value) is unicode:
                             found_date = True
                             try:
                                 row[cellindex] = strptime(value, '%H:%M:%S')
-                            except:
+                            except Exception:
                                 # DateTime used as Time?
                                 row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
                         if vtype == 'Interval' and type(value) is int:
--- a/devtools/devctl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/devctl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -88,6 +88,9 @@
             continue
         if not hasattr(mod, '__file__'):
             continue
+        if mod.__file__ is None:
+            # odd/rare but real
+            continue
         for path in config.vregistry_path():
             if mod.__file__.startswith(path):
                 del sys.modules[name]
@@ -155,6 +158,9 @@
             add_msg(w, cstrtype)
     done = set()
     for eschema in sorted(schema.entities()):
+        if eschema.type in libschema:
+            done.add(eschema.description)
+    for eschema in sorted(schema.entities()):
         etype = eschema.type
         if etype not in libschema:
             add_msg(w, etype)
@@ -203,15 +209,19 @@
     w('# (no object form for final or symmetric relation types)\n')
     w('\n')
     for rschema in sorted(schema.relations()):
+        if rschema.type in libschema:
+            done.add(rschema.type)
+            done.add(rschema.description)
+    for rschema in sorted(schema.relations()):
         rtype = rschema.type
         if rtype not in libschema:
             # bw compat, necessary until all translation of relation are done
             # properly...
             add_msg(w, rtype)
+            done.add(rtype)
             if rschema.description and rschema.description not in done:
-                done.add(rschema.description)
                 add_msg(w, rschema.description)
-            done.add(rtype)
+            done.add(rschema.description)
             librschema = None
         else:
             librschema = libschema.rschema(rtype)
@@ -221,7 +231,7 @@
             for subjschema in rschema.subjects():
                 if not subjschema in libsubjects:
                     add_msg(w, rtype, subjschema.type)
-        if not (schema.rschema(rtype).final or rschema.symmetric):
+        if not (rschema.final or rschema.symmetric):
             if rschema not in NO_I18NCONTEXT:
                 libobjects = librschema and librschema.objects() or ()
                 for objschema in rschema.objects():
@@ -231,6 +241,14 @@
                 # bw compat, necessary until all translation of relation are
                 # done properly...
                 add_msg(w, '%s_object' % rtype)
+        for rdef in rschema.rdefs.itervalues():
+            if not rdef.description or rdef.description in done:
+                continue
+            if (librschema is None or
+                (rdef.subject, rdef.object) not in librschema.rdefs or
+                librschema.rdefs[(rdef.subject, rdef.object)].description != rdef.description):
+                add_msg(w, rdef.description)
+            done.add(rdef.description)
     for objid in _iter_vreg_objids(vreg, vregdone):
         add_msg(w, '%s_description' % objid)
         add_msg(w, objid)
--- a/devtools/fake.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/fake.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -63,8 +63,8 @@
         self._session_data = {}
         self._headers_in = Headers()
 
-    def set_cookie(self, cookie, key, maxage=300, expires=None):
-        super(FakeRequest, self).set_cookie(cookie, key, maxage=300, expires=None)
+    def set_cookie(self, name, value, maxage=300, expires=None, secure=False):
+        super(FakeRequest, self).set_cookie(name, value, maxage, expires, secure)
         cookie = self.get_response_header('Set-Cookie')
         self._headers_in.setHeader('Cookie', cookie)
 
@@ -138,13 +138,15 @@
 
 
 class FakeSession(RequestSessionBase):
-    read_security = write_security = True
-    set_read_security = set_write_security = lambda *args, **kwargs: None
 
-    def __init__(self, repo=None, user=None):
+    def __init__(self, repo=None, user=None, vreg=None):
         self.repo = repo
-        self.vreg = getattr(self.repo, 'vreg', CubicWebVRegistry(FakeConfig(), initlog=False))
-        self.pool = FakePool()
+        if vreg is None:
+            vreg = getattr(self.repo, 'vreg', None)
+        if vreg is None:
+            vreg = CubicWebVRegistry(FakeConfig(), initlog=False)
+        self.vreg = vreg
+        self.cnxset = FakeConnectionsSet()
         self.user = user or FakeUser()
         self.is_internal_session = False
         self.transaction_data = {}
@@ -162,6 +164,13 @@
     def set_entity_cache(self, entity):
         pass
 
+    # for use with enabled_security context manager
+    read_security = write_security = True
+    def init_security(self, *args):
+        return None, None
+    def reset_security(self, *args):
+        return
+
 class FakeRepo(object):
     querier = None
     def __init__(self, schema, vreg=None, config=None):
@@ -201,6 +210,6 @@
         self.uri = uri
 
 
-class FakePool(object):
+class FakeConnectionsSet(object):
     def source(self, uri):
         return FakeSource(uri)
--- a/devtools/fill.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/fill.py	Fri Dec 09 12:08:27 2011 +0100
@@ -275,9 +275,6 @@
     :param choice_func: a function that takes an entity type, an attrname and
                         returns acceptable values for this attribute
     """
-    # XXX HACK, remove or fix asap
-    if etype in set(('String', 'Int', 'Float', 'Boolean', 'Date', 'CWGroup', 'CWUser')):
-        return []
     queries = []
     for index in xrange(entity_num):
         restrictions = []
@@ -355,7 +352,7 @@
         if objtype:
             rql += ', %s is %s' % (selectvar, objtype)
         rset = cursor.execute(rql)
-    except:
+    except Exception:
         print "could restrict eid_list with given constraints (%r)" % constraints
         return []
     return set(eid for eid, in rset.rows)
--- a/devtools/livetest.py	Mon Sep 26 18:37:23 2011 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,194 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""provide utilies for web (live) unit testing
-
-"""
-
-import os
-import socket
-import logging
-from os.path import join, dirname, normpath, abspath
-from StringIO import StringIO
-
-#from twisted.application import service, strports
-# from twisted.internet import reactor, task
-from twisted.web2 import channel
-from twisted.web2 import server
-from twisted.web2 import static
-from twisted.internet import reactor
-from twisted.internet.error import CannotListenError
-
-from logilab.common.testlib import TestCase
-
-from cubicweb.dbapi import in_memory_repo_cnx
-from cubicweb.etwist.server import CubicWebRootResource
-from cubicweb.devtools import BaseApptestConfiguration, init_test_database
-
-
-
-def get_starturl(port=7777, login=None, passwd=None):
-    if login:
-        return 'http://%s:%s/view?login=%s&password=%s' % (socket.gethostname(), port, login, passwd)
-    else:
-        return 'http://%s:%s/' % (socket.gethostname(), port)
-
-
-class LivetestResource(CubicWebRootResource):
-    """redefines main resource to search for data files in several directories"""
-
-    def locateChild(self, request, segments):
-        """Indicate which resource to use to process down the URL's path"""
-        if len(segments) and segments[0] == 'data':
-            # Anything in data/ is treated as static files
-            datadir = self.config.locate_resource(segments[1])[0]
-            if datadir:
-                return static.File(str(datadir), segments[1:])
-        # Otherwise we use this single resource
-        return self, ()
-
-
-
-class LivetestConfiguration(BaseApptestConfiguration):
-    init_repository = False
-
-    def __init__(self, cube=None, sourcefile=None, pyro_name=None,
-                 log_threshold=logging.CRITICAL):
-        BaseApptestConfiguration.__init__(self, cube, log_threshold=log_threshold)
-        self.appid = pyro_name or cube
-        # don't change this, else some symlink problems may arise in some
-        # environment (e.g. mine (syt) ;o)
-        # XXX I'm afraid this test will prevent to run test from a production
-        # environment
-        self._sources = None
-        # instance cube test
-        if cube is not None:
-            self.apphome = self.cube_dir(cube)
-        elif 'web' in os.getcwd().split(os.sep):
-            # web test
-            self.apphome = join(normpath(join(dirname(__file__), '..')), 'web')
-        else:
-            # cube test
-            self.apphome = abspath('..')
-        self.sourcefile = sourcefile
-        self.global_set_option('realm', '')
-        self.use_pyro = pyro_name is not None
-
-    def pyro_enabled(self):
-        if self.use_pyro:
-            return True
-        else:
-            return False
-
-
-
-def make_site(cube, options=None):
-    from cubicweb.etwist import twconfig # trigger configuration registration
-    config = LivetestConfiguration(cube, options.sourcefile,
-                                   pyro_name=options.pyro_name,
-                                   log_threshold=logging.DEBUG)
-    init_test_database(config=config)
-    # if '-n' in sys.argv: # debug mode
-    cubicweb = LivetestResource(config, debug=True)
-    toplevel = cubicweb
-    website = server.Site(toplevel)
-    cube_dir = config.cube_dir(cube)
-    source = config.sources()['system']
-    for port in xrange(7777, 7798):
-        try:
-            reactor.listenTCP(port, channel.HTTPFactory(website))
-            saveconf(cube_dir, port, source['db-user'], source['db-password'])
-            break
-        except CannotListenError:
-            print "port %s already in use, I will try another one" % port
-    else:
-        raise
-    cubicweb.base_url = get_starturl(port=port)
-    print "you can go here : %s" % cubicweb.base_url
-
-def runserver():
-    reactor.run()
-
-def saveconf(templhome, port, user, passwd):
-    import pickle
-    conffile = file(join(templhome, 'test', 'livetest.conf'), 'w')
-
-    pickle.dump((port, user, passwd, get_starturl(port, user, passwd)),
-                conffile)
-    conffile.close()
-
-
-def loadconf(filename='livetest.conf'):
-    import pickle
-    return pickle.load(file(filename))
-
-
-def execute_scenario(filename, **kwargs):
-    """based on twill.parse.execute_file, but inserts cubicweb extensions"""
-    from twill.parse import _execute_script
-    stream = StringIO('extend_with cubicweb.devtools.cubicwebtwill\n' + file(filename).read())
-    kwargs['source'] = filename
-    _execute_script(stream, **kwargs)
-
-
-def hijack_twill_output(new_output):
-    from twill import commands as twc
-    from twill import browser as twb
-    twc.OUT = new_output
-    twb.OUT = new_output
-
-
-class LiveTestCase(TestCase):
-
-    sourcefile = None
-    cube = ''
-    def setUp(self):
-        assert self.cube, "You must specify a cube in your testcase"
-        # twill can be quite verbose ...
-        self.twill_output = StringIO()
-        hijack_twill_output(self.twill_output)
-        # build a config, and get a connection
-        self.config = LivetestConfiguration(self.cube, self.sourcefile)
-        _, user, passwd, _ = loadconf()
-        self.repo, self.cnx = in_memory_repo_cnx(self.config, user, password=passwd)
-        self.setup_db(self.cnx)
-
-    def tearDown(self):
-        self.teardown_db(self.cnx)
-
-
-    def setup_db(self, cnx):
-        """override setup_db() to setup your environment"""
-
-    def teardown_db(self, cnx):
-        """override teardown_db() to clean up your environment"""
-
-    def get_loggedurl(self):
-        port, user, passwd, logged_url = loadconf()
-        return logged_url
-
-    def get_anonurl(self):
-        port, _, _, _ = loadconf()
-        return 'http://%s:%s/view?login=anon&password=anon' % (
-            socket.gethostname(), port)
-
-    # convenience
-    execute_scenario = staticmethod(execute_scenario)
-
-
-if __name__ == '__main__':
-    runserver()
--- a/devtools/qunit.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/qunit.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,3 +1,21 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
 import os, os.path as osp
 import signal
 from tempfile import mkdtemp, NamedTemporaryFile, TemporaryFile
--- a/devtools/repotest.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/repotest.py	Fri Dec 09 12:08:27 2011 +0100
@@ -148,8 +148,7 @@
 from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
 
 class RQLGeneratorTC(TestCase):
-    schema = backend = None # set this in concret test
-
+    schema = backend = None # set this in concrete class
 
     @classmethod
     def setUpClass(cls):
@@ -197,7 +196,7 @@
 
 
 class BaseQuerierTC(TestCase):
-    repo = None # set this in concret test
+    repo = None # set this in concrete class
 
     def setUp(self):
         self.o = self.repo.querier
@@ -205,7 +204,7 @@
         self.ueid = self.session.user.eid
         assert self.ueid != -1
         self.repo._type_source_cache = {} # clear cache
-        self.pool = self.session.set_pool()
+        self.cnxset = self.session.set_cnxset()
         self.maxeid = self.get_max_eid()
         do_monkey_patch()
         self._dumb_sessions = []
@@ -213,7 +212,7 @@
     def get_max_eid(self):
         return self.session.execute('Any MAX(X)')[0][0]
     def cleanup(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.session.execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
 
     def tearDown(self):
@@ -225,7 +224,7 @@
         for session in self._dumb_sessions:
             session.rollback()
             session.close()
-        self.repo._free_pool(self.pool)
+        self.repo._free_cnxset(self.cnxset)
         assert self.session.user.eid != -1
 
     def set_debug(self, debug):
@@ -263,7 +262,8 @@
         u = self.repo._build_user(self.session, self.session.user.eid)
         u._groups = set(groups)
         s = Session(u, self.repo)
-        s._threaddata.pool = self.pool
+        s._threaddata.cnxset = self.cnxset
+        s._threaddata.ctx_count = 1
         # register session to ensure it gets closed
         self._dumb_sessions.append(s)
         return s
@@ -273,7 +273,7 @@
 
     def commit(self):
         self.session.commit()
-        self.session.set_pool()
+        self.session.set_cnxset()
 
 
 class BasePlannerTC(BaseQuerierTC):
@@ -287,7 +287,7 @@
         # XXX source_defs
         self.o = self.repo.querier
         self.session = self.repo._sessions.values()[0]
-        self.pool = self.session.set_pool()
+        self.cnxset = self.session.set_cnxset()
         self.schema = self.o.schema
         self.sources = self.o._repo.sources
         self.system = self.sources[-1]
@@ -311,7 +311,7 @@
             del self.repo.sources_by_uri[source.uri]
         undo_monkey_patch()
         for session in self._dumb_sessions:
-            session._threaddata.pool = None
+            session._threaddata.cnxset = None
             session.close()
 
     def _prepare_plan(self, rql, kwargs=None):
@@ -328,9 +328,10 @@
 
 # monkey patch some methods to get predicatable results #######################
 
-from cubicweb.rqlrewrite import RQLRewriter
-_orig_insert_snippets = RQLRewriter.insert_snippets
-_orig_build_variantes = RQLRewriter.build_variantes
+from cubicweb import rqlrewrite
+_orig_iter_relations = rqlrewrite.iter_relations
+_orig_insert_snippets = rqlrewrite.RQLRewriter.insert_snippets
+_orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes
 
 def _insert_snippets(self, snippets, varexistsmap=None):
     _orig_insert_snippets(self, sorted(snippets, snippet_cmp), varexistsmap)
@@ -414,9 +415,13 @@
 def _syntax_tree_search(*args, **kwargs):
     return deepcopy(_orig_syntax_tree_search(*args, **kwargs))
 
+def _ordered_iter_relations(stinfo):
+    return sorted(_orig_iter_relations(stinfo), key=lambda x:x.r_type)
+
 def do_monkey_patch():
-    RQLRewriter.insert_snippets = _insert_snippets
-    RQLRewriter.build_variantes = _build_variantes
+    rqlrewrite.iter_relations = _ordered_iter_relations
+    rqlrewrite.RQLRewriter.insert_snippets = _insert_snippets
+    rqlrewrite.RQLRewriter.build_variantes = _build_variantes
     ExecutionPlan._check_permissions = _check_permissions
     ExecutionPlan.tablesinorder = None
     ExecutionPlan.init_temp_table = _init_temp_table
@@ -425,8 +430,9 @@
     PyroRQLSource.syntax_tree_search = _syntax_tree_search
 
 def undo_monkey_patch():
-    RQLRewriter.insert_snippets = _orig_insert_snippets
-    RQLRewriter.build_variantes = _orig_build_variantes
+    rqlrewrite.iter_relations = _orig_iter_relations
+    rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets
+    rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes
     ExecutionPlan._check_permissions = _orig_check_permissions
     ExecutionPlan.init_temp_table = _orig_init_temp_table
     PartPlanInformation.merge_input_maps = _orig_merge_input_maps
--- a/devtools/stresstester.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/stresstester.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -38,7 +38,7 @@
   -o / --report-output <filename>
      Write profiler report into <filename> rather than on stdout
 
-Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
 http://www.logilab.fr/ -- mailto:contact@logilab.fr
 """
 
@@ -73,9 +73,7 @@
                 start = clock()
                 try:
                     cursor.execute(query)
-                except KeyboardInterrupt:
-                    raise
-                except:
+                except Exception:
                     TB_LOCK.acquire()
                     traceback.print_exc()
                     TB_LOCK.release()
--- a/devtools/testlib.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/devtools/testlib.py	Fri Dec 09 12:08:27 2011 +0100
@@ -233,7 +233,7 @@
         # web resources
         try:
             config.global_set_option('embed-allowed', re.compile('.*'))
-        except: # not in server only configuration
+        except Exception: # not in server only configuration
             pass
 
     #XXX this doesn't need to a be classmethod anymore
@@ -274,7 +274,7 @@
     def session(self):
         """return current server side session (using default manager account)"""
         session = self.repo._sessions[self.cnx.sessionid]
-        session.set_pool()
+        session.set_cnxset()
         return session
 
     @property
@@ -458,7 +458,7 @@
         try:
             return self.cnx.commit()
         finally:
-            self.session.set_pool() # ensure pool still set after commit
+            self.session.set_cnxset() # ensure cnxset still set after commit
 
     @nocoverage
     def rollback(self):
@@ -467,7 +467,7 @@
         except dbapi.ProgrammingError:
             pass # connection closed
         finally:
-            self.session.set_pool() # ensure pool still set after commit
+            self.session.set_cnxset() # ensure cnxset still set after commit
 
     # # server side db api #######################################################
 
@@ -475,7 +475,7 @@
         if eid_key is not None:
             warn('[3.8] eid_key is deprecated, you can safely remove this argument',
                  DeprecationWarning, stacklevel=2)
-        self.session.set_pool()
+        self.session.set_cnxset()
         return self.session.execute(rql, args)
 
     # other utilities #########################################################
@@ -493,13 +493,17 @@
 
     def assertModificationDateGreater(self, entity, olddate):
         entity.cw_attr_cache.pop('modification_date', None)
-        self.failUnless(entity.modification_date > olddate)
+        self.assertTrue(entity.modification_date > olddate)
 
     def assertItemsEqual(self, it1, it2, *args, **kwargs):
         it1 = set(getattr(x, 'eid', x) for x in it1)
         it2 = set(getattr(x, 'eid', x) for x in it2)
         super(CubicWebTC, self).assertItemsEqual(it1, it2, *args, **kwargs)
 
+    def assertMessageEqual(self, req, params, msg):
+        msg = req.session.data[params['_cwmsgid']]
+        self.assertEqual(msg, msg)
+
     # workflow utilities #######################################################
 
     def assertPossibleTransitions(self, entity, expected):
@@ -568,6 +572,8 @@
             if views:
                 try:
                     view = viewsvreg._select_best(views, req, rset=rset)
+                    if view is None:
+                        raise NoSelectableObject((req,), {'rset':rset}, views)
                     if view.linkable():
                         yield view
                     else:
@@ -722,7 +728,7 @@
         self.assertEqual(session.login, origsession.login)
         self.assertEqual(session.anonymous_session, False)
         self.assertEqual(path, 'view')
-        self.assertEqual(params, {'__message': 'welcome %s !' % req.user.login})
+        self.assertMessageEqual(req, params, 'welcome %s !' % req.user.login)
 
     def assertAuthFailure(self, req, nbsessions=0):
         self.app.connect(req)
@@ -806,15 +812,13 @@
         """
         try:
             output = viewfunc(**kwargs)
-        except (SystemExit, KeyboardInterrupt):
-            raise
-        except:
+        except Exception:
             # hijack exception: generative tests stop when the exception
             # is not an AssertionError
             klass, exc, tcbk = sys.exc_info()
             try:
                 msg = '[%s in %s] %s' % (klass, view.__regid__, exc)
-            except:
+            except Exception:
                 msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__)
             raise AssertionError, msg, tcbk
         return self._check_html(output, view, template)
@@ -856,9 +860,7 @@
     def assertWellFormed(self, validator, content, context=None):
         try:
             return validator.parse_string(content)
-        except (SystemExit, KeyboardInterrupt):
-            raise
-        except:
+        except Exception:
             # hijack exception: generative tests stop when the exception
             # is not an AssertionError
             klass, exc, tcbk = sys.exc_info()
@@ -870,7 +872,7 @@
 
             try:
                 str_exc = str(exc)
-            except:
+            except Exception:
                 str_exc = 'undisplayable exception'
             msg += str_exc
             if content is not None:
@@ -1165,34 +1167,34 @@
         pass
 
 
-def vreg_instrumentize(testclass):
-    # XXX broken
-    from cubicweb.devtools.apptest import TestEnvironment
-    env = testclass._env = TestEnvironment('data', configcls=testclass.configcls)
-    for reg in env.vreg.values():
-        reg._selected = {}
-        try:
-            orig_select_best = reg.__class__.__orig_select_best
-        except:
-            orig_select_best = reg.__class__._select_best
-        def instr_select_best(self, *args, **kwargs):
-            selected = orig_select_best(self, *args, **kwargs)
-            try:
-                self._selected[selected.__class__] += 1
-            except KeyError:
-                self._selected[selected.__class__] = 1
-            except AttributeError:
-                pass # occurs on reg used to restore database
-            return selected
-        reg.__class__._select_best = instr_select_best
-        reg.__class__.__orig_select_best = orig_select_best
+# def vreg_instrumentize(testclass):
+#     # XXX broken
+#     from cubicweb.devtools.apptest import TestEnvironment
+#     env = testclass._env = TestEnvironment('data', configcls=testclass.configcls)
+#     for reg in env.vreg.values():
+#         reg._selected = {}
+#         try:
+#             orig_select_best = reg.__class__.__orig_select_best
+#         except Exception:
+#             orig_select_best = reg.__class__._select_best
+#         def instr_select_best(self, *args, **kwargs):
+#             selected = orig_select_best(self, *args, **kwargs)
+#             try:
+#                 self._selected[selected.__class__] += 1
+#             except KeyError:
+#                 self._selected[selected.__class__] = 1
+#             except AttributeError:
+#                 pass # occurs on reg used to restore database
+#             return selected
+#         reg.__class__._select_best = instr_select_best
+#         reg.__class__.__orig_select_best = orig_select_best
 
 
-def print_untested_objects(testclass, skipregs=('hooks', 'etypes')):
-    for regname, reg in testclass._env.vreg.iteritems():
-        if regname in skipregs:
-            continue
-        for appobjects in reg.itervalues():
-            for appobject in appobjects:
-                if not reg._selected.get(appobject):
-                    print 'not tested', regname, appobject
+# def print_untested_objects(testclass, skipregs=('hooks', 'etypes')):
+#     for regname, reg in testclass._env.vreg.iteritems():
+#         if regname in skipregs:
+#             continue
+#         for appobjects in reg.itervalues():
+#             for appobject in appobjects:
+#                 if not reg._selected.get(appobject):
+#                     print 'not tested', regname, appobject
--- a/doc/book/en/admin/index.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/index.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -23,7 +23,6 @@
    multisources
    ldap
    pyro
-   gae
    migration
    additional-tips
    rql-logs
--- a/doc/book/en/admin/instance-config.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/instance-config.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -43,18 +43,33 @@
 use apache (for example) for redirection and the variable `main.https-url`
 of configuration file.
 
+For this to work you have to activate the following apache modules :
+
+* rewrite
+* proxy
+* http_proxy
+
+The command on Debian based systems for that is ::
+
+  a2enmod rewrite http_proxy proxy
+  /etc/init.d/apache2 restart
+
 :Example:
 
    For an apache redirection of a site accessible via `http://localhost/demo`
    and `https://localhost/demo` and actually running on port 8080, it
    takes to the http:::
 
+     ProxyPreserveHost On
+     RewriteEngine On
      RewriteCond %{REQUEST_URI} ^/demo
      RewriteRule ^/demo$ /demo/
      RewriteRule ^/demo/(.*) http://127.0.0.1:8080/$1 [L,P]
 
    and for the https:::
 
+     ProxyPreserveHost On
+     RewriteEngine On
      RewriteCond %{REQUEST_URI} ^/ demo
      RewriteRule ^/demo$/demo/
      RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]
@@ -65,6 +80,11 @@
      base-url = http://localhost/demo
      https-url = https://localhost/demo
 
+Notice that if you simply want a site accessible through https, not *both* http
+and https, simply set `base-url` to the https url and the first section into your
+apache configuration (as you would have to do for an http configuration with an
+apache front-end).
+
 Setting up the web client
 -------------------------
 :`web.embed-allowed`:
--- a/doc/book/en/admin/ldap.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/ldap.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -27,7 +27,7 @@
 Credential checks are _always_ done against the LDAP server.
 
 The base functionality for this is in
-cubicweb/server/sources/ldapuser.py.
+:file:`cubicweb/server/sources/ldapuser.py`.
 
 Configurations options
 ----------------------
@@ -39,14 +39,14 @@
 
 LDAP server connection options:
 
-* host: may contain port information using <host>:<port> notation.
-* protocol (choices are ldap, ldaps, ldapi)
-* auth-mode (choices are simple, cram_md5, digest_md5, gssapi, support
+* `host`, may contain port information using <host>:<port> notation.
+* `protocol`, choices are ldap, ldaps, ldapi
+* `auth-mode`, (choices are simple, cram_md5, digest_md5, gssapi, support
   for the later being partial as of now)
-* auth-realm, realm to use when using gssapi/kerberos authentication
-* data-cnx-dn, user dn to use to open data connection to the ldap (eg
+* `auth-realm`, realm to use when using gssapi/kerberos authentication
+* `data-cnx-dn`, user dn to use to open data connection to the ldap (eg
   used to respond to rql queries)
-* data-cnx-password, password to use to open data connection to the
+* `data-cnx-password`, password to use to open data connection to the
   ldap (eg used to respond to rql queries)
 
 If the LDAP server accepts anonymous binds, then it is possible to
@@ -55,16 +55,30 @@
 
 LDAP schema mapping:
 
-* user-base-dn, base DN to lookup for users
-* user-scope, user search scope
-* user-classes, classes of user
-* user-attrs-map, map from ldap user attributes to cubicweb attributes
-* user-login-attr, attribute used as login on authentication
+* `user-base-dn`, base DN to lookup for users
+* `user-scope`, user search scope
+* `user-classes`, classes of user
+* `user-attrs-map`, map from ldap user attributes to cubicweb attributes
+* `user-login-attr`, attribute used as login on authentication
 
 LDAP source internal configuration:
 
-* user-default-group, name of a group in which ldap users will be by
+* `user-default-group`, name of a group in which ldap users will be by
   default. You can set multiple groups by separating them by a comma
-* synchronization-interval, interval between synchronization with the
+* `synchronization-interval`, interval between synchronization with the
   ldap directory in seconds (default to once a day)
-* life time of query cache in minutes (default to two hours).
+* `cache-life-time`, life time of query cache in minutes (default to two hours).
+
+Other notes
+-----------
+
+* Yes, cubicweb is able to start if ldap cannot be reached, even on c-c start,
+  though that will slow down the instance, since it will indefinitly attempt
+  to connect to the ldap on each query on users.
+
+* Changing the name of the ldap server in your script is fine, changing the base
+  DN isn't since it's used to identify already known users from others
+
+* You can use the :class:`CWSourceHostConfig` to have variants for a source
+  configuration according to the host the instance is running on. To do so go on
+  the source's view from the sources management view.
--- a/doc/book/en/admin/pyro.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/pyro.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -1,14 +1,19 @@
 Working with a distributed client (using Pyro)
 ==============================================
 
+.. _UsingPyro:
+
 In some circumstances, it is practical to split the repository and
-web-client parts of the application, for load-balancing reasons. Or
+web-client parts of the application for load-balancing reasons. Or
 one wants to access the repository from independant scripts to consult
 or update the database.
 
+Prerequisites
+-------------
+
 For this to work, several steps have to be taken in order.
 
-You must first ensure that the apropriate software is installed and
+You must first ensure that the appropriate software is installed and
 running (see ref:`setup`)::
 
   pyro-nsd -x -p 6969
@@ -21,19 +26,40 @@
 
   pyro-instance-id=myinstancename
 
-Finally, the client (for instance in the case of a script) must
-connect specifically, as in the following example code:
+Connect to the CubicWeb repository from a python script
+-------------------------------------------------------
+
+Assuming pyro-nsd is running and your instance is configured with ``pyro-server=yes``,
+you will be able to use :mod:`cubicweb.dbapi` api to initiate the connection.
+
+.. note::
+    Regardless of whether your instance is pyro activated or not, you can still
+    achieve this by using cubicweb-ctl shell scripts in a simpler way, as by default
+    it creates a repository 'in-memory' instead of connecting through pyro. That
+    also means you've to be on the host where the instance is running.
+
+Finally, the client (for instance a python script) must connect specifically
+as in the following example code:
 
 .. sourcecode:: python
 
     from cubicweb import dbapi
 
-    def pyro_connect(instname, login, password, pyro_ns_host):
-        cnx = dbapi.connect(instname, login, password, pyro_ns_host)
-        cnx.load_appobjects()
-        return cnx
+    cnx = dbapi.connect(database='instance-id', user='admin', password='admin')
+    cnx.load_appobjects()
+    cur = cnx.cursor()
+    for name in (u'Personal', u'Professional', u'Computers'):
+        cur.execute('INSERT Tag T: T name %(n)s', {'n': name})
+    cnx.commit()
 
-The 'cnx.load_appobjects()' line is optional. Without it you will get
-data through the connection roughly as you would from a DBAPI
-connection. With it, provided the cubicweb-client part is installed
-and accessible, you get the ORM goodies.
+Calling :meth:`cubicweb.dbapi.load_appobjects`, will populates The `cubicweb
+registries`_ with the application objects installed on the host where the script
+runs. You'll then be allowed to use the ORM goodies and custom entity methods and
+views. Of course this is optional, without it you can still get the repository
+data through the connection but in a roughly way: only RQL cursors will be
+available, e.g. you can't even build entity objects from the result set.
+
+
+
+.. _cubicweb registries: VRegistryIntro_
+
--- a/doc/book/en/admin/setup-windows.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/setup-windows.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -8,13 +8,12 @@
 Setting up a Windows development environment is not too complicated
 but it requires a series of small steps.
 
-We proposed an example of a typical |cubicweb| installation on Windows
+We propose an example of a typical |cubicweb| installation on Windows
 from sources. We assume everything goes into ``C:\\`` and for any
 package, without version specification, "the latest is
 the greatest".
 
-Take into the mind that adjusting the installation drive should be
-straightforward.
+Mind that adjusting the installation drive should be straightforward.
 
 
 
@@ -24,9 +23,9 @@
 |cubicweb| requires some base elements that must be installed to run
 correctly. So, first of all, you must install them :
 
-* python >= 2.5 and < 3 
-  (`Download Python <http://www.python.org/download/>`_). 
-  You can also consider the Python(x,y) distribution 
+* python >= 2.5 and < 3
+  (`Download Python <http://www.python.org/download/>`_).
+  You can also consider the Python(x,y) distribution
   (`Download Python(x,y) <http://code.google.com/p/pythonxy/wiki/Downloads>`_)
   as it makes things easier for Windows user by wrapping in a single installer
   python 2.5 plus numerous useful third-party modules and
@@ -34,24 +33,24 @@
   IDE for Python under Windows).
 
 * `Twisted <http://twistedmatrix.com/trac/>`_ is an event-driven
-  networking engine 
+  networking engine
   (`Download Twisted <http://twistedmatrix.com/trac/>`_)
 
 * `lxml <http://codespeak.net/lxml/>`_ library
-  (version >=2.2.1) allows working with XML and HTML 
+  (version >=2.2.1) allows working with XML and HTML
   (`Download lxml <http://pypi.python.org/pypi/lxml/2.2.1>`_)
 
-* `Postgresql 8.4 <http://www.postgresql.org/>`_, 
-  an object-relational database system 
-  (`Download Postgresql <http://www.enterprisedb.com/products/pgdownload.do#windows>`_) 
-  and its python drivers 
+* `Postgresql 8.4 <http://www.postgresql.org/>`_,
+  an object-relational database system
+  (`Download Postgresql <http://www.enterprisedb.com/products/pgdownload.do#windows>`_)
+  and its python drivers
   (`Download psycopg <http://www.stickpeople.com/projects/python/win-psycopg/#Version2>`_)
 
-* A recent version of `gettext` 
+* A recent version of `gettext`
   (`Download gettext <http://download.logilab.org/pub/gettext/gettext-0.17-win32-setup.exe>`_).
 
-* `rql <http://www.logilab.org/project/rql>`_, 
-  the recent version of the Relationship Query Language parser 
+* `rql <http://www.logilab.org/project/rql>`_,
+  the recent version of the Relationship Query Language parser
   (`Download rql <http://download.logilab.org/pub/rql/rql-0.26.3.win32-py2.5.exe>`_).
 
 Install optional elements
@@ -60,22 +59,22 @@
 We recommend you to install the following elements. They are not
 mandatory but they activate very interesting features in |cubicweb|:
 
-* `Simplejson <http://pypi.python.org/pypi/simplejson/>`_ 
-  must be installed if you have python <= 2.5 
+* `Simplejson <http://pypi.python.org/pypi/simplejson/>`_
+  must be installed if you have python <= 2.5
   (`Download simplejson <http://www.osuch.org/python-simplejson%3Awin32>`_).
   It is included in the Standard library from Python >= 2.6.
 
-* `Pyro <http://www.xs4all.nl/~irmen/pyro3/>`_ 
+* `Pyro <http://www.xs4all.nl/~irmen/pyro3/>`_
   enables remote access to cubicweb repository instances.
   It also allows the client and the server not running on the same machine
   (`Download Pyro <http://www.xs4all.nl/~irmen/pyro3/download/>`_).
 
-* `python-ldap <http://pypi.python.org/pypi/python-ldap>`_ 
+* `python-ldap <http://pypi.python.org/pypi/python-ldap>`_
   provides access to LDAP/Active directory directories
   (`Download python-ldap <http://www.osuch.org/python-ldap>`_).
 
-* `graphviz <http://www.graphviz.org/>`_ 
-  which allow schema drawings.  
+* `graphviz <http://www.graphviz.org/>`_
+  which allow schema drawings.
   (`Download graphviz <http://www.graphviz.org/Download_windows.php>`_).
   It is quite recommended (albeit not mandatory).
 
@@ -88,28 +87,27 @@
 Some additional tools could be useful to develop :ref:`cubes <AvailableCubes>`
 with the framework.
 
-* `mercurial <http://mercurial.selenic.com/>`_ and its standard
-  windows GUI (`TortoiseHG <http://tortoisehg.bitbucket.org/>`_) 
-  allow you to get the source code of |cubicweb| from control version
-  repositories. So you will be able to get the latest development
-  version in an easy way 
+* `mercurial <http://mercurial.selenic.com/>`_ and its standard windows GUI
+  (`TortoiseHG <http://tortoisehg.bitbucket.org/>`_) allow you to get the source
+  code of |cubicweb| from control version repositories. So you will be able to
+  get the latest development version and pre-release bugfixes in an easy way
   (`Download mercurial <http://bitbucket.org/tortoisehg/stable/wiki/download>`_).
 
 * You can also consider the ssh client `Putty` in order to peruse
   mercurial over ssh (`Download <http://www.putty.org/>`_).
 
 * If you are an Eclipse user, mercurial can be integrated using the
-  `MercurialEclipse` plugin 
+  `MercurialEclipse` plugin
   (`Home page <http://www.vectrace.com/mercurialeclipse/>`_).
 
 Getting the sources
 -------------------
 
-There are tow ways to get the sources of |cubicweb| and its
+There are two ways to get the sources of |cubicweb| and its
 :ref:`cubes <AvailableCubes>`:
 
-* download the latest release (:ref:`SourceInstallation`) 
-* get the development version using Mercurial 
+* download the latest release (:ref:`SourceInstallation`)
+* get the development version using Mercurial
   (:ref:`MercurialInstallation`)
 
 Environment variables
@@ -123,8 +121,8 @@
 it. That opens a small window allowing edition of user-related and system-wide
 variables.
 
-We will consider only user variables. First, the ``PATH`` variable. Assuming 
-you are logged as user *Jane*, add the following paths, separated by 
+We will consider only user variables. First, the ``PATH`` variable. Assuming
+you are logged as user *Jane*, add the following paths, separated by
 semi-colons::
 
   C:\Documents and Settings\Jane\My Documents\Python\cubicweb\cubicweb\bin
@@ -154,3 +152,6 @@
 Then start the service with::
 
   net start cubicweb-my_instance
+
+In case this does not work, you should be able to see error reports in
+the application log, using the windows event log viewer.
--- a/doc/book/en/admin/setup.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/admin/setup.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -91,10 +91,10 @@
   :ref:`cubicweb with other database <DatabaseInstallation>` using the following
   virtual packages :
 
-  * `cubicweb-postgresql-support` contains the necessary dependency for
+  * `cubicweb-postgresql-support` contains the necessary dependencies for
     using :ref:`cubicweb with postgresql datatabase <PostgresqlConfiguration>`
 
-  * `cubicweb-mysql-support` contains the necessary dependency for using
+  * `cubicweb-mysql-support` contains the necessary dependencies for using
     :ref:`cubicweb with mysql database <MySqlConfiguration>`.
 
 .. _`list of sources`: http://wiki.debian.org/SourcesList
@@ -144,9 +144,9 @@
 .. _`virtualenv`: http://virtualenv.openplans.org/
 
 A working compilation chain is needed to build the modules that include C
-extensions. If you definitively wont, installing `Lxml
-<http://codespeak.net/lxml/>`_, `Twisted <http://twistedmatrix.com/trac/>`_ and
-`libgecode <http://www.gecode.org/>`_ will help.
+extensions. If you definitively wont, installing `Lxml <http://lxml.de/>`_,
+`Twisted Web <http://twistedmatrix.com/trac/wiki/Downloads/>`_ and `libgecode
+<http://www.gecode.org/>`_ will help.
 
 To install |cubicweb| and its dependencies, just run::
 
--- a/doc/book/en/annexes/faq.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/annexes/faq.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -148,25 +148,12 @@
     to anonymous user, which will automatically execute what is
     decribed above.
 
-How to load data from a script ?
---------------------------------
-
-The following script aims at loading data within a script assuming pyro-nsd is
-running and your instance is configured with ``pyro-server=yes``, otherwise
-you would not be able to use dbapi.
-
-.. sourcecode:: python
+How to load data from a python script ?
+---------------------------------------
+Please, refer to the `Pyro chapter`_.
 
-    from cubicweb import dbapi
+.. _`Pyro chapter`: UsingPyro_
 
-    cnx = dbapi.connect(database='instance-id', user='admin', password='admin')
-    cur = cnx.cursor()
-    for name in (u'Personal', u'Professional', u'Computers'):
-        cur.execute('INSERT Tag T: T name %(n)s', {'n': name})
-    cnx.commit()
-
-Wether your instance as pyro activated or not, you can still acheive this by
-using cubicweb-ctl shell scripts.
 
 How to format an entity date attribute ?
 ----------------------------------------
--- a/doc/book/en/annexes/rql/implementation.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/annexes/rql/implementation.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -11,7 +11,7 @@
 expression.
 ::
 
-     statement:: = (select | delete | insert | update) ';'
+     statement ::= (select | delete | insert | update) ';'
 
 
      # select specific rules
@@ -130,18 +130,7 @@
   with an OR. I do not think that the negation is supported on this type of
   relation (XXX to be confirmed).
 
-- Relations defining the variables must be left to those using them.  For
-  example::
-
-     Point P where P abs X, P ord Y, P value X+Y
-
-  is valid, but::
-
-     Point P where P abs X, P value X+Y, P ord Y
-
-  is not.
-
-- missing proper explicit type conversion, COALESCE and certainly other things...
+- missing COALESCE and certainly other things...
 
 - writing an rql query requires knowledge of the used schema (with real relation
   names and entities, not those viewed in the user interface). On the other
--- a/doc/book/en/annexes/rql/language.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/annexes/rql/language.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -5,288 +5,584 @@
 RQL syntax
 ----------
 
+.. _RQLKeywords:
+
 Reserved keywords
 ~~~~~~~~~~~~~~~~~
-The keywords are not case sensitive.
 
 ::
 
   AND, ASC, BEING, DELETE, DESC, DISTINCT, EXISTS, FALSE, GROUPBY,
-  HAVING, ILIKE, IN, INSERT, LIKE, LIMIT, NOT, NOW, NULL, OFFSET,
+  HAVING, ILIKE, INSERT, LIKE, LIMIT, NOT, NOW, NULL, OFFSET,
   OR, ORDERBY, SET, TODAY, TRUE, UNION, WHERE, WITH
 
+The keywords are not case sensitive. You should not use them when defining your
+schema, or as RQL variable names.
 
-Variables and Typing
+
+.. _RQLCase:
+
+Case
+~~~~
+
+* Variables should be all upper-cased.
+
+* Relation should be all lower-cased and match exactly names of relations defined
+  in the schema.
+
+* Entity types should start with an upper cased letter and be followed by at least
+  a lower cased latter.
+
+
+.. _RQLVariables:
+
+Variables and typing
 ~~~~~~~~~~~~~~~~~~~~
 
-With RQL, we do not distinguish between entities and attributes. The
-value of an attribute is considered an entity of a particular type (see
-below), linked to one (real) entity by a relation called the name of
-the attribute.
+Entities and values to browse and/or select are represented in the query by
+*variables* that must be written in capital letters.
+
+With RQL, we do not distinguish between entities and attributes. The value of an
+attribute is considered as an entity of a particular type (see below), linked to
+one (real) entity by a relation called the name of the attribute, where the
+entity is the subject and the attribute the object.
+
+The possible type(s) for each variable is derived from the schema according to
+the constraints expressed above and thanks to the relations between each
+variable.
 
-Entities and values to browse and/or select are represented in
-the query by *variables* that must be written in capital letters.
+We can restrict the possible types for a variable using the special relation
+**is** in the restrictions.
+
 
-There is a special type **Any**, referring to a non specific type.
+Virtual relations
+~~~~~~~~~~~~~~~~~
 
-We can restrict the possible types for a variable using the
-special relation **is** in the constraints.
+Those relations may only be used in RQL query but are not actual attributes of
+your entities.
+
+* `has_text`: relation to use to query the full text index (only for entities
+  having fulltextindexed attributes).
 
-The possible type(s) for each variable is derived from the schema
-according to the constraints expressed above and thanks to the relations
-between each variable.
+* `identity`: relation to use to tell that a RQL variable is the same as another
+  when you've to use two different variables for querying purpose. On the
+  opposite it's also useful together with the :ref:`NOT` operator to tell that two
+  variables should not identify the same entity
+
 
-Built-in types
-``````````````
+.. _RQLLiterals:
+
+Literal expressions
+~~~~~~~~~~~~~~~~~~~
+
+Bases types supported by RQL are those supported by yams schema. Literal values
+are expressed as explained below:
 
-The base types supported are string (between double or single quotes),
-integers or floats (the separator is '.'), dates and
-boolean. We expect to receive a schema in which types String,
-Int, Float, Date and Boolean are defined.
+* string should be between double or single quotes. If the value contains a
+  quote, it should be preceded by a backslash '\'
+
+* floats separator is dot '.'
+
+* boolean values are :keyword:`TRUE` and :keyword:`FALSE` keywords
 
-* `String` (literal: between double or single quotes).
-* `Int`, `Float` (separator being'.').
-* `Date`, `Datetime`, `Time` (literal: string YYYY/MM/DD [hh:mm] or keywords
-  `TODAY` and `NOW`).
-* `Boolean` (keywords `TRUE` and `FALSE`).
-* `Keyword` NULL.
+* date and time should be expressed as a string with ISO notation : YYYY/MM/DD
+  [hh:mm], or using keywords :keyword:`TODAY` and :keyword:`NOW`
 
+You may also use the :keyword:`NULL` keyword, meaning 'unspecified'.
+
+
+.. _RQLOperators:
 
 Operators
 ~~~~~~~~~
 
-Logical Operators
+.. _RQLLogicalOperators:
+
+Logical operators
 `````````````````
 ::
 
      AND, OR, NOT, ','
 
-  ',' is equivalent to 'AND' but with the smallest among the priority
-  of logical operators (see :ref:`PriorityOperators`).
+',' is equivalent to 'AND' but with the smallest among the priority of logical
+operators (see :ref:`RQLOperatorsPriority`).
+
+.. _RQLMathematicalOperators:
+
+Mathematical operators
+``````````````````````
 
-Mathematical Operators
-``````````````````````
-::
++----------+---------------------+-----------+--------+
+| Operator |    Description      | Example   | Result |
++==========+=====================+===========+========+
+|  `+`     | addition            | 2 + 3     | 5      |
++----------+---------------------+-----------+--------+
+|  `-`     | subtraction         | 2 - 3     | -1     |
++----------+---------------------+-----------+--------+
+|  `*`     | multiplication      | 2 * 3     | 6      |
++----------+---------------------+-----------+--------+
+|  /       | division            | 4 / 2     | 2      |
++----------+---------------------+-----------+--------+
+|  %       | modulo (remainder)  | 5 % 4     | 1      |
++----------+---------------------+-----------+--------+
+|  ^       | exponentiation      | 2.0 ^ 3.0 | 8      |
++----------+---------------------+-----------+--------+
+|  &       | bitwise AND         | 91 & 15   | 11     |
++----------+---------------------+-----------+--------+
+|  |       | bitwise OR          | 32 | 3    | 35     |
++----------+---------------------+-----------+--------+
+|  #       | bitwise XOR         | 17 # 5    | 20     |
++----------+---------------------+-----------+--------+
+|  ~       | bitwise NOT         | ~1        | -2     |
++----------+---------------------+-----------+--------+
+|  <<      | bitwise shift left  | 1 << 4    | 16     |
++----------+---------------------+-----------+--------+
+|  >>      | bitwise shift right | 8 >> 2    | 2      |
++----------+---------------------+-----------+--------+
 
-     +, -, *, /
+
+Notice integer division truncates results depending on the backend behaviour. For
+instance, postgresql does.
+
+
+.. _RQLComparisonOperators:
 
 Comparison operators
 ````````````````````
-::
+ ::
 
-     =, <, <=, >=, >, ~=, IN, LIKE, ILIKE
-
-* Syntax to use comparison operator:
+     =, !=, <, <=, >=, >, IN
 
-    `VARIABLE relation operator VALUE`
 
-* The operator `=` is the default operator and can be omitted.
+The syntax to use comparison operators is:
 
-* `relation` name is always attended
+    `VARIABLE attribute <operator> VALUE`
 
-* The operator `LIKE` equivalent to `~=` can be used with the
-  special character `%` in a string to indicate that the chain
-  must start or finish by a prefix/suffix:
-  ::
+The `=` operator is the default operator and can be omitted, i.e. :
+
+    `VARIABLE attribute = VALUE`
 
-     Any X WHERE X name ~= 'Th%'
-     Any X WHERE X name LIKE '%lt'
+is equivalent to
 
-* The operator `ILIKE` is the case insensitive version of `LIKE`.
+    `VARIABLE attribute VALUE`
 
-* The operator `IN` provides a list of possible values:
-  ::
+
+The operator `IN` provides a list of possible values: ::
 
     Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault')
 
 
-.. XXX nico: "A trick <> 'bar'" wouldn't it be more convenient than "NOT A trick 'bar'" ?
+.. _RQLStringOperators:
+
+String operators
+````````````````
+::
+
+  LIKE, ILIKE, ~=, REGEXP
+
+The :keyword:`LIKE` string operator can be used with the special character `%` in
+a string as wild-card: ::
+
+     # match every entity whose name starts with 'Th'
+     Any X WHERE X name ~= 'Th%'
+     # match every entity whose name endswith 'lt'
+     Any X WHERE X name LIKE '%lt'
+     # match every entity whose name contains a 'l' and a 't'
+     Any X WHERE X name LIKE '%l%t%'
 
-.. _PriorityOperators:
+:keyword:`ILIKE` is the case insensitive version of :keyword:`LIKE`. It's not
+available on all backend (e.g. sqlite doesn't support it). If not available for
+your backend, :keyword:`ILIKE` will behave like :keyword:`LIKE`.
+
+`~=` is a shortcut version of :keyword:`ILIKE`, or of :keyword:`LIKE` when the
+former is not available on the back-end.
+
+
+The :keyword:`REGEXP` is an alternative to :keyword:`LIKE` that supports POSIX
+regular expressions::
+
+   # match entities whose title starts with a digit
+   Any X WHERE X title REGEXP "^[0-9].*"
+
+
+The underlying SQL operator used is back-end-dependent :
+
+- the ``~`` operator is used for postgresql,
+- the ``REGEXP`` operator for mysql and sqlite.
+
+Other back-ends are not supported yet.
+
+
+.. _RQLOperatorsPriority:
 
 Operators priority
 ``````````````````
 
-#. "(", ")"
-#. '*', '/'
-#. '+', '-'
-#. 'NOT'
-#. 'AND'
-#. 'OR'
-#. ','
+#. `(`, `)`
+#. `^`, `<<`, `>>`
+#. `*`, `/`, `%`, `&`
+#. `+`, `-`, `|`, `#`
+#. `NOT`
+#. `AND`
+#. `OR`
+#. `,`
 
 
+.. _RQLSearchQuery:
+
 Search Query
 ~~~~~~~~~~~~
 
-   [ `DISTINCT`] <entity type> V1 (, V2) \ *
-   [ `GROUPBY` V1 (V2) \*] [ `ORDERBY` <orderterms>]
+Simplified grammar of search query: ::
+
+   [ `DISTINCT`] `Any` V1 (, V2) \*
+   [ `GROUPBY` V1 (, V2) \*] [ `ORDERBY` <orderterms>]
    [ `LIMIT` <value>] [ `OFFSET` <value>]
-   [ `WHERE` <restriction>]
-   [ `WITH` V1 (, V2) \ * BEING (<query>)]
-   [ `HAVING` <restriction>]
+   [ `WHERE` <triplet restrictions>]
+   [ `WITH` V1 (, V2)\* BEING (<query>)]
+   [ `HAVING` <other restrictions>]
    [ `UNION` <query>]
 
-:entity type:
-   Type of selected variables.
-   The special type `Any` is equivalent to not specify a type.
-:restriction:
-   list of conditions to test successively
-     `V1 relation V2 | <static value>`
-:orderterms:
-   Definition of the selection order: variable or column number followed by
-   sorting method ( `ASC`, `DESC`), ASC is the default.
-:note for grouped queries:
-   For grouped queries (e.g., a clause `GROUPBY`), all
-   selected variables must be aggregated or grouped.
+Selection
+`````````
+
+The fist occuring clause is the selection of terms that should be in the result
+set.  Terms may be variable, literals, function calls, arithmetic, etc. and each
+term is separated by a comma.
+
+There will be as much column in the result set as term in this clause, respecting
+order.
+
+Syntax for function call is somewhat intuitive, for instance: ::
+
+    Any UPPER(N) WHERE P firstname N
+
+
+Grouping and aggregating
+````````````````````````
+
+The :keyword:`GROUPBY` keyword is followed by a list of terms on which results
+should be grouped. They are usually used with aggregate functions, responsible to
+aggregate values for each group (see :ref:`RQLAggregateFunctions`).
+
+For grouped queries, all selected variables must be either aggregated (i.e. used
+by an aggregate function) or grouped (i.e. listed in the :keyword:`GROUPBY`
+clause).
+
+
+Sorting
+```````
+
+The :keyword:`ORDERBY` keyword if followed by the definition of the selection
+order: variable or column number followed by sorting method (:keyword:`ASC`,
+:keyword:`DESC`), :keyword:`ASC` being the default. If the sorting method is not
+specified, then the sorting is ascendant (`ASC`).
+
+
+Pagination
+``````````
+
+The :keyword:`LIMIT` and :keyword:`OFFSET` keywords may be respectively used to
+limit the number of results and to tell from which result line to start (for
+instance, use `LIMIT 20` to get the first 20 results, then `LIMIT 20 OFFSET 20`
+to get the next 20.
 
 
-Sorting and groups
-``````````````````
+Restrictions
+````````````
+
+The :keyword:`WHERE` keyword introduce one of the "main" part of the query, where
+you "define" variables and add some restrictions telling what you're interested
+in.
+
+It's a list of triplets "subject relation object", e.g. `V1 relation
+(V2 | <static value>)`. Triplets are separated using :ref:`RQLLogicalOperators`.
 
-- For grouped queries (e.g. with a GROUPBY clause), all
-  selected variables should be grouped at the right of the keyword.
+.. Note:
+
+  About the negation operator (:keyword:`NOT`):
+
+  * "NOT X relation Y" is equivalent to "NOT EXISTS(X relation Y)"
+
+  * `Any X WHERE NOT X owned_by U` means "entities that have no relation
+    `owned_by`".
+
+  * `Any X WHERE NOT X owned_by U, U login "syt"` means "the entity have no
+     relation `owned_by` with the user syt". They may have a relation "owned_by"
+     with another user.
 
-- If the sorting method (SORT_METHOD) is not specified, then the sorting is
-  ascendant (`ASC`).
+In this clause, you can also use :keyword:`EXISTS` when you want to know if some
+expression is true and do not need the complete set of elements that make it
+true. Testing for existence is much faster than fetching the complete set of
+results, especially when you think about using `OR` against several expressions. For instance
+if you want to retrieve versions which are in state "ready" or tagged by
+"priority", you should write :
+
+::
+
+    Any X ORDERBY PN,N
+    WHERE X num N, X version_of P, P name PN,
+          EXISTS(X in_state S, S name "ready")
+          OR EXISTS(T tags X, T name "priority")
+
+not
 
-- Aggregate Functions: COUNT, MIN, MAX, AVG, SUM, GROUP_CONCAT
+::
+
+    Any X ORDERBY PN,N
+    WHERE X num N, X version_of P, P name PN,
+          (X in_state S, S name "ready")
+          OR (T tags X, T name "priority")
 
-.. note::
-   Aggregate functions will return None if there is no result row.
+Both queries aren't at all equivalent :
+
+* the former will retrieve all versions, then check for each one which are in the
+  matching state of or tagged by the expected tag,
+
+* the later will retrieve all versions, state and tags (cartesian product!),
+  compute join and then exclude each row which are in the matching state of or
+  tagged by the expected tag. This implies that : you won't get any result if the
+  in_state or tag
 
 
-Having
-```````
+You can also use the question mark (`?`) to mark optional relations which allow
+you to select entities related **or not** to another. It is a similar concept
+that the `Left outer join`_:
+
+    the result of a left outer join (or simply left join) for table A and B
+    always contains all records of the "left" table (A), even if the
+    join-condition does not find any matching record in the "right" table (B).
+
+You must use the `?` behind a variable to specify that the relation toward it
+is optional. For instance:
+
+- Bugs of a project attached or not to a version ::
+
+       Any X, V WHERE X concerns P, P eid 42, X corrected_in V?
+
+  You will get a result set containing all the project's tickets, with either the
+  version in which it's corrected or None for tickets not related to a version.
+
+
+- All cards and the project they document if any ::
+
+       Any C, P WHERE C is Card, P? documented_by C
+
+Notice you may also use outer join:
 
-The HAVING clause, as in SQL, has been originally introduced to restrict a query
+- on the RHS of attribute relation, e.g. ::
+
+       Any X WHERE X ref XR, Y name XR?
+
+  so that Y is outer joined on X by ref/name attributes comparison
+
+
+- on any side of an `HAVING` expression, e.g. ::
+
+       Any X WHERE X creation_date XC, Y creation_date YC
+       HAVING YEAR(XC)=YEAR(YC)?
+
+  so that Y is outer joined on X by comparison of the year extracted from their
+  creation date. ::
+
+       Any X WHERE X creation_date XC, Y creation_date YC
+       HAVING YEAR(XC)?=YEAR(YC)
+
+  would outer join X on Y instead.
+
+
+Having restrictions
+```````````````````
+
+The :keyword:`HAVING` clause, as in SQL, may be used to restrict a query
 according to value returned by an aggregate function, e.g.::
 
     Any X GROUPBY X WHERE X relation Y HAVING COUNT(Y) > 10
 
-It may however be used for something else...
-
-In the WHERE clause, we are limited to 3-expression, such thing can't be
-expressed directly as in the SQL's way. But this can be expressed using HAVING
-comparison expression.
-
-For instance, let's say you want to get people whose uppercased first name equals
-to another person uppercased first name::
+It may however be used for something else: In the :keyword:`WHERE` clause, we are
+limited to triplet expressions, so some things may not be expressed there. Let's
+take an example : if you want to get people whose upper-cased first name equals to
+another person upper-cased first name. There is no proper way to express this
+using triplet, so you should use something like: ::
 
-    Person X WHERE X firstname XFN, Y firstname YFN HAVING X > Y, UPPER(XFN) = UPPER(YFN)
-
-This open some new possibilities. Another example::
+    Any X WHERE X firstname XFN, Y firstname YFN, NOT X identity Y HAVING UPPER(XFN) = UPPER(YFN)
 
-    Person X WHERE X birthday XB HAVING YEAR(XB) = 2000
+Another example: imagine you want person born in 2000: ::
 
-That lets you use transformation functions not only in selection but for
-restriction as well and to by-pass limitation of the WHERE clause, which was the
-major flaw in the RQL language.
+    Any X WHERE X birthday XB HAVING YEAR(XB) = 2000
 
 Notice that while we would like this to work without the HAVING clause, this
 can't be currently be done because it introduces an ambiguity in RQL's grammar
 that can't be handled by Yapps_, the parser's generator we're using.
 
-Negation
-````````
+
+Sub-queries
+```````````
+
+The :keyword:`WITH` keyword introduce sub-queries clause. Each sub-query has the
+form:
+
+  V1(,V2) BEING (rql query)
 
-* A query such as `Document X WHERE NOT X owned_by U` means "the documents have
-  no relation `owned_by`".
+Variables at the left of the :keyword:`BEING` keyword defines into which
+variables results from the sub-query will be mapped to into the outer query.
+Sub-queries are separated from each other using a comma.
 
-* But the query `Document X WHERE NOT X owned_by U, U login "syt"` means "the
-  documents have no relation `owned_by` with the user syt". They may have a
-  relation "owned_by" with another user.
+Let's say we want to retrieve for each project its number of versions and its
+number of tickets. Due to the nature of relational algebra behind the scene, this
+can't be achieved using a single query. You have to write something along the
+line of: ::
+
+  Any X, VC, TC WHERE X identity XX
+  WITH X, VC BEING (Any X, COUNT(V) GROUPBY X WHERE V version_of X),
+       XX, TC BEING (Any X, COUNT(T) GROUPBY X WHERE T ticket_of X)
 
-Identity
-````````
+Notice that we can't reuse a same variable name as alias for two different
+sub-queries, hence the usage of 'X' and 'XX' in this example, which are then
+unified using the special `identity` relation (see :ref:`XXX`).
+
+.. Warning:
+
+  Sub-queries define a new variable scope, so even if a variable has the same name
+  in the outer query and in the sub-query, they technically **aren't* the same
+  variable. So ::
 
-You can use the special relation `identity` in a query to
-add an identity constraint between two variables. This is equivalent
-to ``is`` in python::
+     Any W, REF WITH W, REF BEING
+         (Any W, REF WHERE W is Workcase, W ref REF,
+                           W concerned_by D, D name "Logilab")
+  could be written:
 
-   Any A WHERE A comments B, A identity B
+     Any W, REF WITH W, REF BEING
+        (Any W1, REF1 WHERE W1 is Workcase, W1 ref REF1,
+                            W1 concerned_by D, D name "Logilab")
 
-return all objects that comment themselves. The relation `identity` is
-especially useful when defining the rules for securities with `RQLExpressions`.
+  Also, when a variable is coming from a sub-query, you currently can't reference
+  its attribute or inlined relations in the outer query, you've to fetch them in
+  the sub-query. For instance, let's say we want to sort by project name in our
+  first example, we would have to write ::
 
 
-Limit / offset
-``````````````
-::
+    Any X, VC, TC ORDERBY XN WHERE X identity XX
+    WITH X, XN, VC BEING (Any X, COUNT(V) GROUPBY X,XN WHERE V version_of X, X name XN),
+         XX, TC BEING (Any X, COUNT(T) GROUPBY X WHERE T ticket_of X)
+
+  instead of ::
 
-    Any P ORDERBY N LIMIT 5 OFFSET 10 WHERE P is Person, P firstname N
+    Any X, VC, TC ORDERBY XN WHERE X identity XX, X name XN,
+    WITH X, XN, VC BEING (Any X, COUNT(V) GROUPBY X WHERE V version_of X),
+         XX, TC BEING (Any X, COUNT(T) GROUPBY X WHERE T ticket_of X)
+
+  which would result in a SQL execution error.
 
 
-Exists
-```````
+Union
+`````
 
-You can use `EXISTS` when you want to know if some expression is true and do not
-need the complete set of elements that make it true. Testing for existence is
-much faster than fetching the complete set of results.
+You may get a result set containing the concatenation of several queries using
+the :keyword:`UNION`. The selection of each query should have the same number of
+columns.
 
 ::
 
-    Any X ORDERBY PN,N
-    WHERE X num N, X version_of P, P name PN,
-          EXISTS(X in_state S, S name IN ("dev", "ready"))
-          OR EXISTS(T tags X, T name "priority")
-
-
-Optional relations
-``````````````````
-
-It is a similar concept that the `Left outer join`_:
-
-    the result of a left outer join (or simply left join) for table A and B
-    always contains all records of the "left" table (A), even if the
-    join-condition does not find any matching record in the "right" table (B).
-
-* They allow you to select entities related or not to another.
-
-* You must use the `?` behind the variable to specify that the relation
-  toward it is optional:
-
-   - Anomalies of a project attached or not to a version ::
-
-       Any X, V WHERE X concerns P, P eid 42, X corrected_in V?
-
-   - All cards and the project they document if necessary ::
-
-       Any C, P WHERE C is Card, P? documented_by C
-
-    Any T,P,V WHERE T is Ticket, T concerns P, T done_in V?
+    (Any X, XN WHERE X is Person, X surname XN) UNION (Any X,XN WHERE X is Company, X name XN)
 
 
-Subqueries
-``````````
-::
+.. _RQLFunctions:
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+Below is the list of aggregate and transformation functions that are supported
+nativly by the framework. Notice that cubes may define additional functions.
+
+.. _RQLAggregateFunctions:
+
+Aggregate functions
+```````````````````
++--------------------+----------------------------------------------------------+
+| :func:`COUNT`      | return the number of rows                                |
++--------------------+----------------------------------------------------------+
+| :func:`MIN`        | return the minimum value                                 |
++--------------------+----------------------------------------------------------+
+| :func:`MAX`        | return the maximum value                                 |
++--------------------+----------------------------------------------------------+
+| :func:`AVG`        | return the average value                                 |
++--------------------+----------------------------------------------------------+
+| :func:`SUM`        | return the sum of values                                 |
++--------------------+----------------------------------------------------------+
+| :func:`COMMA_JOIN` | return each value separated by a comma (for string only) |
++--------------------+----------------------------------------------------------+
+
+All aggregate functions above take a single argument. Take care some aggregate
+functions (e.g. :keyword:`MAX`, :keyword:`MIN`) may return `None` if there is no
+result row.
+
+.. _RQLStringFunctions:
+
+String transformation functions
+```````````````````````````````
 
-    (Any X WHERE X is Person) UNION (Any X WHERE X is Company)
++-------------------------+-----------------------------------------------------------------+
+| :func:`UPPER(String)`   | upper case the string                                           |
++-------------------------+-----------------------------------------------------------------+
+| :func:`LOWER(String)`   | lower case the string                                           |
++-------------------------+-----------------------------------------------------------------+
+| :func:`LENGTH(String)`  | return the length of the string                                 |
++-------------------------+-----------------------------------------------------------------+
+| :func:`SUBSTRING(       | extract from the string a string starting at given index and of |
+|  String, start, length)`| given length                                                    |
++-------------------------+-----------------------------------------------------------------+
+| :func:`LIMIT_SIZE(      | if the length of the string is greater than given max size,     |
+|  String, max size)`     | strip it and add ellipsis ("..."). The resulting string will    |
+|                         | hence have max size + 3 characters                              |
++-------------------------+-----------------------------------------------------------------+
+| :func:`TEXT_LIMIT_SIZE( | similar to the above, but allow to specify the MIME type of the |
+|  String, format,        | text contained by the string. Supported formats are text/html,  |
+|  max size)`             | text/xhtml and text/xml. All others will be considered as plain |
+|                         | text. For non plain text format, sgml tags will be first removed|
+|                         | before limiting the string.                                     |
++-------------------------+-----------------------------------------------------------------+
+
+.. _RQLDateFunctions:
+
+Date extraction functions
+`````````````````````````
+
++--------------------------+----------------------------------------+
+| :func:`YEAR(Date)`       | return the year of a date or datetime  |
++--------------------------+----------------------------------------+
+| :func:`MONTH(Date)`      | return the year of a date or datetime  |
++--------------------------+----------------------------------------+
+| :func:`DAY(Date)`        | return the year of a date or datetime  |
++--------------------------+----------------------------------------+
+| :func:`HOUR(Datetime)`   | return the year of a datetime          |
++--------------------------+----------------------------------------+
+| :func:`MINUTE(Datetime)` | return the year of a datetime          |
++--------------------------+----------------------------------------+
+| :func:`SECOND(Datetime)` | return the year of a datetime          |
++--------------------------+----------------------------------------+
+
+.. _RQLOtherFunctions:
+
+Other functions
+```````````````
++-----------------------+--------------------------------------------------------------------+
+| :func:`ABS(num)`      |  return the absolute value of a number                             |
++-----------------------+--------------------------------------------------------------------+
+| :func:`RANDOM()`      | return a pseudo-random value from 0.0 to 1.0                       |
++-----------------------+--------------------------------------------------------------------+
+| :func:`FSPATH(X)`     | expect X to be an attribute whose value is stored in a             |
+|                       | :class:`BFSStorage` and return its path on the file system         |
++-----------------------+--------------------------------------------------------------------+
+| :func:`FTKIRANK(X)`   | expect X to be an entity used in a has_text relation, and return a |
+|                       | number corresponding to the rank order of each resulting entity    |
++-----------------------+--------------------------------------------------------------------+
+| :func:`CAST(Type, X)` | expect X to be an attribute and return it casted into the given    |
+|                       | final type                                                         |
++-----------------------+--------------------------------------------------------------------+
 
 
-     DISTINCT Any W, REF
-        WITH W, REF BEING
-            (
-              (Any W, REF WHERE W is Workcase, W ref REF,
-                                 W concerned_by D, D name "Logilab")
-               UNION
-              (Any W, REF WHERE W is Workcase, W ref REF, '
-                                W split_into WP, WP name "WP1")
-            )
-
-Function calls
-``````````````
-::
-
-    Any UPPER(N) WHERE P firstname N
-    Any LOWER(N) WHERE P firstname N
-
-Functions available on string: `UPPER`, `LOWER`
-
-.. XXX retrieve available function automatically
-
-For a performance issue, you can enrich the RQL dialect by RDMS (Relational database management system) functions.
-
+.. _RQLExamples:
 
 Examples
 ~~~~~~~~
@@ -349,6 +645,8 @@
         Any X where X is in (FirstType, SecondType)
 
 
+.. _RQLInsertQuery:
+
 Insertion query
 ~~~~~~~~~~~~~~~
 
@@ -380,6 +678,8 @@
 
         INSERT Person X: X name 'foo', X friend  Y WHERE name 'nice'
 
+.. _RQLSetQuery:
+
 Update and relation creation queries
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -401,6 +701,8 @@
         SET X know Y  WHERE X friend Y
 
 
+.. _RQLDeleteQuery:
+
 Deletion query
 ~~~~~~~~~~~~~~
 
@@ -421,22 +723,6 @@
         DELETE X friend Y WHERE X is Person, X name 'foo'
 
 
-Virtual RQL relations
-~~~~~~~~~~~~~~~~~~~~~
-
-Those relations may only be used in RQL query and are not actual
-attributes of your entities.
-
-* `has_text`: relation to use to query the full text index (only for
-  entities having fulltextindexed attributes).
-
-* `identity`: `Identity`_ relation to use to tell that a RQL variable should be
-  the same as another (but you've to use two different rql variables
-  for querying purpose)
-
-* `is`: relation to enforce possible types for a variable
-
-
-
 .. _Yapps: http://theory.stanford.edu/~amitp/yapps/
 .. _Left outer join: http://en.wikipedia.org/wiki/Join_(SQL)#Left_outer_join
+
--- a/doc/book/en/conf.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/conf.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -66,7 +66,7 @@
 
 # General substitutions.
 project = 'CubicWeb'
-copyright = '2001-2010, Logilab'
+copyright = '2001-2011, Logilab'
 
 # The default replacements for |version| and |release|, also used in various
 # other places throughout the built documents.
--- a/doc/book/en/devrepo/datamodel/define-workflows.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devrepo/datamodel/define-workflows.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -13,7 +13,7 @@
 possible transitions from one state to another state.
 
 We will define a simple workflow for a blog, with only the following two states:
-`submitted` and `published`. You may want to take a look at :ref:`_TutosBase` if
+`submitted` and `published`. You may want to take a look at :ref:`TutosBase` if
 you want to quickly setup an instance running a blog.
 
 Setting up a workflow
--- a/doc/book/en/devrepo/repo/hooks.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devrepo/repo/hooks.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -110,7 +110,7 @@
             parents.add(parent.eid)
 
 
-    class CheckSubsidiaryCycleOp(DataOperationMixIn, Operation):
+    class CheckSubsidiaryCycleOp(Operation):
 
         def precommit_event(self):
             check_cycle(self.session, self.eidto, 'subsidiary_of')
@@ -145,7 +145,7 @@
        def __call__(self):
            CheckSubsidiaryCycleOp.get_instance(self._cw).add_data(self.eidto)
 
-   class CheckSubsidiaryCycleOp(Operation):
+   class CheckSubsidiaryCycleOp(DataOperationMixIn, Operation):
 
        def precommit_event(self):
            for eid in self.get_data():
--- a/doc/book/en/devrepo/repo/sessions.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devrepo/repo/sessions.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -59,11 +59,11 @@
       other credentials elements (calling `authentication_information`),
       giving the request object each time
 
-      * the default retriever (bizarrely named
-        `LoginPaswordRetreiver`) will in turn defer login and password
-        fetching to the request object (which, depending on the
-        authentication mode (`cookie` or `http`), will do the
-        appropriate things and return a login and a password)
+      * the default retriever (oddly named `LoginPasswordRetreiver`)
+        will in turn defer login and password fetching to the request
+        object (which, depending on the authentication mode (`cookie`
+        or `http`), will do the appropriate things and return a login
+        and a password)
 
     * the authentication manager, on success, asks the `Repository`
       object to connect with the found credentials (using `connect`)
@@ -74,10 +74,10 @@
         from which a regular `Session` object is made; it returns the
         session id
 
-        * the source in turn will defer work to an authentifier class
-          that define the ultimate `authenticate` method (for instance
-          the native source will query the database against the
-          provided credentials)
+        * the source in turn will delegate work to an authentifier
+          class that defines the ultimate `authenticate` method (for
+          instance the native source will query the database against
+          the provided credentials)
 
     * the authentication manager, on success, will call back _all_
       retrievers with `authenticated` and return its authentication
@@ -99,9 +99,9 @@
 each side: some middleware will do pre-authentication and under the
 right circumstances add a new HTTP `x-foo-user` header to the query
 before it reaches the CubicWeb instance. For a concrete example of
-this, see the `apachekerberos`_ cube.
+this, see the `trustedauth`_ cube.
 
-.. _`apachekerberos`: http://www.cubicweb.org/project/cubicweb-apachekerberos
+.. _`trustedauth`: http://www.cubicweb.org/project/cubicweb-trustedauth
 
 Repository authentication plugins
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--- a/doc/book/en/devrepo/testing.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devrepo/testing.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -337,13 +337,12 @@
                                             sourcefile='/path/to/realdb_sources')
 
         def test_blog_rss(self):
-	    req = self.request()
-	    rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, '
-	                       'B created_by U, U login "logilab", B creation_date D')
+            req = self.request()
+            rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, '
+                'B created_by U, U login "logilab", B creation_date D')
             self.view('rss', rset)
 
 
-
 Testing with other cubes
 ------------------------
 
--- a/doc/book/en/devweb/edition/dissection.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/edition/dissection.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -1,8 +1,8 @@
 
 .. _form_dissection:
 
-Dissection of a form
---------------------
+Dissection of an entity form
+----------------------------
 
 This is done (again) with a vanilla instance of the `tracker`_
 cube. We will populate the database with a bunch of entities and see
@@ -10,44 +10,6 @@
 
 .. _`tracker`: http://www.cubicweb.org/project/cubicweb-tracker
 
-Patching the session object
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In order to play interactively with web side application objects, we
-have to cheat a bit: we will decorate the session object with some
-missing artifacts that should belong to a web request object. With
-that we can instantiate and render forms interactively.
-
-The function below does the minimum to allow going through this
-exercice. Some attributes or methods may be missing for other
-purposes. It is nevertheless not complicated to enhance it if need
-arises.
-
-.. sourcecode:: python
-
- def monkey_patch_session(session):
-     """ useful to use the cw shell session object
-     with web appobjects, which expect more than a plain
-     data repository session
-     """
-     # for autoform selection
-     session.json_request = False
-     session.url = lambda: u'http://perdu.com'
-     session.session = session
-     session.form = {}
-     session.list_form_param = lambda *args: []
-     # for render
-     session.use_fckeditor = lambda: False
-     session._ressources = []
-     session.add_js = session.add_css = lambda *args: session._ressources.append(args)
-     session.external_resource = lambda x:{}
-     session._tabcount = 0
-     def next_tabindex():
-         session._tabcount += 1
-         return session._tabcount
-     session.next_tabindex = next_tabindex
-     return session
-
 Populating the database
 ~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -71,10 +33,17 @@
 
 .. sourcecode:: python
 
- >>> monkey_patch_session(session)
- >>> form = session.vreg['forms'].select('edition', session, rset=rql('Ticket T'))
+ >>> cnx.use_web_compatible_requests('http://fakeurl.com')
+ >>> req = cnx.request()
+ >>> form = req.vreg['forms'].select('edition', req, rset=rql('Ticket T'))
  >>> html = form.render()
 
+.. note::
+
+  In order to play interactively with web side application objects, we have to
+  cheat a bit to have request object that will looks like HTTP request object, by
+  calling :meth:`use_web_compatible_requests()` on the connection.
+
 This creates an automatic entity form. The ``.render()`` call yields
 an html (unicode) string. The html output is shown below (with
 internal fieldset omitted).
--- a/doc/book/en/devweb/edition/form.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/edition/form.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -48,9 +48,10 @@
   'sparql': [<class 'cubicweb.web.views.sparql.SparqlForm'>]}
 
 
-The two most important form families here (for all pracitcal purposes)
-are `base` and `edition`. Most of the time one wants alterations of
-the AutomaticEntityForm (from the `edition` category).
+The two most important form families here (for all practical purposes) are `base`
+and `edition`. Most of the time one wants alterations of the
+:class:`AutomaticEntityForm` to generate custom forms to handle edition of an
+entity.
 
 The Automatic Entity Form
 ~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -214,6 +215,158 @@
 unpublished versions defined in the project (sorted by number) for
 which the current user is allowed to establish the relation.
 
+
+Building self-posted form with custom fields/widgets
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Sometimes you want a form that is not related to entity edition. For those,
+you'll have to handle form posting by yourself. Here is a complete example on how
+to achieve this (and more).
+
+Imagine you want a form that selects a month period. There are no proper
+field/widget to handle this in CubicWeb, so let's start by defining them:
+
+.. sourcecode:: python
+
+    # let's have the whole import list at the beginning, even those necessary for
+    # subsequent snippets
+    from logilab.common import date
+    from logilab.mtconverter import xml_escape
+    from cubicweb.view import View
+    from cubicweb.selectors import match_kwargs
+    from cubicweb.web import RequestError, ProcessFormError
+    from cubicweb.web import formfields as fields, formwidgets as wdgs
+    from cubicweb.web.views import forms, calendar
+
+    class MonthSelect(wdgs.Select):
+        """Custom widget to display month and year. Expect value to be given as a
+        date instance.
+        """
+
+        def format_value(self, form, field, value):
+            return u'%s/%s' % (value.year, value.month)
+
+        def process_field_data(self, form, field):
+            val = super(MonthSelect, self).process_field_data(form, field)
+            try:
+                year, month = val.split('/')
+                year = int(year)
+                month = int(month)
+                return date.date(year, month, 1)
+            except ValueError:
+                raise ProcessFormError(
+                    form._cw._('badly formated date string %s') % val)
+
+
+    class MonthPeriodField(fields.CompoundField):
+        """custom field composed of two subfields, 'begin_month' and 'end_month'.
+
+        It expects to be used on form that has 'mindate' and 'maxdate' in its
+        extra arguments, telling the range of month to display.
+        """
+
+        def __init__(self, *args, **kwargs):
+            kwargs.setdefault('widget', wdgs.IntervalWidget())
+            super(MonthPeriodField, self).__init__(
+                [fields.StringField(name='begin_month',
+                                    choices=self.get_range, sort=False,
+                                    value=self.get_mindate,
+                                    widget=MonthSelect()),
+                 fields.StringField(name='end_month',
+                                    choices=self.get_range, sort=False,
+                                    value=self.get_maxdate,
+                                    widget=MonthSelect())], *args, **kwargs)
+
+        @staticmethod
+        def get_range(form, field):
+            mindate = date.todate(form.cw_extra_kwargs['mindate'])
+            maxdate = date.todate(form.cw_extra_kwargs['maxdate'])
+            assert mindate <= maxdate
+            _ = form._cw._
+            months = []
+            while mindate <= maxdate:
+                label = '%s %s' % (_(calendar.MONTHNAMES[mindate.month - 1]),
+                                   mindate.year)
+                value = field.widget.format_value(form, field, mindate)
+                months.append( (label, value) )
+                mindate = date.next_month(mindate)
+            return months
+
+        @staticmethod
+        def get_mindate(form, field):
+            return form.cw_extra_kwargs['mindate']
+
+        @staticmethod
+        def get_maxdate(form, field):
+            return form.cw_extra_kwargs['maxdate']
+
+        def process_posted(self, form):
+            for field, value in super(MonthPeriodField, self).process_posted(form):
+                if field.name == 'end_month':
+                    value = date.last_day(value)
+                yield field, value
+
+
+Here we first define a widget that will be used to select the beginning and the
+end of the period, displaying months like '<month> YYYY' but using 'YYYY/mm' as
+actual value.
+
+We then define a field that will actually hold two fields, one for the beginning
+and another for the end of the period. Each subfield uses the widget we defined
+earlier, and the outer field itself uses the standard
+:class:`IntervalWidget`. The field adds some logic:
+
+* a vocabulary generation function `get_range`, used to populate each sub-field
+
+* two 'value' functions `get_mindate` and `get_maxdate`, used to tell to
+  subfields which value they should consider on form initialization
+
+* overriding of `process_posted`, called when the form is being posted, so that
+  the end of the period is properly set to the last day of the month.
+
+Now, we can define a very simple form:
+
+.. sourcecode:: python
+
+    class MonthPeriodSelectorForm(forms.FieldsForm):
+        __regid__ = 'myform'
+        __select__ = match_kwargs('mindate', 'maxdate')
+
+        form_buttons = [wdgs.SubmitButton()]
+        form_renderer_id = 'onerowtable'
+        period = MonthPeriodField()
+
+
+where we simply add our field, set a submit button and use a very simple renderer
+(try others!). Also we specify a selector that ensures form will have arguments
+necessary to our field.
+
+Now, we need a view that will wrap the form and handle post when it occurs,
+simply displaying posted values in the page:
+
+.. sourcecode:: python
+
+    class SelfPostingForm(View):
+        __regid__ = 'myformview'
+
+        def call(self):
+            mindate, maxdate = date.date(2010, 1, 1), date.date(2012, 1, 1)
+            form = self._cw.vreg['forms'].select(
+                'myform', self._cw, mindate=mindate, maxdate=maxdate, action='')
+            try:
+                posted = form.process_posted()
+                self.w(u'<p>posted values %s</p>' % xml_escape(repr(posted)))
+            except RequestError: # no specified period asked
+                pass
+            form.render(w=self.w, formvalues=self._cw.form)
+
+
+Notice usage of the :meth:`process_posted` method, that will return a dictionary
+of typed values (because they have been processed by the field). In our case, when
+the form is posted you should see a dictionary with 'begin_month' and 'end_month'
+as keys with the selected dates as value (as a python `date` object).
+
+
 APIs
 ~~~~
 
--- a/doc/book/en/devweb/resource.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/resource.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -8,7 +8,7 @@
 Static files handling
 ---------------------
 
-.. automethod:: cubicweb.web.webconfig.WebConfiguration.static_directory
+.. autoattribute:: cubicweb.web.webconfig.WebConfiguration.static_directory
 .. automethod:: cubicweb.web.webconfig.WebConfiguration.static_file_exists
 .. automethod:: cubicweb.web.webconfig.WebConfiguration.static_file_open
 .. automethod:: cubicweb.web.webconfig.WebConfiguration.static_file_add
--- a/doc/book/en/devweb/views/baseviews.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/views/baseviews.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -1,137 +1,17 @@
-.. -*- coding: utf-8 -*-
-
 Base views
 ----------
 
-*CubicWeb* provides a lot of standard views, that can be found in
+|cubicweb| provides a lot of standard views, that can be found in
 :mod:`cubicweb.web.views` sub-modules.
 
 A certain number of views are used to build the web interface, which apply to one
-or more entities. As other appobject, Their identifier is what distinguish them
+or more entities. As other appobjects, their identifier is what distinguish them
 from each others. The most generic ones, found in
 :mod:`cubicweb.web.views.baseviews`, are described below.
 
-HTML views
-~~~~~~~~~~
-
-Special views
-`````````````
-
-*noresult*
-    This view is the default view used when no result has been found
-    (e.g. empty result set).
-
-*final*
-    Display the value of a cell without trasnformation (in case of a non final
-    entity, we see the eid). Applicable on any result set.
-
-.. note::
-
-   `final` entities are merely attributes.
-
-*null*
-    This view is the default view used when nothing needs to be rendered.
-    It is always applicable.
-
-
-Entity views
-````````````
-
-*incontext, outofcontext*
-
-    Those are used to display a link to an entity, whose label depends on the
-    entity having to be displayed in or out of context (of another entity): some
-    entities make sense in the context of another entity. For instance, the
-    `Version` of a `Project` in forge. So one may expect that 'incontext' will
-    be called when display a version from within the context of a project, while
-    'outofcontext"' will be called in other cases. In our example, the
-    'incontext' view of the version would be something like '0.1.2', while the
-    'outofcontext' view would include the project name, e.g. 'baz 0.1.2' (since
-    only a version number without the associated project doesn't make sense if
-    you don't know yet that you're talking about the famous 'baz' project. |cubicweb|
-    tries to make guess and call 'incontext'/'outofcontext' nicely. When it can't
-    know, the 'oneline' view should be used.
-
-    By default it respectively produces the result of `textincontext` and
-    `textoutofcontext` wrapped in a link leading to the primary view of the
-    entity.
-
-
-*oneline*
-
-    This view is used when we can't tell if the entity should be considered as
-    displayed in or out of context. By default it produces the result of `text`
-    in a link leading to the primary view of the entity.
+You'll probably want to customize one or more of the described views which are
+default, generic, implementations.
 
 
-List
-`````
-
-*list*
-
-    This view displays a list of entities by creating a HTML list (`<ul>`) and
-    call the view `listitem` for each entity of the result set. The 'list' view
-    will generate html like:
-
-    .. sourcecode:: html
-
-      <ul class="section">
-        <li>"result of 'subvid' view for a row</li>
-        ...
-      </ul>
-
-
-*simplelist*
-
-  This view is not 'ul' based, and rely on div behaviour to separate items. html
-  will look like
-
-    .. sourcecode:: html
-
-      <div class="section">"result of 'subvid' view for a row</div>
-      ...
-
-
-  It relies on base :class:`~cubicweb.view.View` class implementation of the
-  :meth:`call` method to insert those <div>.
-
-
-*sameetypelist*
+.. automodule:: cubicweb.web.views.baseviews
 
-    This view displays a list of entities of the same type, in HTML section
-    (`<div>`) and call the view `sameetypelistitem` for each entity of the result
-    set. It's designed to get a more adapted global list when displayed entities
-    are all of the same type.
-
-
-*csv*
-
-    This view displays each entity in a coma separated list. It is NOT related to
-    the well-known text file format.
-
-
-Those list view can be given a 'subvid' arguments, telling the view to use of
-each item in the list. When not specified, the value of the 'redirect_vid'
-attribute of :class:`ListItemView` (for 'listview') or of :class:`SimpleListView`
-will be used. This default to 'outofcontext' for 'list' / 'incontext' for
-'simplelist'
-
-
-Text entity views
-~~~~~~~~~~~~~~~~~
-
-Basic html view have some variantsto be used when generating raw text, not html
-(for notifications for instance).
-
-*text*
-
-    This is the simplest text view for an entity. By default it returns the
-    result of the `.dc_title` method, which is cut to fit the
-    `navigation.short-line-size` property if necessary.
-
-*textincontext, textoutofcontext*
-
-    Similar to the `text` view, but called when an entity is considered out or in
-    context (see description of incontext/outofcontext html views for more
-    information on this). By default it returns respectively the result of the
-    methods `.dc_title()` and `.dc_long_title()` of the entity.
--- a/doc/book/en/devweb/views/primary.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/views/primary.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -10,11 +10,11 @@
 It is automatically selected on a one line result set containing an
 entity.
 
-This view is supposed to render a maximum of informations about the
+It lives in the :mod:`cubicweb.web.views.primary` module.
+
+The *primary* view is supposed to render a maximum of informations about the
 entity.
 
-It lives in the :mod:`cubicweb.web.views.primary` module.
-
 .. _primary_view_layout:
 
 Layout
@@ -139,8 +139,6 @@
 that can't be done using rql for instance.
 
 
-
-
 .. sourcecode:: python
 
    pv_section = uicfg.primaryview_section
@@ -163,62 +161,8 @@
    ``tag_subject_of``. To avoid warnings during execution, they should be set to
    ``'*'``.
 
-Rendering methods and attributes
-````````````````````````````````
 
-The basic layout of a primary view is as in the
-:ref:`primary_view_layout` section. This layout is actually drawn by
-the `render_entity` method.
-
-The methods you may want to modify while customizing a ``PrimaryView``
-are:
-
-*render_entity_title(self, entity)*
-    Renders the entity title, by default using entity's :meth:`dc_title()` method.
-
-*render_entity_attributes(self, entity)*
-    Renders all attributes and relations in the 'attributes' section . The
-    :attr:`skip_none` attribute controls the display of `None` valued attributes.
-
-*render_entity_relations(self, entity)*
-    Renders all relations in the 'relations' section.
-
-*render_side_boxes(self, entity, boxes)*
-    Renders side boxes on the right side of the content. This will generate a box
-    for each relation in the 'sidebox' section, as well as explicit box
-    appobjects selectable in this context.
-
-The placement of relations in the relations section or in side boxes
-can be controlled through the :ref:`primary_view_configuration` mechanism.
-
-*content_navigation_components(self, context)*
-    This method is applicable only for entity type implementing the interface
-    `IPrevNext`. This interface is for entities which can be linked to a previous
-    and/or next entity. This method will render the navigation links between
-    entities of this type, either at the top or at the bottom of the page
-    given the context (navcontent{top|bottom}).
-
-Also, please note that by setting the following attributes in your
-subclass, you can already customize some of the rendering:
-
-*show_attr_label*
-    Renders the attribute label next to the attribute value if set to `True`.
-    Otherwise, does only display the attribute value.
-
-*show_rel_label*
-    Renders the relation label next to the relation value if set to `True`.
-    Otherwise, does only display the relation value.
-
-*skip_none*
-    Does not render an attribute value that is None if set to `True`.
-
-*main_related_section*
-    Renders the relations of the entity if set to `True`.
-
-A good practice is for you to identify the content of your entity type for which
-the default rendering does not answer your need so that you can focus on the specific
-method (from the list above) that needs to be modified. We do not advise you to
-overwrite ``render_entity`` unless you want a completely different layout.
+.. automodule:: cubicweb.web.views.primary
 
 
 Example of customization and creation
@@ -329,3 +273,4 @@
 
 .. image:: ../../images/lax-book_10-blog-with-two-entries_en.png
    :alt: a blog and all its entries
+
--- a/doc/book/en/devweb/views/reledit.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/views/reledit.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -116,7 +116,34 @@
 which always work.
 
 
+Disable `reledit`
+*****************
+
+By default, `reledit` is available on attributes and relations displayed in
+the 'attribute' section of the default primary view.  If you want to disable
+it for some attribute or relation, you have use `uicfg`:
+
+.. sourcecode:: python
+
+    import uicfg.primaryview_display_ctrl as _pvdc
+    _pvdc.tag_attribute(('Company', 'name'), {'vid': 'incontext'})
+
+To deactivate it everywhere it's used automatically, you may use the code snippet
+below somewhere in your cube's views:
+
+.. sourcecode:: python
+
+    from cubicweb.web.views import reledit
+
+    class DeactivatedAutoClickAndEditFormView(reledit.AutoClickAndEditFormView):
+	def _should_edit_attribute(self, rschema):
+	    return False
+
+	def _should_edit_attribute(self, rschema, role):
+	    return False
+
+    def registration_callback(vreg):
+	vreg.register_and_replace(DeactivatedAutoClickAndEditFormView,
+				  reledit.AutoClickAndEditFormView)
 
 
-
-
--- a/doc/book/en/devweb/views/startup.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/devweb/views/startup.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -1,15 +1,18 @@
 Startup views
 -------------
 
-(:mod:`cubicweb.web.views.startup`)
+Startup views are views requiring no context, from which you usually start
+browsing (for instance the index page). The usual selectors are
+:class:`~cubicweb.selectors.none_rset` or :class:`~cubicweb.selectors.yes`.
 
-The usual selectors are no_rset or yes. These views don't apply to a
-result set.
+You'll find here a description of startup views provided by the framework.
 
-*index*
-    This view defines the home page of your application. It does not require
-    a result set to apply to.
+.. automodule:: cubicweb.web.views.startup
+
+
+Other startup views:
 
 *schema*
     A view dedicated to the display of the schema of the instance
 
+.. XXX to be continued
\ No newline at end of file
--- a/doc/book/en/intro/concepts.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/book/en/intro/concepts.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,3 @@
-
 .. -*- coding: utf-8 -*-
 
 .. _Concepts:
@@ -31,7 +30,7 @@
 .. note::
 
  The command :command:`cubicweb-ctl list` displays the list of cubes
-installed on your system.
+ installed on your system.
 
 .. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
 .. _`cubicweb-blog`: http://www.cubicweb.org/project/cubicweb-blog
--- a/doc/tools/pyjsrest.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/doc/tools/pyjsrest.py	Fri Dec 09 12:08:27 2011 +0100
@@ -102,7 +102,7 @@
     for fileid in INDEX_IN_ORDER:
         try:
             index.remove(fileid)
-        except:
+        except Exception:
             raise Exception(
         'Bad file id %s referenced in INDEX_IN_ORDER in %s, '
         'fix this please' % (fileid, __file__))
--- a/entities/adapters.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entities/adapters.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -137,7 +137,7 @@
                 value = entity.printable_value(rschema, format='text/plain')
             except TransformError:
                 continue
-            except:
+            except Exception:
                 self.exception("can't add value of %s to text index for entity %s",
                                rschema, entity.eid)
                 continue
@@ -366,8 +366,8 @@
 class IProgressAdapter(EntityAdapter):
     """something that has a cost, a state and a progression.
 
-    You should at least override progress_info an in_progress methods on concret
-    implementations.
+    You should at least override progress_info an in_progress methods on
+    concrete implementations.
     """
     __needs_bw_compat__ = True
     __regid__ = 'IProgress'
--- a/entities/authobjs.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entities/authobjs.py	Fri Dec 09 12:08:27 2011 +0100
@@ -98,7 +98,7 @@
             prop = self._cw.execute(
                 'CWProperty X WHERE X pkey %(k)s, X for_user U, U eid %(u)s',
                 {'k': pkey, 'u': self.eid}).get_entity(0, 0)
-        except:
+        except Exception:
             kwargs = dict(pkey=unicode(pkey), value=value)
             if self.is_in_group('managers'):
                 kwargs['for_user'] = self
--- a/entities/lib.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entities/lib.py	Fri Dec 09 12:08:27 2011 +0100
@@ -40,6 +40,7 @@
 class EmailAddress(AnyEntity):
     __regid__ = 'EmailAddress'
     fetch_attrs, fetch_order = fetch_config(['address', 'alias'])
+    rest_attr = 'eid'
 
     def dc_title(self):
         if self.alias:
--- a/entities/test/unittest_wfobjs.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entities/test/unittest_wfobjs.py	Fri Dec 09 12:08:27 2011 +0100
@@ -165,7 +165,7 @@
         user = self.user()
         iworkflowable = user.cw_adapt_to('IWorkflowable')
         iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'deactivated')
         self._test_manager_deactivate(user)
         trinfo = self._test_manager_deactivate(user)
@@ -192,7 +192,7 @@
         self.commit()
         iworkflowable.fire_transition('wake up')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'deactivated')
 
     # XXX test managers can change state without matching transition
@@ -274,14 +274,14 @@
         self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
         iworkflowable.fire_transition('swftr1', u'go')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
         iworkflowable.fire_transition('tr1', u'go')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, state2.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -295,10 +295,10 @@
         # force back to state1
         iworkflowable.change_state('state1', u'gadget')
         iworkflowable.fire_transition('swftr1', u'au')
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         iworkflowable.fire_transition('tr2', u'chapeau')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, state3.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -390,7 +390,7 @@
                                  ):
             iworkflowable.fire_transition(trans)
             self.commit()
-            group.clear_all_caches()
+            group.cw_clear_all_caches()
             self.assertEqual(iworkflowable.state, nextstate)
 
 
@@ -408,11 +408,11 @@
         wf.add_state('asleep', initial=True)
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         iworkflowable = self.member.cw_adapt_to('IWorkflowable')
         self.assertEqual(iworkflowable.state, 'activated')# no change before commit
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual(iworkflowable.workflow_history, ())
@@ -429,7 +429,7 @@
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -472,10 +472,10 @@
         self.commit()
         self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
         self.assertEqual(iworkflowable.state, 'activated')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -504,13 +504,13 @@
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': user.eid})
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
                           ['rest'])
         iworkflowable.fire_transition('rest')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
                           ['rest'])
@@ -520,7 +520,7 @@
         self.commit()
         iworkflowable.fire_transition('rest')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'dead')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
                           [('asleep', 'asleep', 'rest', None),
@@ -557,7 +557,7 @@
     def setUp(self):
         CubicWebTC.setUp(self)
         self.wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.s_activated = self.wf.state_by_name('activated').eid
         self.s_deactivated = self.wf.state_by_name('deactivated').eid
         self.s_dummy = self.wf.add_state(u'dummy').eid
@@ -629,13 +629,13 @@
         iworkflowable = user.cw_adapt_to('IWorkflowable')
         iworkflowable.fire_transition('deactivate')
         cnx.commit()
-        session.set_pool()
+        session.set_cnxset()
         with self.assertRaises(ValidationError) as cm:
             iworkflowable.fire_transition('deactivate')
         self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
                                             u"transition isn't allowed from")
         cnx.rollback()
-        session.set_pool()
+        session.set_cnxset()
         # get back now
         iworkflowable.fire_transition('activate')
         cnx.commit()
--- a/entities/wfobjs.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entities/wfobjs.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -326,8 +326,8 @@
             result[ep.subwf_state.eid] = ep.destination and ep.destination.eid
         return result
 
-    def clear_all_caches(self):
-        super(WorkflowTransition, self).clear_all_caches()
+    def cw_clear_all_caches(self):
+        super(WorkflowTransition, self).cw_clear_all_caches()
         clear_cache(self, 'exit_points')
 
 
--- a/entity.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/entity.py	Fri Dec 09 12:08:27 2011 +0100
@@ -395,8 +395,10 @@
 
     @cached
     def cw_metainformation(self):
-        res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid)))
-        res['source'] = self._cw.source_defs()[res['source']]
+        res = self._cw.describe(self.eid, asdict=True)
+        # use 'asource' and not 'source' since this is the actual source,
+        # while 'source' is the physical source (where it's stored)
+        res['source'] = self._cw.source_defs()[res.pop('asource')]
         return res
 
     def cw_check_perm(self, action):
@@ -431,9 +433,11 @@
         use_ext_id = False
         if 'base_url' not in kwargs and \
                getattr(self._cw, 'search_state', ('normal',))[0] == 'normal':
-            baseurl = self.cw_metainformation()['source'].get('base-url')
-            if baseurl:
-                kwargs['base_url'] = baseurl
+            sourcemeta = self.cw_metainformation()['source']
+            if sourcemeta.get('use-cwuri-as-url'):
+                return self.cwuri # XXX consider kwargs?
+            if sourcemeta.get('base-url'):
+                kwargs['base_url'] = sourcemeta['base-url']
                 use_ext_id = True
         if method in (None, 'view'):
             try:
@@ -718,12 +722,21 @@
                         self.cw_attr_cache[name] = value = None
             return value
 
-    def related(self, rtype, role='subject', limit=None, entities=False): # XXX .cw_related
+    def related(self, rtype, role='subject', limit=None, entities=False, # XXX .cw_related
+                safe=False):
         """returns a resultset of related entities
 
-        :param role: is the role played by 'self' in the relation ('subject' or 'object')
-        :param limit: resultset's maximum size
-        :param entities: if True, the entites are returned; if False, a result set is returned
+        :param rtype:
+          the name of the relation, aka relation type
+        :param role:
+          the role played by 'self' in the relation ('subject' or 'object')
+        :param limit:
+          resultset's maximum size
+        :param entities:
+          if True, the entites are returned; if False, a result set is returned
+        :param safe:
+          if True, an empty rset/list of entities will be returned in case of
+          :exc:`Unauthorized`, else (the default), the exception is propagated
         """
         try:
             return self._cw_relation_cache(rtype, role, entities, limit)
@@ -734,7 +747,12 @@
                 return []
             return self._cw.empty_rset()
         rql = self.cw_related_rql(rtype, role)
-        rset = self._cw.execute(rql, {'x': self.eid})
+        try:
+            rset = self._cw.execute(rql, {'x': self.eid})
+        except Unauthorized:
+            if not safe:
+                raise
+            rset = self._cw.empty_rset()
         self.cw_set_relation_cache(rtype, role, rset)
         return self.related(rtype, role, limit, entities)
 
@@ -773,7 +791,7 @@
                                        rql.split(' WHERE ', 1)[1])
         elif not ' ORDERBY ' in rql:
             args = rql.split(' WHERE ', 1)
-            # if modification_date already retreived, we should use it instead
+            # if modification_date already retrieved, we should use it instead
             # of adding another variable for sort. This should be be problematic
             # but it's actually with sqlserver, see ticket #694445
             if 'X modification_date ' in args[1]:
@@ -942,7 +960,7 @@
             assert role
             self._cw_related_cache.pop('%s_%s' % (rtype, role), None)
 
-    def clear_all_caches(self): # XXX cw_clear_all_caches
+    def cw_clear_all_caches(self):
         """flush all caches on this entity. Further attributes/relations access
         will triggers new database queries to get back values.
 
@@ -1024,6 +1042,10 @@
 
     # deprecated stuff #########################################################
 
+    @deprecated('[3.13] use entity.cw_clear_all_caches()')
+    def clear_all_caches(self):
+        return self.cw_clear_all_caches()
+
     @deprecated('[3.9] use entity.cw_attr_value(attr)')
     def get_value(self, name):
         return self.cw_attr_value(name)
--- a/etwist/http.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/http.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,7 +1,7 @@
 """twisted server for CubicWeb web instances
 
 :organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
 :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
@@ -25,25 +25,14 @@
     def _init_headers(self):
         if self._headers_out is None:
             return
-
-        # initialize cookies
-        cookies = self._headers_out.getHeader('set-cookie') or []
-        for cookie in cookies:
-            self._twreq.addCookie(cookie.name, cookie.value, cookie.expires,
-                                  cookie.domain, cookie.path, #TODO max-age
-                                  comment = cookie.comment, secure=cookie.secure)
-        self._headers_out.removeHeader('set-cookie')
-
-        # initialize other headers
-        for k, v in self._headers_out.getAllRawHeaders():
-            self._twreq.setHeader(k, v[0])
-
+        # initialize headers
+        for k, values in self._headers_out.getAllRawHeaders():
+            self._twreq.responseHeaders.setRawHeaders(k, values)
         # add content-length if not present
         if (self._headers_out.getHeader('content-length') is None
             and self._stream is not None):
            self._twreq.setHeader('content-length', len(self._stream))
 
-
     def _finalize(self):
         # we must set code before writing anything, else it's too late
         if self._code is not None:
--- a/etwist/request.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/request.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/etwist/server.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/server.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,14 +17,18 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """twisted server for CubicWeb web instances"""
 
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
 import sys
 import os
+import os.path as osp
 import select
-import errno
 import traceback
 import threading
+import re
+from hashlib import md5 # pylint: disable=E0611
 from os.path import join
 from time import mktime
 from datetime import date, timedelta
@@ -41,7 +45,8 @@
 
 from logilab.common.decorators import monkeypatch
 
-from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER
+from cubicweb import (AuthenticationError, ConfigurationError,
+                      CW_EVENT_MANAGER, CubicWebException)
 from cubicweb.utils import json_dumps
 from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
 from cubicweb.web.application import CubicWebPublisher
@@ -70,13 +75,85 @@
                             code=http.FORBIDDEN,
                             stream='Access forbidden')
 
-class File(static.File):
-    """Prevent from listing directories"""
+
+class NoListingFile(static.File):
+    def __init__(self, config, path=None):
+        if path is None:
+            path = config.static_directory
+        static.File.__init__(self, path)
+        self.config = config
+
+    def set_expires(self, request):
+        if not self.config.debugmode:
+            # XXX: Don't provide additional resource information to error responses
+            #
+            # the HTTP RFC recommands not going further than 1 year ahead
+            expires = date.today() + timedelta(days=6*30)
+            request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
+
     def directoryListing(self):
         return ForbiddenDirectoryLister()
 
 
-class LongTimeExpiringFile(File):
+class DataLookupDirectory(NoListingFile):
+    def __init__(self, config, path):
+        self.md5_version = config.instance_md5_version()
+        NoListingFile.__init__(self, config, path)
+        self.here = path
+        self._defineChildResources()
+        if self.config.debugmode:
+            self.data_modconcat_basepath = '/data/??'
+        else:
+            self.data_modconcat_basepath = '/data/%s/??' % self.md5_version
+
+    def _defineChildResources(self):
+        self.putChild(self.md5_version, self)
+
+    def getChild(self, path, request):
+        if not path:
+            uri = request.uri
+            if uri.startswith('/https/'):
+                uri = uri[6:]
+            if uri.startswith(self.data_modconcat_basepath):
+                resource_relpath = uri[len(self.data_modconcat_basepath):]
+                if resource_relpath:
+                    paths = resource_relpath.split(',')
+                    try:
+                        self.set_expires(request)
+                        return ConcatFiles(self.config, paths)
+                    except ConcatFileNotFoundError:
+                        return self.childNotFound
+            return self.directoryListing()
+        childpath = join(self.here, path)
+        dirpath, rid = self.config.locate_resource(childpath)
+        if dirpath is None:
+            # resource not found
+            return self.childNotFound
+        filepath = os.path.join(dirpath, rid)
+        if os.path.isdir(filepath):
+            resource = DataLookupDirectory(self.config, childpath)
+            # cache resource for this segment path to avoid recomputing
+            # directory lookup
+            self.putChild(path, resource)
+            return resource
+        else:
+            self.set_expires(request)
+            return NoListingFile(self.config, filepath)
+
+
+class FCKEditorResource(NoListingFile):
+
+    def getChild(self, path, request):
+        pre_path = request.path.split('/')[1:]
+        if pre_path[0] == 'https':
+            pre_path.pop(0)
+            uiprops = self.config.https_uiprops
+        else:
+            uiprops = self.config.uiprops
+        return static.File(osp.join(uiprops['FCKEDITOR_PATH'], path))
+
+
+class LongTimeExpiringFile(DataLookupDirectory):
     """overrides static.File and sets a far future ``Expires`` date
     on the resouce.
 
@@ -88,28 +165,84 @@
       etc.
 
     """
-    def render(self, request):
-        # XXX: Don't provide additional resource information to error responses
-        #
-        # the HTTP RFC recommands not going further than 1 year ahead
-        expires = date.today() + timedelta(days=6*30)
-        request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
-        return File.render(self, request)
+    def _defineChildResources(self):
+        pass
+
+
+class ConcatFileNotFoundError(CubicWebException):
+    pass
+
+
+class ConcatFiles(LongTimeExpiringFile):
+    def __init__(self, config, paths):
+        _, ext = osp.splitext(paths[0])
+        self._resources = {}
+        # create a unique / predictable filename. We don't consider cubes
+        # version since uicache is cleared at server startup, and file's dates
+        # are checked in debug mode
+        fname = 'cache_concat_' + md5(';'.join(paths)).hexdigest() + ext
+        filepath = osp.join(config.appdatahome, 'uicache', fname)
+        LongTimeExpiringFile.__init__(self, config, filepath)
+        self._concat_cached_filepath(filepath, paths)
+
+    def _resource(self, path):
+        try:
+            return self._resources[path]
+        except KeyError:
+            self._resources[path] = self.config.locate_resource(path)
+            return self._resources[path]
+
+    def _concat_cached_filepath(self, filepath, paths):
+        if not self._up_to_date(filepath, paths):
+            with open(filepath, 'wb') as f:
+                for path in paths:
+                    dirpath, rid = self._resource(path)
+                    if rid is None:
+                        # In production mode log an error, do not return a 404
+                        # XXX the erroneous content is cached anyway
+                        LOGGER.error('concatenated data url error: %r file '
+                                     'does not exist', path)
+                        if self.config.debugmode:
+                            raise ConcatFileNotFoundError(path)
+                    else:
+                        for line in open(osp.join(dirpath, rid)):
+                            f.write(line)
+                        f.write('\n')
+
+    def _up_to_date(self, filepath, paths):
+        """
+        The concat-file is considered up-to-date if it exists.
+        In debug mode, an additional check is performed to make sure that
+        concat-file is more recent than all concatenated files
+        """
+        if not osp.isfile(filepath):
+            return False
+        if self.config.debugmode:
+            concat_lastmod = os.stat(filepath).st_mtime
+            for path in paths:
+                dirpath, rid = self._resource(path)
+                if rid is None:
+                    raise ConcatFileNotFoundError(path)
+                path = osp.join(dirpath, rid)
+                if os.stat(path).st_mtime > concat_lastmod:
+                    return False
+        return True
 
 
 class CubicWebRootResource(resource.Resource):
     def __init__(self, config, vreg=None):
+        resource.Resource.__init__(self)
         self.config = config
         # instantiate publisher here and not in init_publisher to get some
         # checks done before daemonization (eg versions consistency)
         self.appli = CubicWebPublisher(config, vreg=vreg)
         self.base_url = config['base-url']
         self.https_url = config['https-url']
-        self.children = {}
-        self.static_directories = set(('data%s' % config.instance_md5_version(),
-                                       'data', 'static', 'fckeditor'))
         global MAX_POST_LENGTH
         MAX_POST_LENGTH = config['max-post-length']
+        self.putChild('static', NoListingFile(config))
+        self.putChild('fckeditor', FCKEditorResource(self.config, ''))
+        self.putChild('data', DataLookupDirectory(self.config, ''))
 
     def init_publisher(self):
         config = self.config
@@ -152,38 +285,6 @@
 
     def getChild(self, path, request):
         """Indicate which resource to use to process down the URL's path"""
-        pre_path = request.path.split('/')[1:]
-        if pre_path[0] == 'https':
-            pre_path.pop(0)
-            uiprops = self.config.https_uiprops
-        else:
-            uiprops = self.config.uiprops
-        directory = pre_path[0]
-        # Anything in data/, static/, fckeditor/ and the generated versioned
-        # data directory is treated as static files
-        if directory in self.static_directories:
-            # take care fckeditor may appears as root directory or as a data
-            # subdirectory
-            if directory == 'static':
-                return File(self.config.static_directory)
-            if directory == 'fckeditor':
-                return File(uiprops['FCKEDITOR_PATH'])
-            if directory != 'data':
-                # versioned directory, use specific file with http cache
-                # headers so their are cached for a very long time
-                cls = LongTimeExpiringFile
-            else:
-                cls = File
-            if path == 'fckeditor':
-                return cls(uiprops['FCKEDITOR_PATH'])
-            if path == directory: # recurse
-                return self
-            datadir, path = self.config.locate_resource(path)
-            if datadir is None:
-                return self # recurse
-            self.debug('static file %s from %s', path, datadir)
-            return cls(join(datadir, path))
-        # Otherwise we use this single resource
         return self
 
     def render(self, request):
@@ -208,7 +309,7 @@
             # so we deferred that part to the cubicweb thread
             request.process_multipart()
             return self._render_request(request)
-        except:
+        except Exception:
             errorstream = StringIO()
             traceback.print_exc(file=errorstream)
             return HTTPResponse(stream='<pre>%s</pre>' % errorstream.getvalue(),
@@ -302,6 +403,13 @@
                             stream=content, code=code,
                             headers=request.headers_out)
 
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    @classmethod
+    def debug(cls, msg, *a, **kw):
+        pass
+    info = warning = error = critical = exception = debug
+
 
 JSON_PATHS = set(('json',))
 FRAME_POST_PATHS = set(('validateform',))
@@ -409,6 +517,7 @@
     # serve it via standard HTTP on port set in the configuration
     port = config['port'] or 8080
     interface = config['interface']
+    reactor.suggestThreadPoolSize(config['webserver-threadpool-size'])
     reactor.listenTCP(port, website, interface=interface)
     if not config.debugmode:
         if sys.platform == 'win32':
@@ -421,12 +530,8 @@
             return whichproc # parent process
     root_resource.init_publisher() # before changing uid
     if config['uid'] is not None:
-        try:
-            uid = int(config['uid'])
-        except ValueError:
-            from pwd import getpwnam
-            uid = getpwnam(config['uid']).pw_uid
-        os.setuid(uid)
+        from logilab.common.daemon import setugid
+        setugid(config['uid'])
     root_resource.start_service()
     LOGGER.info('instance started on %s', root_resource.base_url)
     # avoid annoying warnign if not in Main Thread
--- a/etwist/test/unittest_server.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/test/unittest_server.py	Fri Dec 09 12:08:27 2011 +0100
@@ -15,8 +15,12 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+import os, os.path as osp, glob
+
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.etwist.server import host_prefixed_baseurl
+from cubicweb.etwist.server import (host_prefixed_baseurl, ConcatFiles,
+                                    ConcatFileNotFoundError)
 
 
 class HostPrefixedBaseURLTC(CubicWebTC):
@@ -50,3 +54,30 @@
         self._check('http://localhost:8080/hg/', 'code.cubicweb.org',
                     'http://localhost:8080/hg/')
 
+
+class ConcatFilesTC(CubicWebTC):
+
+    def tearDown(self):
+        super(ConcatFilesTC, self).tearDown()
+        self._cleanup_concat_cache()
+        self.config.debugmode = False
+
+    def _cleanup_concat_cache(self):
+        uicachedir = osp.join(self.config.apphome, 'uicache')
+        for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')):
+            os.unlink(osp.join(uicachedir, fname))
+
+    def test_cache(self):
+        concat = ConcatFiles(self.config, ('cubicweb.ajax.js', 'jquery.js'))
+        self.failUnless(osp.isfile(concat.path))
+
+    def test_404(self):
+        # when not in debug mode, should not crash
+        ConcatFiles(self.config, ('cubicweb.ajax.js', 'dummy.js'))
+        # in debug mode, raise error
+        self.config.debugmode = True
+        try:
+            self.assertRaises(ConcatFileNotFoundError, ConcatFiles, self.config,
+                              ('cubicweb.ajax.js', 'dummy.js'))
+        finally:
+            self.config.debugmode = False
--- a/etwist/twconfig.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/twconfig.py	Fri Dec 09 12:08:27 2011 +0100
@@ -90,6 +90,13 @@
           'help': 'run a pyro server',
           'group': 'main', 'level': 1,
           }),
+        ('webserver-threadpool-size',
+         {'type': 'int',
+          'default': 4,
+          'help': "size of twisted's reactor threadpool. It should probably be not too \
+much greater than connection-poolsize",
+          'group': 'web', 'level': 3,
+          }),
         ) + WebConfiguration.options)
 
     def server_file(self):
--- a/etwist/twctl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/etwist/twctl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -17,6 +17,10 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb-clt handlers for twisted"""
 
+from os.path import join
+
+from logilab.common.shellutils import rm
+
 from cubicweb.toolsutils import CommandHandler
 from cubicweb.web.webctl import WebCreateHandler
 
@@ -32,6 +36,9 @@
 
     def start_server(self, config):
         from cubicweb.etwist import server
+        config.info('clear ui caches')
+        for cachedir in ('uicache', 'uicachehttps'):
+            rm(join(config.appdatahome, cachedir, '*'))
         return server.run(config)
 
 class TWStopHandler(CommandHandler):
--- a/ext/rest.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/ext/rest.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -71,7 +71,7 @@
     try:
         try:
             eid_num, rest = text.split(u':', 1)
-        except:
+        except ValueError:
             eid_num, rest = text, '#'+text
         eid_num = int(eid_num)
         if eid_num < 0:
@@ -105,11 +105,17 @@
     else:
         rql, vid = text, None
     _cw.ensure_ro_rql(rql)
-    rset = _cw.execute(rql, {'userid': _cw.user.eid})
-    if vid is None:
-        vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
-    view = _cw.vreg['views'].select(vid, _cw, rset=rset)
-    content = view.render()
+    try:
+        rset = _cw.execute(rql, {'userid': _cw.user.eid})
+        if rset:
+            if vid is None:
+                vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
+        else:
+            vid = 'noresult'
+        view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+        content = view.render()
+    except Exception, exc:
+        content = 'an error occured while interpreting this rql directive: %r' % exc
     set_classes(options)
     return [nodes.raw('', content, format='html')], []
 
@@ -181,7 +187,7 @@
 try:
     from pygments import highlight
     from pygments.lexers import get_lexer_by_name
-    from pygments.formatters import HtmlFormatter
+    from pygments.formatters.html import HtmlFormatter
 except ImportError:
     pygments_directive = None
 else:
@@ -200,7 +206,7 @@
             context = state.document.settings.context
             context._cw.add_css('pygments.css')
         except AttributeError:
-            # used outside cubicweb
+            # used outside cubicweb XXX use hasattr instead
             pass
         return [nodes.raw('', parsed, format='html')]
 
--- a/ext/test/unittest_rest.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/ext/test/unittest_rest.py	Fri Dec 09 12:08:27 2011 +0100
@@ -63,6 +63,16 @@
         self.assert_(out.endswith('<a href="http://testing.fr/cubicweb/cwuser/anon" title="">anon</a>'
                                   '</td></tr></tbody></table></div>\n</div>\n</p>\n'))
 
+    def test_rql_role_with_vid_empty_rset(self):
+        context = self.context()
+        out = rest_publish(context, ':rql:`Any X WHERE X is CWUser, X login "nono":table`')
+        self.assert_(out.endswith('<p><div class="searchMessage"><strong>No result matching query</strong></div>\n</p>\n'))
+
+    def test_rql_role_with_unknown_vid(self):
+        context = self.context()
+        out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`')
+        self.assert_(out.startswith("<p>an error occured while interpreting this rql directive: ObjectNotFound(u'toto',)</p>"))
+
     def test_rql_role_without_vid(self):
         context = self.context()
         out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`')
--- a/hooks/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -46,28 +46,26 @@
                 session.commit()
             finally:
                 session.close()
-        self.repo.looping_task(60*60*24, cleanup_old_transactions, self.repo)
+        if self.repo.config['undo-support']:
+            self.repo.looping_task(60*60*24, cleanup_old_transactions,
+                                   self.repo)
         def update_feeds(repo):
-            session = repo.internal_session()
-            try:
-                # don't iter on repo.sources which doesn't include copy based
-                # sources (the one we're looking for)
-                for source in repo.sources_by_eid.itervalues():
-                    if (not source.copy_based_source
-                        or not repo.config.source_enabled(source)
-                        or not source.config['synchronize']):
-                        continue
-                    try:
-                        stats = source.pull_data(session)
-                        if stats.get('created'):
-                            source.info('added %s entities', len(stats['created']))
-                        if stats.get('updated'):
-                            source.info('updated %s entities', len(stats['updated']))
-                        session.commit()
-                    except Exception, exc:
-                        session.exception('while trying to update feed %s', source)
-                        session.rollback()
-                    session.set_pool()
-            finally:
-                session.close()
+            # don't iter on repo.sources which doesn't include copy based
+            # sources (the one we're looking for)
+            for source in repo.sources_by_eid.itervalues():
+                if (not source.copy_based_source
+                    or not repo.config.source_enabled(source)
+                    or not source.config['synchronize']):
+                    continue
+                session = repo.internal_session(safe=True)
+                try:
+                    stats = source.pull_data(session)
+                    if stats.get('created'):
+                        source.info('added %s entities', len(stats['created']))
+                    if stats.get('updated'):
+                        source.info('updated %s entities', len(stats['updated']))
+                except Exception, exc:
+                    session.exception('while trying to update feed %s', source)
+                finally:
+                    session.close()
         self.repo.looping_task(60, update_feeds, self.repo)
--- a/hooks/metadata.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/metadata.py	Fri Dec 09 12:08:27 2011 +0100
@@ -23,6 +23,7 @@
 
 from cubicweb.selectors import is_instance
 from cubicweb.server import hook
+from cubicweb.server.edition import EditedEntity
 
 
 class MetaDataHook(hook.Hook):
@@ -41,8 +42,10 @@
     def __call__(self):
         timestamp = datetime.now()
         edited = self.entity.cw_edited
-        edited.setdefault('creation_date', timestamp)
-        edited.setdefault('modification_date', timestamp)
+        if not edited.get('creation_date'):
+            edited['creation_date'] = timestamp
+        if not edited.get('modification_date'):
+            edited['modification_date'] = timestamp
         if not self._cw.get_shared_data('do-not-insert-cwuri'):
             cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid)
             edited.setdefault('cwuri', cwuri)
@@ -68,8 +71,9 @@
     def precommit_event(self):
         session = self.session
         relations = [(eid, session.user.eid) for eid in self.get_data()
-                # don't consider entities that have been created and
-                # deleted in the same transaction
+                # don't consider entities that have been created and deleted in
+                # the same transaction, nor ones where created_by has been
+                # explicitly set
                 if not session.deleted_in_transaction(eid) and \
                    not session.entity_from_eid(eid).created_by]
         session.add_relations([('created_by', relations)])
@@ -141,3 +145,87 @@
             session.repo.system_source.index_entity(
                 session, session.entity_from_eid(self.eidto))
 
+
+
+# entity source handling #######################################################
+
+class ChangeEntityUpdateCaches(hook.Operation):
+    oldsource = newsource = entity = None # make pylint happy
+
+    def postcommit_event(self):
+        self.oldsource.reset_caches()
+        repo = self.session.repo
+        entity = self.entity
+        extid = entity.cw_metainformation()['extid']
+        repo._type_source_cache[entity.eid] = (
+            entity.__regid__, self.newsource.uri, None, self.newsource.uri)
+        if self.oldsource.copy_based_source:
+            uri = 'system'
+        else:
+            uri = self.oldsource.uri
+        repo._extid_cache[(extid, uri)] = -entity.eid
+
+class ChangeEntitySourceDeleteHook(MetaDataHook):
+    """support for moving an entity from an external source by watching 'Any
+    cw_source CWSource' relation
+    """
+
+    __regid__ = 'cw.metadata.source-change'
+    __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+    events = ('before_delete_relation',)
+
+    def __call__(self):
+        if (self._cw.deleted_in_transaction(self.eidfrom)
+            or self._cw.deleted_in_transaction(self.eidto)):
+            return
+        schange = self._cw.transaction_data.setdefault('cw_source_change', {})
+        schange[self.eidfrom] = self.eidto
+
+
+class ChangeEntitySourceAddHook(MetaDataHook):
+    __regid__ = 'cw.metadata.source-change'
+    __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+    events = ('before_add_relation',)
+
+    def __call__(self):
+        schange = self._cw.transaction_data.get('cw_source_change')
+        if schange is not None and self.eidfrom in schange:
+            newsource = self._cw.entity_from_eid(self.eidto)
+            if newsource.name != 'system':
+                raise Exception('changing source to something else than the '
+                                'system source is unsupported')
+            syssource = newsource.repo_source
+            oldsource = self._cw.entity_from_eid(schange[self.eidfrom])
+            entity = self._cw.entity_from_eid(self.eidfrom)
+            # copy entity if necessary
+            if not oldsource.repo_source.copy_based_source:
+                entity.complete(skip_bytes=False)
+                if not entity.creation_date:
+                    entity.cw_attr_cache['creation_date'] = datetime.now()
+                if not entity.modification_date:
+                    entity.cw_attr_cache['modification_date'] = datetime.now()
+                entity.cw_attr_cache['cwuri'] = u'%s%s' % (self._cw.base_url(), entity.eid)
+                for rschema, attrschema in entity.e_schema.attribute_definitions():
+                    if attrschema == 'Password' and \
+                       rschema.rdef(entity.e_schema, attrschema).cardinality[0] == '1':
+                        from logilab.common.shellutils import generate_password
+                        entity.cw_attr_cache[rschema.type] = generate_password()
+                entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+                syssource.add_entity(self._cw, entity)
+            # we don't want the moved entity to be reimported later.  To
+            # distinguish this state, the trick is to change the associated
+            # record in the 'entities' system table with eid=-eid while leaving
+            # other fields unchanged, and to add a new record with eid=eid,
+            # source='system'. External source will then have consider case
+            # where `extid2eid` return a negative eid as 'this entity was known
+            # but has been moved, ignore it'.
+            self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s',
+                                {'eid': self.eidfrom})
+            attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': None,
+                     'source': 'system', 'asource': 'system',
+                     'mtime': datetime.now()}
+            self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs)
+            # register an operation to update repository/sources caches
+            ChangeEntityUpdateCaches(self._cw, entity=entity,
+                                     oldsource=oldsource.repo_source,
+                                     newsource=syssource)
--- a/hooks/notification.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/notification.py	Fri Dec 09 12:08:27 2011 +0100
@@ -28,6 +28,8 @@
 
 class RenderAndSendNotificationView(hook.Operation):
     """delay rendering of notification view until precommit"""
+    view = None # make pylint happy
+
     def precommit_event(self):
         view = self.view
         if view.cw_rset is not None and not view.cw_rset:
@@ -191,7 +193,7 @@
     def _call(self):
         try:
             title = self.entity.dc_title()
-        except:
+        except Exception:
             # may raise an error during deletion process, for instance due to
             # missing required relation
             title = '#%s' % self.entity.eid
--- a/hooks/syncschema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/syncschema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -42,12 +42,15 @@
 TYPE_CONVERTER = { # XXX
     'Boolean': bool,
     'Int': int,
+    'BigInt': int,
     'Float': float,
     'Password': str,
     'String': unicode,
     'Date' : unicode,
     'Datetime' : unicode,
     'Time' : unicode,
+    'TZDatetime' : unicode,
+    'TZTime' : unicode,
     }
 
 # core entity and relation types which can't be removed
@@ -84,7 +87,7 @@
         session.system_sql(str('ALTER TABLE %s ADD %s integer'
                                % (table, column)), rollback_on_failure=False)
         session.info('added column %s to table %s', column, table)
-    except:
+    except Exception:
         # silent exception here, if this error has not been raised because the
         # column already exists, index creation will fail anyway
         session.exception('error while adding column %s to table %s',
@@ -92,7 +95,7 @@
     # create index before alter table which may expectingly fail during test
     # (sqlite) while index creation should never fail (test for index existence
     # is done by the dbhelper)
-    session.pool.source('system').create_index(session, table, column)
+    session.cnxset.source('system').create_index(session, table, column)
     session.info('added index on %s(%s)', table, column)
 
 
@@ -218,8 +221,8 @@
             cwuser_cls = self.session.vreg['etypes'].etype_class('CWUser')
             for session in repo._sessions.values():
                 session.user.__class__ = cwuser_cls
-        except:
-            self.critical('error while setting schmea', exc_info=True)
+        except Exception:
+            self.critical('error while setting schema', exc_info=True)
 
     def rollback_event(self):
         self.precommit_event()
@@ -243,6 +246,7 @@
       CWAttribute entities
     * add owned_by relation by creating the necessary CWRelation entity
     """
+    entity = None # make pylint happy
 
     def precommit_event(self):
         session = self.session
@@ -252,7 +256,7 @@
                                description=entity.description)
         eschema = schema.add_entity_type(etype)
         # create the necessary table
-        tablesql = y2sql.eschema2sql(session.pool.source('system').dbhelper,
+        tablesql = y2sql.eschema2sql(session.cnxset.source('system').dbhelper,
                                      eschema, prefix=SQL_PREFIX)
         for sql in tablesql.split(';'):
             if sql.strip():
@@ -289,7 +293,7 @@
         self.session.vreg.schema.rename_entity_type(oldname, newname)
         # we need sql to operate physical changes on the system database
         sqlexec = self.session.system_sql
-        dbhelper= self.session.pool.source('system').dbhelper
+        dbhelper= self.session.cnxset.source('system').dbhelper
         sql = dbhelper.sql_rename_table(SQL_PREFIX+oldname,
                                         SQL_PREFIX+newname)
         sqlexec(sql)
@@ -433,7 +437,7 @@
         # update the in-memory schema first
         rdefdef = self.init_rdef(**props)
         # then make necessary changes to the system source database
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         attrtype = y2sql.type_from_constraints(
             syssource.dbhelper, rdefdef.object, rdefdef.constraints)
         # XXX should be moved somehow into lgdb: sqlite doesn't support to
@@ -606,7 +610,7 @@
         self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values)
         rdef.update(self.values)
         # then make necessary changes to the system source database
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         if 'indexed' in self.values:
             syssource.update_rdef_indexed(session, rdef)
             self.indexed_changed = True
@@ -624,7 +628,7 @@
         # revert changes on in memory schema
         self.rdef.update(self.oldvalues)
         # revert changes on database
-        syssource = self.session.pool.source('system')
+        syssource = self.session.cnxset.source('system')
         if self.indexed_changed:
             syssource.update_rdef_indexed(self.session, self.rdef)
         if self.null_allowed_changed:
@@ -652,7 +656,7 @@
         rdef.constraints.remove(self.oldcstr)
         # then update database: alter the physical schema on size/unique
         # constraint changes
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         cstrtype = self.oldcstr.type()
         if cstrtype == 'SizeConstraint':
             syssource.update_rdef_column(session, rdef)
@@ -668,7 +672,7 @@
         if self.oldcstr is not None:
             self.rdef.constraints.append(self.oldcstr)
         # revert changes on database
-        syssource = self.session.pool.source('system')
+        syssource = self.session.cnxset.source('system')
         if self.size_cstr_changed:
             syssource.update_rdef_column(self.session, self.rdef)
         if self.unique_changed:
@@ -699,7 +703,7 @@
         rdef.constraints.append(newcstr)
         # then update database: alter the physical schema on size/unique
         # constraint changes
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         if cstrtype == 'SizeConstraint' and (oldcstr is None or
                                              oldcstr.max != newcstr.max):
             syssource.update_rdef_column(session, rdef)
@@ -716,7 +720,7 @@
         prefix = SQL_PREFIX
         table = '%s%s' % (prefix, self.entity.constraint_of[0].name)
         cols = ['%s%s' % (prefix, r.name) for r in self.entity.relations]
-        dbhelper= session.pool.source('system').dbhelper
+        dbhelper= session.cnxset.source('system').dbhelper
         sqls = dbhelper.sqls_create_multicol_unique_index(table, cols)
         for sql in sqls:
             session.system_sql(sql)
@@ -736,7 +740,7 @@
         session = self.session
         prefix = SQL_PREFIX
         table = '%s%s' % (prefix, self.entity.type)
-        dbhelper= session.pool.source('system').dbhelper
+        dbhelper= session.cnxset.source('system').dbhelper
         cols = ['%s%s' % (prefix, c) for c in self.cols]
         sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols)
         for sql in sqls:
@@ -756,6 +760,8 @@
 
 class MemSchemaCWETypeDel(MemSchemaOperation):
     """actually remove the entity type from the instance's schema"""
+    etype = None # make pylint happy
+
     def postcommit_event(self):
         # del_entity_type also removes entity's relations
         self.session.vreg.schema.del_entity_type(self.etype)
@@ -763,6 +769,8 @@
 
 class MemSchemaCWRTypeAdd(MemSchemaOperation):
     """actually add the relation type to the instance's schema"""
+    rtypedef = None # make pylint happy
+
     def precommit_event(self):
         self.session.vreg.schema.add_relation_type(self.rtypedef)
 
@@ -772,6 +780,8 @@
 
 class MemSchemaCWRTypeDel(MemSchemaOperation):
     """actually remove the relation type from the instance's schema"""
+    rtype = None # make pylint happy
+
     def postcommit_event(self):
         try:
             self.session.vreg.schema.del_relation_type(self.rtype)
@@ -783,9 +793,10 @@
 class MemSchemaPermissionAdd(MemSchemaOperation):
     """synchronize schema when a *_permission relation has been added on a group
     """
+    eid = action = group_eid = expr = None # make pylint happy
 
     def precommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections.cnxset has been commited"""
         try:
             erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
@@ -793,7 +804,7 @@
             self.warning('no schema for %s', self.eid)
             return
         perms = list(erschema.action_permissions(self.action))
-        if hasattr(self, 'group_eid'):
+        if self.group_eid is not None:
             perm = self.session.entity_from_eid(self.group_eid).name
         else:
             perm = erschema.rql_expression(self.expr)
@@ -814,7 +825,7 @@
     """
 
     def precommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
@@ -827,7 +838,7 @@
                self.action in ('delete', 'add'): # XXX 3.6.1 migration
             return
         perms = list(erschema.action_permissions(self.action))
-        if hasattr(self, 'group_eid'):
+        if self.group_eid is not None:
             perm = self.session.entity_from_eid(self.group_eid).name
         else:
             perm = erschema.rql_expression(self.expr)
@@ -842,6 +853,7 @@
 
 
 class MemSchemaSpecializesAdd(MemSchemaOperation):
+    etypeeid = parentetypeeid = None # make pylint happy
 
     def precommit_event(self):
         eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid)
@@ -853,6 +865,7 @@
 
 
 class MemSchemaSpecializesDel(MemSchemaOperation):
+    etypeeid = parentetypeeid = None # make pylint happy
 
     def precommit_event(self):
         try:
@@ -1228,7 +1241,7 @@
                         source.fti_index_entities(session, [container])
         if to_reindex:
             # Transaction has already been committed
-            session.pool.commit()
+            session.cnxset.commit()
 
 
 
--- a/hooks/syncsession.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/syncsession.py	Fri Dec 09 12:08:27 2011 +0100
@@ -40,7 +40,8 @@
 
 class _GroupOperation(hook.Operation):
     """base class for group operation"""
-    geid = None
+    cnxuser = None # make pylint happy
+
     def __init__(self, session, *args, **kwargs):
         """override to get the group name before actual groups manipulation:
 
@@ -55,8 +56,9 @@
 
 class _DeleteGroupOp(_GroupOperation):
     """synchronize user when a in_group relation has been deleted"""
+
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         groups = self.cnxuser.groups
         try:
             groups.remove(self.group)
@@ -67,7 +69,7 @@
 class _AddGroupOp(_GroupOperation):
     """synchronize user when a in_group relation has been added"""
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         groups = self.cnxuser.groups
         if self.group in groups:
             self.warning('user %s already in group %s', self.cnxuser,
@@ -97,7 +99,7 @@
         hook.Operation.__init__(self, session)
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             self.session.repo.close(self.cnxid)
         except BadConnectionId:
@@ -117,12 +119,12 @@
 
 # CWProperty hooks #############################################################
 
-
 class _DelCWPropertyOp(hook.Operation):
     """a user's custom properties has been deleted"""
+    cwpropdict = key = None # make pylint happy
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             del self.cwpropdict[self.key]
         except KeyError:
@@ -131,17 +133,19 @@
 
 class _ChangeCWPropertyOp(hook.Operation):
     """a user's custom properties has been added/changed"""
+    cwpropdict = key = value = None # make pylint happy
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         self.cwpropdict[self.key] = self.value
 
 
 class _AddCWPropertyOp(hook.Operation):
     """a user's custom properties has been added/changed"""
+    cwprop = None # make pylint happy
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         cwprop = self.cwprop
         if not cwprop.for_user:
             self.session.vreg['propertyvalues'][cwprop.pkey] = cwprop.value
--- a/hooks/syncsources.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/syncsources.py	Fri Dec 09 12:08:27 2011 +0100
@@ -19,6 +19,7 @@
 
 from socket import gethostname
 
+from logilab.common.decorators import clear_cache
 from yams.schema import role_name
 
 from cubicweb import ValidationError
@@ -30,7 +31,10 @@
     category = 'cw.sources'
 
 
+# repo sources synchronization #################################################
+
 class SourceAddedOp(hook.Operation):
+    entity = None # make pylint happy
     def postcommit_event(self):
         self.session.repo.add_source(self.entity)
 
@@ -51,7 +55,8 @@
 
 
 class SourceRemovedOp(hook.Operation):
-    def precommit_event(self):
+    uri = None # make pylint happy
+    def postcommit_event(self):
         self.session.repo.remove_source(self.uri)
 
 class SourceRemovedHook(SourceHook):
@@ -64,25 +69,59 @@
         SourceRemovedOp(self._cw, uri=self.entity.name)
 
 
-class SourceUpdatedOp(hook.DataOperationMixIn, hook.Operation):
+class SourceConfigUpdatedOp(hook.DataOperationMixIn, hook.Operation):
 
     def precommit_event(self):
         self.__processed = []
         for source in self.get_data():
-            conf = source.repo_source.check_config(source)
-            self.__processed.append( (source, conf) )
+            if not self.session.deleted_in_transaction(source.eid):
+                conf = source.repo_source.check_config(source)
+                self.__processed.append( (source, conf) )
 
     def postcommit_event(self):
         for source, conf in self.__processed:
             source.repo_source.update_config(source, conf)
 
+
+class SourceRenamedOp(hook.LateOperation):
+    oldname = newname = None # make pylint happy
+
+    def precommit_event(self):
+        source = self.session.repo.sources_by_uri[self.oldname]
+        if source.copy_based_source:
+            sql = 'UPDATE entities SET asource=%(newname)s WHERE asource=%(oldname)s'
+        else:
+            sql = 'UPDATE entities SET source=%(newname)s, asource=%(newname)s WHERE source=%(oldname)s'
+        self.session.system_sql(sql, {'oldname': self.oldname,
+                                      'newname': self.newname})
+
+    def postcommit_event(self):
+        repo = self.session.repo
+        # XXX race condition
+        source = repo.sources_by_uri.pop(self.oldname)
+        source.uri = self.newname
+        source.public_config['uri'] = self.newname
+        repo.sources_by_uri[self.newname] = source
+        repo._type_source_cache.clear()
+        clear_cache(repo, 'source_defs')
+        if not source.copy_based_source:
+            repo._extid_cache.clear()
+            repo._clear_planning_caches()
+            for cnxset in repo.cnxsets:
+                cnxset.source_cnxs[self.oldname] = cnxset.source_cnxs.pop(self.oldname)
+
+
 class SourceUpdatedHook(SourceHook):
     __regid__ = 'cw.sources.configupdate'
     __select__ = SourceHook.__select__ & is_instance('CWSource')
-    events = ('after_update_entity',)
+    events = ('before_update_entity',)
     def __call__(self):
         if 'config' in self.entity.cw_edited:
-            SourceUpdatedOp.get_instance(self._cw).add_data(self.entity)
+            SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity)
+        if 'name' in self.entity.cw_edited:
+            oldname, newname = self.entity.cw_edited.oldnewvalue('name')
+            SourceRenamedOp(self._cw, oldname=oldname, newname=newname)
+
 
 class SourceHostConfigUpdatedHook(SourceHook):
     __regid__ = 'cw.sources.hostconfigupdate'
@@ -94,21 +133,23 @@
                    not 'config' in self.entity.cw_edited:
                 return
             try:
-                SourceUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource)
+                SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource)
             except IndexError:
                 # XXX no source linked to the host config yet
                 pass
 
 
-# source mapping synchronization. Expect cw_for_source/cw_schema are immutable
-# relations (i.e. can't change from a source or schema to another).
+# source mapping synchronization ###############################################
+#
+# Expect cw_for_source/cw_schema are immutable relations (i.e. can't change from
+# a source or schema to another).
 
-class SourceMappingDeleteHook(SourceHook):
+class SourceMappingImmutableHook(SourceHook):
     """check cw_for_source and cw_schema are immutable relations
 
     XXX empty delete perms would be enough?
     """
-    __regid__ = 'cw.sources.delschemaconfig'
+    __regid__ = 'cw.sources.mapping.immutable'
     __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source', 'cw_schema')
     events = ('before_add_relation',)
     def __call__(self):
--- a/hooks/test/unittest_syncschema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/test/unittest_syncschema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -36,9 +36,9 @@
         self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
 
     def index_exists(self, etype, attr, unique=False):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         return dbhelper.index_exists(sqlcursor, SQL_PREFIX + etype, SQL_PREFIX + attr, unique=unique)
 
     def _set_perms(self, eid):
@@ -57,9 +57,9 @@
 
     def test_base(self):
         schema = self.repo.schema
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         self.failIf(schema.has_entity('Societe2'))
         self.failIf(schema.has_entity('concerne2'))
         # schema should be update on insertion (after commit)
@@ -170,9 +170,9 @@
     # schema modification hooks tests #########################################
 
     def test_uninline_relation(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         self.failUnless(self.schema['state_of'].inlined)
         try:
             self.execute('SET X inlined FALSE WHERE X name "state_of"')
@@ -182,7 +182,7 @@
             self.failIf(self.index_exists('State', 'state_of'))
             rset = self.execute('Any X, Y WHERE X state_of Y')
             self.assertEqual(len(rset), 2) # user states
-        except:
+        except Exception:
             import traceback
             traceback.print_exc()
         finally:
@@ -195,9 +195,9 @@
             self.assertEqual(len(rset), 2)
 
     def test_indexed_change(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         try:
             self.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"')
             self.failUnless(self.schema['name'].rdef('Workflow', 'String').indexed)
@@ -214,9 +214,9 @@
             self.failUnless(self.index_exists('Workflow', 'name'))
 
     def test_unique_change(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         try:
             self.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X '
                          'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,'
--- a/hooks/workflow.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/hooks/workflow.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -45,6 +45,7 @@
 
 class _SetInitialStateOp(hook.Operation):
     """make initial state be a default state"""
+    entity = None # make pylint happy
 
     def precommit_event(self):
         session = self.session
@@ -61,6 +62,7 @@
 
 class _FireAutotransitionOp(hook.Operation):
     """try to fire auto transition after state changes"""
+    entity = None # make pylint happy
 
     def precommit_event(self):
         entity = self.entity
@@ -73,6 +75,7 @@
 
 class _WorkflowChangedOp(hook.Operation):
     """fix entity current state when changing its workflow"""
+    eid = wfeid = None # make pylint happy
 
     def precommit_event(self):
         # notice that enforcement that new workflow apply to the entity's type is
@@ -109,6 +112,7 @@
 
 
 class _CheckTrExitPoint(hook.Operation):
+    treid = None # make pylint happy
 
     def precommit_event(self):
         tr = self.session.entity_from_eid(self.treid)
@@ -122,6 +126,7 @@
 
 
 class _SubWorkflowExitOp(hook.Operation):
+    forentity = trinfo = None # make pylint happy
 
     def precommit_event(self):
         session = self.session
@@ -148,7 +153,7 @@
 
 class WorkflowHook(hook.Hook):
     __abstract__ = True
-    category = 'workflow'
+    category = 'metadata'
 
 
 class SetInitialStateHook(WorkflowHook):
@@ -160,21 +165,15 @@
         _SetInitialStateOp(self._cw, entity=self.entity)
 
 
-class PrepareStateChangeHook(WorkflowHook):
-    """record previous state information"""
-    __regid__ = 'cwdelstate'
-    __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
-    events = ('before_delete_relation',)
+class FireTransitionHook(WorkflowHook):
+    """check the transition is allowed and add missing information into the
+    TrInfo entity.
 
-    def __call__(self):
-        self._cw.transaction_data.setdefault('pendingrelations', []).append(
-            (self.eidfrom, self.rtype, self.eidto))
-
-
-class FireTransitionHook(WorkflowHook):
-    """check the transition is allowed, add missing information. Expect that:
+    Expect that:
     * wf_info_for inlined relation is set
     * by_transition or to_state (managers only) inlined relation is set
+
+    Check for automatic transition to be fired at the end
     """
     __regid__ = 'wffiretransition'
     __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
@@ -273,7 +272,7 @@
 
 
 class FiredTransitionHook(WorkflowHook):
-    """change related entity state"""
+    """change related entity state and handle exit of subworkflow"""
     __regid__ = 'wffiretransition'
     __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
     events = ('after_add_entity',)
@@ -296,6 +295,7 @@
     __regid__ = 'wfcheckinstate'
     __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
     events = ('before_add_relation',)
+    category = 'integrity'
 
     def __call__(self):
         session = self._cw
--- a/i18n/de.po	Mon Sep 26 18:37:23 2011 +0200
+++ b/i18n/de.po	Fri Dec 09 12:08:27 2011 +0100
@@ -162,6 +162,11 @@
 msgstr ""
 
 #, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+
+#, python-format
 msgid "'%s' action requires 'linkattr' option"
 msgstr ""
 
@@ -220,6 +225,9 @@
 "können ein <a href=\"%s\">vollständiges Schema</a> mit Meta-Daten anzeigen.</"
 "div>"
 
+msgid "<no relation>"
+msgstr ""
+
 msgid "<not specified>"
 msgstr "<nicht spezifiziert>"
 
@@ -256,6 +264,12 @@
 msgid "BaseTransition_plural"
 msgstr "Übergänge (abstrakt)"
 
+msgid "BigInt"
+msgstr ""
+
+msgid "BigInt_plural"
+msgstr ""
+
 msgid "Bookmark"
 msgstr "Lesezeichen"
 
@@ -1073,12 +1087,6 @@
 msgid "add a CWRType"
 msgstr "einen Relationstyp hinzufügen"
 
-msgid "add a CWSource"
-msgstr ""
-
-msgid "add a CWSourceSchemaConfig"
-msgstr ""
-
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "Email-Adresse hinzufügen"
@@ -1153,6 +1161,10 @@
 msgid "allowed transitions from this state"
 msgstr "erlaubte Übergänge von diesem Zustand"
 
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr ""
+
 msgid "allowed_transition"
 msgstr "erlaubter Ãœbergang"
 
@@ -1235,6 +1247,9 @@
 msgid "auto"
 msgstr "automatisch"
 
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+
 msgid "automatic"
 msgstr "automatisch"
 
@@ -1787,10 +1802,10 @@
 msgid "ctxcomponents_edit_box_description"
 msgstr "Box mit verfügbaren Aktionen für die angezeigten Daten"
 
-msgid "ctxcomponents_facet.filters"
+msgid "ctxcomponents_facet.filterbox"
 msgstr "Filter"
 
-msgid "ctxcomponents_facet.filters_description"
+msgid "ctxcomponents_facet.filterbox_description"
 msgstr "Box mit Filter für aktuelle Suchergebnis-Funktionalität"
 
 msgid "ctxcomponents_logo"
@@ -1896,10 +1911,6 @@
 msgid "cw_schema_object"
 msgstr ""
 
-msgctxt "CWAttribute"
-msgid "cw_schema_object"
-msgstr ""
-
 msgctxt "CWEType"
 msgid "cw_schema_object"
 msgstr ""
@@ -1957,6 +1968,9 @@
 msgid "data directory url"
 msgstr "URL des Daten-Pools"
 
+msgid "data model schema"
+msgstr "Schema der Website"
+
 msgid "data sources"
 msgstr ""
 
@@ -2274,9 +2288,6 @@
 msgid "eid"
 msgstr ""
 
-msgid "email address to use for notification"
-msgstr "E-Mail-Adresse für Mitteilungen."
-
 msgid "emails successfully sent"
 msgstr "E-Mails erfolgreich versandt."
 
@@ -2388,6 +2399,9 @@
 msgid "external page"
 msgstr "externe Seite"
 
+msgid "facet-loading-msg"
+msgstr ""
+
 msgid "facet.filters"
 msgstr ""
 
@@ -2572,9 +2586,6 @@
 "generische Relation, die anzeigt, dass eine Entität mit einer anderen Web-"
 "Ressource identisch ist (siehe http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "Zurück zur Index-Seite"
-
 msgid "granted to groups"
 msgstr "an Gruppen gewährt"
 
@@ -2605,6 +2616,18 @@
 msgid "groups"
 msgstr "Gruppen"
 
+msgid "groups allowed to add entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr ""
+
 msgid "groups grant permissions to the user"
 msgstr "die Gruppen geben dem Nutzer Rechte"
 
@@ -2727,6 +2750,13 @@
 msgid "in_state_object"
 msgstr "Zustand von"
 
+msgid "in_synchronization"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr ""
+
 msgid "incontext"
 msgstr "im Kontext"
 
@@ -3184,6 +3214,15 @@
 msgid "no associated permissions"
 msgstr "keine entsprechende Berechtigung"
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
+msgid "no edited fields specified"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "kein Eingabefeld spezifiziert Für Entität %s"
@@ -3581,6 +3620,18 @@
 msgid "right"
 msgstr "rechts"
 
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr ""
+
 msgid "rql expressions"
 msgstr "RQL-Ausdrücke"
 
@@ -3617,9 +3668,6 @@
 msgid "searching for"
 msgstr "Suche nach"
 
-msgid "secondary"
-msgstr "sekundär"
-
 msgid "security"
 msgstr "Sicherheit"
 
@@ -3708,9 +3756,6 @@
 msgid "site documentation"
 msgstr "Dokumentation der Website"
 
-msgid "site schema"
-msgstr "Schema der Website"
-
 msgid "site title"
 msgstr "Titel der Website"
 
@@ -3722,9 +3767,6 @@
 msgid "siteinfo"
 msgstr ""
 
-msgid "some errors occurred:"
-msgstr "Einige Fehler sind aufgetreten"
-
 msgid "some later transaction(s) touch entity, undo them first"
 msgstr ""
 "Eine oder mehrere frühere Transaktion(en) betreffen die Tntität. Machen Sie "
@@ -3762,6 +3804,11 @@
 msgid "specifying %s is mandatory"
 msgstr ""
 
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+
 msgid "startup views"
 msgstr "Start-Ansichten"
 
@@ -3935,6 +3982,12 @@
 msgstr ""
 "Der Wert \"%s\" wird bereits benutzt, bitte verwenden Sie einen anderen Wert"
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr "Achtung! Diese Aktion ist unumkehrbar."
 
@@ -4035,7 +4088,7 @@
 msgstr ""
 
 msgid "transaction undone"
-msgstr "Transaktion rückgängig gemacht"
+msgstr ""
 
 #, python-format
 msgid "transition %(tr)s isn't allowed from %(st)s"
@@ -4290,6 +4343,9 @@
 msgid "user preferences"
 msgstr "Nutzereinstellungen"
 
+msgid "user's email account"
+msgstr ""
+
 msgid "users"
 msgstr "Nutzer"
 
@@ -4319,30 +4375,30 @@
 msgid "value"
 msgstr "Wert"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr ""
 "Der mit diesem Schlüssele verbundene Wert kann n icht manuell geändert "
 "werden."
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "Der Wert muss %(op)s %(boundary)s sein."
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "Der Wert muss <= %(boundary)s sein."
+msgid "value should have maximum size of %s but found %s"
+msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "Der Wert muss >= %(boundary)s sein."
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "Der Wert darf höchstens %s betragen."
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "Der Wert muss mindestens %s betragen."
+msgid "value should have minimum size of %s but found %s"
+msgstr ""
 
 msgid "vcard"
 msgstr "VCard"
@@ -4488,76 +4544,3 @@
 #, python-format
 msgid "you should un-inline relation %s which is supported and may be crossed "
 msgstr ""
-
-#~ msgid "Attributes with non default permissions:"
-#~ msgstr "Attribute mit nicht-standard-Berechtigungen"
-
-#~ msgid "Entity types"
-#~ msgstr "Entitätstypen"
-
-#~ msgid "Index"
-#~ msgstr "Index"
-
-#~ msgid "Permissions for entity types"
-#~ msgstr "Berechtigungen für Entitätstypen"
-
-#~ msgid "Permissions for relations"
-#~ msgstr "Berechtigungen für Relationen"
-
-#~ msgid "Relation types"
-#~ msgstr "Relationstypen"
-
-#~ msgid "am/pm calendar (month)"
-#~ msgstr "am/pm Kalender (Monat)"
-
-#~ msgid "am/pm calendar (semester)"
-#~ msgstr "am/pm Kalender (Halbjahr)"
-
-#~ msgid "am/pm calendar (week)"
-#~ msgstr "am/pm Kalender (Woche)"
-
-#~ msgid "am/pm calendar (year)"
-#~ msgstr "am/pm Kalender (Jahr)"
-
-#~ msgid "application entities"
-#~ msgstr "Anwendungs-Entitäten"
-
-#~ msgid "calendar (month)"
-#~ msgstr "Kalender (monatlich)"
-
-#~ msgid "calendar (semester)"
-#~ msgstr "Kalender (halbjährlich)"
-
-#~ msgid "calendar (week)"
-#~ msgstr "Kalender (wöchentlich)"
-
-#~ msgid "calendar (year)"
-#~ msgstr "Kalender (jährlich)"
-
-#~ msgid ""
-#~ "can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has "
-#~ "cardinality=%(card)s"
-#~ msgstr ""
-#~ "Kann 'inlined' = %(inlined)s nicht zuweisen, %(stype)s %(rtype)s %(otype)"
-#~ "s hat die Kardinalität %(card)s"
-
-#~ msgid "create an index page"
-#~ msgstr "Eine Index-Seite anlegen"
-
-#~ msgid "edit the index page"
-#~ msgstr "Index-Seite bearbeiten"
-
-#~ msgid "schema entities"
-#~ msgstr "Entitäten, die das Schema definieren"
-
-#~ msgid "schema-security"
-#~ msgstr "Rechte"
-
-#~ msgid "system entities"
-#~ msgstr "System-Entitäten"
-
-#~ msgid "timestamp of the latest source synchronization."
-#~ msgstr "Zeitstempel der letzten Synchronisierung mit der Quelle."
-
-#~ msgid "up"
-#~ msgstr "nach oben"
--- a/i18n/en.po	Mon Sep 26 18:37:23 2011 +0200
+++ b/i18n/en.po	Fri Dec 09 12:08:27 2011 +0100
@@ -5,7 +5,7 @@
 msgstr ""
 "Project-Id-Version: 2.0\n"
 "POT-Creation-Date: 2006-01-12 17:35+CET\n"
-"PO-Revision-Date: 2010-09-15 14:55+0200\n"
+"PO-Revision-Date: 2011-04-29 12:57+0200\n"
 "Last-Translator: Sylvain Thenault <sylvain.thenault@logilab.fr>\n"
 "Language-Team: English <devel@logilab.fr.org>\n"
 "Language: en\n"
@@ -154,6 +154,11 @@
 msgstr ""
 
 #, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+
+#, python-format
 msgid "'%s' action requires 'linkattr' option"
 msgstr ""
 
@@ -209,6 +214,9 @@
 "can also display a <a href=\"%s\">complete schema with meta-data</a>.</div>"
 msgstr ""
 
+msgid "<no relation>"
+msgstr ""
+
 msgid "<not specified>"
 msgstr ""
 
@@ -245,6 +253,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transitions (abstract)"
 
+msgid "BigInt"
+msgstr "Big integer"
+
+msgid "BigInt_plural"
+msgstr "Big integers"
+
 msgid "Bookmark"
 msgstr "Bookmark"
 
@@ -503,7 +517,7 @@
 msgstr "Interval"
 
 msgid "IntervalBoundConstraint"
-msgstr "interval constraint"
+msgstr "Interval constraint"
 
 msgid "Interval_plural"
 msgstr "Intervals"
@@ -1033,12 +1047,6 @@
 msgid "add a CWRType"
 msgstr "add a relation type"
 
-msgid "add a CWSource"
-msgstr "add a source"
-
-msgid "add a CWSourceSchemaConfig"
-msgstr "add an item to mapping "
-
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "add an email address"
@@ -1111,6 +1119,10 @@
 msgid "allowed transitions from this state"
 msgstr ""
 
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr ""
+
 msgid "allowed_transition"
 msgstr "allowed transition"
 
@@ -1190,6 +1202,9 @@
 msgid "auto"
 msgstr "automatic"
 
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+
 msgid "automatic"
 msgstr ""
 
@@ -1742,10 +1757,10 @@
 msgid "ctxcomponents_edit_box_description"
 msgstr "box listing the applicable actions on the displayed data"
 
-msgid "ctxcomponents_facet.filters"
+msgid "ctxcomponents_facet.filterbox"
 msgstr "facets box"
 
-msgid "ctxcomponents_facet.filters_description"
+msgid "ctxcomponents_facet.filterbox_description"
 msgstr "box providing filter within current search results functionality"
 
 msgid "ctxcomponents_logo"
@@ -1851,10 +1866,6 @@
 msgid "cw_schema_object"
 msgstr "mapped by"
 
-msgctxt "CWAttribute"
-msgid "cw_schema_object"
-msgstr "mapped by"
-
 msgctxt "CWEType"
 msgid "cw_schema_object"
 msgstr "mapped by"
@@ -1912,6 +1923,9 @@
 msgid "data directory url"
 msgstr ""
 
+msgid "data model schema"
+msgstr ""
+
 msgid "data sources"
 msgstr ""
 
@@ -2219,9 +2233,6 @@
 msgid "eid"
 msgstr ""
 
-msgid "email address to use for notification"
-msgstr ""
-
 msgid "emails successfully sent"
 msgstr ""
 
@@ -2330,6 +2341,9 @@
 msgid "external page"
 msgstr ""
 
+msgid "facet-loading-msg"
+msgstr "processing, please wait"
+
 msgid "facet.filters"
 msgstr "filter"
 
@@ -2512,9 +2526,6 @@
 "object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def"
 msgstr ""
 
-msgid "go back to the index page"
-msgstr ""
-
 msgid "granted to groups"
 msgstr ""
 
@@ -2540,6 +2551,18 @@
 msgid "groups"
 msgstr ""
 
+msgid "groups allowed to add entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr ""
+
 msgid "groups grant permissions to the user"
 msgstr ""
 
@@ -2654,6 +2677,13 @@
 msgid "in_state_object"
 msgstr "state of"
 
+msgid "in_synchronization"
+msgstr "in synchronization"
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr "in synchronization"
+
 msgid "incontext"
 msgstr "in-context"
 
@@ -3095,6 +3125,15 @@
 msgid "no associated permissions"
 msgstr ""
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
+msgid "no edited fields specified"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr ""
@@ -3488,6 +3527,18 @@
 msgid "right"
 msgstr ""
 
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr ""
+
 msgid "rql expressions"
 msgstr ""
 
@@ -3524,9 +3575,6 @@
 msgid "searching for"
 msgstr ""
 
-msgid "secondary"
-msgstr ""
-
 msgid "security"
 msgstr ""
 
@@ -3612,9 +3660,6 @@
 msgid "site documentation"
 msgstr ""
 
-msgid "site schema"
-msgstr ""
-
 msgid "site title"
 msgstr ""
 
@@ -3624,9 +3669,6 @@
 msgid "siteinfo"
 msgstr "site information"
 
-msgid "some errors occurred:"
-msgstr ""
-
 msgid "some later transaction(s) touch entity, undo them first"
 msgstr ""
 
@@ -3662,6 +3704,11 @@
 msgid "specifying %s is mandatory"
 msgstr ""
 
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+
 msgid "startup views"
 msgstr ""
 
@@ -3830,6 +3877,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr ""
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr ""
 
@@ -4176,6 +4229,9 @@
 msgid "user preferences"
 msgstr ""
 
+msgid "user's email account"
+msgstr ""
+
 msgid "users"
 msgstr ""
 
@@ -4205,27 +4261,27 @@
 msgid "value"
 msgstr ""
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr ""
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr ""
-
-#, python-format
-msgid "value must be <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
 msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr ""
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr ""
-
-#, python-format
-msgid "value should have minimum size of %s"
+msgid "value should have minimum size of %s but found %s"
 msgstr ""
 
 msgid "vcard"
@@ -4370,10 +4426,3 @@
 #, python-format
 msgid "you should un-inline relation %s which is supported and may be crossed "
 msgstr ""
-
-#~ msgctxt "CWAttribute"
-#~ msgid "relations_object"
-#~ msgstr "constrained by"
-
-#~ msgid "schema-security"
-#~ msgstr "permissions"
--- a/i18n/es.po	Mon Sep 26 18:37:23 2011 +0200
+++ b/i18n/es.po	Fri Dec 09 12:08:27 2011 +0100
@@ -163,6 +163,11 @@
 msgstr "la acción '%s'  no acepta opciones"
 
 #, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+
+#, python-format
 msgid "'%s' action requires 'linkattr' option"
 msgstr "la acción '%s' requiere una opción 'linkattr'"
 
@@ -221,6 +226,9 @@
 "pero se puede ver a un <a href=\"%s\">modelo completo con meta-datos</a>.</"
 "div>"
 
+msgid "<no relation>"
+msgstr ""
+
 msgid "<not specified>"
 msgstr "<no especificado>"
 
@@ -257,6 +265,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transiciones (abstractas)"
 
+msgid "BigInt"
+msgstr ""
+
+msgid "BigInt_plural"
+msgstr ""
+
 msgid "Bookmark"
 msgstr "Favorito"
 
@@ -1083,12 +1097,6 @@
 msgid "add a CWRType"
 msgstr "Agregar un tipo de relación"
 
-msgid "add a CWSource"
-msgstr "agregar una fuente"
-
-msgid "add a CWSourceSchemaConfig"
-msgstr "agregar una parte de mapeo"
-
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "Agregar correo electrónico"
@@ -1163,6 +1171,10 @@
 msgid "allowed transitions from this state"
 msgstr "transiciones autorizadas desde este estado"
 
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr ""
+
 msgid "allowed_transition"
 msgstr "transiciones autorizadas"
 
@@ -1246,6 +1258,9 @@
 msgid "auto"
 msgstr "Automático"
 
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+
 msgid "automatic"
 msgstr "Automático"
 
@@ -1812,10 +1827,10 @@
 msgid "ctxcomponents_edit_box_description"
 msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados"
 
-msgid "ctxcomponents_facet.filters"
+msgid "ctxcomponents_facet.filterbox"
 msgstr "Filtros"
 
-msgid "ctxcomponents_facet.filters_description"
+msgid "ctxcomponents_facet.filterbox_description"
 msgstr "Muestra los filtros aplicables a una búsqueda realizada"
 
 msgid "ctxcomponents_logo"
@@ -1925,10 +1940,6 @@
 msgid "cw_schema_object"
 msgstr "mapeado por"
 
-msgctxt "CWAttribute"
-msgid "cw_schema_object"
-msgstr "mapeado por"
-
 msgctxt "CWEType"
 msgid "cw_schema_object"
 msgstr "mapeado por"
@@ -1986,6 +1997,9 @@
 msgid "data directory url"
 msgstr "Url del repertorio de datos"
 
+msgid "data model schema"
+msgstr "Esquema del Sistema"
+
 msgid "data sources"
 msgstr "fuente de datos"
 
@@ -2313,9 +2327,6 @@
 msgid "eid"
 msgstr "eid"
 
-msgid "email address to use for notification"
-msgstr "Dirección electrónica a utilizarse para notificar"
-
 msgid "emails successfully sent"
 msgstr "Mensajes enviados con éxito"
 
@@ -2431,6 +2442,9 @@
 msgid "external page"
 msgstr "Página externa"
 
+msgid "facet-loading-msg"
+msgstr ""
+
 msgid "facet.filters"
 msgstr "Filtros"
 
@@ -2615,9 +2629,6 @@
 "Relación genérica que indicar que una entidad es idéntica a otro recurso web "
 "(ver http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "Regresar a la página de inicio"
-
 msgid "granted to groups"
 msgstr "Otorgado a los grupos"
 
@@ -2647,6 +2658,18 @@
 msgid "groups"
 msgstr "Grupos"
 
+msgid "groups allowed to add entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr ""
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr ""
+
 msgid "groups grant permissions to the user"
 msgstr "Los grupos otorgan los permisos al usuario"
 
@@ -2770,6 +2793,13 @@
 msgid "in_state_object"
 msgstr "Estado de"
 
+msgid "in_synchronization"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr ""
+
 msgid "incontext"
 msgstr "En el contexto"
 
@@ -3152,11 +3182,11 @@
 
 msgctxt "CWSource"
 msgid "name"
-msgstr "nombre"
+msgstr ""
 
 msgctxt "State"
 msgid "name"
-msgstr "Nombre"
+msgstr "nombre"
 
 msgctxt "Transition"
 msgid "name"
@@ -3225,6 +3255,15 @@
 msgid "no associated permissions"
 msgstr "No existe permiso asociado"
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
+msgid "no edited fields specified"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "Ningún campo editable especificado para la entidad %s"
@@ -3631,6 +3670,18 @@
 msgid "right"
 msgstr "Derecha"
 
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr ""
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr ""
+
 msgid "rql expressions"
 msgstr "Expresiones RQL"
 
@@ -3667,9 +3718,6 @@
 msgid "searching for"
 msgstr "Buscando"
 
-msgid "secondary"
-msgstr "Secundaria"
-
 msgid "security"
 msgstr "Seguridad"
 
@@ -3759,9 +3807,6 @@
 msgid "site documentation"
 msgstr "Documentación Sistema"
 
-msgid "site schema"
-msgstr "Esquema del Sistema"
-
 msgid "site title"
 msgstr "Nombre del Sistema"
 
@@ -3771,9 +3816,6 @@
 msgid "siteinfo"
 msgstr "información"
 
-msgid "some errors occurred:"
-msgstr "Algunos errores encontrados :"
-
 msgid "some later transaction(s) touch entity, undo them first"
 msgstr ""
 "Las transacciones más recientes modificaron esta entidad, anúlelas primero"
@@ -3812,6 +3854,11 @@
 msgid "specifying %s is mandatory"
 msgstr "especificar %s es obligatorio"
 
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+
 msgid "startup views"
 msgstr "Vistas de inicio"
 
@@ -3985,6 +4032,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro"
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr "Esta acción es irreversible!."
 
@@ -4085,7 +4138,7 @@
 msgstr "n° de transición"
 
 msgid "transaction undone"
-msgstr "Transacciones Anuladas"
+msgstr ""
 
 #, python-format
 msgid "transition %(tr)s isn't allowed from %(st)s"
@@ -4340,6 +4393,9 @@
 msgid "user preferences"
 msgstr "Preferencias"
 
+msgid "user's email account"
+msgstr ""
+
 msgid "users"
 msgstr "Usuarios"
 
@@ -4369,28 +4425,28 @@
 msgid "value"
 msgstr "Vampr"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr "El valor asociado a este elemento no es editable manualmente"
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "El valor debe ser %(op)s %(boundary)s"
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "El valor debe ser <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
+msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "El valor debe ser >= %(boundary)s"
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "El valor no debe exceder de %s"
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "El valor no puede ser menor a %s"
+msgid "value should have minimum size of %s but found %s"
+msgstr ""
 
 msgid "vcard"
 msgstr "vcard"
@@ -4539,6 +4595,3 @@
 msgstr ""
 "usted debe  quitar la puesta en línea de la relación %s que es aceptada y "
 "puede ser cruzada"
-
-#~ msgid "add a %s"
-#~ msgstr "agregar un %s"
--- a/i18n/fr.po	Mon Sep 26 18:37:23 2011 +0200
+++ b/i18n/fr.po	Fri Dec 09 12:08:27 2011 +0100
@@ -162,6 +162,13 @@
 msgstr "l'action '%s' ne prend pas d'option"
 
 #, python-format
+msgid ""
+"'%s' action for in_state relation should at least have 'linkattr=name' option"
+msgstr ""
+"l'action '%s' pour la relation in_state doit au moins avoir l'option "
+"'linkattr=name'"
+
+#, python-format
 msgid "'%s' action requires 'linkattr' option"
 msgstr "l'action '%s' nécessite une option 'linkattr'"
 
@@ -219,6 +226,9 @@
 "<div>Ce schéma du modèle de données <em>exclue</em> les méta-données, mais "
 "vous pouvez afficher un <a href=\"%s\">schéma complet</a>.</div>"
 
+msgid "<no relation>"
+msgstr "<pas de relation>"
+
 msgid "<not specified>"
 msgstr "<non spécifié>"
 
@@ -255,6 +265,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transitions (abstraites)"
 
+msgid "BigInt"
+msgstr "Entier long"
+
+msgid "BigInt_plural"
+msgstr "Entiers longs"
+
 msgid "Bookmark"
 msgstr "Signet"
 
@@ -1081,12 +1097,6 @@
 msgid "add a CWRType"
 msgstr "ajouter un type de relation"
 
-msgid "add a CWSource"
-msgstr "ajouter une source"
-
-msgid "add a CWSourceSchemaConfig"
-msgstr "ajouter une partie de mapping"
-
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "ajouter une adresse électronique"
@@ -1161,6 +1171,10 @@
 msgid "allowed transitions from this state"
 msgstr "transitions autorisées depuis cet état"
 
+#, python-format
+msgid "allowed values for \"action\" are %s"
+msgstr "les valeurs autorisées pour \"action\" sont %s"
+
 msgid "allowed_transition"
 msgstr "transitions autorisées"
 
@@ -1244,6 +1258,10 @@
 msgid "auto"
 msgstr "automatique"
 
+msgid "autocomputed attribute used to ensure transition coherency"
+msgstr ""
+"attribut calculé automatiquement pour assurer la cohérence de la transition"
+
 msgid "automatic"
 msgstr "automatique"
 
@@ -1815,10 +1833,10 @@
 msgstr ""
 "boîte affichant les différentes actions possibles sur les données affichées"
 
-msgid "ctxcomponents_facet.filters"
+msgid "ctxcomponents_facet.filterbox"
 msgstr "boîte à facettes"
 
-msgid "ctxcomponents_facet.filters_description"
+msgid "ctxcomponents_facet.filterbox_description"
 msgstr ""
 "boîte permettant de filtrer parmi les résultats d'une recherche à l'aide de "
 "facettes"
@@ -1928,10 +1946,6 @@
 msgid "cw_schema_object"
 msgstr "mappé par"
 
-msgctxt "CWAttribute"
-msgid "cw_schema_object"
-msgstr "mappé par"
-
 msgctxt "CWEType"
 msgid "cw_schema_object"
 msgstr "mappé par"
@@ -1989,6 +2003,9 @@
 msgid "data directory url"
 msgstr "url du répertoire de données"
 
+msgid "data model schema"
+msgstr "schéma du modèle de données"
+
 msgid "data sources"
 msgstr "sources de données"
 
@@ -2312,9 +2329,6 @@
 msgid "eid"
 msgstr "eid"
 
-msgid "email address to use for notification"
-msgstr "adresse email à utiliser pour la notification"
-
 msgid "emails successfully sent"
 msgstr "courriels envoyés avec succès"
 
@@ -2429,6 +2443,9 @@
 msgid "external page"
 msgstr "page externe"
 
+msgid "facet-loading-msg"
+msgstr "en cours de traitement, merci de patienter"
+
 msgid "facet.filters"
 msgstr "facettes"
 
@@ -2613,9 +2630,6 @@
 "relation générique permettant d'indiquer qu'une entité est identique à une "
 "autre ressource web (voir http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "retourner sur la page d'accueil"
-
 msgid "granted to groups"
 msgstr "accordée aux groupes"
 
@@ -2646,6 +2660,18 @@
 msgid "groups"
 msgstr "groupes"
 
+msgid "groups allowed to add entities/relations of this type"
+msgstr "groupes autorisés à ajouter des entités/relations de ce type"
+
+msgid "groups allowed to delete entities/relations of this type"
+msgstr "groupes autorisés à supprimer des entités/relations de ce type"
+
+msgid "groups allowed to read entities/relations of this type"
+msgstr "groupes autorisés à lire des entités/relations de ce type"
+
+msgid "groups allowed to update entities/relations of this type"
+msgstr "groupes autorisés à mettre à jour des entités/relations de ce type"
+
 msgid "groups grant permissions to the user"
 msgstr "les groupes donnent des permissions à l'utilisateur"
 
@@ -2768,6 +2794,13 @@
 msgid "in_state_object"
 msgstr "état de"
 
+msgid "in_synchronization"
+msgstr "en cours de synchronisation"
+
+msgctxt "CWSource"
+msgid "in_synchronization"
+msgstr "en cours de synchronisation"
+
 msgid "incontext"
 msgstr "dans le contexte"
 
@@ -3207,7 +3240,7 @@
 msgstr "nombre d'entités dans la vue primaire"
 
 msgid "navigation.short-line-size"
-msgstr "description courtes"
+msgstr "taille des descriptions courtes"
 
 msgid "navtop"
 msgstr "haut de page du contenu principal"
@@ -3224,6 +3257,15 @@
 msgid "no associated permissions"
 msgstr "aucune permission associée"
 
+msgid "no content next link"
+msgstr "pas de lien 'suivant'"
+
+msgid "no content prev link"
+msgstr "pas de lien 'précédent'"
+
+msgid "no edited fields specified"
+msgstr "aucun champ à éditer spécifié"
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "aucun champ à éditer spécifié pour l'entité %s"
@@ -3632,6 +3674,19 @@
 msgid "right"
 msgstr "droite"
 
+msgid "rql expression allowing to add entities/relations of this type"
+msgstr "expression rql autorisant à ajouter des entités/relations de ce type"
+
+msgid "rql expression allowing to delete entities/relations of this type"
+msgstr "expression rql autorisant à supprimer des entités/relations de ce type"
+
+msgid "rql expression allowing to read entities/relations of this type"
+msgstr "expression rql autorisant à lire des entités/relations de ce type"
+
+msgid "rql expression allowing to update entities/relations of this type"
+msgstr ""
+"expression rql autorisant à mettre à jour des entités/relations de ce type"
+
 msgid "rql expressions"
 msgstr "conditions rql"
 
@@ -3668,9 +3723,6 @@
 msgid "searching for"
 msgstr "Recherche de"
 
-msgid "secondary"
-msgstr "secondaire"
-
 msgid "security"
 msgstr "sécurité"
 
@@ -3759,9 +3811,6 @@
 msgid "site documentation"
 msgstr "documentation du site"
 
-msgid "site schema"
-msgstr "schéma du site"
-
 msgid "site title"
 msgstr "titre du site"
 
@@ -3771,9 +3820,6 @@
 msgid "siteinfo"
 msgstr "informations"
 
-msgid "some errors occurred:"
-msgstr "des erreurs sont survenues"
-
 msgid "some later transaction(s) touch entity, undo them first"
 msgstr ""
 "des transactions plus récentes modifient cette entité, annulez les d'abord"
@@ -3813,6 +3859,12 @@
 msgid "specifying %s is mandatory"
 msgstr "spécifier %s est obligatoire"
 
+msgid ""
+"start timestamp of the currently in synchronization, or NULL when no "
+"synchronization in progress."
+msgstr ""
+"horodate de départ de la synchronisation en cours, ou NULL s'il n'y en a pas."
+
 msgid "startup views"
 msgstr "vues de départ"
 
@@ -3985,6 +4037,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur"
 
+msgid "there is no next page"
+msgstr "il n'y a pas de page suivante"
+
+msgid "there is no previous page"
+msgstr "il n'y a pas de page précédente"
+
 msgid "this action is not reversible!"
 msgstr ""
 "Attention ! Cette opération va détruire les données de façon irréversible."
@@ -4339,6 +4397,9 @@
 msgid "user preferences"
 msgstr "préférences utilisateur"
 
+msgid "user's email account"
+msgstr "email de l'utilisateur"
+
 msgid "users"
 msgstr "utilisateurs"
 
@@ -4368,28 +4429,28 @@
 msgid "value"
 msgstr "valeur"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr "la valeur %(value)s doit être %(op)s %(boundary)s"
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr "la valeur %(value)s doit être <= %(boundary)s"
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr "la valeur %(value)s doit être >= %(boundary)s"
+
 msgid "value associated to this key is not editable manually"
 msgstr "la valeur associée à cette clé n'est pas éditable manuellement"
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "la valeur doit être %(op)s %(boundary)s"
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "la valeur doit être <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
+msgstr "la taille maximum est %s mais cette valeur est de taille %s"
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "la valeur doit être >= %(boundary)s"
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "la valeur doit être de taille %s au maximum"
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "la valeur doit être de taille %s au minimum"
+msgid "value should have minimum size of %s but found %s"
+msgstr "la taille minimum est %s mais cette valeur est de taille %s"
 
 msgid "vcard"
 msgstr "vcard"
@@ -4539,66 +4600,3 @@
 msgstr ""
 "vous devriez enlevé la mise en ligne de la relation %s qui est supportée et "
 "peut-être croisée"
-
-#~ msgid "Attributes with non default permissions:"
-#~ msgstr "Attributs ayant des permissions non-standard"
-
-#~ msgid "Entity types"
-#~ msgstr "Types d'entités"
-
-#~ msgid "Permissions for entity types"
-#~ msgstr "Permissions pour les types d'entités"
-
-#~ msgid "Permissions for relations"
-#~ msgstr "Permissions pour les relations"
-
-#~ msgid "Relation types"
-#~ msgstr "Types de relation"
-
-#~ msgid "add a %s"
-#~ msgstr "ajouter un %s"
-
-#~ msgid "am/pm calendar (month)"
-#~ msgstr "calendrier am/pm (mois)"
-
-#~ msgid "am/pm calendar (semester)"
-#~ msgstr "calendrier am/pm (semestre)"
-
-#~ msgid "am/pm calendar (week)"
-#~ msgstr "calendrier am/pm (semaine)"
-
-#~ msgid "am/pm calendar (year)"
-#~ msgstr "calendrier am/pm (année)"
-
-#~ msgid "application entities"
-#~ msgstr "entités applicatives"
-
-#~ msgid "calendar (month)"
-#~ msgstr "calendrier (mensuel)"
-
-#~ msgid "calendar (semester)"
-#~ msgstr "calendrier (semestriel)"
-
-#~ msgid "calendar (week)"
-#~ msgstr "calendrier (hebdo)"
-
-#~ msgid "calendar (year)"
-#~ msgstr "calendrier (annuel)"
-
-#~ msgid "create an index page"
-#~ msgstr "créer une page d'accueil"
-
-#~ msgid "edit the index page"
-#~ msgstr "éditer la page d'accueil"
-
-#~ msgid "schema entities"
-#~ msgstr "entités définissant le schéma"
-
-#~ msgid "schema-security"
-#~ msgstr "permissions"
-
-#~ msgid "system entities"
-#~ msgstr "entités systèmes"
-
-#~ msgid "timestamp of the latest source synchronization."
-#~ msgstr "date de la dernière synchronisation avec la source."
--- a/mail.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/mail.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,10 +21,10 @@
 
 from base64 import b64encode, b64decode
 from time import time
-from email.MIMEMultipart import MIMEMultipart
-from email.MIMEText import MIMEText
-from email.MIMEImage import MIMEImage
-from email.Header import Header
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.mime.image import MIMEImage
+from email.header import Header
 try:
     from socket import gethostname
 except ImportError:
@@ -67,7 +67,7 @@
         values = b64decode(str(values + '='*padding), '.-')
         values = dict(v.split('=') for v in values.split('&'))
         fromappid, host = qualif.split('.', 1)
-    except:
+    except Exception:
         return None
     if appid != fromappid or host != gethostname():
         return None
@@ -156,6 +156,10 @@
 
     msgid_timestamp = True
 
+    # to be defined on concrete sub-classes
+    content = None # body of the mail
+    message = None # action verb of the subject
+
     # this is usually the method to call
     def render_and_send(self, **kwargs):
         """generate and send an email message for this view"""
--- a/md5crypt.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/md5crypt.py	Fri Dec 09 12:08:27 2011 +0100
@@ -41,7 +41,7 @@
 MAGIC = '$1$'                        # Magic string
 ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
 
-import hashlib as md5
+from hashlib import md5 # pylint: disable=E0611
 
 def to64 (v, n):
     ret = ''
@@ -63,7 +63,7 @@
     salt = salt.split('$', 1)[0]
     salt = salt[:8]
     ctx = pw + magic + salt
-    final = md5.md5(pw + salt + pw).digest()
+    final = md5(pw + salt + pw).digest()
     for pl in xrange(len(pw), 0, -16):
         if pl > 16:
             ctx = ctx + final[:16]
@@ -77,7 +77,7 @@
         else:
             ctx = ctx + pw[0]
         i = i >> 1
-    final = md5.md5(ctx).digest()
+    final = md5(ctx).digest()
     # The following is supposed to make
     # things run slower.
     # my question: WTF???
@@ -95,7 +95,7 @@
             ctx1 = ctx1 + final[:16]
         else:
             ctx1 = ctx1 + pw
-        final = md5.md5(ctx1).digest()
+        final = md5(ctx1).digest()
     # Final xform
     passwd = ''
     passwd = passwd + to64((int(ord(final[0])) << 16)
--- a/migration.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/migration.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -201,8 +201,8 @@
         if not ask_confirm or self.confirm(msg):
             return meth(*args, **kwargs)
 
-    def confirm(self, question, shell=True, abort=True, retry=False, pdb=False,
-                default='y'):
+    def confirm(self, question, # pylint: disable=E0202
+                shell=True, abort=True, retry=False, pdb=False, default='y'):
         """ask for confirmation and return true on positive answer
 
         if `retry` is true the r[etry] answer may return 2
@@ -269,7 +269,10 @@
         def unicode_raw_input(prompt):
             return unicode(raw_input(prompt), sys.stdin.encoding)
         interact(banner, readfunc=unicode_raw_input, local=local_ctx)
-        readline.write_history_file(histfile)
+        try:
+            readline.write_history_file(histfile)
+        except IOError:
+            pass
         # delete instance's confirm attribute to avoid questions
         del self.confirm
         self.need_wrap = True
@@ -411,7 +414,7 @@
         basecubes = [c for c in origcubes if not c in toremove]
         self.config._cubes = tuple(self.config.expand_cubes(basecubes))
         removed = [p for p in origcubes if not p in self.config._cubes]
-        if not cube in removed:
+        if not cube in removed and cube in origcubes:
             raise ConfigurationError("can't remove cube %s, "
                                      "used as a dependency" % cube)
         return removed
@@ -488,7 +491,7 @@
                     try:
                         oper, version = constraint.split()
                         self.reverse_dependencies[name].add( (oper, version, cube) )
-                    except:
+                    except Exception:
                         self.warnings.append(
                             'cube %s depends on %s but constraint badly '
                             'formatted: %s' % (cube, name, constraint))
--- a/misc/cwdesklets/rqlsensor/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/misc/cwdesklets/rqlsensor/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -15,9 +15,6 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
 import webbrowser
 reload(webbrowser)
 
@@ -89,7 +86,7 @@
         cursor = cnx.cursor()
         try:
             rset = cursor.execute(rql)
-        except:
+        except Exception:
             del self._v_cnx
             raise
         self._urls = []
@@ -101,7 +98,7 @@
             output.set('resultbg[%s]' % i, 'black')
             try:
                 self._urls.append(base % 'Any X WHERE X eid %s' % line[0])
-            except:
+            except Exception:
                 self._urls.append('')
             i += 1
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.12.9_Any.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,1 @@
+sync_schema_props_perms('cw_source')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.13.0_Any.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,3 @@
+sync_schema_props_perms('cw_source', syncprops=False)
+if schema['BigInt'].eid is None:
+    add_entity_type('BigInt')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.13.3_Any.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,2 @@
+drop_relation_definition('CWSourceSchemaConfig', 'cw_schema', 'CWAttribute')
+sync_schema_props_perms('cw_schema')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.13.6_Any.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,1 @@
+sync_schema_props_perms('CWSourceSchemaConfig')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.13.8_Any.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,5 @@
+change_attribute_type('CWCache', 'timestamp', 'TZDatetime')
+change_attribute_type('CWUser', 'last_login_time', 'TZDatetime')
+change_attribute_type('CWSource', 'latest_retrieval', 'TZDatetime')
+drop_attribute('CWSource', 'synchronizing')
+add_attribute('CWSource', 'in_synchronization')
--- a/misc/migration/bootstrapmigration_repository.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/misc/migration/bootstrapmigration_repository.py	Fri Dec 09 12:08:27 2011 +0100
@@ -35,6 +35,12 @@
     ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None))
     commit(ask_confirm=False)
 
+if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1):
+    sql('ALTER TABLE entities ADD asource VARCHAR(64)')
+    sql('UPDATE entities SET asource=cw_name  '
+        'FROM cw_CWSource, cw_source_relation '
+        'WHERE entities.eid=cw_source_relation.eid_from AND cw_source_relation.eid_to=cw_CWSource.cw_eid')
+
 if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0):
     CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
                        ask_confirm=False))
@@ -49,7 +55,7 @@
 elif applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0):
     CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
                        ask_confirm=False))
-    session.set_pool()
+    session.set_cnxset()
     permsdict = ss.deserialize_ertype_permissions(session)
 
     with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
--- a/misc/scripts/drop_external_entities.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/misc/scripts/drop_external_entities.py	Fri Dec 09 12:08:27 2011 +0100
@@ -3,7 +3,7 @@
 
 sql("DELETE FROM entities WHERE type='Int'")
 
-ecnx = session.pool.connection(source)
+ecnx = session.cnxset.connection(source)
 for e in rql('Any X WHERE X cw_source S, S name %(name)s', {'name': source}).entities():
     meta = e.cw_metainformation()
     assert meta['source']['uri'] == source
@@ -15,7 +15,7 @@
     if suri != 'system':
         try:
             print 'deleting', e.__regid__, e.eid, suri, e.dc_title().encode('utf8')
-            repo.delete_info(session, e, suri, meta['extid'], scleanup=True)
+            repo.delete_info(session, e, suri, scleanup=e.eid)
         except UnknownEid:
             print '  cant delete', e.__regid__, e.eid, meta
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/scripts/pyroforge2datafeed.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,134 @@
+"""turn a pyro source into a datafeed source
+
+Once this script is run, execute c-c db-check to cleanup relation tables.
+"""
+import sys
+
+try:
+    source_name, = __args__
+    source = repo.sources_by_uri[source_name]
+except ValueError:
+    print('you should specify the source name as script argument (i.e. after --'
+          ' on the command line)')
+    sys.exit(1)
+except KeyError:
+    print '%s is not an active source' % source_name
+    sys.exit(1)
+
+# check source is reachable before doing anything
+try:
+    source.get_connection()._repo
+except AttributeError:
+    print '%s is not reachable. Fix this before running this script' % source_name
+    sys.exit(1)
+
+raw_input('Ensure you have shutdown all instances of this application before continuing.'
+          ' Type enter when ready.')
+
+system_source = repo.system_source
+
+from base64 import b64encode
+from cubicweb.server.edition import EditedEntity
+
+DONT_GET_BACK_ETYPES = set(( # XXX edit as desired
+        'State',
+        'RecipeStep', 'RecipeStepInput', 'RecipeStepOutput',
+        'RecipeTransition', 'RecipeTransitionCondition',
+        'NarvalConditionExpression', 'Recipe',
+        # XXX TestConfig
+        ))
+
+
+session.mode = 'write' # hold on the connections set
+
+print '******************** backport entity content ***************************'
+
+from cubicweb.server import debugged
+todelete = {}
+host = source.config['base-url'].split('://')[1]
+for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
+        etype = entity.__regid__
+        if not source.support_entity(etype):
+            print "source doesn't support %s, delete %s" % (etype, entity.eid)
+        elif etype in DONT_GET_BACK_ETYPES:
+            print 'ignore %s, delete %s' % (etype, entity.eid)
+        else:
+            try:
+                entity.complete()
+                if not host in entity.cwuri:
+                    print 'SKIP foreign entity', entity.cwuri, source.config['base-url']
+                    continue
+            except Exception:
+                print '%s %s much probably deleted, delete it (extid %s)' % (
+                    etype, entity.eid, entity.cw_metainformation()['extid'])
+            else:
+                print 'get back', etype, entity.eid
+                entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+                system_source.add_entity(session, entity)
+                sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s "
+                    "WHERE eid=%(eid)s", {'asource': source_name,
+                                          'extid': b64encode(entity.cwuri),
+                                          'eid': entity.eid})
+                continue
+        todelete.setdefault(etype, []).append(entity)
+
+# only cleanup entities table, remaining stuff should be cleaned by a c-c
+# db-check to be run after this script
+for entities in todelete.values():
+    system_source.delete_info_multi(session, entities, source_name)
+
+
+print '******************** backport mapping **********************************'
+session.disable_hook_categories('cw.sources')
+mapping = []
+for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
+                   {'s': source.eid}).entities():
+    schemaent = mappart.cw_schema[0]
+    if schemaent.__regid__ != 'CWEType':
+        assert schemaent.__regid__ == 'CWRType'
+        sch = schema._eid_index[schemaent.eid]
+        for rdef in sch.rdefs.values():
+            if not source.support_entity(rdef.subject) \
+                    or not source.support_entity(rdef.object):
+                continue
+            if rdef.subject in DONT_GET_BACK_ETYPES \
+                    and rdef.object in DONT_GET_BACK_ETYPES:
+                print 'dont map', rdef
+                continue
+            if rdef.subject in DONT_GET_BACK_ETYPES:
+                options = u'action=link\nlinkattr=name'
+                roles = 'object',
+            elif rdef.object in DONT_GET_BACK_ETYPES:
+                options = u'action=link\nlinkattr=name'
+                roles = 'subject',
+            else:
+                options = u'action=copy'
+                if rdef.rtype in ('use_environment',):
+                    roles = 'object',
+                else:
+                    roles = 'subject',
+            print 'map', rdef, options, roles
+            for role in roles:
+                mapping.append( (
+                        (str(rdef.subject), str(rdef.rtype), str(rdef.object)),
+                        options + '\nrole=%s' % role) )
+    mappart.cw_delete()
+
+source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0)
+source_ent.init_mapping(mapping)
+
+# change source properties
+config = u'''synchronize=yes
+synchronization-interval=10min
+delete-entities=no
+'''
+rql('SET X type "datafeed", X parser "cw.entityxml", X url %(url)s, X config %(config)s '
+    'WHERE X eid %(x)s',
+    {'x': source.eid, 'config': config,
+     'url': source.config['base-url']+'/project'})
+
+
+commit()
+
+from cubes.apycot import recipes
+recipes.create_quick_recipe(session)
--- a/mttransforms.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/mttransforms.py	Fri Dec 09 12:08:27 2011 +0100
@@ -99,10 +99,10 @@
 
     def patch_convert(cls):
         def _convert(self, trdata, origconvert=cls._convert):
-            try:
-                trdata.appobject._cw.add_css('pygments.css')
-            except AttributeError: # session has no add_css, only http request
-                pass
+            add_css = getattr(trdata.appobject._cw, 'add_css', None)
+            if add_css is not None:
+                # session has no add_css, only http request
+                add_css('pygments.css')
             return origconvert(self, trdata)
         cls._convert = _convert
     patch_convert(pygmentstransforms.PygmentsHTMLTransform)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylintext.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,41 @@
+"""https://pastebin.logilab.fr/show/860/"""
+
+from logilab.astng import MANAGER, nodes, scoped_nodes
+
+def turn_function_to_class(node):
+    """turn a Function node into a Class node (in-place)"""
+    node.__class__ = scoped_nodes.Class
+    node.bases = ()
+    # remove return nodes so that we don't get warned about 'return outside
+    # function' by pylint
+    for rnode in node.nodes_of_class(nodes.Return):
+        rnode.parent.body.remove(rnode)
+    # that seems to be enough :)
+
+
+def cubicweb_transform(module):
+    # handle objectify_selector decorator. Only look at module level functions,
+    # should be enough
+    for assnodes in module.locals.values():
+        for node in assnodes:
+            if isinstance(node, scoped_nodes.Function) and node.decorators:
+                for decorator in node.decorators.nodes:
+                    for infered in decorator.infer():
+                        if infered.name == 'objectify_selector':
+                            turn_function_to_class(node)
+                            break
+                    else:
+                        continue
+                    break
+    # add yams base types into 'yams.buildobjs', astng doesn't grasp globals()
+    # magic in there
+    if module.name == 'yams.buildobjs':
+        from yams import BASE_TYPES
+        for etype in BASE_TYPES:
+            module.locals[etype] = [scoped_nodes.Class(etype, None)]
+
+MANAGER.register_transformer(cubicweb_transform)
+
+def register(linter):
+    """called when loaded by pylint --load-plugins, nothing to do here"""
+
--- a/req.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/req.py	Fri Dec 09 12:08:27 2011 +0100
@@ -29,7 +29,7 @@
 from logilab.common.deprecation import deprecated
 from logilab.common.date import ustrftime, strptime, todate, todatetime
 
-from cubicweb import Unauthorized, RegistryException, typed_eid
+from cubicweb import Unauthorized, NoSelectableObject, typed_eid
 from cubicweb.rset import ResultSet
 
 ONESECOND = timedelta(0, 1, 0)
@@ -66,7 +66,7 @@
         self.vreg = vreg
         try:
             encoding = vreg.property_value('ui.encoding')
-        except: # no vreg or property not registered
+        except Exception: # no vreg or property not registered
             encoding = 'utf-8'
         self.encoding = encoding
         # cache result of execution for (rql expr / eids),
@@ -336,7 +336,7 @@
             initargs.update(kwargs)
         try:
             view =  self.vreg[__registry].select(__vid, self, rset=rset, **initargs)
-        except RegistryException:
+        except NoSelectableObject:
             if __fallback_oid is None:
                 raise
             view =  self.vreg[__registry].select(__fallback_oid, self,
@@ -409,7 +409,7 @@
 
     # abstract methods to override according to the web front-end #############
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
         raise NotImplementedError
 
--- a/rqlrewrite.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/rqlrewrite.py	Fri Dec 09 12:08:27 2011 +0100
@@ -119,6 +119,10 @@
     return newsolutions
 
 
+def iter_relations(stinfo):
+    # this is a function so that test may return relation in a predictable order
+    return stinfo['relations'] - stinfo['rhsrelations']
+
 class Unsupported(Exception):
     """raised when an rql expression can't be inserted in some rql query
     because it create an unresolvable query (eg no solutions found)
@@ -337,43 +341,58 @@
         """introduce the given snippet in a subquery"""
         subselect = stmts.Select()
         snippetrqlst = n.Exists(transformedsnippet.copy(subselect))
+        get_rschema = self.schema.rschema
         aliases = []
-        rels_done = set()
-        for i, (selectvar, snippetvar) in enumerate(varmap):
+        done = set()
+        for i, (selectvar, _) in enumerate(varmap):
+            need_null_test = False
             subselectvar = subselect.get_variable(selectvar)
             subselect.append_selected(n.VariableRef(subselectvar))
             aliases.append(selectvar)
-            vi = self.varinfos[i]
-            need_null_test = False
-            stinfo = vi['stinfo']
-            for rel in stinfo['relations']:
-                if rel in rels_done:
-                    continue
-                rels_done.add(rel)
-                rschema = self.schema.rschema(rel.r_type)
-                if rschema.final or (rschema.inlined and
-                                     not rel in stinfo['rhsrelations']):
-                    rel.children[0].name = selectvar # XXX explain why
-                    subselect.add_restriction(rel.copy(subselect))
-                    for vref in rel.children[1].iget_nodes(n.VariableRef):
-                        if isinstance(vref.variable, n.ColumnAlias):
-                            # XXX could probably be handled by generating the
-                            # subquery into the detected subquery
-                            raise BadSchemaDefinition(
-                                "cant insert security because of usage two inlined "
-                                "relations in this query. You should probably at "
-                                "least uninline %s" % rel.r_type)
-                        subselect.append_selected(vref.copy(subselect))
-                        aliases.append(vref.name)
-                    self.select.remove_node(rel)
-                    # when some inlined relation has to be copied in the
-                    # subquery, we need to test that either value is NULL or
-                    # that the snippet condition is satisfied
-                    if rschema.inlined and rel.optional:
-                        need_null_test = True
+            todo = [(selectvar, self.varinfos[i]['stinfo'])]
+            while todo:
+                varname, stinfo = todo.pop()
+                done.add(varname)
+                for rel in iter_relations(stinfo):
+                    if rel in done:
+                        continue
+                    done.add(rel)
+                    rschema = get_rschema(rel.r_type)
+                    if rschema.final or rschema.inlined:
+                        rel.children[0].name = varname # XXX explain why
+                        subselect.add_restriction(rel.copy(subselect))
+                        for vref in rel.children[1].iget_nodes(n.VariableRef):
+                            if isinstance(vref.variable, n.ColumnAlias):
+                                # XXX could probably be handled by generating the
+                                # subquery into the detected subquery
+                                raise BadSchemaDefinition(
+                                    "cant insert security because of usage two inlined "
+                                    "relations in this query. You should probably at "
+                                    "least uninline %s" % rel.r_type)
+                            subselect.append_selected(vref.copy(subselect))
+                            aliases.append(vref.name)
+                        self.select.remove_node(rel)
+                        # when some inlined relation has to be copied in the
+                        # subquery and that relation is optional, we need to
+                        # test that either value is NULL or that the snippet
+                        # condition is satisfied
+                        if varname == selectvar and rel.optional and rschema.inlined:
+                            need_null_test = True
+                        # also, if some attributes or inlined relation of the
+                        # object variable are accessed, we need to get all those
+                        # from the subquery as well
+                        if vref.name not in done and rschema.inlined:
+                            # we can use vref here define in above for loop
+                            ostinfo = vref.variable.stinfo
+                            for orel in iter_relations(ostinfo):
+                                orschema = get_rschema(orel.r_type)
+                                if orschema.final or orschema.inlined:
+                                    todo.append( (vref.name, ostinfo) )
+                                    break
             if need_null_test:
                 snippetrqlst = n.Or(
-                    n.make_relation(subselectvar, 'is', (None, None), n.Constant,
+                    n.make_relation(subselect.get_variable(selectvar), 'is',
+                                    (None, None), n.Constant,
                                     operator='='),
                     snippetrqlst)
         subselect.add_restriction(snippetrqlst)
@@ -619,7 +638,7 @@
 
     def visit_mathexpression(self, node):
         cmp_ = n.MathExpression(node.operator)
-        for c in cmp.children:
+        for c in node.children:
             cmp_.append(c.accept(self))
         return cmp_
 
--- a/rset.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/rset.py	Fri Dec 09 12:08:27 2011 +0100
@@ -351,7 +351,8 @@
                     if offset <= entity.cw_row < stop:
                         entity.cw_row = entity.cw_row - offset
                     else:
-                        self.req.drop_entity_cache(entity.eid)
+                        entity.cw_rset = entity.as_rset()
+                        entity.cw_row = entity.cw_col = 0
         else:
             rset = self.copy(rows, descr)
             if not offset:
@@ -475,43 +476,57 @@
         entity.eid = eid
         # cache entity
         req.set_entity_cache(entity)
-        eschema = entity.e_schema
         # try to complete the entity if there are some additional columns
         if len(rowvalues) > 1:
-            rqlst = self.syntax_tree()
-            if rqlst.TYPE == 'select':
-                # UNION query, find the subquery from which this entity has been
-                # found
-                select, col = rqlst.locate_subquery(col, etype, self.args)
+            eschema = entity.e_schema
+            eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col)
+            entity.eid = rowvalues[eid_col]
+            for attr, col_idx in attr_cols.items():
+                entity.cw_attr_cache[attr] = rowvalues[col_idx]
+            for (rtype, role), col_idx in rel_cols.items():
+                value = rowvalues[col_idx]
+                if value is None:
+                    if role == 'subject':
+                        rql = 'Any Y WHERE X %s Y, X eid %s'
+                    else:
+                        rql = 'Any Y WHERE Y %s X, X eid %s'
+                    rrset = ResultSet([], rql % (rtype, entity.eid))
+                    rrset.req = req
+                else:
+                    rrset = self._build_entity(row, col_idx).as_rset()
+                entity.cw_set_relation_cache(rtype, role, rrset)
+        return entity
+
+    @cached
+    def _rset_structure(self, eschema, entity_col):
+        eid_col = col = entity_col
+        rqlst = self.syntax_tree()
+        attr_cols = {}
+        rel_cols = {}
+        if rqlst.TYPE == 'select':
+            # UNION query, find the subquery from which this entity has been
+            # found
+            select, col = rqlst.locate_subquery(entity_col, eschema.type, self.args)
+        else:
+            select = rqlst
+        # take care, due to outer join support, we may find None
+        # values for non final relation
+        for i, attr, role in attr_desc_iterator(select, col, entity_col):
+            if role == 'subject':
+                rschema = eschema.subjrels[attr]
             else:
-                select = rqlst
-            # take care, due to outer join support, we may find None
-            # values for non final relation
-            for i, attr, role in attr_desc_iterator(select, col, entity.cw_col):
-                if role == 'subject':
-                    rschema = eschema.subjrels[attr]
-                    if rschema.final:
-                        if attr == 'eid':
-                            entity.eid = rowvalues[i]
-                        else:
-                            entity.cw_attr_cache[attr] = rowvalues[i]
-                        continue
+                rschema = eschema.objrels[attr]
+            if rschema.final:
+                if attr == 'eid':
+                    eid_col = i
                 else:
-                    rschema = eschema.objrels[attr]
+                    attr_cols[attr] = i
+            else:
                 rdef = eschema.rdef(attr, role)
                 # only keep value if it can't be multivalued
                 if rdef.role_cardinality(role) in '1?':
-                    if rowvalues[i] is None:
-                        if role == 'subject':
-                            rql = 'Any Y WHERE X %s Y, X eid %s'
-                        else:
-                            rql = 'Any Y WHERE Y %s X, X eid %s'
-                        rrset = ResultSet([], rql % (attr, entity.eid))
-                        rrset.req = req
-                    else:
-                        rrset = self._build_entity(row, i).as_rset()
-                    entity.cw_set_relation_cache(attr, role, rrset)
-        return entity
+                    rel_cols[(attr, role)] = i
+        return eid_col, attr_cols, rel_cols
 
     @cached
     def syntax_tree(self):
@@ -680,7 +695,7 @@
             continue
         if rootvar.name == rootmainvar.name:
             continue
-        if select is not rootselect:
+        if select is not rootselect and isinstance(rootvar, nodes.ColumnAlias):
             term = select.selection[root.subquery_selection_index(select, i)]
         var = _get_variable(term)
         if var is None:
--- a/schema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/schema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -544,10 +544,11 @@
         rschema = self.add_relation_type(ybo.RelationType('identity'))
         rschema.final = False
 
+    etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$'
     def add_entity_type(self, edef):
         edef.name = edef.name.encode()
         edef.name = bw_normalize_etype(edef.name)
-        if not re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name):
+        if not re.match(self.etype_name_re, edef.name):
             raise BadSchemaDefinition(
                 '%r is not a valid name for an entity type. It should start '
                 'with an upper cased letter and be followed by at least a '
@@ -665,6 +666,8 @@
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
     info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+    # to be defined in concrete classes
+    full_rql = None
 
     def __init__(self, expression, mainvars, eid):
         self.eid = eid # eid of the entity representing this rql expression
--- a/schemas/base.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/schemas/base.py	Fri Dec 09 12:08:27 2011 +0100
@@ -21,7 +21,9 @@
 _ = unicode
 
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, String, Datetime, Password, Interval)
+                            SubjectRelation,
+                            String, TZDatetime, Datetime, Password, Interval,
+                            Boolean)
 from cubicweb.schema import (
     RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
     PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS)
@@ -40,13 +42,7 @@
     upassword = Password(required=True) # password is a reserved word for mysql
     firstname = String(maxsize=64)
     surname   = String(maxsize=64)
-    last_login_time  = Datetime(description=_('last connection date'))
-    # allowing an email to be the primary email of multiple entities is necessary for
-    # test at least :-/
-    primary_email = SubjectRelation('EmailAddress', cardinality='??',
-                                    description=_('email address to use for notification'))
-    use_email     = SubjectRelation('EmailAddress', cardinality='*?', composite='subject')
-
+    last_login_time = TZDatetime(description=_('last connection date'))
     in_group = SubjectRelation('CWGroup', cardinality='+*',
                                constraints=[RQLConstraint('NOT O name "owners"')],
                                description=_('groups grant permissions to the user'))
@@ -70,17 +66,35 @@
 to indicate which is the preferred form.'))
 
 class use_email(RelationType):
-    """ """
+    fulltext_container = 'subject'
+
+
+class use_email_relation(RelationDefinition):
+    """user's email account"""
+    name = "use_email"
     __permissions__ = {
         'read':   ('managers', 'users', 'guests',),
         'add':    ('managers', RRQLExpression('U has_update_permission S'),),
         'delete': ('managers', RRQLExpression('U has_update_permission S'),),
         }
-    fulltext_container = 'subject'
+    subject = "CWUser"
+    object = "EmailAddress"
+    cardinality = '*?'
+    composite = 'subject'
+
 
-class primary_email(RelationType):
+class primary_email(RelationDefinition):
     """the prefered email"""
-    __permissions__ = use_email.__permissions__
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers', RRQLExpression('U has_update_permission S'),),
+        'delete': ('managers', RRQLExpression('U has_update_permission S'),),
+        }
+    subject = "CWUser"
+    object = "EmailAddress"
+    cardinality = '??'
+    constraints= [RQLConstraint('S use_email O')]
+
 
 class prefered_form(RelationType):
     __permissions__ = {
@@ -238,7 +252,7 @@
 
     name = String(required=True, unique=True, maxsize=128,
                   description=_('name of the cache'))
-    timestamp = Datetime(default='NOW')
+    timestamp = TZDatetime(default='NOW')
 
 
 class CWSource(EntityType):
@@ -264,7 +278,8 @@
     # may changes when sources are specified
     url = String(description=_('URLs from which content will be imported. You can put one url per line'))
     parser = String(description=_('parser to use to extract entities from content retrieved at given URLs.'))
-    latest_retrieval = Datetime(description=_('latest synchronization time'))
+    latest_retrieval = TZDatetime(description=_('latest synchronization time'))
+    in_synchronization = TZDatetime(description=_('start timestamp of the currently in synchronization, or NULL when no synchronization in progress.'))
 
 
 ENTITY_MANAGERS_PERMISSIONS = {
@@ -307,8 +322,8 @@
 class cw_source(RelationDefinition):
     __permissions__ = {
         'read':   ('managers', 'users', 'guests'),
-        'add':    (),
-        'delete': (),
+        'add':    ('managers',),
+        'delete': ('managers',),
         }
     subject = '*'
     object = 'CWSource'
@@ -317,17 +332,31 @@
 
 class CWSourceSchemaConfig(EntityType):
     __permissions__ = ENTITY_MANAGERS_PERMISSIONS
-    __unique_together__ = [('cw_for_source', 'cw_schema')]
     cw_for_source = SubjectRelation(
         'CWSource', inlined=True, cardinality='1*', composite='object',
         __permissions__=RELATION_MANAGERS_PERMISSIONS)
-    cw_schema = SubjectRelation(
-        ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'),
-        inlined=True, cardinality='1*', composite='object',
-        __permissions__=RELATION_MANAGERS_PERMISSIONS)
     options = String(description=_('allowed options depends on the source type'))
 
 
+class rtype_cw_schema(RelationDefinition):
+    __permissions__ = RELATION_MANAGERS_PERMISSIONS
+    name = 'cw_schema'
+    subject = 'CWSourceSchemaConfig'
+    object = ('CWEType', 'CWRType')
+    inlined = True
+    cardinality = '1*'
+    composite = 'object'
+    constraints = [RQLConstraint('NOT O final TRUE')]
+
+class rdef_cw_schema(RelationDefinition):
+    __permissions__ = RELATION_MANAGERS_PERMISSIONS
+    name = 'cw_schema'
+    subject = 'CWSourceSchemaConfig'
+    object = 'CWRelation'
+    inlined = True
+    cardinality = '1*'
+    composite = 'object'
+
 # "abtract" relation types, no definition in cubicweb itself ###################
 
 class identical_to(RelationType):
--- a/selectors.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/selectors.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -255,12 +255,19 @@
 
     * if `entity` is specified, return score for this entity's class
 
-    * elif `row` is specified, return score for the class of the entity
-      found in the specified cell, using column specified by `col` or 0
+    * elif `rset`, `select` and `filtered_variable` are specified, return score
+      for the possible classes for variable in the given rql :class:`Select`
+      node
+
+    * elif `rset` and `row` are specified, return score for the class of the
+      entity found in the specified cell, using column specified by `col` or 0
 
-    * else return the sum of scores for each entity class found in the column
-      specified specified by the `col` argument or in column 0 if not specified,
-      unless:
+    * elif `rset` is specified return score for each entity class found in the
+      column specified specified by the `col` argument or in column 0 if not
+      specified
+
+    When there are several classes to be evaluated, return the sum of scores for
+    each entity class unless:
 
       - `once_is_enough` is False (the default) and some entity class is scored
         to 0, in which case 0 is returned
@@ -276,32 +283,37 @@
         self.accept_none = accept_none
 
     @lltrace
-    def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None,
+    def __call__(self, cls, req, rset=None, row=None, col=0, entity=None,
+                 select=None, filtered_variable=None,
+                 accept_none=None,
                  **kwargs):
-        if kwargs.get('entity'):
-            return self.score_class(kwargs['entity'].__class__, req)
+        if entity is not None:
+            return self.score_class(entity.__class__, req)
         if not rset:
             return 0
-        score = 0
-        if row is None:
+        if select is not None and filtered_variable is not None:
+            etypes = set(sol[filtered_variable.name] for sol in select.solutions)
+        elif row is None:
             if accept_none is None:
                 accept_none = self.accept_none
-            if not accept_none:
-                if any(rset[i][col] is None for i in xrange(len(rset))):
-                    return 0
-            for etype in rset.column_types(col):
-                if etype is None: # outer join
-                    return 0
-                escore = self.score(cls, req, etype)
-                if not escore and not self.once_is_enough:
-                    return 0
-                elif self.once_is_enough:
-                    return escore
-                score += escore
+            if not accept_none and \
+                   any(rset[i][col] is None for i in xrange(len(rset))):
+                return 0
+            etypes = rset.column_types(col)
         else:
             etype = rset.description[row][col]
-            if etype is not None:
-                score = self.score(cls, req, etype)
+            # may have None in rset.description on outer join
+            if etype is None or rset.rows[row][col] is None:
+                return 0
+            etypes = (etype,)
+        score = 0
+        for etype in etypes:
+            escore = self.score(cls, req, etype)
+            if not escore and not self.once_is_enough:
+                return 0
+            elif self.once_is_enough:
+                return escore
+            score += escore
         return score
 
     def score(self, cls, req, etype):
@@ -909,6 +921,7 @@
 
     # hack hack hack
     def __call__(self, cls, req, **kwargs):
+        # hack hack hack
         if self.strict:
             return EntitySelector.__call__(self, cls, req, **kwargs)
         return EClassSelector.__call__(self, cls, req, **kwargs)
--- a/server/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -210,9 +210,9 @@
     handler = config.migration_handler(schema, interactive=False,
                                        cnx=cnx, repo=repo)
     # install additional driver specific sql files
-    handler.install_custom_sql_scripts(join(CW_SOFTWARE_ROOT, 'schemas'), driver)
-    for directory in reversed(config.cubes_path()):
-        handler.install_custom_sql_scripts(join(directory, 'schema'), driver)
+    handler.cmd_install_custom_sql_scripts()
+    for cube in reversed(config.cubes()):
+        handler.cmd_install_custom_sql_scripts(cube)
     # serialize the schema
     initialize_schema(config, schema, handler)
     # yoo !
@@ -231,8 +231,7 @@
     from cubicweb.server.schemaserial import serialize_schema
     from cubicweb.server.session import hooks_control
     session = mhandler.session
-    paths = [p for p in config.cubes_path() + [config.apphome]
-             if exists(join(p, 'migration'))]
+    cubes = config.cubes()
     # deactivate every hooks but those responsible to set metadata
     # so, NO INTEGRITY CHECKS are done, to have quicker db creation.
     # Active integrity is kept else we may pb such as two default
@@ -240,18 +239,22 @@
     with hooks_control(session, session.HOOKS_DENY_ALL, 'metadata',
                        'activeintegrity'):
         # execute cubicweb's pre<event> script
-        mhandler.exec_event_script('pre%s' % event)
+        mhandler.cmd_exec_event_script('pre%s' % event)
         # execute cubes pre<event> script if any
-        for path in reversed(paths):
-            mhandler.exec_event_script('pre%s' % event, path)
+        for cube in reversed(cubes):
+            mhandler.cmd_exec_event_script('pre%s' % event, cube)
+        # execute instance's pre<event> script (useful in tests)
+        mhandler.cmd_exec_event_script('pre%s' % event, apphome=True)
         # enter instance'schema into the database
-        session.set_pool()
+        session.set_cnxset()
         serialize_schema(session, schema)
         # execute cubicweb's post<event> script
-        mhandler.exec_event_script('post%s' % event)
+        mhandler.cmd_exec_event_script('post%s' % event)
         # execute cubes'post<event> script if any
-        for path in reversed(paths):
-            mhandler.exec_event_script('post%s' % event, path)
+        for cube in reversed(cubes):
+            mhandler.cmd_exec_event_script('post%s' % event, cube)
+        # execute instance's post<event> script (useful in tests)
+        mhandler.cmd_exec_event_script('post%s' % event, apphome=True)
 
 
 # sqlite'stored procedures have to be registered at connection opening time
--- a/server/checkintegrity.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/checkintegrity.py	Fri Dec 09 12:08:27 2011 +0100
@@ -47,7 +47,7 @@
     sqlcursor.execute('SELECT type, source FROM entities WHERE eid=%s' % eid)
     try:
         etype, source = sqlcursor.fetchone()
-    except:
+    except Exception:
         eids[eid] = False
         return False
     if source and source != 'system':
@@ -58,7 +58,7 @@
                                {'x': eid}):
                 eids[eid] = True
                 return True
-        except: # TypeResolverError, Unauthorized...
+        except Exception: # TypeResolverError, Unauthorized...
             pass
         eids[eid] = False
         return False
@@ -101,7 +101,7 @@
     # deactivate modification_date hook since we don't want them
     # to be updated due to the reindexation
     repo = session.repo
-    cursor = session.pool['system']
+    cursor = session.cnxset['system']
     dbhelper = session.repo.system_source.dbhelper
     if not dbhelper.has_fti_table(cursor):
         print 'no text index table'
@@ -188,6 +188,18 @@
             if fix:
                 session.system_sql('DELETE FROM entities WHERE eid=%s;' % eid)
             notify_fixed(fix)
+    session.system_sql('INSERT INTO cw_source_relation (eid_from, eid_to) '
+                       'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWSource as s '
+                       'WHERE s.cw_name=e.asource AND NOT EXISTS(SELECT 1 FROM cw_source_relation as cs '
+                       '  WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)')
+    session.system_sql('INSERT INTO is_relation (eid_from, eid_to) '
+                       'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s '
+                       'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_relation as cs '
+                       '  WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)')
+    session.system_sql('INSERT INTO is_instance_of_relation (eid_from, eid_to) '
+                       'SELECT e.eid, s.cw_eid FROM entities as e, cw_CWEType as s '
+                       'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs '
+                       '  WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)')
     print 'Checking entities tables'
     for eschema in schema.entities():
         if eschema.final:
@@ -283,10 +295,10 @@
                     rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype)
                 for entity in session.execute(rql).entities():
                     print >> sys.stderr, '%s #%s is missing mandatory %s relation %s' % (
-                        entity.__regid__, entity.eid, role, rschema)
+                        entity.__regid__, entity.eid, role, rschema),
                     if fix:
                         #if entity.cw_describe()['source']['uri'] == 'system': XXX
-                        entity.delete()
+                        entity.cw_delete()
                     notify_fixed(fix)
 
 
@@ -304,9 +316,9 @@
                     rschema, rdef.subject)
                 for entity in session.execute(rql).entities():
                     print >> sys.stderr, '%s #%s is missing mandatory attribute %s' % (
-                        entity.__regid__, entity.eid, rschema)
+                        entity.__regid__, entity.eid, rschema),
                     if fix:
-                        entity.delete()
+                        entity.cw_delete()
                     notify_fixed(fix)
 
 
@@ -333,22 +345,6 @@
                                        % (table, column, eidcolumn, eid),
                                        {'v': default})
                 notify_fixed(fix)
-    cursor = session.system_sql('SELECT MIN(%s) FROM %sCWUser;' % (eidcolumn,
-                                                                  SQL_PREFIX))
-    default_user_eid = cursor.fetchone()[0]
-    assert default_user_eid is not None, 'no user defined !'
-    for rel, default in ( ('owned_by', default_user_eid), ):
-        cursor = session.system_sql("SELECT eid, type FROM entities "
-                                    "WHERE source='system' AND NOT EXISTS "
-                                    "(SELECT 1 FROM %s_relation WHERE eid_from=eid);"
-                                    % rel)
-        for eid, etype in cursor.fetchall():
-            msg = '  %s with eid %s has no %s relation'
-            print >> sys.stderr, msg % (etype, eid, rel),
-            if fix:
-                session.system_sql('INSERT INTO %s_relation VALUES (%s, %s) ;'
-                                   % (rel, eid, default))
-            notify_fixed(fix)
 
 
 def check(repo, cnx, checks, reindex, fix, withpb=True):
@@ -356,11 +352,11 @@
     using given user and password to locally connect to the repository
     (no running cubicweb server needed)
     """
-    session = repo._get_session(cnx.sessionid, setpool=True)
+    session = repo._get_session(cnx.sessionid, setcnxset=True)
     # yo, launch checks
     if checks:
         eids_cache = {}
-        with security_enabled(session, read=False): # ensure no read security
+        with security_enabled(session, read=False, write=False): # ensure no read security
             for check in checks:
                 check_func = globals()['check_%s' % check]
                 check_func(repo.schema, session, eids_cache, fix=fix)
@@ -372,6 +368,6 @@
             print 'WARNING: Diagnostic run, nothing has been corrected'
     if reindex:
         cnx.rollback()
-        session.set_pool()
+        session.set_cnxset()
         reindex_entities(repo.schema, session, withpb=withpb)
         cnx.commit()
--- a/server/edition.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/edition.py	Fri Dec 09 12:08:27 2011 +0100
@@ -68,6 +68,11 @@
         super(EditedEntity, self).__delitem__(attr)
         self.entity.cw_attr_cache.pop(attr, None)
 
+    def __copy__(self):
+        # default copy protocol fails in EditedEntity.__setitem__ because
+        # copied entity has no skip_security attribute at this point
+        return EditedEntity(self.entity, **self)
+
     def pop(self, attr, *args):
         # don't update skip_security by design (think to storage api)
         assert not self.saved, 'too late to modify edited attributes'
--- a/server/hook.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/hook.py	Fri Dec 09 12:08:27 2011 +0100
@@ -69,12 +69,19 @@
 ~~~~~~~~~~
 
 Operations are subclasses of the :class:`~cubicweb.server.hook.Operation` class
-that may be created by hooks and scheduled to happen just before (or after) the
-`precommit`, `postcommit` or `rollback` event. Hooks are being fired immediately
-on data operations, and it is sometime necessary to delay the actual work down
-to a time where all other hooks have run. Also while the order of execution of
-hooks is data dependant (and thus hard to predict), it is possible to force an
-order on operations.
+that may be created by hooks and scheduled to happen on `precommit`,
+`postcommit` or `rollback` event (i.e. respectivly before/after a commit or
+before a rollback of a transaction).
+
+Hooks are being fired immediately on data operations, and it is sometime
+necessary to delay the actual work down to a time where we can expect all
+information to be there, or when all other hooks have run (though take case
+since operations may themselves trigger hooks). Also while the order of
+execution of hooks is data dependant (and thus hard to predict), it is possible
+to force an order on operations.
+
+So, for such case where you may miss some information that may be set later in
+the transaction, you should instantiate an operation in the hook.
 
 Operations may be used to:
 
@@ -248,7 +255,7 @@
 from logging import getLogger
 from itertools import chain
 
-from logilab.common.decorators import classproperty
+from logilab.common.decorators import classproperty, cached
 from logilab.common.deprecation import deprecated, class_renamed
 from logilab.common.logging_ext import set_log_methods
 
@@ -257,7 +264,7 @@
 from cubicweb.cwvreg import CWRegistry, VRegistry
 from cubicweb.selectors import (objectify_selector, lltrace, ExpectedValueSelector,
                                 is_instance)
-from cubicweb.appobject import AppObject
+from cubicweb.appobject import AppObject, NotSelector, OrSelector
 from cubicweb.server.session import security_enabled
 
 ENTITIES_HOOKS = set(('before_add_entity',    'after_add_entity',
@@ -318,15 +325,83 @@
             else:
                 entities = []
                 eids_from_to = []
+            pruned = self.get_pruned_hooks(session, event,
+                                           entities, eids_from_to, kwargs)
             # by default, hooks are executed with security turned off
             with security_enabled(session, read=False):
                 for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs):
-                    hooks = sorted(self.possible_objects(session, **_kwargs),
+                    hooks = sorted(self.filtered_possible_objects(pruned, session, **_kwargs),
                                    key=lambda x: x.order)
                     with security_enabled(session, write=False):
                         for hook in hooks:
-                            #print hook.category, hook.__regid__
-                            hook()
+                           hook()
+
+    def get_pruned_hooks(self, session, event, entities, eids_from_to, kwargs):
+        """return a set of hooks that should not be considered by filtered_possible objects
+
+        the idea is to make a first pass over all the hooks in the
+        registry and to mark put some of them in a pruned list. The
+        pruned hooks are the one which:
+
+        * are disabled at the session level
+        * have a match_rtype or an is_instance selector which does not
+          match the rtype / etype of the relations / entities for
+          which we are calling the hooks. This works because the
+          repository calls the hooks grouped by rtype or by etype when
+          using the entities or eids_to_from keyword arguments
+
+        Only hooks with a simple selector or an AndSelector of simple
+        selectors are considered for disabling.
+
+        """
+        if 'entity' in kwargs:
+            entities = [kwargs['entity']]
+        if len(entities):
+            look_for_selector = is_instance
+            etype = entities[0].__regid__
+        elif 'rtype' in kwargs:
+            look_for_selector = match_rtype
+            etype = None
+        else: # nothing to prune, how did we get there ???
+            return set()
+        cache_key = (event, kwargs.get('rtype'), etype)
+        pruned = session.pruned_hooks_cache.get(cache_key)
+        if pruned is not None:
+            return pruned
+        pruned = set()
+        session.pruned_hooks_cache[cache_key] = pruned
+        if look_for_selector is not None:
+            for id, hooks in self.iteritems():
+                for hook in hooks:
+                    enabled_cat, main_filter = hook.filterable_selectors()
+                    if enabled_cat is not None:
+                        if not enabled_cat(hook, session):
+                            pruned.add(hook)
+                            continue
+                    if main_filter is not None:
+                        if isinstance(main_filter, match_rtype) and \
+                           (main_filter.frometypes is not None  or \
+                            main_filter.toetypes is not None):
+                            continue
+                        first_kwargs = _iter_kwargs(entities, eids_from_to, kwargs).next()
+                        if not main_filter(hook, session, **first_kwargs):
+                            pruned.add(hook)
+        return pruned
+
+
+    def filtered_possible_objects(self, pruned, *args, **kwargs):
+        for appobjects in self.itervalues():
+            if pruned:
+                filtered_objects = [obj for obj in appobjects if obj not in pruned]
+                if not filtered_objects:
+                    continue
+            else:
+                filtered_objects = appobjects
+            obj = self._select_best(filtered_objects,
+                                    *args, **kwargs)
+            if obj is None:
+                continue
+            yield obj
 
 class HooksManager(object):
     def __init__(self, vreg):
@@ -462,8 +537,17 @@
     # XXX deprecated
     enabled = True
     # stop pylint from complaining about missing attributes in Hooks classes
-    eidfrom = eidto = entity = rtype = None
+    eidfrom = eidto = entity = rtype = repo = None
 
+    @classmethod
+    @cached
+    def filterable_selectors(cls):
+        search = cls.__select__.search_selector
+        if search((NotSelector, OrSelector)):
+            return None, None
+        enabled_cat = search(enabled_category)
+        main_filter = search((is_instance, match_rtype))
+        return enabled_cat, main_filter
 
     @classmethod
     def check_events(cls):
@@ -496,7 +580,7 @@
             warn('[3.6] %s: accepts is deprecated, define proper __select__'
                  % classid(cls), DeprecationWarning)
             rtypes = []
-            for ertype in cls.accepts:
+            for ertype in cls.accepts: # pylint: disable=E1101
                 if ertype.islower():
                     rtypes.append(ertype)
                 else:
@@ -517,6 +601,7 @@
         if hasattr(self, 'call'):
             warn('[3.6] %s: call is deprecated, implement __call__'
                  % classid(self.__class__), DeprecationWarning)
+            # pylint: disable=E1101
             if self.event.endswith('_relation'):
                 self.call(self._cw, self.eidfrom, self.rtype, self.eidto)
             elif 'delete' in self.event:
@@ -544,7 +629,7 @@
     Notice there are no default behaviour defined when a watched relation is
     deleted, you'll have to handle this by yourself.
 
-    You usually want to use the :class:`match_rtype_sets` selector on concret
+    You usually want to use the :class:`match_rtype_sets` selector on concrete
     classes.
     """
     events = ('after_add_relation',)
@@ -653,8 +738,8 @@
     operation. These keyword arguments will be accessible as attributes from the
     operation instance.
 
-    An operation is triggered on connections pool events related to
-    commit / rollback transations. Possible events are:
+    An operation is triggered on connections set events related to commit /
+    rollback transations. Possible events are:
 
     * `precommit`:
 
@@ -724,11 +809,11 @@
         if event == 'postcommit_event' and hasattr(self, 'commit_event'):
             warn('[3.10] %s: commit_event method has been replaced by postcommit_event'
                  % classid(self.__class__), DeprecationWarning)
-            self.commit_event()
+            self.commit_event() # pylint: disable=E1101
         getattr(self, event)()
 
     def precommit_event(self):
-        """the observed connections pool is preparing a commit"""
+        """the observed connections set is preparing a commit"""
 
     def revertprecommit_event(self):
         """an error went when pre-commiting this operation or a later one
@@ -738,14 +823,13 @@
         """
 
     def rollback_event(self):
-        """the observed connections pool has been rollbacked
+        """the observed connections set has been rollbacked
 
-        do nothing by default, the operation will just be removed from the pool
-        operation list
+        do nothing by default
         """
 
     def postcommit_event(self):
-        """the observed connections pool has committed"""
+        """the observed connections set has committed"""
 
     @property
     @deprecated('[3.6] use self.session.user')
@@ -1009,6 +1093,9 @@
 
 
 class RQLPrecommitOperation(Operation):
+    # to be defined in concrete classes
+    rqls = None
+
     def precommit_event(self):
         execute = self.session.execute
         for rql in self.rqls:
@@ -1028,7 +1115,7 @@
     data_key = 'neweids'
 
     def rollback_event(self):
-        """the observed connections pool has been rollbacked,
+        """the observed connections set has been rollbacked,
         remove inserted eid from repository type/source cache
         """
         try:
@@ -1042,7 +1129,7 @@
     """
     data_key = 'pendingeids'
     def postcommit_event(self):
-        """the observed connections pool has been rollbacked,
+        """the observed connections set has been rollbacked,
         remove inserted eid from repository type/source cache
         """
         try:
--- a/server/migractions.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/migractions.py	Fri Dec 09 12:08:27 2011 +0100
@@ -50,7 +50,7 @@
 from yams.schema2sql import eschema2sql, rschema2sql
 from yams.schema import RelationDefinitionSchema
 
-from cubicweb import AuthenticationError, ExecutionError
+from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError
 from cubicweb.selectors import is_instance
 from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES,
                              PURE_VIRTUAL_RTYPES,
@@ -153,7 +153,7 @@
                   migrscript, funcname, *args, **kwargs)
         except ExecutionError, err:
             print >> sys.stderr, "-> %s" % err
-        except:
+        except BaseException:
             self.rollback()
             raise
 
@@ -201,7 +201,6 @@
                 versions = repo.get_versions()
                 for cube, version in versions.iteritems():
                     version_file.write('%s %s\n' % (cube, version))
-                    
             if not failed:
                 bkup = tarfile.open(backupfile, 'w|gz')
                 for filename in os.listdir(tmpdir):
@@ -242,7 +241,7 @@
                 written_format = format_file.readline().strip()
                 if written_format in ('portable', 'native'):
                     format = written_format
-        self.config.open_connections_pools = False
+        self.config.init_cnxset_pool = False
         repo = self.repo_connect()
         for source in repo.sources:
             if systemonly and source.uri != 'system':
@@ -255,7 +254,7 @@
                     raise SystemExit(1)
         shutil.rmtree(tmpdir)
         # call hooks
-        repo.open_connections_pools()
+        repo.init_cnxset_pool()
         repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile)
         print '-> database restored.'
 
@@ -288,7 +287,7 @@
                 except (KeyboardInterrupt, EOFError):
                     print 'aborting...'
                     sys.exit(0)
-            self.session.keep_pool_mode('transaction')
+            self.session.keep_cnxset_mode('transaction')
             self.session.data['rebuild-infered'] = False
             return self._cnx
 
@@ -296,10 +295,10 @@
     def session(self):
         if self.config is not None:
             session = self.repo._get_session(self.cnx.sessionid)
-            if session.pool is None:
+            if session.cnxset is None:
                 session.set_read_security(False)
                 session.set_write_security(False)
-            session.set_pool()
+            session.set_cnxset()
             return session
         # no access to session on remote instance
         return None
@@ -308,13 +307,13 @@
         if hasattr(self, '_cnx'):
             self._cnx.commit()
         if self.session:
-            self.session.set_pool()
+            self.session.set_cnxset()
 
     def rollback(self):
         if hasattr(self, '_cnx'):
             self._cnx.rollback()
         if self.session:
-            self.session.set_pool()
+            self.session.set_cnxset()
 
     def rqlexecall(self, rqliter, ask_confirm=False):
         for rql, kwargs in rqliter:
@@ -351,10 +350,17 @@
         """cached constraint types mapping"""
         return ss.cstrtype_mapping(self._cw)
 
-    def exec_event_script(self, event, cubepath=None, funcname=None,
-                          *args, **kwargs):
-        if cubepath:
+    def cmd_exec_event_script(self, event, cube=None, funcname=None,
+                              *args, **kwargs):
+        """execute a cube event scripts  `migration/<event>.py` where event
+        is one of 'precreate', 'postcreate', 'preremove' and 'postremove'.
+        """
+        assert event in ('precreate', 'postcreate', 'preremove', 'postremove')
+        if cube:
+            cubepath = self.config.cube_dir(cube)
             apc = osp.join(cubepath, 'migration', '%s.py' % event)
+        elif kwargs.pop('apphome', False):
+            apc = osp.join(self.config.apphome, 'migration', '%s.py' % event)
         else:
             apc = osp.join(self.config.migration_scripts_dir(), '%s.py' % event)
         if osp.exists(apc):
@@ -373,19 +379,31 @@
                 if self.config.free_wheel:
                     self.cmd_reactivate_verification_hooks()
 
-    def install_custom_sql_scripts(self, directory, driver):
+    def cmd_install_custom_sql_scripts(self, cube=None):
+        """install a cube custom sql scripts `schema/*.<driver>.sql` where
+        <driver> depends on the instance main database backend (eg 'postgres',
+        'mysql'...)
+        """
+        driver = self.repo.system_source.dbdriver
+        if cube is None:
+            directory = osp.join(CW_SOFTWARE_ROOT, 'schemas')
+        else:
+            directory = osp.join(self.config.cube_dir(cube), 'schema')
+        sql_scripts = []
         for fpath in glob(osp.join(directory, '*.sql.%s' % driver)):
             newname = osp.basename(fpath).replace('.sql.%s' % driver,
                                                   '.%s.sql' % driver)
             warn('[3.5.6] rename %s into %s' % (fpath, newname),
                  DeprecationWarning)
+            sql_scripts.append(fpath)
+        sql_scripts += glob(osp.join(directory, '*.%s.sql' % driver))
+        for fpath in sql_scripts:
             print '-> installing', fpath
-            sqlexec(open(fpath).read(), self.session.system_sql, False,
-                    delimiter=';;')
-        for fpath in glob(osp.join(directory, '*.%s.sql' % driver)):
-            print '-> installing', fpath
-            sqlexec(open(fpath).read(), self.session.system_sql, False,
-                    delimiter=';;')
+            try:
+                sqlexec(open(fpath).read(), self.session.system_sql, False,
+                        delimiter=';;')
+            except Exception, exc:
+                print '-> ERROR:', exc, ', skipping', fpath
 
     # schema synchronization internals ########################################
 
@@ -657,10 +675,9 @@
         new = set()
         # execute pre-create files
         driver = self.repo.system_source.dbdriver
-        for pack in reversed(newcubes):
-            cubedir = self.config.cube_dir(pack)
-            self.install_custom_sql_scripts(osp.join(cubedir, 'schema'), driver)
-            self.exec_event_script('precreate', cubedir)
+        for cube in reversed(newcubes):
+            self.cmd_install_custom_sql_scripts(cube)
+            self.cmd_exec_event_script('precreate', cube)
         # add new entity and relation types
         for rschema in newcubes_schema.relations():
             if not rschema in self.repo.schema:
@@ -683,8 +700,8 @@
                 self.cmd_add_relation_definition(str(fromtype), rschema.type,
                                                  str(totype))
         # execute post-create files
-        for pack in reversed(newcubes):
-            self.exec_event_script('postcreate', self.config.cube_dir(pack))
+        for cube in reversed(newcubes):
+            self.cmd_exec_event_script('postcreate', cube)
             self.commit()
 
     def cmd_remove_cube(self, cube, removedeps=False):
@@ -696,8 +713,8 @@
         removedcubes_schema = self.config.load_schema(construction_mode='non-strict')
         reposchema = self.repo.schema
         # execute pre-remove files
-        for pack in reversed(removedcubes):
-            self.exec_event_script('preremove', self.config.cube_dir(pack))
+        for cube in reversed(removedcubes):
+            self.cmd_exec_event_script('preremove', cube)
         # remove cubes'entity and relation types
         for rschema in fsschema.relations():
             if not rschema in removedcubes_schema and rschema in reposchema:
@@ -718,7 +735,7 @@
                             str(fromtype), rschema.type, str(totype))
         # execute post-remove files
         for cube in reversed(removedcubes):
-            self.exec_event_script('postremove', self.config.cube_dir(cube))
+            self.cmd_exec_event_script('postremove', cube)
             self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s',
                          {'pk': u'system.version.'+cube}, ask_confirm=False)
             self.commit()
@@ -1364,7 +1381,7 @@
             prop = self.rqlexec(
                 'CWProperty X WHERE X pkey %(k)s, NOT X for_user U',
                 {'k': pkey}, ask_confirm=False).get_entity(0, 0)
-        except:
+        except Exception:
             self.cmd_create_entity('CWProperty', pkey=unicode(pkey), value=value)
         else:
             prop.set_attributes(value=value)
@@ -1375,7 +1392,7 @@
     def _cw(self):
         session = self.session
         if session is not None:
-            session.set_pool()
+            session.set_cnxset()
             return session
         return self.cnx.request()
 
@@ -1482,14 +1499,14 @@
         if not ask_confirm or self.confirm('Execute sql: %s ?' % sql):
             try:
                 cu = self.session.system_sql(sql, args)
-            except:
+            except Exception:
                 ex = sys.exc_info()[1]
                 if self.confirm('Error: %s\nabort?' % ex, pdb=True):
                     raise
                 return
             try:
                 return cu.fetchall()
-            except:
+            except Exception:
                 # no result to fetch
                 return
 
@@ -1530,15 +1547,16 @@
         """
         rschema = self.repo.schema.rschema(attr)
         oldtype = rschema.objects(etype)[0]
-        rdefeid = rschema.rproperty(etype, oldtype, 'eid')
+        rdefeid = rschema.rdef(etype, oldtype).eid
+        allownull = rschema.rdef(etype, oldtype).cardinality[0] != '1'
         sql = ("UPDATE cw_CWAttribute "
                "SET cw_to_entity=(SELECT cw_eid FROM cw_CWEType WHERE cw_name='%s')"
                "WHERE cw_eid=%s") % (newtype, rdefeid)
         self.sqlexec(sql, ask_confirm=False)
         dbhelper = self.repo.system_source.dbhelper
         sqltype = dbhelper.TYPE_MAPPING[newtype]
-        sql = 'ALTER TABLE cw_%s ALTER COLUMN cw_%s TYPE %s' % (etype, attr, sqltype)
-        self.sqlexec(sql, ask_confirm=False)
+        cursor = self.session.cnxset[self.repo.system_source.uri]
+        dbhelper.change_col_type(cursor, 'cw_%s'  % etype, 'cw_%s' % attr, sqltype, allownull)
         if commit:
             self.commit()
 
@@ -1561,8 +1579,7 @@
         This may be useful on accidental desync between the repository schema
         and a sql database
         """
-        dbhelper = self.repo.system_source.dbhelper
-        tablesql = rschema2sql(dbhelper, self.repo.schema.rschema(rtype))
+        tablesql = rschema2sql(self.repo.schema.rschema(rtype))
         for sql in tablesql.split(';'):
             if sql.strip():
                 self.sqlexec(sql)
--- a/server/msplanner.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/msplanner.py	Fri Dec 09 12:08:27 2011 +0100
@@ -291,6 +291,8 @@
         self.sourcesterms = self._sourcesterms = {}
         # source : {relation: set(child variable and constant)}
         self._crossrelations = {}
+        # term : set(sources)
+        self._discarded_sources = {}
         # dictionary of variables and constants which are linked to each other
         # using a non final relation supported by multiple sources (crossed or
         # not).
@@ -370,7 +372,7 @@
                     eid = const.eval(self.plan.args)
                     source = self._session.source_from_eid(eid)
                     if (source is self.system_source
-                        or (hasrel and
+                        or (hasrel and varobj._q_invariant and
                             not any(source.support_relation(r.r_type)
                                     for r in varobj.stinfo['relations']
                                     if not r is rel))):
@@ -539,6 +541,7 @@
                         if invariant and source is self.system_source:
                             continue
                         self._remove_source_term(source, lhs)
+                        self._discarded_sources.setdefault(lhs, set()).add(source)
                     usesys = self.system_source not in sources
                 else:
                     for source, terms in sourcesterms.items():
@@ -546,6 +549,7 @@
                             if invariant and source is self.system_source:
                                 continue
                             self._remove_source_term(source, lhs)
+                            self._discarded_sources.setdefault(lhs, set()).add(source)
                     usesys = self.system_source in sources
                 if rel is None or (len(var.stinfo['relations']) == 2 and
                                    not var.stinfo['selected']):
@@ -697,6 +701,12 @@
                                                     rel in self._crossrelations[s]))
         if invalid_sources:
             self._remove_sources(term, invalid_sources)
+            discarded = self._discarded_sources.get(term)
+            if discarded is not None and not any(x[0] for x in (termsources-invalid_sources)
+                                                 if not x[0] in discarded):
+                raise BadRQLQuery('relation %s cant be crossed but %s and %s should '
+                              'come from difference sources' %
+                              (rel.r_type, term.as_string(), oterm.as_string()))
             # if term is a rewritten const, we can apply the same changes to
             # all other consts inserted from the same original variable
             for const in self._const_vars.get(term, ()):
@@ -1438,7 +1448,7 @@
                                                          for step in steps
                                                          for select in step.union.children):
                 if temptable:
-                    step = IntersectFetchStep(plan) # XXX not implemented
+                    raise NotImplementedError('oops') # IntersectFetchStep(plan)
                 else:
                     step = IntersectStep(plan)
             else:
@@ -1623,17 +1633,7 @@
     def visit_relation(self, node, newroot, terms):
         if not node.is_types_restriction():
             if not node in terms and node in self.skip and self.solindices.issubset(self.skip[node]):
-                if not self.schema.rschema(node.r_type).final:
-                    # can't really skip the relation if one variable is selected
-                    # and only referenced by this relation
-                    for vref in node.iget_nodes(VariableRef):
-                        stinfo = vref.variable.stinfo
-                        if stinfo['selected'] and len(stinfo['relations']) == 1:
-                            break
-                    else:
-                        return None, node
-                else:
-                    return None, node
+                return None, node
             if not self._relation_supported(node):
                 raise UnsupportedBranch()
         # don't copy type restriction unless this is the only supported relation
@@ -1650,7 +1650,7 @@
         self._pending_vrefs = []
         try:
             res = self.visit_default(node, newroot, terms)[0]
-        except:
+        except Exception:
             # when a relation isn't supported, we should dereference potentially
             # introduced variable refs
             for vref in self._pending_vrefs:
--- a/server/pool.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/pool.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,19 +15,18 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""CubicWeb server connections pool : the repository has a limited number of
-connections pools, each of them dealing with a set of connections on each source
-used by the repository. A connections pools (`ConnectionsPool`) is an
-abstraction for a group of connection to each source.
+"""CubicWeb server connections set : the repository has a limited number of
+:class:`ConnectionsSet` (defined in configuration, default to 4). Each of them
+hold a connection for each source used by the repository.
 """
 
 __docformat__ = "restructuredtext en"
 
 import sys
 
-class ConnectionsPool(object):
+class ConnectionsSet(object):
     """handle connections on a set of sources, at some point associated to a
-    user session
+    :class:`Session`
     """
 
     def __init__(self, sources):
@@ -74,40 +73,40 @@
             # catch exceptions, rollback other sources anyway
             try:
                 cnx.rollback()
-            except:
+            except Exception:
                 source.critical('rollback error', exc_info=sys.exc_info())
                 # error on rollback, the connection is much probably in a really
                 # bad state. Replace it by a new one.
                 self.reconnect(source)
 
     def close(self, i_know_what_i_do=False):
-        """close all connections in the pool"""
+        """close all connections in the set"""
         if i_know_what_i_do is not True: # unexpected closing safety belt
-            raise RuntimeError('pool shouldn\'t be closed')
+            raise RuntimeError('connections set shouldn\'t be closed')
         for cu in self._cursors.values():
             try:
                 cu.close()
-            except:
+            except Exception:
                 continue
         for _, cnx in self.source_cnxs.values():
             try:
                 cnx.close()
-            except:
+            except Exception:
                 continue
 
     # internals ###############################################################
 
-    def pool_set(self):
-        """pool is being set"""
+    def cnxset_set(self):
+        """connections set is being set on a session"""
         self.check_connections()
 
-    def pool_reset(self):
-        """pool is being reseted"""
+    def cnxset_freed(self):
+        """connections set is being freed from a session"""
         for source, cnx in self.source_cnxs.values():
-            source.pool_reset(cnx)
+            source.cnxset_freed(cnx)
 
     def sources(self):
-        """return the source objects handled by this pool"""
+        """return the source objects handled by this connections set"""
         # implementation details of flying insert requires the system source
         # first
         yield self.source_cnxs['system'][0]
@@ -136,7 +135,7 @@
             try:
                 # properly close existing connection if any
                 self.source_cnxs[source.uri][1].close()
-            except:
+            except Exception:
                 pass
             source.info('trying to reconnect')
             self.source_cnxs[source.uri] = (source, source.get_connection())
--- a/server/querier.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/querier.py	Fri Dec 09 12:08:27 2011 +0100
@@ -169,7 +169,7 @@
         # session executing the query
         self.session = session
         # quick reference to the system source
-        self.syssource = session.pool.source('system')
+        self.syssource = session.cnxset.source('system')
         # execution steps
         self.steps = []
         # index of temporary tables created during execution
@@ -666,7 +666,7 @@
         if server.DEBUG & (server.DBG_RQL | server.DBG_SQL):
             if server.DEBUG & (server.DBG_MORE | server.DBG_SQL):
                 print '*'*80
-            print 'querier input', rql, args
+            print 'querier input', repr(rql), repr(args)
         # parse the query and binds variables
         cachekey = rql
         try:
@@ -734,8 +734,8 @@
             # transaction must been rollbacked
             #
             # notes:
-            # * we should not reset the pool here, since we don't want the
-            #   session to loose its pool during processing
+            # * we should not reset the connections set here, since we don't want the
+            #   session to loose it during processing
             # * don't rollback if we're in the commit process, will be handled
             #   by the session
             if session.commit_state is None:
--- a/server/repository.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/repository.py	Fri Dec 09 12:08:27 2011 +0100
@@ -154,13 +154,13 @@
         self.sources_by_uri = {'system': self.system_source}
         # querier helper, need to be created after sources initialization
         self.querier = querier.QuerierHelper(self, self.schema)
-        # cache eid -> type / source
+        # cache eid -> (type, physical source, extid, actual source)
         self._type_source_cache = {}
         # cache (extid, source uri) -> eid
         self._extid_cache = {}
-        # open some connections pools
-        if config.open_connections_pools:
-            self.open_connections_pools()
+        # open some connections set
+        if config.init_cnxset_pool:
+            self.init_cnxset_pool()
         @onevent('after-registry-reload', self)
         def fix_user_classes(self):
             usercls = self.vreg['etypes'].etype_class('CWUser')
@@ -168,10 +168,10 @@
                 if not isinstance(session.user, InternalManager):
                     session.user.__class__ = usercls
 
-    def open_connections_pools(self):
+    def init_cnxset_pool(self):
         config = self.config
-        self._available_pools = Queue.Queue()
-        self._available_pools.put_nowait(pool.ConnectionsPool(self.sources))
+        self._cnxsets_pool = Queue.Queue()
+        self._cnxsets_pool.put_nowait(pool.ConnectionsSet(self.sources))
         if config.quick_start:
             # quick start, usually only to get a minimal repository to get cubes
             # information (eg dump/restore/...)
@@ -213,14 +213,14 @@
             # configurate tsearch according to postgres version
             for source in self.sources:
                 source.init_creating()
-        # close initialization pool and reopen fresh ones for proper
+        # close initialization connetions set and reopen fresh ones for proper
         # initialization now that we know cubes
-        self._get_pool().close(True)
-        # list of available pools (we can't iterate on Queue instance)
-        self.pools = []
+        self._get_cnxset().close(True)
+        # list of available_cnxsets (we can't iterate on Queue instance)
+        self.cnxsets = []
         for i in xrange(config['connections-pool-size']):
-            self.pools.append(pool.ConnectionsPool(self.sources))
-            self._available_pools.put_nowait(self.pools[-1])
+            self.cnxsets.append(pool.ConnectionsSet(self.sources))
+            self._cnxsets_pool.put_nowait(self.cnxsets[-1])
         if config.quick_start:
             config.init_cubes(self.get_cubes())
         self.hm = hook.HooksManager(self.vreg)
@@ -244,7 +244,7 @@
                     self.sources_by_eid[sourceent.eid] = self.system_source
                     self.system_source.init(True, sourceent)
                     continue
-                self.add_source(sourceent, add_to_pools=False)
+                self.add_source(sourceent, add_to_cnxsets=False)
         finally:
             session.close()
 
@@ -253,7 +253,7 @@
                       'can_cross_relation', 'rel_type_sources'):
             clear_cache(self, cache)
 
-    def add_source(self, sourceent, add_to_pools=True):
+    def add_source(self, sourceent, add_to_cnxsets=True):
         source = self.get_source(sourceent.type, sourceent.name,
                                  sourceent.host_config, sourceent.eid)
         self.sources_by_eid[sourceent.eid] = source
@@ -261,15 +261,15 @@
         if self.config.source_enabled(source):
             # call source's init method to complete their initialisation if
             # needed (for instance looking for persistent configuration using an
-            # internal session, which is not possible until pools have been
+            # internal session, which is not possible until connections sets have been
             # initialized)
             source.init(True, sourceent)
             if not source.copy_based_source:
                 self.sources.append(source)
                 self.querier.set_planner()
-                if add_to_pools:
-                    for pool in self.pools:
-                        pool.add_source(source)
+                if add_to_cnxsets:
+                    for cnxset in self.cnxsets:
+                       cnxset.add_source(source)
         else:
             source.init(False, sourceent)
         self._clear_planning_caches()
@@ -280,8 +280,8 @@
         if self.config.source_enabled(source) and not source.copy_based_source:
             self.sources.remove(source)
             self.querier.set_planner()
-            for pool in self.pools:
-                pool.remove_source(source)
+            for cnxset in self.cnxsets:
+                cnxset.remove_source(source)
         self._clear_planning_caches()
 
     def get_source(self, type, uri, source_config, eid=None):
@@ -344,7 +344,7 @@
             self.looping_task(cleanup_session_interval, self.clean_sessions)
         assert isinstance(self._looping_tasks, list), 'already started'
         for i, (interval, func, args) in enumerate(self._looping_tasks):
-            self._looping_tasks[i] = task = utils.LoopTask(interval, func, args)
+            self._looping_tasks[i] = task = utils.LoopTask(self, interval, func, args)
             self.info('starting task %s with interval %.2fs', task.name,
                       interval)
             task.start()
@@ -368,25 +368,25 @@
         t.start()
 
     #@locked
-    def _get_pool(self):
+    def _get_cnxset(self):
         try:
-            return self._available_pools.get(True, timeout=5)
+            return self._cnxsets_pool.get(True, timeout=5)
         except Queue.Empty:
-            raise Exception('no pool available after 5 secs, probably either a '
+            raise Exception('no connections set available after 5 secs, probably either a '
                             'bug in code (too many uncommited/rollbacked '
                             'connections) or too much load on the server (in '
                             'which case you can try to set a bigger '
-                            'connections pools size)')
+                            'connections pool size)')
 
-    def _free_pool(self, pool):
-        self._available_pools.put_nowait(pool)
+    def _free_cnxset(self, cnxset):
+        self._cnxsets_pool.put_nowait(cnxset)
 
     def pinfo(self):
-        # XXX: session.pool is accessed from a local storage, would be interesting
-        #      to see if there is a pool set in any thread specific data)
-        return '%s: %s (%s)' % (self._available_pools.qsize(),
+        # XXX: session.cnxset is accessed from a local storage, would be interesting
+        #      to see if there is a cnxset set in any thread specific data)
+        return '%s: %s (%s)' % (self._cnxsets_pool.qsize(),
                                 ','.join(session.user.login for session in self._sessions.values()
-                                         if session.pool),
+                                         if session.cnxset),
                                 threading.currentThread())
     def shutdown(self):
         """called on server stop event to properly close opened sessions and
@@ -409,12 +409,12 @@
                 or self.config.quick_start):
             self.hm.call_hooks('server_shutdown', repo=self)
         self.close_sessions()
-        while not self._available_pools.empty():
-            pool = self._available_pools.get_nowait()
+        while not self._cnxsets_pool.empty():
+            cnxset = self._cnxsets_pool.get_nowait()
             try:
-                pool.close(True)
-            except:
-                self.exception('error while closing %s' % pool)
+                cnxset.close(True)
+            except Exception:
+                self.exception('error while closing %s' % cnxset)
                 continue
         if self.pyro_registered:
             if self._use_pyrons():
@@ -496,7 +496,7 @@
         results['nb_open_sessions'] = len(self._sessions)
         results['nb_active_threads'] = threading.activeCount()
         results['looping_tasks'] = ', '.join(str(t) for t in self._looping_tasks)
-        results['available_pools'] = self._available_pools.qsize()
+        results['available_cnxsets'] = self._cnxsets_pool.qsize()
         results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate()))
         return results
 
@@ -505,12 +505,7 @@
 
         This is a public method, not requiring a session id.
         """
-        try:
-            # necessary to support pickling used by pyro
-            self.schema.__hashmode__ = 'pickle'
-            return self.schema
-        finally:
-            self.schema.__hashmode__ = None
+        return self.schema
 
     def get_cubes(self):
         """Return the list of cubes used by this instance.
@@ -535,12 +530,12 @@
         # XXX we may want to check we don't give sensible information
         if foreid is None:
             return self.config[option]
-        _, sourceuri, extid = self.type_and_source_from_eid(foreid)
+        _, sourceuri, extid, _ = self.type_and_source_from_eid(foreid)
         if sourceuri == 'system':
             return self.config[option]
-        pool = self._get_pool()
+        cnxset = self._get_cnxset()
         try:
-            cnx = pool.connection(sourceuri)
+            cnx = cnxset.connection(sourceuri)
             # needed to check connection is valid and usable by the current
             # thread
             newcnx = self.sources_by_uri[sourceuri].check_connection(cnx)
@@ -548,7 +543,7 @@
                 cnx = newcnx
             return cnx.get_option_value(option, extid)
         finally:
-            self._free_pool(pool)
+            self._free_cnxset(cnxset)
 
     @cached
     def get_versions(self, checkversions=False):
@@ -721,7 +716,7 @@
         * build_descr is a flag indicating if the description should be
           built on select queries
         """
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             try:
                 rset = self.querier.execute(session, rqlstring, args,
@@ -747,21 +742,23 @@
                 self.exception('unexpected error while executing %s with %s', rqlstring, args)
                 raise
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def describe(self, sessionid, eid, txid=None):
-        """return a tuple (type, source, extid) for the entity with id <eid>"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        """return a tuple `(type, physical source uri, extid, actual source
+        uri)` for the entity of the given `eid`
+        """
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.type_and_source_from_eid(eid, session)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def check_session(self, sessionid):
         """raise `BadConnectionId` if the connection is no more valid, else
         return its latest activity timestamp.
         """
-        return self._get_session(sessionid, setpool=False).timestamp
+        return self._get_session(sessionid, setcnxset=False).timestamp
 
     def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False):
         """return value associated to key in the session's data dictionary or
@@ -772,7 +769,7 @@
         If key isn't defined in the dictionnary, value specified by the
         `default` argument will be returned.
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         return session.get_shared_data(key, default, pop, txdata)
 
     def set_shared_data(self, sessionid, key, value, txdata=False):
@@ -782,7 +779,7 @@
         transaction's data which are cleared on commit/rollback of the current
         transaction.
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         session.set_shared_data(key, value, txdata)
 
     def commit(self, sessionid, txid=None):
@@ -794,7 +791,7 @@
             return session.commit()
         except (ValidationError, Unauthorized):
             raise
-        except:
+        except Exception:
             self.exception('unexpected error')
             raise
 
@@ -805,16 +802,16 @@
             session = self._get_session(sessionid)
             session.set_tx_data(txid)
             session.rollback()
-        except:
+        except Exception:
             self.exception('unexpected error')
             raise
 
     def close(self, sessionid, txid=None, checkshuttingdown=True):
         """close the session with the given id"""
-        session = self._get_session(sessionid, setpool=True, txid=txid,
+        session = self._get_session(sessionid, setcnxset=True, txid=txid,
                                     checkshuttingdown=checkshuttingdown)
         # operation uncommited before close are rollbacked before hook is called
-        session.rollback(reset_pool=False)
+        session.rollback(free_cnxset=False)
         self.hm.call_hooks('session_close', session)
         # commit session at this point in case write operation has been done
         # during `session_close` hooks
@@ -829,7 +826,7 @@
         * update user information on each user's request (i.e. groups and
           custom properties)
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         if props is not None:
             self.set_session_props(sessionid, props)
         user = session.user
@@ -841,43 +838,43 @@
         * update user information on each user's request (i.e. groups and
           custom properties)
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         for prop, value in props.items():
             session.change_property(prop, value)
 
     def undoable_transactions(self, sessionid, ueid=None, txid=None,
                               **actionfilters):
         """See :class:`cubicweb.dbapi.Connection.undoable_transactions`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.undoable_transactions(session, ueid,
                                                             **actionfilters)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def transaction_info(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_info`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.tx_info(session, txuuid)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def transaction_actions(self, sessionid, txuuid, public=True, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_actions`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.tx_actions(session, txuuid, public)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def undo_transaction(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.undo_transaction`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.undo_transaction(session, txuuid)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     # public (inter-repository) interface #####################################
 
@@ -893,7 +890,7 @@
           deleted since the given timestamp
         """
         session = self.internal_session()
-        updatetime = datetime.now()
+        updatetime = datetime.utcnow()
         try:
             modentities, delentities = self.system_source.modified_entities(
                 session, etypes, mtime)
@@ -908,7 +905,7 @@
         for sessionid in self._sessions.keys():
             try:
                 self.close(sessionid, checkshuttingdown=False)
-            except:
+            except Exception: # XXX BaseException?
                 self.exception('error while closing session %s' % sessionid)
 
     def clean_sessions(self):
@@ -925,18 +922,20 @@
                 nbclosed += 1
         return nbclosed
 
-    def internal_session(self, cnxprops=None):
-        """return a dbapi like connection/cursor using internal user which
-        have every rights on the repository. You'll *have to* commit/rollback
-        or close (rollback implicitly) the session once the job's done, else
-        you'll leak connections pool up to the time where no more pool is
-        available, causing irremediable freeze...
+    def internal_session(self, cnxprops=None, safe=False):
+        """return a dbapi like connection/cursor using internal user which have
+        every rights on the repository. The `safe` argument is a boolean flag
+        telling if integrity hooks should be activated or not.
+
+        *YOU HAVE TO* commit/rollback or close (rollback implicitly) the
+        session once the job's done, else you'll leak connections set up to the
+        time where no one is available, causing irremediable freeze...
         """
-        session = InternalSession(self, cnxprops)
-        session.set_pool()
+        session = InternalSession(self, cnxprops, safe)
+        session.set_cnxset()
         return session
 
-    def _get_session(self, sessionid, setpool=False, txid=None,
+    def _get_session(self, sessionid, setcnxset=False, txid=None,
                      checkshuttingdown=True):
         """return the user associated to the given session identifier"""
         if checkshuttingdown and self.shutting_down:
@@ -945,9 +944,9 @@
             session = self._sessions[sessionid]
         except KeyError:
             raise BadConnectionId('No such session %s' % sessionid)
-        if setpool:
-            session.set_tx_data(txid) # must be done before set_pool
-            session.set_pool()
+        if setcnxset:
+            session.set_tx_data(txid) # must be done before set_cnxset
+            session.set_cnxset()
         return session
 
     # data sources handling ###################################################
@@ -955,7 +954,9 @@
     # * correspondance between eid and local id (i.e. specific to a given source)
 
     def type_and_source_from_eid(self, eid, session=None):
-        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        """return a tuple `(type, physical source uri, extid, actual source
+        uri)` for the entity of the given `eid`
+        """
         try:
             eid = typed_eid(eid)
         except ValueError:
@@ -965,19 +966,19 @@
         except KeyError:
             if session is None:
                 session = self.internal_session()
-                reset_pool = True
+                free_cnxset = True
             else:
-                reset_pool = False
+                free_cnxset = False
             try:
-                etype, uri, extid = self.system_source.eid_type_source(session,
-                                                                       eid)
+                etype, uri, extid, auri = self.system_source.eid_type_source(
+                    session, eid)
             finally:
-                if reset_pool:
-                    session.reset_pool()
-        self._type_source_cache[eid] = (etype, uri, extid)
-        if uri != 'system':
-            self._extid_cache[(extid, uri)] = eid
-        return etype, uri, extid
+                if free_cnxset:
+                    session.free_cnxset()
+            self._type_source_cache[eid] = (etype, uri, extid, auri)
+            if uri != 'system':
+                self._extid_cache[(extid, uri)] = eid
+            return etype, uri, extid, auri
 
     def clear_caches(self, eids):
         etcache = self._type_source_cache
@@ -985,7 +986,7 @@
         rqlcache = self.querier._rql_cache
         for eid in eids:
             try:
-                etype, uri, extid = etcache.pop(typed_eid(eid)) # may be a string in some cases
+                etype, uri, extid, auri = etcache.pop(typed_eid(eid)) # may be a string in some cases
                 rqlcache.pop('%s X WHERE X eid %s' % (etype, eid), None)
                 extidcache.pop((extid, uri), None)
             except KeyError:
@@ -1019,31 +1020,52 @@
 
     def eid2extid(self, source, eid, session=None):
         """get local id from an eid"""
-        etype, uri, extid = self.type_and_source_from_eid(eid, session)
+        etype, uri, extid, _ = self.type_and_source_from_eid(eid, session)
         if source.uri != uri:
             # eid not from the given source
             raise UnknownEid(eid)
         return extid
 
     def extid2eid(self, source, extid, etype, session=None, insert=True,
-                  sourceparams=None):
-        """get eid from a local id. An eid is attributed if no record is found"""
+                  complete=True, commit=True, sourceparams=None):
+        """Return eid from a local id. If the eid is a negative integer, that
+        means the entity is known but has been copied back to the system source
+        hence should be ignored.
+
+        If no record is found, ie the entity is not known yet:
+
+        1. an eid is attributed
+
+        2. the source's :meth:`before_entity_insertion` method is called to
+           build the entity instance
+
+        3. unless source's :attr:`should_call_hooks` tell otherwise,
+          'before_add_entity' hooks are called
+
+        4. record is added into the system source
+
+        5. the source's :meth:`after_entity_insertion` method is called to
+           complete building of the entity instance
+
+        6. unless source's :attr:`should_call_hooks` tell otherwise,
+          'before_add_entity' hooks are called
+        """
         uri = 'system' if source.copy_based_source else source.uri
         cachekey = (extid, uri)
         try:
             return self._extid_cache[cachekey]
         except KeyError:
             pass
-        reset_pool = False
+        free_cnxset = False
         if session is None:
             session = self.internal_session()
-            reset_pool = True
+            free_cnxset = True
         eid = self.system_source.extid2eid(session, uri, extid)
         if eid is not None:
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, uri, extid)
-            if reset_pool:
-                session.reset_pool()
+            self._type_source_cache[eid] = (etype, uri, extid, source.uri)
+            if free_cnxset:
+                session.free_cnxset()
             return eid
         if not insert:
             return
@@ -1055,24 +1077,25 @@
         # processing a commit, we have to use another one
         if not session.is_internal_session:
             session = self.internal_session()
-            reset_pool = True
+            free_cnxset = True
         try:
             eid = self.system_source.create_eid(session)
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, uri, extid)
+            self._type_source_cache[eid] = (etype, uri, extid, source.uri)
             entity = source.before_entity_insertion(
                 session, extid, etype, eid, sourceparams)
             if source.should_call_hooks:
                 self.hm.call_hooks('before_add_entity', session, entity=entity)
-            # XXX call add_info with complete=False ?
-            self.add_info(session, entity, source, extid)
+            self.add_info(session, entity, source, extid, complete=complete)
             source.after_entity_insertion(session, extid, entity, sourceparams)
             if source.should_call_hooks:
                 self.hm.call_hooks('after_add_entity', session, entity=entity)
-            session.commit(reset_pool)
+            if commit or free_cnxset:
+                session.commit(free_cnxset)
             return eid
-        except:
-            session.rollback(reset_pool)
+        except Exception:
+            if commit or free_cnxset:
+                session.rollback(free_cnxset)
             raise
 
     def add_info(self, session, entity, source, extid=None, complete=True):
@@ -1083,22 +1106,32 @@
         hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid)
         self.system_source.add_info(session, entity, source, extid, complete)
 
-    def delete_info(self, session, entity, sourceuri, extid, scleanup=None):
+    def delete_info(self, session, entity, sourceuri, scleanup=None):
         """called by external source when some entity known by the system source
         has been deleted in the external source
         """
         # mark eid as being deleted in session info and setup cache update
         # operation
         hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid)
-        self._delete_info(session, entity, sourceuri, extid, scleanup)
+        self._delete_info(session, entity, sourceuri, scleanup)
 
-    def _delete_info(self, session, entity, sourceuri, extid, scleanup=None):
+    def _delete_info(self, session, entity, sourceuri, scleanup=None):
         """delete system information on deletion of an entity:
+
         * delete all remaining relations from/to this entity
+
         * call delete info on the system source which will transfer record from
           the entities table to the deleted_entities table
+
+        When scleanup is specified, it's expected to be the source's eid, in
+        which case we'll specify the target's relation source so that this
+        source is ignored. E.g. we want to delete relations stored locally, as
+        the deletion information comes from the external source, it's its
+        responsability to have cleaned-up its own relations.
         """
         pendingrtypes = session.transaction_data.get('pendingrtypes', ())
+        if scleanup is not None:
+            source = self.sources_by_eid[scleanup]
         # delete remaining relations: if user can delete the entity, he can
         # delete all its relations without security checking
         with security_enabled(session, read=False, write=False):
@@ -1114,6 +1147,13 @@
                 else:
                     rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
                 if scleanup is not None:
+                    # if the relation can't be crossed, nothing to cleanup (we
+                    # would get a BadRQLQuery from the multi-sources planner).
+                    # This may still leave some junk if the mapping has changed
+                    # at some point, but one can still run db-check to catch
+                    # those
+                    if not source in self.can_cross_relation(rtype):
+                        continue
                     # source cleaning: only delete relations stored locally
                     # (here, scleanup
                     rql += ', NOT (Y cw_source S, S eid %(seid)s)'
@@ -1121,6 +1161,8 @@
                     session.execute(rql, {'x': eid, 'seid': scleanup},
                                     build_descr=False)
                 except Exception:
+                    if self.config.mode == 'test':
+                        raise
                     self.exception('error while cascading delete for entity %s '
                                    'from %s. RQL: %s', entity, sourceuri, rql)
         self.system_source.delete_info_multi(session, [entity], sourceuri)
@@ -1130,11 +1172,12 @@
         the same etype and belinging to the same source.
         """
         pendingrtypes = session.transaction_data.get('pendingrtypes', ())
+        if scleanup is not None:
+            source = self.sources_by_eid[scleanup]
         # delete remaining relations: if user can delete the entity, he can
         # delete all its relations without security checking
         with security_enabled(session, read=False, write=False):
-            eids = [_e.eid for _e in entities]
-            in_eids = ','.join((str(eid) for eid in eids))
+            in_eids = ','.join([str(_e.eid) for _e in entities])
             for rschema, _, role in entities[0].e_schema.relation_definitions():
                 rtype = rschema.type
                 if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes:
@@ -1146,11 +1189,20 @@
                 else:
                     rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids)
                 if scleanup is not None:
+                    # if the relation can't be crossed, nothing to cleanup (we
+                    # would get a BadRQLQuery from the multi-sources planner).
+                    # This may still leave some junk if the mapping has changed
+                    # at some point, but one can still run db-check to catch
+                    # those
+                    if not source in self.can_cross_relation(rtype):
+                        continue
                     # source cleaning: only delete relations stored locally
                     rql += ', NOT (Y cw_source S, S eid %(seid)s)'
                 try:
                     session.execute(rql, {'seid': scleanup}, build_descr=False)
                 except Exception:
+                    if self.config.mode == 'test':
+                        raise
                     self.exception('error while cascading delete for entity %s '
                                    'from %s. RQL: %s', entities, sourceuri, rql)
         self.system_source.delete_info_multi(session, entities, sourceuri)
@@ -1195,7 +1247,8 @@
                 suri = 'system'
             extid = source.get_extid(entity)
             self._extid_cache[(str(extid), suri)] = entity.eid
-        self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid)
+        self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid,
+                                               source.uri)
         return extid
 
     def glob_add_entity(self, session, edited):
@@ -1356,7 +1409,7 @@
         # in setdefault, this should not be changed without profiling.
 
         for eid in eids:
-            etype, sourceuri, extid = self.type_and_source_from_eid(eid, session)
+            etype, sourceuri, extid, _ = self.type_and_source_from_eid(eid, session)
             # XXX should cache entity's cw_metainformation
             entity = session.entity_from_eid(eid, etype)
             try:
@@ -1369,7 +1422,11 @@
             source = self.sources_by_uri[sourceuri]
             if source.should_call_hooks:
                 self.hm.call_hooks('before_delete_entity', session, entities=entities)
-            self._delete_info_multi(session, entities, sourceuri)
+            if session.deleted_in_transaction(source.eid):
+                # source is being deleted, think to give scleanup argument
+                self._delete_info_multi(session, entities, sourceuri, scleanup=source.eid)
+            else:
+                self._delete_info_multi(session, entities, sourceuri)
             source.delete_entities(session, entities)
             if source.should_call_hooks:
                 self.hm.call_hooks('after_delete_entity', session, entities=entities)
--- a/server/rqlannotation.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/rqlannotation.py	Fri Dec 09 12:08:27 2011 +0100
@@ -109,8 +109,9 @@
                         ostinfo = rhs.children[0].variable.stinfo
                     else:
                         ostinfo = lhs.variable.stinfo
-                    if not any(orel for orel in ostinfo['relations']
-                               if orel.optional and orel is not rel):
+                    if not (ostinfo.get('optcomparisons') or
+                            any(orel for orel in ostinfo['relations']
+                                if orel.optional and orel is not rel)):
                         break
             if rschema.final or (onlhs and rschema.inlined):
                 if rschema.type != 'has_text':
@@ -202,8 +203,8 @@
     # since introduced duplicates will be removed
     if scope.stmt.distinct and diffscope_rels:
         return iter(_sort(diffscope_rels)).next()
-    # XXX  could use a relation for a different scope if it can't generate
-    # duplicates, so we would have to check cardinality
+    # XXX could use a relation from a different scope if it can't generate
+    # duplicates, so we should have to check cardinality
     raise CantSelectPrincipal()
 
 def _select_main_var(relations):
@@ -211,16 +212,22 @@
     relation for the rhs variable
     """
     principal = None
+    others = []
     # sort for test predictability
     for rel in sorted(relations, key=lambda x: (x.children[0].name, x.r_type)):
         # only equality relation with a variable as rhs may be principal
         if rel.operator() not in ('=', 'IS') \
                or not isinstance(rel.children[1].children[0], VariableRef) or rel.neged(strict=True):
             continue
+        if rel.optional:
+            others.append(rel)
+            continue
         if rel.scope is rel.stmt:
             return rel
         principal = rel
     if principal is None:
+        if others:
+            return others[0]
         raise BadRQLQuery('unable to find principal in %s' % ', '.join(
             r.as_string() for r in relations))
     return principal
--- a/server/schemaserial.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/schemaserial.py	Fri Dec 09 12:08:27 2011 +0100
@@ -88,7 +88,7 @@
     repo = session.repo
     dbhelper = repo.system_source.dbhelper
     # XXX bw compat (3.6 migration)
-    sqlcu = session.pool['system']
+    sqlcu = session.cnxset['system']
     sqlcu.execute("SELECT * FROM cw_CWRType WHERE cw_name='symetric'")
     if sqlcu.fetchall():
         sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric',
@@ -135,11 +135,11 @@
             try:
                 sqlexec('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s',
                         {'x': etype, 'n': netype})
-            except:
+            except Exception:
                 pass
             tocleanup = [eid]
-            tocleanup += (eid for eid, (eidetype, uri, extid) in repo._type_source_cache.items()
-                          if etype == eidetype)
+            tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems()
+                          if etype == cached[0])
             repo.clear_caches(tocleanup)
             session.commit(False)
             if needcopy:
--- a/server/server.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/server.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -129,6 +129,13 @@
         signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit())
         signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit())
 
+
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    @classmethod
+    def info(cls, msg, *a, **kw):
+        pass
+
 from logging import getLogger
 from cubicweb import set_log_methods
 LOGGER = getLogger('cubicweb.reposerver')
--- a/server/serverconfig.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/serverconfig.py	Fri Dec 09 12:08:27 2011 +0100
@@ -130,7 +130,7 @@
         ('connections-pool-size',
          {'type' : 'int',
           'default': 4,
-          'help': 'size of the connections pools. Each source supporting multiple \
+          'help': 'size of the connections pool. Each source supporting multiple \
 connections will have this number of opened connections.',
           'group': 'main', 'level': 3,
           }),
@@ -209,9 +209,9 @@
           }),
         ) + CubicWebConfiguration.options)
 
-    # should we open connections pools (eg connect to sources). This is usually
-    # necessary...
-    open_connections_pools = True
+    # should we init the connections pool (eg connect to sources). This is
+    # usually necessary...
+    init_cnxset_pool = True
 
     # read the schema from the database
     read_instance_schema = True
@@ -255,7 +255,7 @@
     # configuration file (#16102)
     @cached
     def read_sources_file(self):
-        return read_config(self.sources_file())
+        return read_config(self.sources_file(), raise_if_unreadable=True)
 
     def sources(self):
         """return a dictionnaries containing sources definitions indexed by
--- a/server/serverctl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/serverctl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -24,6 +24,7 @@
 # completion). So import locally in command helpers.
 import sys
 import os
+import logging
 
 from logilab.common import nullobject
 from logilab.common.configuration import Configuration
@@ -122,11 +123,10 @@
                             interactive=interactive)
     # disable autocommit (isolation_level(1)) because DROP and
     # CREATE DATABASE can't be executed in a transaction
-    try:
-        cnx.set_isolation_level(0)
-    except AttributeError:
+    set_isolation_level = getattr(cnx, 'set_isolation_level', None)
+    if set_isolation_level is not None:
         # set_isolation_level() is psycopg specific
-        pass
+        set_isolation_level(0)
     return cnx
 
 def repo_cnx(config):
@@ -248,7 +248,7 @@
                         cursor.execute, 'DROP USER %s' % user) is not ERROR:
                         print '-> user %s dropped.' % user
                 cnx.commit()
-            except:
+            except BaseException:
                 cnx.rollback()
                 raise
 
@@ -363,21 +363,28 @@
                 createdb(helper, source, dbcnx, cursor)
                 dbcnx.commit()
                 print '-> database %s created.' % dbname
-            except:
+            except BaseException:
                 dbcnx.rollback()
                 raise
         cnx = system_source_cnx(source, special_privs='CREATE LANGUAGE',
                                 interactive=not automatic)
         cursor = cnx.cursor()
         helper.init_fti_extensions(cursor)
+        cnx.commit()
         # postgres specific stuff
         if driver == 'postgres':
-            # install plpythonu/plpgsql language if not installed by the cube
-            langs = sys.platform == 'win32' and ('plpgsql',) or ('plpythonu', 'plpgsql')
+            # install plpythonu/plpgsql languages
+            langs = ('plpythonu', 'plpgsql')
             for extlang in langs:
-                helper.create_language(cursor, extlang)
-        cursor.close()
-        cnx.commit()
+                if automatic or ASK.confirm('Create language %s ?' % extlang):
+                    try:
+                        helper.create_language(cursor, extlang)
+                    except Exception, exc:
+                        print '-> ERROR:', exc
+                        print '-> could not create language %s, some stored procedures might be unusable' % extlang
+                        cnx.rollback()
+                    else:
+                        cnx.commit()
         print '-> database for instance %s created and necessary extensions installed.' % appid
         print
         if automatic:
@@ -560,6 +567,7 @@
     """
     name = 'reset-admin-pwd'
     arguments = '<instance>'
+    min_args = max_args = 1
     options = (
         ('password',
          {'short': 'p', 'type' : 'string', 'metavar' : '<new-password>',
@@ -643,14 +651,13 @@
         )
 
     def run(self, args):
-        from logilab.common.daemon import daemonize
+        from logilab.common.daemon import daemonize, setugid
         from cubicweb.cwctl import init_cmdline_log_threshold
         from cubicweb.server.server import RepositoryServer
         appid = args[0]
         debug = self['debug']
         if sys.platform == 'win32' and not debug:
-            from logging import getLogger
-            logger = getLogger('cubicweb.ctl')
+            logger = logging.getLogger('cubicweb.ctl')
             logger.info('Forcing debug mode on win32 platform')
             debug = True
         config = ServerConfiguration.config_for(appid, debugmode=debug)
@@ -668,12 +675,7 @@
             return
         uid = config['uid']
         if uid is not None:
-            try:
-                uid = int(uid)
-            except ValueError:
-                from pwd import getpwnam
-                uid = getpwnam(uid).pw_uid
-            os.setuid(uid)
+            setugid(uid)
         server.install_sig_handlers()
         server.connect(config['host'], 0)
         server.run()
@@ -982,7 +984,7 @@
         appid = args[0]
         config = ServerConfiguration.config_for(appid)
         repo, cnx = repo_cnx(config)
-        session = repo._get_session(cnx.sessionid, setpool=True)
+        session = repo._get_session(cnx.sessionid, setcnxset=True)
         reindex_entities(repo.schema, session)
         cnx.commit()
 
@@ -1007,11 +1009,43 @@
         mih.cmd_synchronize_schema()
 
 
+class SynchronizeSourceCommand(Command):
+    """Force a source synchronization.
+
+    <instance>
+      the identifier of the instance
+    <source>
+      the name of the source to synchronize.
+    """
+    name = 'source-sync'
+    arguments = '<instance> <source>'
+    min_args = max_args = 2
+
+    def run(self, args):
+        config = ServerConfiguration.config_for(args[0])
+        config.global_set_option('log-file', None)
+        config.log_format = '%(levelname)s %(name)s: %(message)s'
+        logger = logging.getLogger('cubicweb.sources')
+        logger.setLevel(logging.INFO)
+        # only retrieve cnx to trigger authentication, close it right away
+        repo, cnx = repo_cnx(config)
+        cnx.close()
+        try:
+            source = repo.sources_by_uri[args[1]]
+        except KeyError:
+            raise ExecutionError('no source named %r' % args[1])
+        session = repo.internal_session()
+        stats = source.pull_data(session, force=True, raise_on_error=True)
+        for key, val in stats.iteritems():
+            if val:
+                print key, ':', val
+
+
 for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand,
                  GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
                  StartRepositoryCommand,
                  DBDumpCommand, DBRestoreCommand, DBCopyCommand,
                  AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
-                 SynchronizeInstanceSchemaCommand,
+                 SynchronizeInstanceSchemaCommand, SynchronizeSourceCommand
                  ):
     CWCTL.register(cmdclass)
--- a/server/session.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/session.py	Fri Dec 09 12:08:27 2011 +0100
@@ -61,6 +61,18 @@
         description.append(term.get_type(solution, args))
     return description
 
+def selection_idx_type(i, rqlst, args):
+    """try to return type of term at index `i` of the rqlst's selection"""
+    for select in rqlst.children:
+        term = select.selection[i]
+        for solution in select.solutions:
+            try:
+                ttype = term.get_type(solution, args)
+                if ttype is not None:
+                    return ttype
+            except CoercionError:
+                return None
+
 @objectify_selector
 def is_user_session(cls, req, **kwargs):
     """repository side only selector returning 1 if the session is a regular
@@ -125,21 +137,13 @@
         self.categories = categories
 
     def __enter__(self):
-        self.oldmode = self.session.set_hooks_mode(self.mode)
-        if self.mode is self.session.HOOKS_DENY_ALL:
-            self.changes = self.session.enable_hook_categories(*self.categories)
-        else:
-            self.changes = self.session.disable_hook_categories(*self.categories)
+        self.oldmode, self.changes = self.session.init_hooks_mode_categories(
+            self.mode, self.categories)
 
     def __exit__(self, exctype, exc, traceback):
-        if self.changes:
-            if self.mode is self.session.HOOKS_DENY_ALL:
-                self.session.disable_hook_categories(*self.changes)
-            else:
-                self.session.enable_hook_categories(*self.changes)
-        self.session.set_hooks_mode(self.oldmode)
+        self.session.reset_hooks_mode_categories(self.oldmode, self.mode, self.changes)
 
-INDENT = ''
+
 class security_enabled(object):
     """context manager to control security w/ session.execute, since by
     default security is disabled on queries executed on the repository
@@ -151,33 +155,90 @@
         self.write = write
 
     def __enter__(self):
-#        global INDENT
-        if self.read is not None:
-            self.oldread = self.session.set_read_security(self.read)
-#            print INDENT + 'read', self.read, self.oldread
-        if self.write is not None:
-            self.oldwrite = self.session.set_write_security(self.write)
-#            print INDENT + 'write', self.write, self.oldwrite
-#        INDENT += '  '
+        self.oldread, self.oldwrite = self.session.init_security(
+            self.read, self.write)
 
     def __exit__(self, exctype, exc, traceback):
-#        global INDENT
-#        INDENT = INDENT[:-2]
-        if self.read is not None:
-            self.session.set_read_security(self.oldread)
-#            print INDENT + 'reset read to', self.oldread
-        if self.write is not None:
-            self.session.set_write_security(self.oldwrite)
-#            print INDENT + 'reset write to', self.oldwrite
+        self.session.reset_security(self.oldread, self.oldwrite)
 
 
 class TransactionData(object):
     def __init__(self, txid):
         self.transactionid = txid
+        self.ctx_count = 0
+
 
 class Session(RequestSessionBase):
-    """tie session id, user, connections pool and other session data all
-    together
+    """Repository usersession, tie a session id, user, connections set and
+    other session data all together.
+
+    About session storage / transactions
+    ------------------------------------
+
+    Here is a description of internal session attributes. Besides :attr:`data`
+    and :attr:`transaction_data`, you should not have to use attributes
+    described here but higher level APIs.
+
+      :attr:`data` is a dictionary containing shared data, used to communicate
+      extra information between the client and the repository
+
+      :attr:`_tx_data` is a dictionary of :class:`TransactionData` instance, one
+      for each running transaction. The key is the transaction id. By default
+      the transaction id is the thread name but it can be otherwise (per dbapi
+      cursor for instance, or per thread name *from another process*).
+
+      :attr:`__threaddata` is a thread local storage whose `txdata` attribute
+      refers to the proper instance of :class:`TransactionData` according to the
+      transaction.
+
+      :attr:`_threads_in_transaction` is a set of (thread, connections set)
+      referencing threads that currently hold a connections set for the session.
+
+    You should not have to use neither :attr:`_txdata` nor :attr:`__threaddata`,
+    simply access transaction data transparently through the :attr:`_threaddata`
+    property. Also, you usually don't have to access it directly since current
+    transaction's data may be accessed/modified through properties / methods:
+
+      :attr:`transaction_data`, similarly to :attr:`data`, is a dictionary
+      containing some shared data that should be cleared at the end of the
+      transaction. Hooks and operations may put arbitrary data in there, and
+      this may also be used as a communication channel between the client and
+      the repository.
+
+      :attr:`cnxset`, the connections set to use to execute queries on sources.
+      During a transaction, the connection set may be freed so that is may be
+      used by another session as long as no writing is done. This means we can
+      have multiple sessions with a reasonably low connections set pool size.
+
+      :attr:`mode`, string telling the connections set handling mode, may be one
+      of 'read' (connections set may be freed), 'write' (some write was done in
+      the connections set, it can't be freed before end of the transaction),
+      'transaction' (we want to keep the connections set during all the
+      transaction, with or without writing)
+
+      :attr:`pending_operations`, ordered list of operations to be processed on
+      commit/rollback
+
+      :attr:`commit_state`, describing the transaction commit state, may be one
+      of None (not yet committing), 'precommit' (calling precommit event on
+      operations), 'postcommit' (calling postcommit event on operations),
+      'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error
+      has been raised during the transaction and so it must be rollbacked).
+
+      :attr:`read_security` and :attr:`write_security`, boolean flags telling if
+      read/write security is currently activated.
+
+      :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`.
+
+      :attr:`enabled_hook_categories`, when :attr:`hooks_mode` is
+      `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled.
+
+      :attr:`disabled_hook_categories`, when :attr:`hooks_mode` is
+      `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled.
+
+
+      :attr:`running_dbapi_query`, boolean flag telling if the executing query
+      is coming from a dbapi connection or is a query from within the repository
     """
     is_internal_session = False
 
@@ -246,7 +307,10 @@
         """return a fake request/session using specified user"""
         session = Session(user, self.repo)
         threaddata = session._threaddata
-        threaddata.pool = self.pool
+        threaddata.cnxset = self.cnxset
+        # we attributed a connections set, need to update ctx_count else it will be freed
+        # while undesired
+        threaddata.ctx_count = 1
         # share pending_operations, else operation added in the hi-jacked
         # session such as SendMailOp won't ever be processed
         threaddata.pending_operations = self.pending_operations
@@ -388,14 +452,14 @@
         """return a sql cursor on the system database"""
         if sql.split(None, 1)[0].upper() != 'SELECT':
             self.mode = 'write'
-        source = self.pool.source('system')
+        source = self.cnxset.source('system')
         try:
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
         except (source.OperationalError, source.InterfaceError):
             if not rollback_on_failure:
                 raise
             source.warning("trying to reconnect")
-            self.pool.reconnect(source)
+            self.cnxset.reconnect(source)
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
 
     def set_language(self, language):
@@ -446,6 +510,29 @@
     def security_enabled(self, read=False, write=False):
         return security_enabled(self, read=read, write=write)
 
+    def init_security(self, read, write):
+        if read is None:
+            oldread = None
+        else:
+            oldread = self.set_read_security(read)
+        if write is None:
+            oldwrite = None
+        else:
+            oldwrite = self.set_write_security(write)
+        self._threaddata.ctx_count += 1
+        return oldread, oldwrite
+
+    def reset_security(self, read, write):
+        txstore = self._threaddata
+        txstore.ctx_count -= 1
+        if txstore.ctx_count == 0:
+            self._clear_thread_storage(txstore)
+        else:
+            if read is not None:
+                self.set_read_security(read)
+            if write is not None:
+                self.set_write_security(write)
+
     @property
     def read_security(self):
         """return a boolean telling if read security is activated or not"""
@@ -498,7 +585,7 @@
             return self.DEFAULT_SECURITY
         try:
             return txstore.write_security
-        except:
+        except AttributeError:
             txstore.write_security = self.DEFAULT_SECURITY
             return txstore.write_security
 
@@ -546,6 +633,30 @@
         self._threaddata.hooks_mode = mode
         return oldmode
 
+    def init_hooks_mode_categories(self, mode, categories):
+        oldmode = self.set_hooks_mode(mode)
+        if mode is self.HOOKS_DENY_ALL:
+            changes = self.enable_hook_categories(*categories)
+        else:
+            changes = self.disable_hook_categories(*categories)
+        self._threaddata.ctx_count += 1
+        return oldmode, changes
+
+    def reset_hooks_mode_categories(self, oldmode, mode, categories):
+        txstore = self._threaddata
+        txstore.ctx_count -= 1
+        if txstore.ctx_count == 0:
+            self._clear_thread_storage(txstore)
+        else:
+            try:
+                if categories:
+                    if mode is self.HOOKS_DENY_ALL:
+                        return self.disable_hook_categories(*categories)
+                    else:
+                        return self.enable_hook_categories(*categories)
+            finally:
+                self.set_hooks_mode(oldmode)
+
     @property
     def disabled_hook_categories(self):
         try:
@@ -569,17 +680,18 @@
         - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled
         """
         changes = set()
+        self.pruned_hooks_cache.clear()
         if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enablecats = self.enabled_hook_categories
+            enabledcats = self.enabled_hook_categories
             for category in categories:
-                if category in enablecats:
-                    enablecats.remove(category)
+                if category in enabledcats:
+                    enabledcats.remove(category)
                     changes.add(category)
         else:
-            disablecats = self.disabled_hook_categories
+            disabledcats = self.disabled_hook_categories
             for category in categories:
-                if category not in disablecats:
-                    disablecats.add(category)
+                if category not in disabledcats:
+                    disabledcats.add(category)
                     changes.add(category)
         return tuple(changes)
 
@@ -590,17 +702,18 @@
         - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled
         """
         changes = set()
+        self.pruned_hooks_cache.clear()
         if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enablecats = self.enabled_hook_categories
+            enabledcats = self.enabled_hook_categories
             for category in categories:
-                if category not in enablecats:
-                    enablecats.add(category)
+                if category not in enabledcats:
+                    enabledcats.add(category)
                     changes.add(category)
         else:
-            disablecats = self.disabled_hook_categories
+            disabledcats = self.disabled_hook_categories
             for category in categories:
-                if category in self.disabled_hook_categories:
-                    disablecats.remove(category)
+                if category in disabledcats:
+                    disabledcats.remove(category)
                     changes.add(category)
         return tuple(changes)
 
@@ -620,19 +733,19 @@
 
     # connection management ###################################################
 
-    def keep_pool_mode(self, mode):
-        """set pool_mode, e.g. how the session will keep its pool:
+    def keep_cnxset_mode(self, mode):
+        """set `mode`, e.g. how the session will keep its connections set:
 
-        * if mode == 'write', the pool is freed after each ready query, but kept
-          until the transaction's end (eg commit or rollback) when a write query
-          is detected (eg INSERT/SET/DELETE queries)
+        * if mode == 'write', the connections set is freed after each ready
+          query, but kept until the transaction's end (eg commit or rollback)
+          when a write query is detected (eg INSERT/SET/DELETE queries)
 
-        * if mode == 'transaction', the pool is only freed after the
+        * if mode == 'transaction', the connections set is only freed after the
           transaction's end
 
-        notice that a repository has a limited set of pools, and a session has to
-        wait for a free pool to run any rql query (unless it already has a pool
-        set).
+        notice that a repository has a limited set of connections sets, and a
+        session has to wait for a free connections set to run any rql query
+        (unless it already has one set).
         """
         assert mode in ('transaction', 'write')
         if mode == 'transaction':
@@ -655,56 +768,58 @@
     commit_state = property(get_commit_state, set_commit_state)
 
     @property
-    def pool(self):
-        """connections pool, set according to transaction mode for each query"""
+    def cnxset(self):
+        """connections set, set according to transaction mode for each query"""
         if self._closed:
-            self.reset_pool(True)
-            raise Exception('try to access pool on a closed session')
-        return getattr(self._threaddata, 'pool', None)
+            self.free_cnxset(True)
+            raise Exception('try to access connections set on a closed session %s' % self.id)
+        return getattr(self._threaddata, 'cnxset', None)
 
-    def set_pool(self):
-        """the session need a pool to execute some queries"""
+    def set_cnxset(self):
+        """the session need a connections set to execute some queries"""
         with self._closed_lock:
             if self._closed:
-                self.reset_pool(True)
-                raise Exception('try to set pool on a closed session')
-            if self.pool is None:
-                # get pool first to avoid race-condition
-                self._threaddata.pool = pool = self.repo._get_pool()
+                self.free_cnxset(True)
+                raise Exception('try to set connections set on a closed session %s' % self.id)
+            if self.cnxset is None:
+                # get connections set first to avoid race-condition
+                self._threaddata.cnxset = cnxset = self.repo._get_cnxset()
+                self._threaddata.ctx_count += 1
                 try:
-                    pool.pool_set()
-                except:
-                    self._threaddata.pool = None
-                    self.repo._free_pool(pool)
+                    cnxset.cnxset_set()
+                except Exception:
+                    self._threaddata.cnxset = None
+                    self.repo._free_cnxset(cnxset)
                     raise
                 self._threads_in_transaction.add(
-                    (threading.currentThread(), pool) )
-            return self._threaddata.pool
+                    (threading.currentThread(), cnxset) )
+            return self._threaddata.cnxset
 
-    def _free_thread_pool(self, thread, pool, force_close=False):
+    def _free_thread_cnxset(self, thread, cnxset, force_close=False):
         try:
-            self._threads_in_transaction.remove( (thread, pool) )
+            self._threads_in_transaction.remove( (thread, cnxset) )
         except KeyError:
-            # race condition on pool freeing (freed by commit or rollback vs
+            # race condition on cnxset freeing (freed by commit or rollback vs
             # close)
             pass
         else:
             if force_close:
-                pool.reconnect()
+                cnxset.reconnect()
             else:
-                pool.pool_reset()
-            # free pool once everything is done to avoid race-condition
-            self.repo._free_pool(pool)
+                cnxset.cnxset_freed()
+            # free cnxset once everything is done to avoid race-condition
+            self.repo._free_cnxset(cnxset)
 
-    def reset_pool(self, ignoremode=False):
-        """the session is no longer using its pool, at least for some time"""
-        # pool may be none if no operation has been done since last commit
+    def free_cnxset(self, ignoremode=False):
+        """the session is no longer using its connections set, at least for some time"""
+        # cnxset may be none if no operation has been done since last commit
         # or rollback
-        pool = getattr(self._threaddata, 'pool', None)
-        if pool is not None and (ignoremode or self.mode == 'read'):
+        cnxset = getattr(self._threaddata, 'cnxset', None)
+        if cnxset is not None and (ignoremode or self.mode == 'read'):
             # even in read mode, we must release the current transaction
-            self._free_thread_pool(threading.currentThread(), pool)
-            del self._threaddata.pool
+            self._free_thread_cnxset(threading.currentThread(), cnxset)
+            del self._threaddata.cnxset
+            self._threaddata.ctx_count -= 1
 
     def _touch(self):
         """update latest session usage timestamp and reset mode to read"""
@@ -770,9 +885,13 @@
     def source_defs(self):
         return self.repo.source_defs()
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.repo.type_and_source_from_eid(eid, self)
+        metas = self.repo.type_and_source_from_eid(eid, self)
+        if asdict:
+            return dict(zip(('type', 'source', 'extid', 'asource'), metas)) 
+       # XXX :-1 for cw compat, use asdict=True for full information
+        return metas[:-1]
 
     # db-api like interface ###################################################
 
@@ -793,9 +912,9 @@
         rset.req = self
         return rset
 
-    def _clear_thread_data(self, reset_pool=True):
-        """remove everything from the thread local storage, except pool
-        which is explicitly removed by reset_pool, and mode which is set anyway
+    def _clear_thread_data(self, free_cnxset=True):
+        """remove everything from the thread local storage, except connections set
+        which is explicitly removed by free_cnxset, and mode which is set anyway
         by _touch
         """
         try:
@@ -803,23 +922,38 @@
         except AttributeError:
             pass
         else:
-            if reset_pool:
-                self._tx_data.pop(txstore.transactionid, None)
-                try:
-                    del self.__threaddata.txdata
-                except AttributeError:
-                    pass
+            if free_cnxset:
+                self.free_cnxset()
+                if txstore.ctx_count == 0:
+                    self._clear_thread_storage(txstore)
+                else:
+                    self._clear_tx_storage(txstore)
             else:
-                for name in ('commit_state', 'transaction_data',
-                             'pending_operations', '_rewriter'):
-                    try:
-                        delattr(txstore, name)
-                    except AttributeError:
-                        continue
+                self._clear_tx_storage(txstore)
+
+    def _clear_thread_storage(self, txstore):
+        self._tx_data.pop(txstore.transactionid, None)
+        try:
+            del self.__threaddata.txdata
+        except AttributeError:
+            pass
 
-    def commit(self, reset_pool=True):
+    def _clear_tx_storage(self, txstore):
+        for name in ('commit_state', 'transaction_data',
+                     'pending_operations', '_rewriter',
+                     'pruned_hooks_cache'):
+            try:
+                delattr(txstore, name)
+            except AttributeError:
+                continue
+
+    def commit(self, free_cnxset=True, reset_pool=None):
         """commit the current session's transaction"""
-        if self.pool is None:
+        if reset_pool is not None:
+            warn('[3.13] use free_cnxset argument instead for reset_pool',
+                 DeprecationWarning, stacklevel=2)
+            free_cnxset = reset_pool
+        if self.cnxset is None:
             assert not self.pending_operations
             self._clear_thread_data()
             self._touch()
@@ -847,7 +981,7 @@
                         operation.handle_event('precommit_event')
                     self.pending_operations[:] = processed
                     self.debug('precommit session %s done', self.id)
-                except:
+                except BaseException:
                     # if error on [pre]commit:
                     #
                     # * set .failed = True on the operation causing the failure
@@ -862,37 +996,41 @@
                     for operation in reversed(processed):
                         try:
                             operation.handle_event('revertprecommit_event')
-                        except:
+                        except BaseException:
                             self.critical('error while reverting precommit',
                                           exc_info=True)
                     # XXX use slice notation since self.pending_operations is a
                     # read-only property.
                     self.pending_operations[:] = processed + self.pending_operations
-                    self.rollback(reset_pool)
+                    self.rollback(free_cnxset)
                     raise
-                self.pool.commit()
+                self.cnxset.commit()
                 self.commit_state = 'postcommit'
                 while self.pending_operations:
                     operation = self.pending_operations.pop(0)
                     operation.processed = 'postcommit'
                     try:
                         operation.handle_event('postcommit_event')
-                    except:
+                    except BaseException:
                         self.critical('error while postcommit',
                                       exc_info=sys.exc_info())
                 self.debug('postcommit session %s done', self.id)
                 return self.transaction_uuid(set=False)
         finally:
             self._touch()
-            if reset_pool:
-                self.reset_pool(ignoremode=True)
-            self._clear_thread_data(reset_pool)
+            if free_cnxset:
+                self.free_cnxset(ignoremode=True)
+            self._clear_thread_data(free_cnxset)
 
-    def rollback(self, reset_pool=True):
+    def rollback(self, free_cnxset=True, reset_pool=None):
         """rollback the current session's transaction"""
-        # don't use self.pool, rollback may be called with _closed == True
-        pool = getattr(self._threaddata, 'pool', None)
-        if pool is None:
+        if reset_pool is not None:
+            warn('[3.13] use free_cnxset argument instead for reset_pool',
+                 DeprecationWarning, stacklevel=2)
+            free_cnxset = reset_pool
+        # don't use self.cnxset, rollback may be called with _closed == True
+        cnxset = getattr(self._threaddata, 'cnxset', None)
+        if cnxset is None:
             self._clear_thread_data()
             self._touch()
             self.debug('rollback session %s done (no db activity)', self.id)
@@ -904,23 +1042,23 @@
                     try:
                         operation = self.pending_operations.pop(0)
                         operation.handle_event('rollback_event')
-                    except:
+                    except BaseException:
                         self.critical('rollback error', exc_info=sys.exc_info())
                         continue
-                pool.rollback()
+                cnxset.rollback()
                 self.debug('rollback for session %s done', self.id)
         finally:
             self._touch()
-            if reset_pool:
-                self.reset_pool(ignoremode=True)
-            self._clear_thread_data(reset_pool)
+            if free_cnxset:
+                self.free_cnxset(ignoremode=True)
+            self._clear_thread_data(free_cnxset)
 
     def close(self):
-        """do not close pool on session close, since they are shared now"""
+        """do not close connections set on session close, since they are shared now"""
         with self._closed_lock:
             self._closed = True
         # copy since _threads_in_transaction maybe modified while waiting
-        for thread, pool in self._threads_in_transaction.copy():
+        for thread, cnxset in self._threads_in_transaction.copy():
             if thread is threading.currentThread():
                 continue
             self.info('waiting for thread %s', thread)
@@ -930,12 +1068,12 @@
             for i in xrange(10):
                 thread.join(1)
                 if not (thread.isAlive() and
-                        (thread, pool) in self._threads_in_transaction):
+                        (thread, cnxset) in self._threads_in_transaction):
                     break
             else:
                 self.error('thread %s still alive after 10 seconds, will close '
                            'session anyway', thread)
-                self._free_thread_pool(thread, pool, force_close=True)
+                self._free_thread_cnxset(thread, cnxset, force_close=True)
         self.rollback()
         del self.__threaddata
         del self._tx_data
@@ -962,9 +1100,16 @@
             self._threaddata.pending_operations = []
             return self._threaddata.pending_operations
 
+    @property
+    def pruned_hooks_cache(self):
+        try:
+            return self._threaddata.pruned_hooks_cache
+        except AttributeError:
+            self._threaddata.pruned_hooks_cache = {}
+            return self._threaddata.pruned_hooks_cache
+
     def add_operation(self, operation, index=None):
-        """add an observer"""
-        assert self.commit_state != 'commit'
+        """add an operation"""
         if index is None:
             self.pending_operations.append(operation)
         else:
@@ -1023,20 +1168,13 @@
         unstables = rqlst.get_variable_indices()
         basedescr = []
         todetermine = []
-        sampleselect = rqlst.children[0]
-        samplesols = sampleselect.solutions[0]
-        for i, term in enumerate(sampleselect.selection):
-            try:
-                ttype = term.get_type(samplesols, args)
-            except CoercionError:
+        for i in xrange(len(rqlst.children[0].selection)):
+            ttype = selection_idx_type(i, rqlst, args)
+            if ttype is None or ttype == 'Any':
                 ttype = None
                 isfinal = True
             else:
-                if ttype is None or ttype == 'Any':
-                    ttype = None
-                    isfinal = True
-                else:
-                    isfinal = ttype in BASE_TYPES
+                isfinal = ttype in BASE_TYPES
             if ttype is None or i in unstables:
                 basedescr.append(None)
                 todetermine.append( (i, isfinal) )
@@ -1049,7 +1187,8 @@
     def _build_descr(self, result, basedescription, todetermine):
         description = []
         etype_from_eid = self.describe
-        for row in result:
+        todel = []
+        for i, row in enumerate(result):
             row_descr = basedescription[:]
             for index, isfinal in todetermine:
                 value = row[index]
@@ -1063,10 +1202,14 @@
                     try:
                         row_descr[index] = etype_from_eid(value)[0]
                     except UnknownEid:
-                        self.critical('wrong eid %s in repository, you should '
-                                      'db-check the database' % value)
-                        row_descr[index] = row[index] = None
-            description.append(tuple(row_descr))
+                        self.error('wrong eid %s in repository, you should '
+                                   'db-check the database' % value)
+                        todel.append(i)
+                        break
+            else:
+                description.append(tuple(row_descr))
+        for i in reversed(todel):
+            del result[i]
         return description
 
     # deprecated ###############################################################
@@ -1075,6 +1218,18 @@
     def schema_rproperty(self, rtype, eidfrom, eidto, rprop):
         return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop)
 
+    @property
+    @deprecated("[3.13] use .cnxset attribute instead of .pool")
+    def pool(self):
+        return self.cnxset
+
+    @deprecated("[3.13] use .set_cnxset() method instead of .set_pool()")
+    def set_pool(self):
+        return self.set_cnxset()
+
+    @deprecated("[3.13] use .free_cnxset() method instead of .reset_pool()")
+    def reset_pool(self):
+        return self.free_cnxset()
 
     @deprecated("[3.7] execute is now unsafe by default in hooks/operation. You"
                 " can also control security with the security_enabled context "
@@ -1133,20 +1288,21 @@
     is_internal_session = True
     running_dbapi_query = False
 
-    def __init__(self, repo, cnxprops=None):
+    def __init__(self, repo, cnxprops=None, safe=False):
         super(InternalSession, self).__init__(InternalManager(), repo, cnxprops,
                                               _id='internal')
         self.user._cw = self # XXX remove when "vreg = user._cw.vreg" hack in entity.py is gone
         self.cnxtype = 'inmemory'
-        self.disable_hook_categories('integrity')
+        if not safe:
+            self.disable_hook_categories('integrity')
 
     @property
-    def pool(self):
-        """connections pool, set according to transaction mode for each query"""
+    def cnxset(self):
+        """connections set, set according to transaction mode for each query"""
         if self.repo.shutting_down:
-            self.reset_pool(True)
+            self.free_cnxset(True)
             raise ShuttingDown('repository is shutting down')
-        return getattr(self._threaddata, 'pool', None)
+        return getattr(self._threaddata, 'cnxset', None)
 
 
 class InternalManager(object):
--- a/server/sources/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -25,6 +25,7 @@
 from logging import getLogger
 
 from logilab.common import configuration
+from logilab.common.deprecation import deprecated
 
 from yams.schema import role_name
 
@@ -36,11 +37,11 @@
 
 def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'):
     if server.DEBUG & server.DBG_RQL:
-        print '  %s %s source: %s' % (prefix, uri, union.as_string())
+        print '  %s %s source: %s' % (prefix, uri, repr(union.as_string()))
         if varmap:
             print '    using varmap', varmap
         if server.DEBUG & server.DBG_MORE:
-            print '    args', args
+            print '    args', repr(args)
             print '    cache key', cachekey
             print '    solutions', ','.join(str(s.solutions)
                                             for s in union.children)
@@ -64,13 +65,13 @@
         self.ttl = timedelta(seconds=ttl)
 
     def __setitem__(self, key, value):
-        dict.__setitem__(self, key, (datetime.now(), value))
+        dict.__setitem__(self, key, (datetime.utcnow(), value))
 
     def __getitem__(self, key):
         return dict.__getitem__(self, key)[1]
 
     def clear_expired(self):
-        now_ = datetime.now()
+        now_ = datetime.utcnow()
         ttl = self.ttl
         for key, (timestamp, value) in self.items():
             if now_ - timestamp > ttl:
@@ -110,15 +111,24 @@
     # force deactivation (configuration error for instance)
     disabled = False
 
+    # boolean telling if cwuri of entities from this source is the url that
+    # should be used as entity's absolute url
+    use_cwuri_as_url = False
+
     # source configuration options
     options = ()
 
+    # these are overridden by set_log_methods below
+    # only defining here to prevent pylint from complaining
+    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+
     def __init__(self, repo, source_config, eid=None):
         self.repo = repo
         self.set_schema(repo.schema)
         self.support_relations['identity'] = False
         self.eid = eid
         self.public_config = source_config.copy()
+        self.public_config.setdefault('use-cwuri-as-url', self.use_cwuri_as_url)
         self.remove_sensitive_information(self.public_config)
         self.uri = source_config.pop('uri')
         set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
@@ -171,7 +181,7 @@
         # cw < 3.10 bw compat
         try:
             processed['adapter'] = confdict['adapter']
-        except:
+        except KeyError:
             pass
         # check for unknown options
         if confdict and not confdict.keys() == ['adapter']:
@@ -213,7 +223,7 @@
         """
         pass
 
-    PUBLIC_KEYS = ('type', 'uri')
+    PUBLIC_KEYS = ('type', 'uri', 'use-cwuri-as-url')
     def remove_sensitive_information(self, sourcedef):
         """remove sensitive information such as login / password from source
         definition
@@ -230,23 +240,23 @@
 
     def check_connection(self, cnx):
         """Check connection validity, return None if the connection is still
-        valid else a new connection (called when the pool using the given
-        connection is being attached to a session). Do nothing by default.
+        valid else a new connection (called when the connections set using the
+        given connection is being attached to a session). Do nothing by default.
         """
         pass
 
-    def close_pool_connections(self):
-        for pool in self.repo.pools:
-            pool._cursors.pop(self.uri, None)
-            pool.source_cnxs[self.uri][1].close()
+    def close_source_connections(self):
+        for cnxset in self.repo.cnxsets:
+            cnxset._cursors.pop(self.uri, None)
+            cnxset.source_cnxs[self.uri][1].close()
 
-    def open_pool_connections(self):
-        for pool in self.repo.pools:
-            pool.source_cnxs[self.uri] = (self, self.get_connection())
+    def open_source_connections(self):
+        for cnxset in self.repo.cnxsets:
+            cnxset.source_cnxs[self.uri] = (self, self.get_connection())
 
-    def pool_reset(self, cnx):
-        """the pool using the given connection is being reseted from its current
-        attached session
+    def cnxset_freed(self, cnx):
+        """the connections set holding the given connection is being reseted
+        from its current attached session.
 
         do nothing by default
         """
@@ -264,12 +274,6 @@
 
     # external source api ######################################################
 
-    def eid2extid(self, eid, session=None):
-        return self.repo.eid2extid(self, eid, session)
-
-    def extid2eid(self, value, etype, session=None, **kwargs):
-        return self.repo.extid2eid(self, value, etype, session, **kwargs)
-
     def support_entity(self, etype, write=False):
         """return true if the given entity's type is handled by this adapter
         if write is true, return true only if it's a RW support
@@ -404,7 +408,7 @@
         .executemany().
         """
         res = self.syntax_tree_search(session, union, args, varmap=varmap)
-        session.pool.source('system').manual_insert(res, table, session)
+        session.cnxset.source('system').manual_insert(res, table, session)
 
     # write modification api ###################################################
     # read-only sources don't have to implement methods below
@@ -517,6 +521,15 @@
         pass
 
 
+    @deprecated('[3.13] use repo.eid2extid(source, eid, session)')
+    def eid2extid(self, eid, session=None):
+        return self.repo.eid2extid(self, eid, session)
+
+    @deprecated('[3.13] use extid2eid(source, value, etype, session, **kwargs)')
+    def extid2eid(self, value, etype, session=None, **kwargs):
+        return self.repo.extid2eid(self, value, etype, session, **kwargs)
+
+
 class TrFunc(object):
     """lower, upper"""
     def __init__(self, trname, index, attrname=None):
--- a/server/sources/datafeed.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/datafeed.py	Fri Dec 09 12:08:27 2011 +0100
@@ -18,15 +18,24 @@
 """datafeed sources: copy data from an external data stream into the system
 database
 """
+from __future__ import with_statement
+
+import urllib2
+import StringIO
 from datetime import datetime, timedelta
 from base64 import b64decode
+from cookielib import CookieJar
 
-from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError
+from lxml import etree
+
+from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid
 from cubicweb.server.sources import AbstractSource
 from cubicweb.appobject import AppObject
 
+
 class DataFeedSource(AbstractSource):
     copy_based_source = True
+    use_cwuri_as_url = True
 
     options = (
         ('synchronize',
@@ -46,6 +55,15 @@
                    'external source (default to 5 minutes, must be >= 1 min).'),
           'group': 'datafeed-source', 'level': 2,
           }),
+        ('max-lock-lifetime',
+         {'type' : 'time',
+          'default': '1h',
+          'help': ('Maximum time allowed for a synchronization to be run. '
+                   'Exceeded that time, the synchronization will be considered '
+                   'as having failed and not properly released the lock, hence '
+                   'it won\'t be considered'),
+          'group': 'datafeed-source', 'level': 2,
+          }),
         ('delete-entities',
          {'type' : 'yn',
           'default': True,
@@ -71,7 +89,7 @@
 
     def _entity_update(self, source_entity):
         source_entity.complete()
-        self.parser = source_entity.parser
+        self.parser_id = source_entity.parser
         self.latest_retrieval = source_entity.latest_retrieval
         self.urls = [url.strip() for url in source_entity.url.splitlines()
                      if url.strip()]
@@ -81,6 +99,7 @@
         properly typed with defaults set
         """
         self.synchro_interval = timedelta(seconds=typedconfig['synchronization-interval'])
+        self.max_lock_lifetime = timedelta(seconds=typedconfig['max-lock-lifetime'])
         if source_entity is not None:
             self._entity_update(source_entity)
         self.config = typedconfig
@@ -88,12 +107,12 @@
     def init(self, activated, source_entity):
         if activated:
             self._entity_update(source_entity)
-        self.parser = source_entity.parser
+        self.parser_id = source_entity.parser
         self.load_mapping(source_entity._cw)
 
     def _get_parser(self, session, **kwargs):
         return self.repo.vreg['parsers'].select(
-            self.parser, session, source=self, **kwargs)
+            self.parser_id, session, source=self, **kwargs)
 
     def load_mapping(self, session):
         self.mapping = {}
@@ -121,27 +140,54 @@
             return False
         return datetime.utcnow() < (self.latest_retrieval + self.synchro_interval)
 
+    def update_latest_retrieval(self, session):
+        self.latest_retrieval = datetime.utcnow()
+        session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
+                        {'x': self.eid, 'date': self.latest_retrieval})
+
+    def acquire_synchronization_lock(self, session):
+        # XXX race condition until WHERE of SET queries is executed using
+        # 'SELECT FOR UPDATE'
+        now = datetime.utcnow()
+        if not session.execute(
+            'SET X in_synchronization %(now)s WHERE X eid %(x)s, '
+            'X in_synchronization NULL OR X in_synchronization < %(maxdt)s',
+            {'x': self.eid, 'now': now, 'maxdt': now - self.max_lock_lifetime}):
+            self.error('concurrent synchronization detected, skip pull')
+            session.commit(free_cnxset=False)
+            return False
+        session.commit(free_cnxset=False)
+        return True
+
+    def release_synchronization_lock(self, session):
+        session.set_cnxset()
+        session.execute('SET X in_synchronization NULL WHERE X eid %(x)s',
+                        {'x': self.eid})
+        session.commit()
+
     def pull_data(self, session, force=False, raise_on_error=False):
+        """Launch synchronization of the source if needed.
+
+        This method is responsible to handle commit/rollback on the given
+        session.
+        """
         if not force and self.fresh():
             return {}
+        if not self.acquire_synchronization_lock(session):
+            return {}
+        try:
+            with session.transaction(free_cnxset=False):
+                return self._pull_data(session, force, raise_on_error)
+        finally:
+            self.release_synchronization_lock(session)
+
+    def _pull_data(self, session, force=False, raise_on_error=False):
         if self.config['delete-entities']:
             myuris = self.source_cwuris(session)
         else:
             myuris = None
         parser = self._get_parser(session, sourceuris=myuris)
-        error = False
-        self.info('pulling data for source %s', self.uri)
-        for url in self.urls:
-            try:
-                if parser.process(url):
-                    error = True
-            except IOError, exc:
-                if raise_on_error:
-                    raise
-                self.error('could not pull data while processing %s: %s',
-                           url, exc)
-                error = True
-        if error:
+        if self.process_urls(parser, self.urls, raise_on_error):
             self.warning("some error occured, don't attempt to delete entities")
         elif self.config['delete-entities'] and myuris:
             byetype = {}
@@ -151,11 +197,30 @@
             for etype, eids in byetype.iteritems():
                 session.execute('DELETE %s X WHERE X eid IN (%s)'
                                 % (etype, ','.join(eids)))
-        self.latest_retrieval = datetime.utcnow()
-        session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
-                        {'x': self.eid, 'date': self.latest_retrieval})
+        self.update_latest_retrieval(session)
         return parser.stats
 
+    def process_urls(self, parser, urls, raise_on_error=False):
+        error = False
+        for url in urls:
+            self.info('pulling data from %s', url)
+            try:
+                if parser.process(url, raise_on_error):
+                    error = True
+            except IOError, exc:
+                if raise_on_error:
+                    raise
+                self.error('could not pull data while processing %s: %s',
+                           url, exc)
+                error = True
+            except Exception, exc:
+                if raise_on_error:
+                    raise
+                self.exception('error while processing %s: %s',
+                               url, exc)
+                error = True
+        return error
+
     def before_entity_insertion(self, session, lid, etype, eid, sourceparams):
         """called by the repository when an eid has been attributed for an
         entity stored here but the entity has not been inserted in the system
@@ -166,12 +231,9 @@
         """
         entity = super(DataFeedSource, self).before_entity_insertion(
             session, lid, etype, eid, sourceparams)
-        entity.cw_edited['cwuri'] = unicode(lid)
+        entity.cw_edited['cwuri'] = lid.decode('utf-8')
         entity.cw_edited.set_defaults()
         sourceparams['parser'].before_entity_copy(entity, sourceparams)
-        # avoid query to search full-text indexed attributes
-        for attr in entity.e_schema.indexable_attributes():
-            entity.cw_edited.setdefault(attr, u'')
         return entity
 
     def after_entity_insertion(self, session, lid, entity, sourceparams):
@@ -195,8 +257,8 @@
 class DataFeedParser(AppObject):
     __registry__ = 'parsers'
 
-    def __init__(self, session, source, sourceuris=None):
-        self._cw = session
+    def __init__(self, session, source, sourceuris=None, **kwargs):
+        super(DataFeedParser, self).__init__(session, **kwargs)
         self.source = source
         self.sourceuris = sourceuris
         self.stats = {'created': set(),
@@ -213,14 +275,44 @@
         raise ValidationError(schemacfg.eid, {None: msg})
 
     def extid2entity(self, uri, etype, **sourceparams):
+        """return an entity for the given uri. May return None if it should be
+        skipped
+        """
+        session = self._cw
+        # if cwsource is specified and repository has a source with the same
+        # name, call extid2eid on that source so entity will be properly seen as
+        # coming from this source
+        source_uri = sourceparams.pop('cwsource', None)
+        if source_uri is not None and source_uri != 'system':
+            source = session.repo.sources_by_uri.get(source_uri, self.source)
+        else:
+            source = self.source
         sourceparams['parser'] = self
-        eid = self.source.extid2eid(str(uri), etype, self._cw,
-                                    sourceparams=sourceparams)
+        if isinstance(uri, unicode):
+            uri = uri.encode('utf-8')
+        try:
+            eid = session.repo.extid2eid(source, str(uri), etype, session,
+                                         complete=False, commit=False,
+                                         sourceparams=sourceparams)
+        except ValidationError, ex:
+            self.source.error('error while creating %s: %s', etype, ex)
+            return None
+        if eid < 0:
+            # entity has been moved away from its original source
+            #
+            # Don't give etype to entity_from_eid so we get UnknownEid if the
+            # entity has been removed
+            try:
+                entity = session.entity_from_eid(-eid)
+            except UnknownEid:
+                return None
+            self.notify_updated(entity) # avoid later update from the source's data
+            return entity
         if self.sourceuris is not None:
             self.sourceuris.pop(str(uri), None)
-        return self._cw.entity_from_eid(eid, etype)
+        return session.entity_from_eid(eid, etype)
 
-    def process(self, url):
+    def process(self, url, partialcommit=True):
         """main callback: process the url"""
         raise NotImplementedError
 
@@ -238,3 +330,66 @@
 
     def notify_updated(self, entity):
         return self.stats['updated'].add(entity.eid)
+
+
+class DataFeedXMLParser(DataFeedParser):
+
+    def process(self, url, raise_on_error=False, partialcommit=True):
+        """IDataFeedParser main entry point"""
+        try:
+            parsed = self.parse(url)
+        except Exception, ex:
+            if raise_on_error:
+                raise
+            self.source.error(str(ex))
+            return True
+        error = False
+        for args in parsed:
+            try:
+                self.process_item(*args)
+                if partialcommit:
+                    # commit+set_cnxset instead of commit(free_cnxset=False) to let
+                    # other a chance to get our connections set
+                    self._cw.commit()
+                    self._cw.set_cnxset()
+            except ValidationError, exc:
+                if raise_on_error:
+                    raise
+                if partialcommit:
+                    self.source.error('Skipping %s because of validation error %s' % (args, exc))
+                    self._cw.rollback()
+                    self._cw.set_cnxset()
+                    error = True
+                else:
+                    raise
+        return error
+
+    def parse(self, url):
+        if url.startswith('http'):
+            from cubicweb.sobjects.parsers import URL_MAPPING
+            for mappedurl in URL_MAPPING:
+                if url.startswith(mappedurl):
+                    url = url.replace(mappedurl, URL_MAPPING[mappedurl], 1)
+                    break
+            self.source.info('GET %s', url)
+            stream = _OPENER.open(url)
+        elif url.startswith('file://'):
+            stream = open(url[7:])
+        else:
+            stream = StringIO.StringIO(url)
+        return self.parse_etree(etree.parse(stream).getroot())
+
+    def parse_etree(self, document):
+        return [(document,)]
+
+    def process_item(self, *args):
+        raise NotImplementedError
+
+# use a cookie enabled opener to use session cookie if any
+_OPENER = urllib2.build_opener()
+try:
+    from logilab.common import urllib2ext
+    _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
+except ImportError: # python-kerberos not available
+    pass
+_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
--- a/server/sources/extlite.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/extlite.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -102,19 +102,19 @@
 
     def backup(self, backupfile, confirm):
         """method called to create a backup of the source's data"""
-        self.close_pool_connections()
+        self.close_source_connections()
         try:
             self.sqladapter.backup_to_file(backupfile, confirm)
         finally:
-            self.open_pool_connections()
+            self.open_source_connections()
 
     def restore(self, backupfile, confirm, drop):
         """method called to restore a backup of source's data"""
-        self.close_pool_connections()
+        self.close_source_connections()
         try:
             self.sqladapter.restore_from_file(backupfile, confirm, drop)
         finally:
-            self.open_pool_connections()
+            self.open_source_connections()
 
     @property
     def _sqlcnx(self):
@@ -174,15 +174,15 @@
 
     def check_connection(self, cnx):
         """check connection validity, return None if the connection is still valid
-        else a new connection (called when the pool using the given connection is
+        else a new connection (called when the connections set holding the given connection is
         being attached to a session)
 
         always return the connection to reset eventually cached cursor
         """
         return cnx
 
-    def pool_reset(self, cnx):
-        """the pool using the given connection is being reseted from its current
+    def cnxset_freed(self, cnx):
+        """the connections set holding the given connection is being freed from its current
         attached session: release the connection lock if the connection wrapper
         has a connection set
         """
@@ -286,7 +286,7 @@
         """
         if server.DEBUG:
             print 'exec', query, args
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         try:
             # str(query) to avoid error if it's an unicode string
             cursor.execute(str(query), args)
@@ -294,9 +294,9 @@
             self.critical("sql: %r\n args: %s\ndbms message: %r",
                           query, args, ex.args[0])
             try:
-                session.pool.connection(self.uri).rollback()
+                session.cnxset.connection(self.uri).rollback()
                 self.critical('transaction has been rollbacked')
-            except:
+            except Exception:
                 pass
             raise
         return cursor
--- a/server/sources/ldapuser.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/ldapuser.py	Fri Dec 09 12:08:27 2011 +0100
@@ -273,7 +273,7 @@
         if self._conn is None:
             try:
                 self._connect()
-            except:
+            except Exception:
                 self.exception('unable to connect to ldap:')
         return ConnectionWrapper(self._conn)
 
@@ -310,7 +310,11 @@
         except Exception:
             self.error('while trying to authenticate %s', user, exc_info=True)
             raise AuthenticationError()
-        return self.extid2eid(user['dn'], 'CWUser', session)
+        eid = self.repo.extid2eid(self, user['dn'], 'CWUser', session)
+        if eid < 0:
+            # user has been moved away from this source
+            raise AuthenticationError()
+        return eid
 
     def ldap_name(self, var):
         if var.stinfo['relations']:
@@ -392,7 +396,7 @@
                     break
         assert mainvars, rqlst
         columns, globtransforms = self.prepare_columns(mainvars, rqlst)
-        eidfilters = []
+        eidfilters = [lambda x: x > 0]
         allresults = []
         generator = RQL2LDAPFilter(self, session, args, mainvars)
         for mainvar in mainvars:
@@ -419,7 +423,7 @@
             filteredres = []
             for resdict in res:
                 # get sure the entity exists in the system table
-                eid = self.extid2eid(resdict['dn'], 'CWUser', session)
+                eid = self.repo.extid2eid(self, resdict['dn'], 'CWUser', session)
                 for eidfilter in eidfilters:
                     if not eidfilter(eid):
                         break
@@ -524,21 +528,21 @@
         """make an ldap query"""
         self.debug('ldap search %s %s %s %s %s', self.uri, base, scope,
                    searchstr, list(attrs))
-        # XXX for now, we do not have connection pool support for LDAP, so
+        # XXX for now, we do not have connections set support for LDAP, so
         # this is always self._conn
-        cnx = session.pool.connection(self.uri).cnx
+        cnx = session.cnxset.connection(self.uri).cnx
         try:
             res = cnx.search_s(base, scope, searchstr, attrs)
         except ldap.PARTIAL_RESULTS:
             res = cnx.result(all=0)[1]
         except ldap.NO_SUCH_OBJECT:
             self.info('ldap NO SUCH OBJECT')
-            eid = self.extid2eid(base, 'CWUser', session, insert=False)
+            eid = self.repo.extid2eid(self, base, 'CWUser', session, insert=False)
             if eid:
                 self.warning('deleting ldap user with eid %s and dn %s',
                              eid, base)
                 entity = session.entity_from_eid(eid, 'CWUser')
-                self.repo.delete_info(session, entity, self.uri, base)
+                self.repo.delete_info(session, entity, self.uri)
                 self.reset_caches()
             return []
         # except ldap.REFERRAL, e:
@@ -566,7 +570,7 @@
                     try:
                         for i in range(len(value)):
                             value[i] = unicode(value[i], 'utf8')
-                    except:
+                    except Exception:
                         pass
                 if isinstance(value, list) and len(value) == 1:
                     rec_dict[key] = value = value[0]
@@ -642,6 +646,7 @@
     """generate an LDAP filter for a rql query"""
     def __init__(self, source, session, args=None, mainvars=()):
         self.source = source
+        self.repo = source.repo
         self._ldap_attrs = source.user_rev_attrs
         self._base_filters = source.base_filters
         self._session = session
@@ -747,7 +752,7 @@
                           }[rhs.operator]
                 self._eidfilters.append(filter)
                 return
-            dn = self.source.eid2extid(eid, self._session)
+            dn = self.repo.eid2extid(self.source, eid, self._session)
             raise GotDN(dn)
         try:
             filter = '(%s%s)' % (self._ldap_attrs[relation.r_type],
--- a/server/sources/native.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/native.py	Fri Dec 09 12:08:27 2011 +0100
@@ -313,9 +313,9 @@
             self.dbhelper.dbname = abspath(self.dbhelper.dbname)
             self.get_connection = lambda: ConnectionWrapper(self)
             self.check_connection = lambda cnx: cnx
-            def pool_reset(cnx):
+            def cnxset_freed(cnx):
                 cnx.close()
-            self.pool_reset = pool_reset
+            self.cnxset_freed = cnxset_freed
         if self.dbdriver == 'sqlite':
             self._create_eid = None
             self.create_eid = self._create_eid_sqlite
@@ -355,21 +355,21 @@
         """execute the query and return its result"""
         return self.process_result(self.doexec(session, sql, args))
 
-    def init_creating(self, pool=None):
+    def init_creating(self, cnxset=None):
         # check full text index availibility
         if self.do_fti:
-            if pool is None:
-                _pool = self.repo._get_pool()
-                _pool.pool_set()
+            if cnxset is None:
+                _cnxset = self.repo._get_cnxset()
+                _cnxset.cnxset_set()
             else:
-                _pool = pool
-            if not self.dbhelper.has_fti_table(_pool['system']):
+                _cnxset = cnxset
+            if not self.dbhelper.has_fti_table(_cnxset['system']):
                 if not self.repo.config.creating:
                     self.critical('no text index table')
                 self.do_fti = False
-            if pool is None:
-                _pool.pool_reset()
-                self.repo._free_pool(_pool)
+            if cnxset is None:
+                _cnxset.cnxset_freed()
+                self.repo._free_cnxset(_cnxset)
 
     def backup(self, backupfile, confirm, format='native'):
         """method called to create a backup of the source's data"""
@@ -377,25 +377,25 @@
             self.repo.fill_schema()
             self.set_schema(self.repo.schema)
             helper = DatabaseIndependentBackupRestore(self)
-            self.close_pool_connections()
+            self.close_source_connections()
             try:
                 helper.backup(backupfile)
             finally:
-                self.open_pool_connections()
+                self.open_source_connections()
         elif format == 'native':
-            self.close_pool_connections()
+            self.close_source_connections()
             try:
                 self.backup_to_file(backupfile, confirm)
             finally:
-                self.open_pool_connections()
+                self.open_source_connections()
         else:
             raise ValueError('Unknown format %r' % format)
 
 
     def restore(self, backupfile, confirm, drop, format='native'):
         """method called to restore a backup of source's data"""
-        if self.repo.config.open_connections_pools:
-            self.close_pool_connections()
+        if self.repo.config.init_cnxset_pool:
+            self.close_source_connections()
         try:
             if format == 'portable':
                 helper = DatabaseIndependentBackupRestore(self)
@@ -405,12 +405,18 @@
             else:
                 raise ValueError('Unknown format %r' % format)
         finally:
-            if self.repo.config.open_connections_pools:
-                self.open_pool_connections()
+            if self.repo.config.init_cnxset_pool:
+                self.open_source_connections()
 
 
     def init(self, activated, source_entity):
-        self.init_creating(source_entity._cw.pool)
+        self.init_creating(source_entity._cw.cnxset)
+        try:
+            # test if 'asource' column exists
+            query = self.dbhelper.sql_add_limit_offset('SELECT asource FROM entities', 1)
+            source_entity._cw.system_sql(query)
+        except Exception, ex:
+            self.eid_type_source = self.eid_type_source_pre_131
 
     def shutdown(self):
         if self._eid_creation_cnx:
@@ -532,13 +538,13 @@
                 raise
             # FIXME: better detection of deconnection pb
             self.warning("trying to reconnect")
-            session.pool.reconnect(self)
+            session.cnxset.reconnect(self)
             cursor = self.doexec(session, sql, args)
         except (self.DbapiError,), exc:
             # We get this one with pyodbc and SQL Server when connection was reset
             if exc.args[0] == '08S01' and session.mode != 'write':
                 self.warning("trying to reconnect")
-                session.pool.reconnect(self)
+                session.cnxset.reconnect(self)
                 cursor = self.doexec(session, sql, args)
             else:
                 raise
@@ -585,7 +591,7 @@
             for table in temptables:
                 try:
                     self.doexec(session,'DROP TABLE %s' % table)
-                except:
+                except Exception:
                     pass
                 try:
                     del self._temp_table_data[table]
@@ -727,9 +733,9 @@
         """Execute a query.
         it's a function just so that it shows up in profiling
         """
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         if server.DEBUG & server.DBG_SQL:
-            cnx = session.pool.connection(self.uri)
+            cnx = session.cnxset.connection(self.uri)
             # getattr to get the actual connection if cnx is a ConnectionWrapper
             # instance
             print 'exec', query, args, getattr(cnx, '_cnx', cnx)
@@ -744,7 +750,7 @@
                               query, args, ex.args[0])
             if rollback:
                 try:
-                    session.pool.connection(self.uri).rollback()
+                    session.cnxset.connection(self.uri).rollback()
                     if self.repo.config.mode != 'test':
                         self.critical('transaction has been rollbacked')
                 except Exception, ex:
@@ -773,7 +779,7 @@
         """
         if server.DEBUG & server.DBG_SQL:
             print 'execmany', query, 'with', len(args), 'arguments'
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         try:
             # str(query) to avoid error if it's an unicode string
             cursor.executemany(str(query), args)
@@ -784,10 +790,10 @@
                 self.critical("sql many: %r\n args: %s\ndbms message: %r",
                               query, args, ex.args[0])
             try:
-                session.pool.connection(self.uri).rollback()
+                session.cnxset.connection(self.uri).rollback()
                 if self.repo.config.mode != 'test':
                     self.critical('transaction has been rollbacked')
-            except:
+            except Exception:
                 pass
             raise
 
@@ -802,7 +808,7 @@
             self.error("backend can't alter %s.%s to %s%s", table, column, coltype,
                        not allownull and 'NOT NULL' or '')
             return
-        self.dbhelper.change_col_type(LogCursor(session.pool[self.uri]),
+        self.dbhelper.change_col_type(LogCursor(session.cnxset[self.uri]),
                                       table, column, coltype, allownull)
         self.info('altered %s.%s: now %s%s', table, column, coltype,
                   not allownull and 'NOT NULL' or '')
@@ -817,7 +823,7 @@
             return
         table, column = rdef_table_column(rdef)
         coltype, allownull = rdef_physical_info(self.dbhelper, rdef)
-        self.dbhelper.set_null_allowed(LogCursor(session.pool[self.uri]),
+        self.dbhelper.set_null_allowed(LogCursor(session.cnxset[self.uri]),
                                        table, column, coltype, allownull)
 
     def update_rdef_indexed(self, session, rdef):
@@ -835,29 +841,49 @@
             self.drop_index(session, table, column, unique=True)
 
     def create_index(self, session, table, column, unique=False):
-        cursor = LogCursor(session.pool[self.uri])
+        cursor = LogCursor(session.cnxset[self.uri])
         self.dbhelper.create_index(cursor, table, column, unique)
 
     def drop_index(self, session, table, column, unique=False):
-        cursor = LogCursor(session.pool[self.uri])
+        cursor = LogCursor(session.cnxset[self.uri])
         self.dbhelper.drop_index(cursor, table, column, unique)
 
     # system source interface #################################################
 
-    def eid_type_source(self, session, eid):
-        """return a tuple (type, source, extid) for the entity with id <eid>"""
-        sql = 'SELECT type, source, extid FROM entities WHERE eid=%s' % eid
+    def _eid_type_source(self, session, eid, sql, _retry=True):
         try:
             res = self.doexec(session, sql).fetchone()
-        except:
-            assert session.pool, 'session has no pool set'
-            raise UnknownEid(eid)
-        if res is None:
-            raise UnknownEid(eid)
-        if res[-1] is not None:
+            if res is not None:
+                return res
+        except (self.OperationalError, self.InterfaceError):
+            if session.mode == 'read' and _retry:
+                self.warning("trying to reconnect (eid_type_source())")
+                session.cnxset.reconnect(self)
+                return self._eid_type_source(session, eid, sql, _retry=False)
+        except Exception:
+            assert session.cnxset, 'session has no connections set'
+            self.exception('failed to query entities table for eid %s', eid)
+        raise UnknownEid(eid)
+
+    def eid_type_source(self, session, eid): # pylint: disable=E0202
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        sql = 'SELECT type, source, extid, asource FROM entities WHERE eid=%s' % eid
+        res = self._eid_type_source(session, eid, sql)
+        if res[-2] is not None:
             if not isinstance(res, list):
                 res = list(res)
+            res[-2] = b64decode(res[-2])
+        return res
+
+    def eid_type_source_pre_131(self, session, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        sql = 'SELECT type, source, extid FROM entities WHERE eid=%s' % eid
+        res = self._eid_type_source(session, eid, sql)
+        if not isinstance(res, list):
+            res = list(res)
+        if res[-1] is not None:
             res[-1] = b64decode(res[-1])
+        res.append(res[1])
         return res
 
     def extid2eid(self, session, source_uri, extid):
@@ -874,7 +900,7 @@
             result = cursor.fetchone()
             if result:
                 return result[0]
-        except:
+        except Exception:
             pass
         return None
 
@@ -898,13 +924,13 @@
             return cursor.fetchone()[0]
 
 
-    def create_eid(self, session):
+    def create_eid(self, session): # pylint: disable=E0202
         # lock needed to prevent 'Connection is busy with results for another
         # command (0)' errors with SQLServer
         with self._eid_cnx_lock:
-            return self._create_eid()
+            return self._create_eid() # pylint: disable=E1102
 
-    def _create_eid(self):
+    def _create_eid(self): # pylint: disable=E0202
         # internal function doing the eid creation without locking.
         # needed for the recursive handling of disconnections (otherwise we
         # deadlock on self._eid_cnx_lock
@@ -920,16 +946,16 @@
             # FIXME: better detection of deconnection pb
             self.warning("trying to reconnect create eid connection")
             self._eid_creation_cnx = None
-            return self._create_eid()
+            return self._create_eid() # pylint: disable=E1102
         except (self.DbapiError,), exc:
             # We get this one with pyodbc and SQL Server when connection was reset
             if exc.args[0] == '08S01':
                 self.warning("trying to reconnect create eid connection")
                 self._eid_creation_cnx = None
-                return self._create_eid()
+                return self._create_eid() # pylint: disable=E1102
             else:
                 raise
-        except: # WTF?
+        except Exception: # WTF?
             cnx.rollback()
             self._eid_creation_cnx = None
             self.exception('create eid failed in an unforeseen way on SQL statement %s', sql)
@@ -946,7 +972,7 @@
             extid = b64encode(extid)
         uri = 'system' if source.copy_based_source else source.uri
         attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
-                 'source': uri, 'mtime': datetime.now()}
+                 'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
         self.doexec(session, self.sqlgen.insert('entities', attrs), attrs)
         # insert core relations: is, is_instance_of and cw_source
         try:
@@ -976,7 +1002,7 @@
             self.index_entity(session, entity=entity)
         # update entities.mtime.
         # XXX Only if entity.__regid__ in self.multisources_etypes?
-        attrs = {'eid': entity.eid, 'mtime': datetime.now()}
+        attrs = {'eid': entity.eid, 'mtime': datetime.utcnow()}
         self.doexec(session, self.sqlgen.update('entities', attrs, ['eid']), attrs)
 
     def delete_info_multi(self, session, entities, uri):
@@ -993,7 +1019,7 @@
         if entities[0].__regid__ not in self.multisources_etypes:
             return
         attrs = {'type': entities[0].__regid__,
-                 'source': uri, 'dtime': datetime.now()}
+                 'source': uri, 'dtime': datetime.utcnow()}
         for entity in entities:
             extid = entity.cw_metainformation()['extid']
             if extid is not None:
@@ -1127,7 +1153,7 @@
         important note: while undoing of a transaction, only hooks in the
         'integrity', 'activeintegrity' and 'undo' categories are called.
         """
-        # set mode so pool isn't released subsquently until commit/rollback
+        # set mode so connections set isn't released subsquently until commit/rollback
         session.mode = 'write'
         errors = []
         session.transaction_data['undoing_uuid'] = txuuid
@@ -1147,7 +1173,7 @@
         table when some undoable transaction is started
         """
         ueid = session.user.eid
-        attrs = {'tx_uuid': uuid, 'tx_user': ueid, 'tx_time': datetime.now()}
+        attrs = {'tx_uuid': uuid, 'tx_user': ueid, 'tx_time': datetime.utcnow()}
         self.doexec(session, self.sqlgen.insert('transactions', attrs), attrs)
 
     def _save_attrs(self, session, entity, attrs):
@@ -1372,7 +1398,7 @@
     def fti_unindex_entities(self, session, entities):
         """remove text content for entities from the full text index
         """
-        cursor = session.pool['system']
+        cursor = session.cnxset['system']
         cursor_unindex_object = self.dbhelper.cursor_unindex_object
         try:
             for entity in entities:
@@ -1385,7 +1411,7 @@
         """add text content of created/modified entities to the full text index
         """
         cursor_index_object = self.dbhelper.cursor_index_object
-        cursor = session.pool['system']
+        cursor = session.cnxset['system']
         try:
             # use cursor_index_object, not cursor_reindex_object since
             # unindexing done in the FTIndexEntityOp
@@ -1434,6 +1460,7 @@
   eid INTEGER PRIMARY KEY NOT NULL,
   type VARCHAR(64) NOT NULL,
   source VARCHAR(64) NOT NULL,
+  asource VARCHAR(64) NOT NULL,
   mtime %s NOT NULL,
   extid VARCHAR(256)
 );;
@@ -1625,7 +1652,7 @@
         return self._source.get_connection()
 
     def backup(self, backupfile):
-        archive=zipfile.ZipFile(backupfile, 'w')
+        archive=zipfile.ZipFile(backupfile, 'w', allowZip64=True)
         self.cnx = self.get_connection()
         try:
             self.cursor = self.cnx.cursor()
@@ -1655,7 +1682,6 @@
         prefix = 'cw_'
         for etype in self.schema.entities():
             eschema = self.schema.eschema(etype)
-            print etype, eschema.final
             if eschema.final:
                 continue
             etype_tables.append('%s%s'%(prefix, etype))
@@ -1721,7 +1747,7 @@
         return dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL)
 
     def restore(self, backupfile):
-        archive = zipfile.ZipFile(backupfile, 'r')
+        archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True)
         self.cnx = self.get_connection()
         self.cursor = self.cnx.cursor()
         sequences, tables, table_chunks = self.read_metadata(archive, backupfile)
--- a/server/sources/pyrorql.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/pyrorql.py	Fri Dec 09 12:08:27 2011 +0100
@@ -191,7 +191,7 @@
                 self.support_entities[ertype] = 'write' in options
         else: # CWRType
             if ertype in ('is', 'is_instance_of', 'cw_source') or ertype in VIRTUAL_RTYPES:
-                msg = schemacfg._cw._('%s relation should not be in mapped') % rtype
+                msg = schemacfg._cw._('%s relation should not be in mapped') % ertype
                 raise ValidationError(schemacfg.eid, {role_name('cw_for_schema', 'subject'): msg})
             options = self._check_options(schemacfg, self.rtype_options)
             if 'dontcross' in options:
@@ -226,7 +226,7 @@
                         self.cross_relations.remove(ertype)
                 else:
                     self.dont_cross_relations.remove(ertype)
-        except:
+        except Exception:
             self.error('while updating mapping consequently to removal of %s',
                        schemacfg)
 
@@ -235,10 +235,12 @@
         if dexturi == 'system' or not (
             dexturi in self.repo.sources_by_uri or self._skip_externals):
             assert etype in self.support_entities, etype
-            return self.repo.extid2eid(self, str(extid), etype, session), True
-        if dexturi in self.repo.sources_by_uri:
+            eid = self.repo.extid2eid(self, str(extid), etype, session)
+            if eid > 0:
+                return eid, True
+        elif dexturi in self.repo.sources_by_uri:
             source = self.repo.sources_by_uri[dexturi]
-            cnx = session.pool.connection(source.uri)
+            cnx = session.cnxset.connection(source.uri)
             eid = source.local_eid(cnx, dextid, session)[0]
             return eid, False
         return None, None
@@ -273,20 +275,22 @@
                         entity = rset.get_entity(0, 0)
                         entity.complete(entity.e_schema.indexable_attributes())
                         source.index_entity(session, entity)
-                except:
+                except Exception:
                     self.exception('while updating %s with external id %s of source %s',
                                    etype, extid, self.uri)
                     continue
             for etype, extid in deleted:
                 try:
-                    eid = self.extid2eid(str(extid), etype, session,
-                                         insert=False)
+                    eid = self.repo.extid2eid(self, str(extid), etype, session,
+                                              insert=False)
                     # entity has been deleted from external repository but is not known here
                     if eid is not None:
                         entity = session.entity_from_eid(eid, etype)
-                        repo.delete_info(session, entity, self.uri, extid,
+                        repo.delete_info(session, entity, self.uri,
                                          scleanup=self.eid)
-                except:
+                except Exception:
+                    if self.repo.config.mode == 'test':
+                        raise
                     self.exception('while updating %s with external id %s of source %s',
                                    etype, extid, self.uri)
                     continue
@@ -322,7 +326,7 @@
         else a new connection
         """
         # we have to transfer manually thread ownership. This can be done safely
-        # since the pool to which belong the connection is affected to one
+        # since the connections set holding the connection is affected to one
         # session/thread and can't be called simultaneously
         try:
             cnx._repo._transferThread(threading.currentThread())
@@ -359,7 +363,7 @@
         if not args is None:
             args = args.copy()
         # get cached cursor anyway
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         if cu is None:
             # this is a ConnectionWrapper instance
             msg = session._("can't connect to source %s, some data may be missing")
@@ -390,7 +394,7 @@
                     or uidtype(union, i, etype, args)):
                     needtranslation.append(i)
             if needtranslation:
-                cnx = session.pool.connection(self.uri)
+                cnx = session.cnxset.connection(self.uri)
                 for rowindex in xrange(rset.rowcount - 1, -1, -1):
                     row = rows[rowindex]
                     localrow = False
@@ -421,7 +425,7 @@
 
     def _entity_relations_and_kwargs(self, session, entity):
         relations = []
-        kwargs = {'x': self.eid2extid(entity.eid, session)}
+        kwargs = {'x': self.repo.eid2extid(self, entity.eid, session)}
         for key, val in entity.cw_attr_cache.iteritems():
             relations.append('X %s %%(%s)s' % (key, key))
             kwargs[key] = val
@@ -434,43 +438,52 @@
     def update_entity(self, session, entity):
         """update an entity in the source"""
         relations, kwargs = self._entity_relations_and_kwargs(session, entity)
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs)
         self._query_cache.clear()
-        entity.clear_all_caches()
+        entity.cw_clear_all_caches()
 
     def delete_entity(self, session, entity):
         """delete an entity from the source"""
-        cu = session.pool[self.uri]
+        if session.deleted_in_transaction(self.eid):
+            # source is being deleted, don't propagate
+            self._query_cache.clear()
+            return
+        cu = session.cnxset[self.uri]
         cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__,
-                   {'x': self.eid2extid(entity.eid, session)})
+                   {'x': self.repo.eid2extid(self, entity.eid, session)})
         self._query_cache.clear()
 
     def add_relation(self, session, subject, rtype, object):
         """add a relation to the source"""
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
-                   {'x': self.eid2extid(subject, session),
-                    'y': self.eid2extid(object, session)})
+                   {'x': self.repo.eid2extid(self, subject, session),
+                    'y': self.repo.eid2extid(self, object, session)})
         self._query_cache.clear()
-        session.entity_from_eid(subject).clear_all_caches()
-        session.entity_from_eid(object).clear_all_caches()
+        session.entity_from_eid(subject).cw_clear_all_caches()
+        session.entity_from_eid(object).cw_clear_all_caches()
 
     def delete_relation(self, session, subject, rtype, object):
         """delete a relation from the source"""
-        cu = session.pool[self.uri]
+        if session.deleted_in_transaction(self.eid):
+            # source is being deleted, don't propagate
+            self._query_cache.clear()
+            return
+        cu = session.cnxset[self.uri]
         cu.execute('DELETE X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
-                   {'x': self.eid2extid(subject, session),
-                    'y': self.eid2extid(object, session)})
+                   {'x': self.repo.eid2extid(self, subject, session),
+                    'y': self.repo.eid2extid(self, object, session)})
         self._query_cache.clear()
-        session.entity_from_eid(subject).clear_all_caches()
-        session.entity_from_eid(object).clear_all_caches()
+        session.entity_from_eid(subject).cw_clear_all_caches()
+        session.entity_from_eid(object).cw_clear_all_caches()
 
 
 class RQL2RQL(object):
     """translate a local rql query to be executed on a distant repository"""
     def __init__(self, source):
         self.source = source
+        self.repo = source.repo
         self.current_operator = None
 
     def _accept_children(self, node):
@@ -656,7 +669,7 @@
         value = const.eval(self.kwargs)
         try:
             return None, self._const_var[value]
-        except:
+        except Exception:
             var = self._varmaker.next()
             self.need_translation = True
             restr = '%s eid %s' % (var, self.visit_constant(const))
@@ -666,7 +679,7 @@
 
     def eid2extid(self, eid):
         try:
-            return self.source.eid2extid(eid, self._session)
+            return self.repo.eid2extid(self.source, eid, self._session)
         except UnknownEid:
             operator = self.current_operator
             if operator is not None and operator != '=':
--- a/server/sources/rql2sql.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/rql2sql.py	Fri Dec 09 12:08:27 2011 +0100
@@ -56,6 +56,7 @@
 from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY
 
 from rql import BadRQLQuery, CoercionError
+from rql.utils import common_parent
 from rql.stmts import Union, Select
 from rql.nodes import (SortTerm, VariableRef, Constant, Function, Variable, Or,
                        Not, Comparison, ColumnAlias, Relation, SubQuery, Exists)
@@ -669,7 +670,7 @@
             else:
                 tocheck.append(compnode)
         # tocheck hold a set of comparison not implying an aggregat function
-        # put them in fakehaving if the don't share an Or node as ancestor
+        # put them in fakehaving if they don't share an Or node as ancestor
         # with another comparison containing an aggregat function
         for compnode in tocheck:
             parents = set()
@@ -784,7 +785,20 @@
         sorts = select.orderby
         groups = select.groupby
         having = select.having
-        morerestr = extract_fake_having_terms(having)
+        for restr in extract_fake_having_terms(having):
+            scope = None
+            for vref in restr.get_nodes(VariableRef):
+                vscope = vref.variable.scope
+                if vscope is select:
+                    continue # ignore select scope, so restriction is added to
+                             # the inner most scope possible
+                if scope is None:
+                    scope = vscope
+                elif vscope is not scope:
+                    scope = common_parent(scope, vscope).scope
+            if scope is None:
+                scope = select
+            scope.add_restriction(restr)
         # remember selection, it may be changed and have to be restored
         origselection = select.selection[:]
         # check if the query will have union subquery, if it need sort term
@@ -829,7 +843,7 @@
         self._in_wrapping_query = False
         self._state = state
         try:
-            sql = self._solutions_sql(select, morerestr, sols, distinct,
+            sql = self._solutions_sql(select, sols, distinct,
                                       needalias or needwrap)
             # generate groups / having before wrapping query selection to get
             # correct column aliases
@@ -900,15 +914,13 @@
                 except KeyError:
                     continue
 
-    def _solutions_sql(self, select, morerestr, solutions, distinct, needalias):
+    def _solutions_sql(self, select, solutions, distinct, needalias):
         sqls = []
         for solution in solutions:
             self._state.reset(solution)
             # visit restriction subtree
             if select.where is not None:
                 self._state.add_restriction(select.where.accept(self))
-            for restriction in morerestr:
-                self._state.add_restriction(restriction.accept(self))
             sql = [self._selection_sql(select.selection, distinct, needalias)]
             if self._state.restrictions:
                 sql.append('WHERE %s' % ' AND '.join(self._state.restrictions))
@@ -1226,35 +1238,47 @@
 
 
     def _visit_outer_join_inlined_relation(self, relation, rschema):
-        leftvar, leftconst, rightvar, rightconst = relation_info(relation)
-        assert not (leftconst and rightconst), "doesn't make sense"
-        if relation.optional != 'right':
-            leftvar, rightvar = rightvar, leftvar
-            leftconst, rightconst = rightconst, leftconst
-        outertype = 'FULL' if relation.optional == 'both' else 'LEFT'
-        leftalias = self._var_table(leftvar)
+        lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
+        assert not (lhsconst and rhsconst), "doesn't make sense"
         attr = 'eid' if relation.r_type == 'identity' else relation.r_type
-        lhs, rhs = relation.get_variable_parts()
+        lhsalias = self._var_table(lhsvar)
+        rhsalias = rhsvar and self._var_table(rhsvar)
         try:
-            lhssql = self._varmap['%s.%s' % (lhs.name, attr)]
+            lhssql = self._varmap['%s.%s' % (lhsvar.name, attr)]
         except KeyError:
-            lhssql = '%s.%s%s' % (self._var_table(lhs.variable), SQL_PREFIX, attr)
-        if rightvar is not None:
-            rightalias = self._var_table(rightvar)
-            if rightalias is None:
-                if rightconst is not None:
-                    # inlined relation with invariant as rhs
-                    condition = '%s=%s' % (lhssql, rightconst.accept(self))
-                    if relation.r_type != 'identity':
-                        condition = '(%s OR %s IS NULL)' % (condition, lhssql)
-                    if not leftvar.stinfo.get('optrelations'):
-                        return condition
-                    self._state.add_outer_join_condition(leftalias, condition)
-                return
-        if leftalias is None:
-            leftalias = leftvar._q_sql.split('.', 1)[0]
-        self._state.replace_tables_by_outer_join(
-            leftalias, rightalias, outertype, '%s=%s' % (lhssql, rhs.accept(self)))
+            if lhsalias is None:
+                lhssql = lhsconst.accept(self)
+            else:
+                lhssql = '%s.%s%s' % (lhsalias, SQL_PREFIX, attr)
+        condition = '%s=%s' % (lhssql, (rhsconst or rhsvar).accept(self))
+        # this is not a typo, rhs optional variable means lhs outer join and vice-versa
+        if relation.optional == 'left':
+            lhsvar, rhsvar = rhsvar, lhsvar
+            lhsconst, rhsconst = rhsconst, lhsconst
+            lhsalias, rhsalias = rhsalias, lhsalias
+            outertype = 'LEFT'
+        elif relation.optional == 'both':
+            outertype = 'FULL'
+        else:
+            outertype = 'LEFT'
+        if rhsalias is None:
+            if rhsconst is not None:
+                # inlined relation with invariant as rhs
+                if relation.r_type != 'identity':
+                    condition = '(%s OR %s IS NULL)' % (condition, lhssql)
+                if not lhsvar.stinfo.get('optrelations'):
+                    return condition
+                self._state.add_outer_join_condition(lhsalias, condition)
+            return
+        if lhsalias is None:
+            if lhsconst is not None and not rhsvar.stinfo.get('optrelations'):
+                return condition
+            lhsalias = lhsvar._q_sql.split('.', 1)[0]
+        if lhsalias == rhsalias:
+            self._state.add_outer_join_condition(lhsalias, condition)
+        else:
+            self._state.replace_tables_by_outer_join(
+                lhsalias, rhsalias, outertype, condition)
         return ''
 
     def _visit_var_attr_relation(self, relation, rhs_vars):
@@ -1280,9 +1304,16 @@
                                                relation.r_type)
                 try:
                     self._state.ignore_varmap = True
-                    return '%s%s' % (lhssql, relation.children[1].accept(self))
+                    sql = lhssql + relation.children[1].accept(self)
                 finally:
                     self._state.ignore_varmap = False
+                if relation.optional == 'right':
+                    leftalias = self._var_table(principal.children[0].variable)
+                    rightalias = self._var_table(relation.children[0].variable)
+                    self._state.replace_tables_by_outer_join(
+                        leftalias, rightalias, 'LEFT', sql)
+                    return ''
+                return sql
         return ''
 
     def _visit_attribute_relation(self, rel):
@@ -1360,29 +1391,63 @@
 
     def visit_comparison(self, cmp):
         """generate SQL for a comparison"""
+        optional = getattr(cmp, 'optional', None) # rql < 0.30
         if len(cmp.children) == 2:
-            # XXX occurs ?
+            # simplified expression from HAVING clause
             lhs, rhs = cmp.children
         else:
             lhs = None
             rhs = cmp.children[0]
+            assert not optional
+        sql = None
         operator = cmp.operator
         if operator in ('LIKE', 'ILIKE'):
             if operator == 'ILIKE' and not self.dbhelper.ilike_support:
                 operator = ' LIKE '
             else:
                 operator = ' %s ' % operator
+        elif operator == 'REGEXP':
+            sql = ' %s' % self.dbhelper.sql_regexp_match_expression(rhs.accept(self))
         elif (operator == '=' and isinstance(rhs, Constant)
               and rhs.eval(self._args) is None):
             if lhs is None:
-                return ' IS NULL'
-            return '%s IS NULL' % lhs.accept(self)
+                sql = ' IS NULL'
+            else:
+                sql = '%s IS NULL' % lhs.accept(self)
         elif isinstance(rhs, Function) and rhs.name == 'IN':
             assert operator == '='
             operator = ' '
-        if lhs is None:
-            return '%s%s'% (operator, rhs.accept(self))
-        return '%s%s%s'% (lhs.accept(self), operator, rhs.accept(self))
+        if sql is None:
+            if lhs is None:
+                sql = '%s%s'% (operator, rhs.accept(self))
+            else:
+                sql = '%s%s%s'% (lhs.accept(self), operator, rhs.accept(self))
+        if optional is None:
+            return sql
+        leftvars = cmp.children[0].get_nodes(VariableRef)
+        assert len(leftvars) == 1
+        if leftvars[0].variable.stinfo['attrvar'] is None:
+            assert isinstance(leftvars[0].variable, ColumnAlias)
+            leftalias = leftvars[0].variable._q_sqltable
+        else:
+            leftalias = self._var_table(leftvars[0].variable.stinfo['attrvar'])
+        rightvars = cmp.children[1].get_nodes(VariableRef)
+        assert len(rightvars) == 1
+        if rightvars[0].variable.stinfo['attrvar'] is None:
+            assert isinstance(rightvars[0].variable, ColumnAlias)
+            rightalias = rightvars[0].variable._q_sqltable
+        else:
+            rightalias = self._var_table(rightvars[0].variable.stinfo['attrvar'])
+        if optional == 'right':
+            self._state.replace_tables_by_outer_join(
+                leftalias, rightalias, 'LEFT', sql)
+        elif optional == 'left':
+            self._state.replace_tables_by_outer_join(
+                rightalias, leftalias, 'LEFT', sql)
+        else:
+            self._state.replace_tables_by_outer_join(
+                leftalias, rightalias, 'FULL', sql)
+        return ''
 
     def visit_mathexpression(self, mexpr):
         """generate SQL for a mathematic expression"""
@@ -1397,6 +1462,10 @@
             pass
         return '(%s %s %s)'% (lhs.accept(self), operator, rhs.accept(self))
 
+    def visit_unaryexpression(self, uexpr):
+        """generate SQL for a unary expression"""
+        return '%s%s'% (uexpr.operator, uexpr.children[0].accept(self))
+
     def visit_function(self, func):
         """generate SQL name for a function"""
         if func.name == 'FTIRANK':
@@ -1422,15 +1491,17 @@
         if constant.type is None:
             return 'NULL'
         value = constant.value
-        if constant.type == 'Int' and  isinstance(constant.parent, SortTerm):
+        if constant.type == 'etype':
             return value
+        if constant.type == 'Int': # XXX Float?
+            return str(value)
         if constant.type in ('Date', 'Datetime'):
             rel = constant.relation()
             if rel is not None:
                 rel._q_needcast = value
             return self.keyword_map[value]()
         if constant.type == 'Boolean':
-            value = self.dbhelper.boolean_value(value)
+            return str(self.dbhelper.boolean_value(value))
         if constant.type == 'Substitute':
             try:
                 # we may found constant from simplified var in varmap
@@ -1584,8 +1655,14 @@
             scope = self._state.scopes[var.scope]
             self._state.add_table(sql.split('.', 1)[0], scope=scope)
         except KeyError:
-            sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
-            #self._state.done.add(var.name)
+            # rtype may be an attribute relation when called from
+            # _visit_var_attr_relation.  take care about 'eid' rtype, since in
+            # some case we may use the `entities` table, so in that case we've
+            # to properly use variable'sql
+            if rtype == 'eid':
+                sql = var.accept(self)
+            else:
+                sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
         return sql
 
     def _linked_var_sql(self, variable):
--- a/server/sources/storages.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sources/storages.py	Fri Dec 09 12:08:27 2011 +0100
@@ -211,7 +211,7 @@
         """return the current fs_path of the tribute.
 
         Return None is the attr is not stored yet."""
-        sysource = entity._cw.pool.source('system')
+        sysource = entity._cw.cnxset.source('system')
         cu = sysource.doexec(entity._cw,
                              'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
                              attr, entity.__regid__, entity.eid))
--- a/server/sqlutils.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/sqlutils.py	Fri Dec 09 12:08:27 2011 +0100
@@ -338,6 +338,17 @@
         return _limit_size(text, maxsize)
     cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
 
+    from logilab.common.date import strptime
+    def weekday(ustr):
+        try:
+            dt = strptime(ustr, '%Y-%m-%d %H:%M:%S')
+        except:
+            dt =  strptime(ustr, '%Y-%m-%d')
+        # expect sunday to be 1, saturday 7 while weekday method return 0 for
+        # monday
+        return (dt.weekday() + 1) % 7
+    cnx.create_function("WEEKDAY", 1, weekday)
+
     import yams.constraints
     yams.constraints.patch_sqlite_decimal()
 
--- a/server/test/data/site_cubicweb.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/data/site_cubicweb.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,14 +20,10 @@
 from logilab.database.sqlite import register_sqlite_pyfunc
 from rql.utils import register_function
 
-try:
-    class DUMB_SORT(FunctionDescr):
-        pass
+class DUMB_SORT(FunctionDescr):
+    pass
 
-    register_function(DUMB_SORT)
-    def dumb_sort(something):
-        return something
-    register_sqlite_pyfunc(dumb_sort)
-except:
-    # already registered
-    pass
+register_function(DUMB_SORT)
+def dumb_sort(something):
+    return something
+register_sqlite_pyfunc(dumb_sort)
--- a/server/test/unittest_datafeed.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_datafeed.py	Fri Dec 09 12:08:27 2011 +0100
@@ -37,19 +37,21 @@
         self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
         self.assertFalse(dfsource.fresh())
 
+
         class AParser(datafeed.DataFeedParser):
             __regid__ = 'testparser'
-            def process(self, url):
+            def process(self, url, raise_on_error=False):
                 entity = self.extid2entity('http://www.cubicweb.org/', 'Card',
-                                  item={'title': u'cubicweb.org',
-                                        'content': u'the cw web site'})
+                                           item={'title': u'cubicweb.org',
+                                                 'content': u'the cw web site'})
                 if not self.created_during_pull(entity):
                     self.notify_updated(entity)
             def before_entity_copy(self, entity, sourceparams):
                 entity.cw_edited.update(sourceparams['item'])
 
         with self.temporary_appobjects(AParser):
-            stats = dfsource.pull_data(self.session, force=True)
+            session = self.repo.internal_session()
+            stats = dfsource.pull_data(session, force=True)
             self.commit()
             # test import stats
             self.assertEqual(sorted(stats.keys()), ['created', 'updated'])
@@ -64,26 +66,29 @@
             self.assertEqual(entity.cw_source[0].name, 'myfeed')
             self.assertEqual(entity.cw_metainformation(),
                              {'type': 'Card',
-                              'source': {'uri': 'system', 'type': 'native'},
+                              'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
                               'extid': 'http://www.cubicweb.org/'}
                              )
+            self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/')
             # test repo cache keys
             self.assertEqual(self.repo._type_source_cache[entity.eid],
-                             ('Card', 'system', 'http://www.cubicweb.org/'))
+                             ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed'))
             self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
                              entity.eid)
             # test repull
-            stats = dfsource.pull_data(self.session, force=True)
+            session.set_cnxset()
+            stats = dfsource.pull_data(session, force=True)
             self.assertEqual(stats['created'], set())
             self.assertEqual(stats['updated'], set((entity.eid,)))
             # test repull with caches reseted
             self.repo._type_source_cache.clear()
             self.repo._extid_cache.clear()
-            stats = dfsource.pull_data(self.session, force=True)
+            session.set_cnxset()
+            stats = dfsource.pull_data(session, force=True)
             self.assertEqual(stats['created'], set())
             self.assertEqual(stats['updated'], set((entity.eid,)))
             self.assertEqual(self.repo._type_source_cache[entity.eid],
-                             ('Card', 'system', 'http://www.cubicweb.org/'))
+                             ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed'))
             self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
                              entity.eid)
 
@@ -93,6 +98,30 @@
         self.assertTrue(dfsource.latest_retrieval)
         self.assertTrue(dfsource.fresh())
 
+        # test_rename_source
+        req = self.request()
+        req.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"')
+        self.commit()
+        entity = self.execute('Card X').get_entity(0, 0)
+        self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
+        self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed')
+        self.assertEqual(entity.cw_metainformation(),
+                         {'type': 'Card',
+                          'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
+                          'extid': 'http://www.cubicweb.org/'}
+                         )
+        self.assertEqual(self.repo._type_source_cache[entity.eid],
+                         ('Card', 'system', 'http://www.cubicweb.org/', 'myrenamedfeed'))
+        self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
+                         entity.eid)
+
+        # test_delete_source
+        req = self.request()
+        req.execute('DELETE CWSource S WHERE S name "myrenamedfeed"')
+        self.commit()
+        self.failIf(self.execute('Card X WHERE X title "cubicweb.org"'))
+        self.failIf(self.execute('Any X WHERE X has_text "cubicweb.org"'))
+
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
     unittest_main()
--- a/server/test/unittest_hook.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_hook.py	Fri Dec 09 12:08:27 2011 +0100
@@ -23,7 +23,7 @@
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
 
-from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools import TestServerConfiguration, fake
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.server import hook
 from cubicweb.hooks import integrity, syncschema
@@ -124,10 +124,8 @@
     def test_call_hook(self):
         self.o.register(AddAnyHook)
         dis = set()
-        cw = mock_object(vreg=self.vreg,
-                         set_read_security=lambda *a,**k: None,
-                         set_write_security=lambda *a,**k: None,
-                         is_hook_activated=lambda x, cls: cls.category not in dis)
+        cw = fake.FakeSession()
+        cw.is_hook_activated = lambda cls: cls.category not in dis
         self.assertRaises(HookCalled,
                           self.o.call_hooks, 'before_add_entity', cw)
         dis.add('cat1')
@@ -203,10 +201,10 @@
 #         self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)])
 
 
-#     def _before_relation_hook(self, pool, subject, r_type, object):
+#     def _before_relation_hook(self, cnxset, subject, r_type, object):
 #         self.called.append((subject, r_type, object))
 
-#     def _after_relation_hook(self, pool, subject, r_type, object):
+#     def _after_relation_hook(self, cnxset, subject, r_type, object):
 #         self.called.append((subject, r_type, object))
 
 
--- a/server/test/unittest_ldapuser.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_ldapuser.py	Fri Dec 09 12:08:27 2011 +0100
@@ -61,7 +61,7 @@
         # no such user
         raise AuthenticationError()
     # don't check upassword !
-    return self.extid2eid(user['dn'], 'CWUser', session)
+    return self.repo.extid2eid(self, user['dn'], 'CWUser', session)
 
 def setUpModule(*args):
     create_slapd_configuration(LDAPUserSourceTC.config)
@@ -137,7 +137,7 @@
 
     def test_authenticate(self):
         source = self.repo.sources_by_uri['ldapuser']
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(AuthenticationError,
                           source.authenticate, self.session, 'toto', 'toto')
 
@@ -239,7 +239,7 @@
         iworkflowable.fire_transition('deactivate')
         try:
             cnx.commit()
-            adim.clear_all_caches()
+            adim.cw_clear_all_caches()
             self.assertEqual(adim.in_state[0].name, 'deactivated')
             trinfo = iworkflowable.latest_trinfo()
             self.assertEqual(trinfo.owned_by[0].login, SYT)
@@ -265,7 +265,7 @@
         self.failUnless(self.sexecute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}))
 
     def test_exists1(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.session.create_entity('CWGroup', name=u'bougloup1')
         self.session.create_entity('CWGroup', name=u'bougloup2')
         self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"')
@@ -378,6 +378,27 @@
         rset = cu.execute('Any F WHERE X has_text "iaminguestsgrouponly", X firstname F')
         self.assertEqual(rset.rows, [[None]])
 
+    def test_copy_to_system_source(self):
+        source = self.repo.sources_by_uri['ldapuser']
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0]
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
+        self.commit()
+        source.reset_caches()
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system', 'use-cwuri-as-url': False},
+                                                  'type': 'CWUser',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.failUnless(e.creation_date)
+        self.failUnless(e.modification_date)
+        # XXX test some password has been set
+        source.synchronize()
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})
+        self.assertEqual(len(rset), 1)
+
     def test_nonregr1(self):
         self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, '
                      'X modification_date AA',
@@ -465,8 +486,8 @@
         self._schema = repo.schema
         super(RQL2LDAPFilterTC, self).setUp()
         ldapsource = repo.sources[-1]
-        self.pool = repo._get_pool()
-        session = mock_object(pool=self.pool)
+        self.cnxset = repo._get_cnxset()
+        session = mock_object(cnxset=self.cnxset)
         self.o = RQL2LDAPFilter(ldapsource, session)
         self.ldapclasses = ''.join(ldapsource.base_filters)
 
--- a/server/test/unittest_migractions.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_migractions.py	Fri Dec 09 12:08:27 2011 +0100
@@ -338,7 +338,7 @@
     @tag('longrun')
     def test_sync_schema_props_perms(self):
         cursor = self.mh.session
-        cursor.set_pool()
+        cursor.set_cnxset()
         nbrqlexpr_start = cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
         migrschema['titre'].rdefs[('Personne', 'String')].order = 7
         migrschema['adel'].rdefs[('Personne', 'String')].order = 6
--- a/server/test/unittest_msplanner.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_msplanner.py	Fri Dec 09 12:08:27 2011 +0100
@@ -296,7 +296,7 @@
                    True)
 
     def test_not_relation_no_split_external(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         # similar to the above test but with an eid coming from the external source.
         # the same plan may be used, since we won't find any record in the system source
         # linking 9999999 to a state
@@ -313,13 +313,15 @@
                    True)
 
     def test_simplified_var(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        # need access to source since X table has to be accessed because of the outer join
         self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
                    {'x': 999999, 'u': self.session.user.eid},
-                   {self.system: {'P': s[0], 'G': s[0], 'X': s[0],
+                   {self.system: {'P': s[0], 'G': s[0],
                                   'require_permission': s[0], 'in_group': s[0], 'P': s[0], 'require_group': s[0],
-                                  'u': s[0]}},
-                   False)
+                                  'u': s[0]},
+                    self.cards: {'X': s[0]}},
+                   True)
 
     def test_delete_relation1(self):
         ueid = self.session.user.eid
@@ -329,7 +331,7 @@
                    False)
 
     def test_crossed_relation_eid_1_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         ueid = self.session.user.eid
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    {'x': 999999,},
@@ -337,14 +339,14 @@
                    True)
 
     def test_crossed_relation_eid_1_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    {'x': 999999},
                    {self.system: {'Y': s[0], 'x': s[0]}},
                    False)
 
     def test_crossed_relation_eid_2_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    {'x': 999999,},
                    {self.cards: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]},
@@ -352,7 +354,7 @@
                    False)
 
     def test_version_crossed_depends_on_1(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    {'x': 999999},
                    {self.cards: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]},
@@ -360,7 +362,7 @@
                    True)
 
     def test_version_crossed_depends_on_2(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    {'x': 999999},
                    {self.cards: {'X': s[0], 'AD': s[0]},
@@ -368,8 +370,8 @@
                     True)
 
     def test_simplified_var_3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards')
         self._test('Any S,T WHERE S eid %(s)s, N eid %(n)s, N type T, N is Note, S is State',
                    {'n': 999999, 's': 999998},
                    {self.cards: {'s': s[0], 'N': s[0]}}, False)
@@ -1210,12 +1212,12 @@
                                     [{'X': 'Note', 'S': 'State'}])],
                      [self.cards, self.system], None, {'X': 'table0.C0', 'S': 'table0.C1'}, []),
                      ('UnionStep', None, None,
-                      [('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is CWUser, X is Note',
+                      [('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is Personne, X is Affaire',
+                                          [{'X': 'Affaire', 'S': 'State', 'U': 'Personne'}])],
+                        None, None, [self.system], {}, []),
+                       ('OneFetchStep', [('Any X,S,U WHERE X todo_by U, S is State, U is CWUser, X is Note',
                                           [{'X': 'Note', 'S': 'State', 'U': 'CWUser'}])],
                         None, None, [self.system], {'X': 'table0.C0', 'S': 'table0.C1'}, []),
-                       ('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is Personne, X is Affaire',
-                                          [{'X': 'Affaire', 'S': 'State', 'U': 'Personne'}])],
-                        None, None, [self.system], {}, []),
                        ])
                     ])
 
@@ -1266,7 +1268,7 @@
                    {'x': ueid})
 
     def test_not_relation_no_split_external(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         # similar to the above test but with an eid coming from the external source.
         # the same plan may be used, since we won't find any record in the system source
         # linking 9999999 to a state
@@ -1297,7 +1299,7 @@
                      )])
 
     def test_external_attributes_and_relation(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any A,B,C,D WHERE A eid %(x)s,A creation_date B,A modification_date C, A todo_by D?',
                    [('FetchStep', [('Any A,B,C WHERE A eid 999999, A creation_date B, A modification_date C, A is Note',
                                     [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime'}])],
@@ -1312,12 +1314,31 @@
                    {'x': 999999})
 
 
-    def test_simplified_var(self):
+    def test_simplified_var_1(self):
         ueid = self.session.user.eid
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
-                   [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999' % (ueid, ueid),
-                                       [{'X': 'Note', 'G': 'CWGroup', 'P': 'CWPermission'}])],
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        # need access to cards source since X table has to be accessed because of the outer join
+        self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR '
+                   '(X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
+                   [('FetchStep',
+                     [('Any 999999', [{}])], [self.cards],
+                     None, {u'%(x)s': 'table0.C0'}, []),
+                    ('OneFetchStep',
+                     [(u'Any 6 WHERE 6 in_group G, (G name IN("managers", "logilab")) OR '
+                       '(X require_permission P?, P name "bla", P require_group G), '
+                       'G is CWGroup, P is CWPermission, X is Note',
+                       [{'G': 'CWGroup', 'P': 'CWPermission', 'X': 'Note'}])],
+                     None, None, [self.system], {u'%(x)s': 'table0.C0'}, [])],
+                   {'x': 999999, 'u': ueid})
+
+    def test_simplified_var_2(self):
+        ueid = self.session.user.eid
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        # no need access to source since X is invariant
+        self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR '
+                   '(X require_permission P, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
+                   [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (999999 require_permission P, P name "bla", P require_group G)' % (ueid, ueid),
+                                       [{'G': 'CWGroup', 'P': 'CWPermission'}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999999, 'u': ueid})
 
@@ -1529,7 +1550,7 @@
                    {'E': ueid})
 
     def test_eid_dont_cross_relation_1(self):
-        repo._type_source_cache[999999] = ('Personne', 'system', 999999)
+        repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system')
         self._test('Any Y,YT WHERE X eid %(x)s, X fiche Y, Y title YT',
                    [('OneFetchStep', [('Any Y,YT WHERE X eid 999999, X fiche Y, Y title YT',
                                        [{'X': 'Personne', 'Y': 'Card', 'YT': 'String'}])],
@@ -1537,7 +1558,7 @@
                    {'x': 999999})
 
     def test_eid_dont_cross_relation_2(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.dont_cross_relations.add('concerne')
         try:
             self._test('Any Y,S,YT,X WHERE Y concerne X, Y in_state S, X eid 999999, Y ref YT',
@@ -1552,7 +1573,7 @@
     # external source w/ .cross_relations == ['multisource_crossed_rel'] ######
 
     def test_crossed_relation_eid_1_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y', [{u'Y': 'Note'}])],
                       None, None, [self.system], {}, [])
@@ -1560,7 +1581,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_1_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])],
                      [self.cards, self.system], None,
@@ -1573,7 +1594,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_2_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])],
                       None, None, [self.cards, self.system], {}, [])
@@ -1581,7 +1602,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_2_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note',
                                        [{'T': 'String', 'Y': 'Note'}])],
@@ -1591,7 +1612,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_not_1(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y',
                    [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])],
                      [self.cards, self.system], None, {'Y': 'table0.C0'}, []),
@@ -1608,7 +1629,7 @@
 #                    {'x': 999999,})
 
     def test_crossed_relation_base_XXXFIXME(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T',
                    [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])],
                      [self.cards, self.system], None,
@@ -1697,8 +1718,8 @@
     # edition queries tests ###################################################
 
     def test_insert_simplified_var_1(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1710,8 +1731,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_2(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1724,8 +1745,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards')
         self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1737,8 +1758,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_4(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s',
                    [('InsertStep',
                       [('InsertRelationsStep', [])]
@@ -1746,8 +1767,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_5(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s, A concerne N',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1784,19 +1805,23 @@
                    {'x': ueid, 'y': ueid})
 
     def test_delete_relation3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        self._test('DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, NOT (Y cw_source S, S name %(source)s)',
-                   [('DeleteRelationsStep',
-                     [('OneFetchStep',
-                       [('Any Y,999999 WHERE Y multisource_inlined_rel 999999, NOT EXISTS(Y cw_source S, S name "cards"), S is CWSource, Y is IN(Card, Note)',
-                         [{'S': 'CWSource', 'Y': 'Card'}, {'S': 'CWSource', 'Y': 'Note'}])],
-                       None, None, [self.system], {},
-                       [])]
-                     )],
-                   {'x': 999999, 'source': 'cards'})
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        self.assertRaises(
+            BadRQLQuery, self._test,
+            'DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, '
+            'NOT (Y cw_source S, S name %(source)s)', [],
+            {'x': 999999, 'source': 'cards'})
+
+    def test_delete_relation4(self):
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        self.assertRaises(
+            BadRQLQuery, self._test,
+            'DELETE X multisource_inlined_rel Y WHERE Y is Note, X eid %(x)s, '
+            'NOT (Y cw_source S, S name %(source)s)', [],
+            {'x': 999999, 'source': 'cards'})
 
     def test_delete_entity1(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X',
                    [('DeleteEntitiesStep',
                      [('OneFetchStep', [('Any 999999 WHERE NOT EXISTS(Y multisource_rel 999999), Y is IN(Card, Note)',
@@ -1807,7 +1832,7 @@
                    {'x': 999999})
 
     def test_delete_entity2(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('DELETE Note X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y',
                    [('DeleteEntitiesStep',
                      [('OneFetchStep', [('Any X WHERE X eid 999999, NOT X multisource_inlined_rel Y, X is Note, Y is IN(Affaire, Note)',
@@ -1872,7 +1897,7 @@
 #                     ])
 
     def test_ldap_user_related_to_invariant_and_dont_cross_rel(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.dont_cross_relations.add('created_by')
         try:
             self._test('Any X,XL WHERE E eid %(x)s, E created_by X, X login XL',
@@ -1893,7 +1918,7 @@
             self.cards.dont_cross_relations.remove('created_by')
 
     def test_ambigous_cross_relation(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.support_relations['see_also'] = True
         self.cards.cross_relations.add('see_also')
         try:
@@ -2044,7 +2069,7 @@
                     ])
 
     def test_source_conflict_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         with self.assertRaises(BadRQLQuery) as cm:
             self._test('Any X WHERE X cw_source S, S name "system", X eid %(x)s',
                        [], {'x': 999999})
@@ -2067,7 +2092,7 @@
 
 
     def test_ambigous_cross_relation_source_specified(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.support_relations['see_also'] = True
         self.cards.cross_relations.add('see_also')
         try:
@@ -2198,7 +2223,7 @@
                     ])
 
     def test_nonregr7(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A is Affaire, A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, W multisource_rel WP)) OR (EXISTS(A concerne W)), W eid %(n)s',
                    [('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])],
                      [self.cards], None, {'WP': u'table0.C0'}, []),
@@ -2208,7 +2233,7 @@
                    {'n': 999999})
 
     def test_nonregr8(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X',
                    [('FetchStep', [('Any 999999 WHERE 999999 multisource_rel Y, Y is Note',
                                     [{'Y': 'Note'}])],
@@ -2223,8 +2248,8 @@
                    {'x': 999999})
 
     def test_nonregr9(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('Note', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards')
         self._test('SET X migrated_from Y WHERE X eid %(x)s, Y multisource_rel Z, Z eid %(z)s, Y migrated_from Z',
                    [('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])],
                      [self.cards], None, {'Y': u'table0.C0'}, []),
@@ -2236,7 +2261,7 @@
                    {'x': 999999, 'z': 999998})
 
     def test_nonregr10(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
         self._test('Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E owned_by X, X login AA, X modification_date AB',
                    [('FetchStep',
                      [('Any X,AA,AB WHERE X login AA, X modification_date AB, X is CWUser',
@@ -2254,7 +2279,7 @@
                    {'x': 999999})
 
     def test_nonregr11(self):
-        repo._type_source_cache[999999] = ('Bookmark', 'system', 999999)
+        repo._type_source_cache[999999] = ('Bookmark', 'system', 999999, 'system')
         self._test('SET X bookmarked_by Y WHERE X eid %(x)s, Y login "hop"',
                    [('UpdateStep',
                      [('OneFetchStep', [('DISTINCT Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])],
@@ -2263,7 +2288,7 @@
                    {'x': 999999})
 
     def test_nonregr12(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X',
                    [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note',
                                     [{'X': 'Note', 'Z': 'Datetime'}])],
@@ -2347,38 +2372,38 @@
                    {'x': self.session.user.eid})
 
     def test_nonregr14_1(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE 999999 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999999, 'u': 999999})
 
     def test_nonregr14_2(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
-        repo._type_source_cache[999998] = ('Note', 'system', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
+        repo._type_source_cache[999998] = ('Note', 'system', 999998, 'system')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999998, 'u': 999999})
 
     def test_nonregr14_3(self):
-        repo._type_source_cache[999999] = ('CWUser', 'system', 999999)
-        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'system', 999999, 'system')
+        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999998, 'u': 999999})
 
     def test_nonregr_identity_no_source_access_1(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any S WHERE S identity U, S eid %(s)s, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE 999999 identity 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'s': 999999, 'u': 999999})
 
     def test_nonregr_identity_no_source_access_2(self):
-        repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999)
-        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999, 'system')
+        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any X WHERE O use_email X, ((EXISTS(O identity U)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, U in_group G2, NOT G2 name "users")), X eid %(x)s, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE O use_email 999999, ((EXISTS(O identity 999998)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, 999998 in_group G2, NOT G2 name "users"))',
                                        [{'G': 'CWGroup', 'G2': 'CWGroup', 'O': 'CWUser'}])],
@@ -2386,7 +2411,7 @@
                    {'x': 999999, 'u': 999998})
 
     def test_nonregr_similar_subquery(self):
-        repo._type_source_cache[999999] = ('Personne', 'system', 999999)
+        repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system')
         self._test('Any T,TD,U,T,UL WITH T,TD,U,UL BEING ('
                    '(Any T,TD,U,UL WHERE X eid %(x)s, T comments X, T content TD, T created_by U?, U login UL)'
                    ' UNION '
@@ -2435,6 +2460,21 @@
                      [])],
                    {'x': 999999})
 
+    def test_nonregr_dont_readd_already_processed_relation(self):
+        self._test('Any WO,D,SO WHERE WO is Note, D tags WO, WO in_state SO',
+                   [('FetchStep',
+                     [('Any WO,SO WHERE WO in_state SO, SO is State, WO is Note',
+                       [{'SO': 'State', 'WO': 'Note'}])],
+                     [self.cards, self.system], None,
+                     {'SO': 'table0.C1', 'WO': 'table0.C0'},
+                     []),
+                    ('OneFetchStep',
+                     [('Any WO,D,SO WHERE D tags WO, D is Tag, SO is State, WO is Note',
+                       [{'D': 'Tag', 'SO': 'State', 'WO': 'Note'}])],
+                     None, None, [self.system],
+                     {'SO': 'table0.C1', 'WO': 'table0.C0'},
+                     [])
+                    ])
 
 class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC):
     """test planner related feature on a 3-sources repository:
@@ -2456,7 +2496,7 @@
 
 
     def test_linked_external_entities(self):
-        repo._type_source_cache[999999] = ('Tag', 'system', 999999)
+        repo._type_source_cache[999999] = ('Tag', 'system', 999999, 'system')
         self._test('Any X,XT WHERE X is Card, X title XT, T tags X, T eid %(t)s',
                    [('FetchStep',
                      [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -2472,7 +2512,7 @@
                    {'t': 999999})
 
     def test_version_depends_on(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E migrated_from X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2488,7 +2528,7 @@
                    {'x': 999999})
 
     def test_version_crossed_depends_on_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2511,7 +2551,7 @@
                    {'x': 999999})
 
     def test_version_crossed_depends_on_2(self):
-        self.repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2587,7 +2627,7 @@
                        )
 
     def test_nonregr_dont_cross_rel_source_filtering_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
                    [('OneFetchStep', [('Any S WHERE 999999 in_state S, NOT S name "moved", S is State',
                                        [{'S': 'State'}])],
@@ -2596,7 +2636,7 @@
                    {'x': 999999})
 
     def test_nonregr_dont_cross_rel_source_filtering_2(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB',
                    [('OneFetchStep', [('Any X,AA,AB WHERE 999999 in_state X, X name AA, X modification_date AB, X is State',
                                        [{'AA': 'String', 'AB': 'Datetime', 'X': 'State'}])],
@@ -2605,7 +2645,7 @@
                    {'x': 999999})
 
     def test_nonregr_eid_query(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X WHERE X eid 999999',
                    [('OneFetchStep', [('Any 999999', [{}])],
                      None, None, [self.system], {}, []
@@ -2671,6 +2711,29 @@
                       ])
                     ])
 
+    def test_remove_from_deleted_source_1(self):
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        self._test('Note X WHERE X eid 999999, NOT X cw_source Y',
+                   [('OneFetchStep',
+                     [('Any 999999 WHERE NOT EXISTS(999999 cw_source Y)',
+                       [{'Y': 'CWSource'}])],
+                     None, None, [self.system], {}, [])
+                    ])
+
+    def test_remove_from_deleted_source_2(self):
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        self.repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards')
+        self._test('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y',
+                   [('FetchStep',
+                     [('Any X WHERE X eid IN(999998, 999999), X is Note',
+                       [{'X': 'Note'}])],
+                     [self.cards], None, {'X': 'table0.C0'}, []),
+                    ('OneFetchStep',
+                     [('Any X WHERE NOT EXISTS(X cw_source Y, Y is CWSource), X is Note',
+                       [{'X': 'Note', 'Y': 'CWSource'}])],
+                         None, None, [self.system],{'X': 'table0.C0'}, [])
+                        ])
+
 
 class FakeVCSSource(AbstractSource):
     uri = 'ccc'
@@ -2707,17 +2770,17 @@
                     ])
 
     def test_fully_simplified_extsource(self):
-        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998)
-        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999)
+        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs')
+        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs')
         self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999',
                    [('OneFetchStep', [('Any 999998,999999 WHERE NOT EXISTS(999998 multisource_rel 999999)', [{}])],
                      None, None, [self.vcs], {}, [])
                     ])
 
     def test_nonregr_fully_simplified_extsource(self):
-        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998)
-        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999)
-        self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000)
+        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs')
+        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs')
+        self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000, 'system')
         self._test('DISTINCT Any T,FALSE,L,M WHERE L eid 1000000, M eid 999999, T eid 999998',
                    [('OneFetchStep', [('DISTINCT Any 999998,FALSE,1000000,999999', [{}])],
                      None, None, [self.system], {}, [])
--- a/server/test/unittest_multisources.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_multisources.py	Fri Dec 09 12:08:27 2011 +0100
@@ -30,7 +30,7 @@
 class ExternalSource2Configuration(TestServerConfiguration):
     sourcefile = 'sources_multi'
 
-MTIME = datetime.now() - timedelta(0, 10)
+MTIME = datetime.utcnow() - timedelta(0, 10)
 
 EXTERN_SOURCE_CFG = u'''
 pyro-ns-id = extern
@@ -160,11 +160,11 @@
         # since they are orderd by eid, we know the 3 first one is coming from the system source
         # and the others from external source
         self.assertEqual(rset.get_entity(0, 0).cw_metainformation(),
-                          {'source': {'type': 'native', 'uri': 'system'},
+                          {'source': {'type': 'native', 'uri': 'system', 'use-cwuri-as-url': False},
                            'type': u'Card', 'extid': None})
         externent = rset.get_entity(3, 0)
         metainf = externent.cw_metainformation()
-        self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'})
+        self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern', 'use-cwuri-as-url': False})
         self.assertEqual(metainf['type'], 'Card')
         self.assert_(metainf['extid'])
         etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s',
@@ -381,6 +381,16 @@
     def test_nonregr3(self):
         self.sexecute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1})
 
+    def test_nonregr4(self):
+        self.sexecute('Any X,S,U WHERE X in_state S, X todo_by U')
+
+    def test_delete_source(self):
+        req = self.request()
+        req.execute('DELETE CWSource S WHERE S name "extern"')
+        self.commit()
+        cu = self.session.system_sql("SELECT * FROM entities WHERE source='extern'")
+        self.failIf(cu.fetchall())
+
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
     unittest_main()
--- a/server/test/unittest_querier.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_querier.py	Fri Dec 09 12:08:27 2011 +0100
@@ -311,6 +311,14 @@
         seid = self.execute('State X WHERE X name "deactivated"')[0][0]
         rset = self.execute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid)
 
+    def test_select_groupby_funccall(self):
+        rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) WHERE X is CWUser, X creation_date CD')
+        self.assertListEqual(rset.rows, [[date.today().year, 2]])
+
+    def test_select_groupby_colnumber(self):
+        rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY 1 WHERE X is CWUser, X creation_date CD')
+        self.assertListEqual(rset.rows, [[date.today().year, 2]])
+
     def test_select_complex_orderby(self):
         rset1 = self.execute('Any N ORDERBY N WHERE X name N')
         self.assertEqual(sorted(rset1.rows), rset1.rows)
@@ -435,7 +443,7 @@
         self.execute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s",
                      {'d': datetime(2001, 2,3, 12,13)})
         test_data = [('YEAR', 2001), ('MONTH', 2), ('DAY', 3),
-                     ('HOUR', 12), ('MINUTE', 13)]
+                     ('HOUR', 12), ('MINUTE', 13), ('WEEKDAY', 6)]
         for funcname, result in test_data:
             rset = self.execute('Any %s(D) WHERE X is Personne, X datenaiss D'
                                 % funcname)
@@ -443,6 +451,15 @@
             self.assertEqual(rset.rows[0][0], result)
             self.assertEqual(rset.description, [('Int',)])
 
+    def test_regexp_based_pattern_matching(self):
+        peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        peid2 = self.execute("INSERT Personne X: X nom 'cidule'")[0][0]
+        rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "^b"')
+        self.assertEqual(len(rset.rows), 1, rset.rows)
+        self.assertEqual(rset.rows[0][0], peid1)
+        rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "idu"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+
     def test_select_aggregat_count(self):
         rset = self.execute('Any COUNT(X)')
         self.assertEqual(len(rset.rows), 1)
@@ -768,7 +785,7 @@
     def test_select_boolean(self):
         rset = self.execute('Any N WHERE X is CWEType, X name N, X final %(val)s',
                             {'val': True})
-        self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+        self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes',
                                                            'Date', 'Datetime',
                                                            'Decimal', 'Float',
                                                            'Int', 'Interval',
@@ -776,13 +793,19 @@
                                                            'TZDatetime', 'TZTime',
                                                            'Time'])
         rset = self.execute('Any N WHERE X is CWEType, X name N, X final TRUE')
-        self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+        self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes',
                                                            'Date', 'Datetime',
                                                            'Decimal', 'Float',
                                                            'Int', 'Interval',
                                                            'Password', 'String',
                                                            'TZDatetime', 'TZTime',
                                                            'Time'])
+        req = self.session
+        req.create_entity('Personne', nom=u'louis', test=True)
+        self.assertEqual(len(req.execute('Any X WHERE X test %(val)s', {'val': True})), 1)
+        self.assertEqual(len(req.execute('Any X WHERE X test TRUE')), 1)
+        self.assertEqual(len(req.execute('Any X WHERE X test %(val)s', {'val': False})), 0)
+        self.assertEqual(len(req.execute('Any X WHERE X test FALSE')), 0)
 
     def test_select_constant(self):
         rset = self.execute('Any X, "toto" ORDERBY X WHERE X is CWGroup')
@@ -1099,7 +1122,7 @@
         #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y'
         eeid, = self.o.execute(s, 'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y WHERE Y is EmailAddress')[0]
         self.o.execute(s, "DELETE Email X")
-        sqlc = s.pool['system']
+        sqlc = s.cnxset['system']
         sqlc.execute('SELECT * FROM recipients_relation')
         self.assertEqual(len(sqlc.fetchall()), 0)
         sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid)
@@ -1212,7 +1235,7 @@
         self.assertEqual(rset.description, [('CWUser',)])
         self.assertRaises(Unauthorized,
                           self.execute, "Any P WHERE X is CWUser, X login 'bob', X upassword P")
-        cursor = self.pool['system']
+        cursor = self.cnxset['system']
         cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
                        % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
         passwd = str(cursor.fetchone()[0])
@@ -1227,7 +1250,7 @@
         self.assertEqual(rset.description[0][0], 'CWUser')
         rset = self.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'",
                             {'pwd': 'tutu'})
-        cursor = self.pool['system']
+        cursor = self.cnxset['system']
         cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
                        % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
         passwd = str(cursor.fetchone()[0])
--- a/server/test/unittest_repository.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_repository.py	Fri Dec 09 12:08:27 2011 +0100
@@ -24,6 +24,7 @@
 import sys
 import threading
 import time
+import logging
 from copy import deepcopy
 from datetime import datetime
 
@@ -62,13 +63,13 @@
             table = SQL_PREFIX + 'CWEType'
             namecol = SQL_PREFIX + 'name'
             finalcol = SQL_PREFIX + 'final'
-            self.session.set_pool()
+            self.session.set_cnxset()
             cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
                 namecol, table, finalcol))
             self.assertEqual(cu.fetchall(), [])
             cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
-                                         % (namecol, table, finalcol, namecol), {'final': 'TRUE'})
-            self.assertEqual(cu.fetchall(), [(u'Boolean',), (u'Bytes',),
+                                         % (namecol, table, finalcol, namecol), {'final': True})
+            self.assertEqual(cu.fetchall(), [(u'BigInt',), (u'Boolean',), (u'Bytes',),
                                              (u'Date',), (u'Datetime',),
                                              (u'Decimal',),(u'Float',),
                                              (u'Int',),
@@ -259,7 +260,7 @@
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         # rollback state change which trigger TrInfo insertion
         session = repo._get_session(cnxid)
-        session.set_pool()
+        session.set_cnxset()
         user = session.user
         user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
         rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
@@ -292,7 +293,7 @@
         try:
             with self.assertRaises(Exception) as cm:
                 run_transaction()
-            self.assertEqual(str(cm.exception), 'try to access pool on a closed session')
+            self.assertEqual(str(cm.exception), 'try to access connections set on a closed session %s' % cnxid)
         finally:
             t.join()
 
@@ -365,7 +366,6 @@
             schema = cnx.get_schema()
             self.failUnless(cnx.vreg)
             self.failUnless('etypes'in cnx.vreg)
-            self.assertEqual(schema.__hashmode__, None)
             cu = cnx.cursor()
             rset = cu.execute('Any U,G WHERE U in_group G')
             user = iter(rset.entities()).next()
@@ -382,9 +382,9 @@
     def test_internal_api(self):
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        session = repo._get_session(cnxid, setpool=True)
+        session = repo._get_session(cnxid, setcnxset=True)
         self.assertEqual(repo.type_and_source_from_eid(2, session),
-                         ('CWGroup', 'system', None))
+                         ('CWGroup', 'system', None, 'system'))
         self.assertEqual(repo.type_from_eid(2, session), 'CWGroup')
         self.assertEqual(repo.source_from_eid(2, session).uri, 'system')
         self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None)
@@ -394,7 +394,10 @@
 
     def test_public_api(self):
         self.assertEqual(self.repo.get_schema(), self.repo.schema)
-        self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
+        self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native',
+                                                              'uri': 'system',
+                                                              'use-cwuri-as-url': False}
+                                                  })
         # .properties() return a result set
         self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U')
 
@@ -402,7 +405,7 @@
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {}))
-        self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None))
+        self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None, 'system'))
         repo.close(cnxid)
         self.assertRaises(BadConnectionId, repo.user_info, cnxid)
         self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
@@ -519,38 +522,39 @@
 class DataHelpersTC(CubicWebTC):
 
     def test_create_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assert_(self.repo.system_source.create_eid(self.session))
 
     def test_source_from_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertEqual(self.repo.source_from_eid(1, self.session),
                           self.repo.sources_by_uri['system'])
 
     def test_source_from_eid_raise(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(UnknownEid, self.repo.source_from_eid, -2, self.session)
 
     def test_type_from_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup')
 
     def test_type_from_eid_raise(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session)
 
     def test_add_delete_info(self):
         entity = self.repo.vreg['etypes'].etype_class('Personne')(self.session)
         entity.eid = -1
         entity.complete = lambda x: None
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.repo.add_info(self.session, entity, self.repo.system_source)
         cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
         data = cu.fetchall()
-        self.assertIsInstance(data[0][3], datetime)
+        self.assertIsInstance(data[0][4], datetime)
         data[0] = list(data[0])
-        data[0][3] = None
-        self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None, None)])
+        data[0][4] = None
+        self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', 'system',
+                                          None, None)])
         self.repo.delete_info(self.session, entity, 'system', None)
         #self.repo.commit()
         cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
@@ -566,7 +570,7 @@
         self.commit()
         ts = datetime.now()
         self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
-        self.session.set_pool()
+        self.session.set_cnxset()
         cu = self.session.system_sql('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp)
         omtime = cu.fetchone()[0]
         # our sqlite datetime adapter is ignore seconds fraction, so we have to
@@ -575,7 +579,7 @@
         self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp})
         self.commit()
         self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
-        self.session.set_pool()
+        self.session.set_cnxset()
         cu = self.session.system_sql('SELECT mtime FROM entities WHERE eid = %s' % eidp)
         mtime = cu.fetchone()[0]
         self.failUnless(omtime < mtime)
@@ -646,7 +650,7 @@
         CubicWebTC.setUp(self)
         CALLED[:] = ()
 
-    def _after_relation_hook(self, pool, fromeid, rtype, toeid):
+    def _after_relation_hook(self, cnxset, fromeid, rtype, toeid):
         self.called.append((fromeid, rtype, toeid))
 
     def test_inline_relation(self):
@@ -704,13 +708,18 @@
 
 
 class PerformanceTest(CubicWebTC):
-    def setup_database(self):
-        import logging
+    def setUp(self):
+        super(PerformanceTest, self).setUp()
         logger = logging.getLogger('cubicweb.session')
         #logger.handlers = [logging.StreamHandler(sys.stdout)]
         logger.setLevel(logging.INFO)
         self.info = logger.info
 
+    def tearDown(self):
+        super(PerformanceTest, self).tearDown()
+        logger = logging.getLogger('cubicweb.session')
+        logger.setLevel(logging.CRITICAL)
+
     def test_composite_deletion(self):
         req = self.request()
         personnes = []
@@ -807,6 +816,7 @@
         req.cnx.commit()
         t1 = time.time()
         self.info('add relations: %.2gs', t1-t0)
+
     def test_session_add_relation_inlined(self):
         """ to be compared with test_session_add_relations"""
         req = self.request()
@@ -847,7 +857,7 @@
         p2 = req.create_entity('Personne', nom=u'Florent')
         w = req.create_entity('Affaire', ref=u'wc')
         w.set_relations(todo_by=[p1,p2])
-        w.clear_all_caches()
+        w.cw_clear_all_caches()
         self.commit()
         self.assertEqual(len(w.todo_by), 1)
         self.assertEqual(w.todo_by[0].eid, p2.eid)
@@ -860,7 +870,7 @@
         w.set_relations(todo_by=p1)
         self.commit()
         w.set_relations(todo_by=p2)
-        w.clear_all_caches()
+        w.cw_clear_all_caches()
         self.commit()
         self.assertEqual(len(w.todo_by), 1)
         self.assertEqual(w.todo_by[0].eid, p2.eid)
--- a/server/test/unittest_rql2sql.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_rql2sql.py	Fri Dec 09 12:08:27 2011 +0100
@@ -88,7 +88,6 @@
     ]
 
 BASIC = [
-
     ("Any AS WHERE AS is Affaire",
      '''SELECT _AS.cw_eid
 FROM cw_Affaire AS _AS'''),
@@ -201,7 +200,12 @@
     ('Any X WHERE  X title V, NOT X wikiid V, NOT X title "parent", X is Card',
      '''SELECT _X.cw_eid
 FROM cw_Card AS _X
-WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)''')
+WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)'''),
+
+    ("Any -AS WHERE AS is Affaire",
+     '''SELECT -_AS.cw_eid
+FROM cw_Affaire AS _AS'''),
+
 ]
 
 BASIC_WITH_LIMIT = [
@@ -550,6 +554,15 @@
 GROUP BY rel_todo_by0.eid_to
 ORDER BY 2 DESC'''),
 
+    ('Any R2 WHERE R2 concerne R, R eid RE, R2 eid > RE',
+     '''SELECT _R2.eid
+FROM concerne_relation AS rel_concerne0, entities AS _R2
+WHERE _R2.eid=rel_concerne0.eid_from AND _R2.eid>rel_concerne0.eid_to'''),
+
+    ('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y',
+     '''SELECT _X.cw_eid
+FROM cw_Note AS _X
+WHERE _X.cw_eid IN(999998, 999999) AND NOT (EXISTS(SELECT 1 FROM cw_source_relation AS rel_cw_source0 WHERE rel_cw_source0.eid_from=_X.cw_eid))'''),
     ]
 
 ADVANCED_WITH_GROUP_CONCAT = [
@@ -798,6 +811,11 @@
 
 
 OUTER_JOIN = [
+
+    ('Any U,G WHERE U login L, G name L?, G is CWGroup',
+     '''SELECT _U.cw_eid, _G.cw_eid
+FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (_G.cw_name=_U.cw_login)'''),
+
     ('Any X,S WHERE X travaille S?',
      '''SELECT _X.cw_eid, rel_travaille0.eid_to
 FROM cw_Personne AS _X LEFT OUTER JOIN travaille_relation AS rel_travaille0 ON (rel_travaille0.eid_from=_X.cw_eid)'''
@@ -958,8 +976,31 @@
     ('Any CASE, CALIBCFG, CFG '
      'WHERE CASE eid 1, CFG ecrit_par CASE, CALIBCFG? ecrit_par CASE',
      '''SELECT _CFG.cw_ecrit_par, _CALIBCFG.cw_eid, _CFG.cw_eid
-FROM cw_Note AS _CFG LEFT OUTER JOIN cw_Note AS _CALIBCFG ON (_CALIBCFG.cw_ecrit_par=_CFG.cw_ecrit_par)
+FROM cw_Note AS _CFG LEFT OUTER JOIN cw_Note AS _CALIBCFG ON (_CALIBCFG.cw_ecrit_par=1)
 WHERE _CFG.cw_ecrit_par=1'''),
+
+    ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)=UPPER(GL)?',
+     '''SELECT _U.cw_eid, _G.cw_eid
+FROM cw_CWUser AS _U LEFT OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''),
+
+    ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)',
+     '''SELECT _U.cw_eid, _G.cw_eid
+FROM cw_CWGroup AS _G LEFT OUTER JOIN cw_CWUser AS _U ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''),
+
+    ('Any U,G WHERE U login UL, G name GL, G is CWGroup HAVING UPPER(UL)?=UPPER(GL)?',
+     '''SELECT _U.cw_eid, _G.cw_eid
+FROM cw_CWUser AS _U FULL OUTER JOIN cw_CWGroup AS _G ON (UPPER(_U.cw_login)=UPPER(_G.cw_name))'''),
+
+    ('Any H, COUNT(X), SUM(XCE)/1000 '
+     'WHERE X type "0", X date XSCT, X para XCE, X? ecrit_par F, F eid 999999, F is Personne, '
+     'DH is Affaire, DH ref H '
+     'HAVING XSCT?=H',
+     '''SELECT _DH.cw_ref, COUNT(_X.cw_eid), (SUM(_X.cw_para) / 1000)
+FROM cw_Affaire AS _DH LEFT OUTER JOIN cw_Note AS _X ON (_X.cw_date=_DH.cw_ref AND _X.cw_type=0 AND _X.cw_ecrit_par=999999)'''),
+
+    ('Any C WHERE X ecrit_par C?, X? inline1 F, F eid 1, X type XT, Z is Personne, Z nom ZN HAVING ZN=XT?',
+     '''SELECT _X.cw_ecrit_par
+FROM cw_Personne AS _Z LEFT OUTER JOIN cw_Note AS _X ON (_Z.cw_nom=_X.cw_type AND _X.cw_inline1=1)'''),
     ]
 
 VIRTUAL_VARS = [
@@ -1355,11 +1396,28 @@
                     '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER)
 FROM cw_Personne AS _P''')
 
+    def test_weekday_extraction(self):
+        self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT (CAST(EXTRACT(DOW from _P.cw_creation_date) AS INTEGER) + 1)
+FROM cw_Personne AS _P''')
+
     def test_substring(self):
         self._check("Any SUBSTRING(N, 1, 1) WHERE P nom N, P is Personne",
                     '''SELECT SUBSTR(_P.cw_nom, 1, 1)
 FROM cw_Personne AS _P''')
 
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS text)
+FROM cw_Personne AS _P''')
+
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login ~ [0-9].*
+''')
+
     def test_parser_parse(self):
         for t in self._parse(PARSER):
             yield t
@@ -1471,6 +1529,12 @@
 FROM (SELECT MAX(_A.cw_ordernum) AS C0
 FROM cw_CWAttribute AS _A) AS _T0, cw_CWAttribute AS _A
 WHERE _A.cw_ordernum=_T0.C0'''),
+
+            ('Any O1 HAVING O1=O2? WITH O1 BEING (Any MAX(O) WHERE A ordernum O, A is CWAttribute), O2 BEING (Any MAX(O) WHERE A ordernum O, A is CWRelation)',
+             '''SELECT _T0.C0
+FROM (SELECT MAX(_A.cw_ordernum) AS C0
+FROM cw_CWAttribute AS _A) AS _T0 LEFT OUTER JOIN (SELECT MAX(_A.cw_ordernum) AS C0
+FROM cw_CWRelation AS _A) AS _T1 ON (_T0.C0=_T1.C0)'''),
             )):
             yield t
 
@@ -1622,12 +1686,26 @@
                     '''SELECT (A || _X.cw_ref)
 FROM cw_Affaire AS _X''')
 
-    def test_or_having_fake_terms(self):
+    def test_or_having_fake_terms_base(self):
         self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
                     '''SELECT _X.cw_eid
 FROM cw_CWUser AS _X
 WHERE ((CAST(EXTRACT(YEAR from _X.cw_creation_date) AS INTEGER)=2010) OR (_X.cw_creation_date IS NULL))''')
 
+    def test_or_having_fake_terms_exists(self):
+        # crash with rql <= 0.29.0
+        self._check('Any X WHERE X is CWUser, EXISTS(B bookmarked_by X, B creation_date D) HAVING D=2010 OR D=NULL, D=1 OR D=NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE EXISTS(SELECT 1 FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_Bookmark AS _B WHERE rel_bookmarked_by0.eid_from=_B.cw_eid AND rel_bookmarked_by0.eid_to=_X.cw_eid AND ((_B.cw_creation_date=1) OR (_B.cw_creation_date IS NULL)) AND ((_B.cw_creation_date=2010) OR (_B.cw_creation_date IS NULL)))''')
+
+    def test_or_having_fake_terms_nocrash(self):
+        # crash with rql <= 0.29.0
+        self._check('Any X WHERE X is CWUser, X creation_date D HAVING D=2010 OR D=NULL, D=1 OR D=NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE ((_X.cw_creation_date=1) OR (_X.cw_creation_date IS NULL)) AND ((_X.cw_creation_date=2010) OR (_X.cw_creation_date IS NULL))''')
+
     def test_not_no_where(self):
         # XXX will check if some in_group relation exists, that's it.
         # We  can't actually know if we want to check if there are some
@@ -1669,21 +1747,29 @@
 class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC):
     backend = 'sqlserver2005'
     def _norm_sql(self, sql):
-        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ')
+        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
 
     def test_has_text(self):
         for t in self._parse(HAS_TEXT_LG_INDEXER):
             yield t
 
-    def test_or_having_fake_terms(self):
+    def test_regexp(self):
+        self.skipTest('regexp-based pattern matching not implemented in sqlserver')
+
+    def test_or_having_fake_terms_base(self):
         self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
                     '''SELECT _X.cw_eid
 FROM cw_CWUser AS _X
-WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
+WHERE ((DATEPART(YEAR, _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
 
     def test_date_extraction(self):
         self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
-                    '''SELECT MONTH(_P.cw_creation_date)
+                    '''SELECT DATEPART(MONTH, _P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
+    def test_weekday_extraction(self):
+        self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT DATEPART(WEEKDAY, _P.cw_creation_date)
 FROM cw_Personne AS _P''')
 
     def test_symmetric(self):
@@ -1813,14 +1899,19 @@
         for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
             yield t
 
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS nvarchar(max))
+FROM cw_Personne AS _P''')
+
     def test_groupby_orderby_insertion_dont_modify_intention(self):
         self._check('Any YEAR(XECT)*100+MONTH(XECT), COUNT(X),SUM(XCE),AVG(XSCT-XECT) '
                     'GROUPBY YEAR(XECT),MONTH(XECT) ORDERBY 1 '
                     'WHERE X creation_date XSCT, X modification_date XECT, '
                     'X ordernum XCE, X is CWAttribute',
-                    '''SELECT ((YEAR(_X.cw_modification_date) * 100) + MONTH(_X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date))
+                    '''SELECT ((DATEPART(YEAR, _X.cw_modification_date) * 100) + DATEPART(MONTH, _X.cw_modification_date)), COUNT(_X.cw_eid), SUM(_X.cw_ordernum), AVG((_X.cw_creation_date - _X.cw_modification_date))
 FROM cw_CWAttribute AS _X
-GROUP BY YEAR(_X.cw_modification_date),MONTH(_X.cw_modification_date)
+GROUP BY DATEPART(YEAR, _X.cw_modification_date),DATEPART(MONTH, _X.cw_modification_date)
 ORDER BY 1'''),
 
 
@@ -1828,13 +1919,27 @@
     backend = 'sqlite'
 
     def _norm_sql(self, sql):
-        return sql.strip().replace(' ILIKE ', ' LIKE ')
+        return sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
 
     def test_date_extraction(self):
         self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
                     '''SELECT MONTH(_P.cw_creation_date)
 FROM cw_Personne AS _P''')
 
+    def test_weekday_extraction(self):
+        # custom impl. in cw.server.sqlutils
+        self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT WEEKDAY(_P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
+
     def test_union(self):
         for t in self._parse((
             ('(Any N ORDERBY 1 WHERE X name N, X is State)'
@@ -1947,7 +2052,7 @@
             yield t
 
 
-    def test_or_having_fake_terms(self):
+    def test_or_having_fake_terms_base(self):
         self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
                     '''SELECT _X.cw_eid
 FROM cw_CWUser AS _X
@@ -1985,6 +2090,23 @@
                     '''SELECT EXTRACT(MONTH from _P.cw_creation_date)
 FROM cw_Personne AS _P''')
 
+    def test_weekday_extraction(self):
+        self._check("Any WEEKDAY(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT DAYOFWEEK(_P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS mediumtext)
+FROM cw_Personne AS _P''')
+
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
     def test_from_clause_needed(self):
         queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')",
                     '''SELECT 1
@@ -2046,7 +2168,7 @@
 FROM cw_Personne AS _P''')
 
 
-    def test_or_having_fake_terms(self):
+    def test_or_having_fake_terms_base(self):
         self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
                     '''SELECT _X.cw_eid
 FROM cw_CWUser AS _X
--- a/server/test/unittest_rqlannotation.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_rqlannotation.py	Fri Dec 09 12:08:27 2011 +0100
@@ -340,6 +340,16 @@
         self.assertEqual(rqlst.defined_vars['X']._q_invariant, False)
         self.assertEqual(rqlst.defined_vars['S']._q_invariant, False)
 
+    def test_remove_from_deleted_source_1(self):
+        rqlst = self._prepare('Note X WHERE X eid 999998, NOT X cw_source Y')
+        self.failIf('X' in rqlst.defined_vars) # simplified
+        self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True)
+
+    def test_remove_from_deleted_source_2(self):
+        rqlst = self._prepare('Note X WHERE X eid IN (999998, 999999), NOT X cw_source Y')
+        self.assertEqual(rqlst.defined_vars['X']._q_invariant, False)
+        self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True)
+
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
     unittest_main()
--- a/server/test/unittest_security.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_security.py	Fri Dec 09 12:08:27 2011 +0100
@@ -221,7 +221,7 @@
         rset = cu.execute('Personne P')
         self.assertEqual(len(rset), 1)
         ent = rset.get_entity(0, 0)
-        session.set_pool() # necessary
+        session.set_cnxset() # necessary
         self.assertRaises(Unauthorized, ent.cw_check_perm, 'update')
         self.assertRaises(Unauthorized,
                           cu.execute, "SET P travaille S WHERE P is Personne, S is Societe")
@@ -579,7 +579,7 @@
         cnx = self.login('iaminusersgrouponly')
         session = self.session
         # needed to avoid check_perm error
-        session.set_pool()
+        session.set_cnxset()
         # needed to remove rql expr granting update perm to the user
         affaire_perms = self.schema['Affaire'].permissions.copy()
         self.schema['Affaire'].set_action_permissions('update', self.schema['Affaire'].get_groups('update'))
--- a/server/test/unittest_session.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_session.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,13 +15,12 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
+from __future__ import with_statement
 
-"""
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.server.session import _make_description
+from cubicweb.server.session import _make_description, hooks_control
 
 class Variable:
     def __init__(self, name):
@@ -46,11 +45,70 @@
         self.assertEqual(_make_description((Function('max', 'A'), Variable('B')), {}, solution),
                           ['Int','CWUser'])
 
+
 class InternalSessionTC(CubicWebTC):
     def test_dbapi_query(self):
         session = self.repo.internal_session()
         self.assertFalse(session.running_dbapi_query)
         session.close()
 
+
+class SessionTC(CubicWebTC):
+
+    def test_hooks_control(self):
+        session = self.session
+        self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
+        self.assertEqual(session.disabled_hook_categories, set())
+        self.assertEqual(session.enabled_hook_categories, set())
+        self.assertEqual(len(session._tx_data), 1)
+        with hooks_control(session, session.HOOKS_DENY_ALL, 'metadata'):
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            session.commit()
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            session.rollback()
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
+                self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
+                self.assertEqual(session.disabled_hook_categories, set(('integrity',)))
+                self.assertEqual(session.enabled_hook_categories, set(('metadata',))) # not changed in such case
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+        # leaving context manager with no transaction running should reset the
+        # transaction local storage (and associated cnxset)
+        self.assertEqual(session._tx_data, {})
+        self.assertEqual(session.cnxset, None)
+        self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
+        self.assertEqual(session.disabled_hook_categories, set())
+        self.assertEqual(session.enabled_hook_categories, set())
+
+    def test_build_descr1(self):
+        rset = self.execute('(Any U,L WHERE U login L) UNION (Any G,N WHERE G name N, G is CWGroup)')
+        orig_length = len(rset)
+        rset.rows[0][0] = 9999999
+        description = self.session.build_description(rset.syntax_tree(), None, rset.rows)
+        self.assertEqual(len(description), orig_length - 1)
+        self.assertEqual(len(rset.rows), orig_length - 1)
+        self.failIf(rset.rows[0][0] == 9999999)
+
+    def test_build_descr2(self):
+        rset = self.execute('Any X,Y WITH X,Y BEING ((Any G,NULL WHERE G is CWGroup) UNION (Any U,G WHERE U in_group G))')
+        for x, y in rset.description:
+            if y is not None:
+                self.assertEqual(y, 'CWGroup')
+
+    def test_build_descr3(self):
+        rset = self.execute('(Any G,NULL WHERE G is CWGroup) UNION (Any U,G WHERE U in_group G)')
+        for x, y in rset.description:
+            if y is not None:
+                self.assertEqual(y, 'CWGroup')
+
+
 if __name__ == '__main__':
     unittest_main()
--- a/server/test/unittest_ssplanner.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_ssplanner.py	Fri Dec 09 12:08:27 2011 +0100
@@ -16,14 +16,18 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
-from cubicweb.devtools import init_test_database
+from cubicweb.devtools import TestServerConfiguration, get_test_db_handler
 from cubicweb.devtools.repotest import BasePlannerTC, test_plan
 from cubicweb.server.ssplanner import SSPlanner
 
 # keep cnx so it's not garbage collected and the associated session closed
 def setUpModule(*args):
     global repo, cnx
-    repo, cnx = init_test_database(apphome=SSPlannerTC.datadir)
+    handler = get_test_db_handler(TestServerConfiguration(
+            'data', apphome=SSPlannerTC.datadir))
+    handler.build_db_cache()
+    global repo, cnx
+    repo, cnx = handler.get_repo_and_cnx()
 
 def tearDownModule(*args):
     global repo, cnx
--- a/server/test/unittest_storage.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_storage.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -261,7 +261,7 @@
     def test_bfss_update_to_None(self):
         f = self.session.create_entity('Affaire', opt_attr=Binary('toto'))
         self.session.commit()
-        self.session.set_pool()
+        self.session.set_cnxset()
         f.set_attributes(opt_attr=None)
         self.session.commit()
 
--- a/server/test/unittest_undo.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/test/unittest_undo.py	Fri Dec 09 12:08:27 2011 +0100
@@ -153,8 +153,8 @@
         txuuid = self.commit()
         actions = self.cnx.transaction_info(txuuid).actions_list()
         self.assertEqual(len(actions), 1)
-        toto.clear_all_caches()
-        e.clear_all_caches()
+        toto.cw_clear_all_caches()
+        e.cw_clear_all_caches()
         errors = self.cnx.undo_transaction(txuuid)
         undotxuuid = self.commit()
         self.assertEqual(undotxuuid, None) # undo not undoable
@@ -195,7 +195,7 @@
         self.commit()
         errors = self.cnx.undo_transaction(txuuid)
         self.commit()
-        p.clear_all_caches()
+        p.cw_clear_all_caches()
         self.assertEqual(p.fiche[0].eid, c2.eid)
         self.assertEqual(len(errors), 1)
         self.assertEqual(errors[0],
@@ -235,7 +235,7 @@
         self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
         self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
         self.failIf(self.execute('Any X,Y WHERE X fiche Y'))
-        self.session.set_pool()
+        self.session.set_cnxset()
         for eid in (p.eid, c.eid):
             self.failIf(session.system_sql(
                 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
--- a/server/utils.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/server/utils.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Some utilities for the CubicWeb server."""
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -121,11 +122,12 @@
 
 class LoopTask(object):
     """threaded task restarting itself once executed"""
-    def __init__(self, interval, func, args):
+    def __init__(self, repo, interval, func, args):
         if interval <= 0:
             raise ValueError('Loop task interval must be > 0 '
                              '(current value: %f for %s)' % \
                              (interval, func_name(func)))
+        self.repo = repo
         self.interval = interval
         def auto_restart_func(self=self, func=func, args=args):
             restart = True
@@ -138,7 +140,7 @@
             except BaseException:
                 restart = False
             finally:
-                if restart:
+                if restart and not self.repo.shutting_down:
                     self.start()
         self.func = auto_restart_func
         self.name = func_name(func)
@@ -167,7 +169,7 @@
         def auto_remove_func(self=self, func=target):
             try:
                 func()
-            except:
+            except Exception:
                 logger = logging.getLogger('cubicweb.repository')
                 logger.exception('Unhandled exception in RepoThread %s', self._name)
                 raise
--- a/setup.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/setup.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
 #
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -179,7 +179,7 @@
 if USE_SETUPTOOLS:
     # overwrite MyInstallData to use sys.prefix instead of the egg directory
     MyInstallMoreData = MyInstallData
-    class MyInstallData(MyInstallMoreData):
+    class MyInstallData(MyInstallMoreData): # pylint: disable=E0102
         """A class that manages data files installation"""
         def run(self):
             _old_install_dir = self.install_dir
--- a/skeleton/__pkginfo__.py.tmpl	Mon Sep 26 18:37:23 2011 +0200
+++ b/skeleton/__pkginfo__.py.tmpl	Fri Dec 09 12:08:27 2011 +0100
@@ -18,7 +18,7 @@
 
 
 from os import listdir as _listdir
-from os.path import join, isdir, exists
+from os.path import join, isdir
 from glob import glob
 
 THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname)
--- a/skeleton/debian/control.tmpl	Mon Sep 26 18:37:23 2011 +0200
+++ b/skeleton/debian/control.tmpl	Fri Dec 09 12:08:27 2011 +0100
@@ -2,7 +2,7 @@
 Section: web
 Priority: optional
 Maintainer: %(author)s <%(author-email)s>
-Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4)
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-support
 Standards-Version: 3.8.0
 
 
--- a/skeleton/debian/rules.tmpl	Mon Sep 26 18:37:23 2011 +0200
+++ b/skeleton/debian/rules.tmpl	Fri Dec 09 12:08:27 2011 +0100
@@ -37,6 +37,7 @@
 	dh_installexamples -i
 	dh_installdocs -i
 	dh_installman -i
+	dh_pysupport -i /usr/share/cubicweb
 	dh_link -i
 	dh_compress -i -X.py -X.ini -X.xml -Xtest
 	dh_fixperms -i
--- a/skeleton/test/realdb_test_CUBENAME.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/skeleton/test/realdb_test_CUBENAME.py	Fri Dec 09 12:08:27 2011 +0100
@@ -18,8 +18,8 @@
 """
 
 """
-from cubicweb.devtools import buildconfig, loadconfig
-from cubicweb.devtools.testlib import RealDBTest
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.devtools.realdbtest import buildconfig, loadconfig
 
 def setUpModule(options):
     if options.source:
@@ -32,7 +32,8 @@
                                                options.epassword)
     RealDatabaseTC.configcls = configcls
 
-class RealDatabaseTC(RealDBTest):
+
+class RealDatabaseTC(CubicWebTC):
     configcls = None # set by setUpModule()
 
     def test_all_primaries(self):
--- a/sobjects/notification.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/sobjects/notification.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -109,6 +109,8 @@
 
 url: %(url)s
 """
+    # to be defined on concrete sub-classes
+    content_attr = None
 
     def context(self, **kwargs):
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
--- a/sobjects/parsers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/sobjects/parsers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -31,26 +31,22 @@
 
 """
 
-import urllib2
-import StringIO
 import os.path as osp
-from cookielib import CookieJar
-from datetime import datetime, timedelta
-
-from lxml import etree
+from datetime import datetime, timedelta, time
+from urllib import urlencode
+from cgi import parse_qs # in urlparse with python >= 2.6
 
 from logilab.common.date import todate, totime
 from logilab.common.textutils import splitstrip, text_to_dict
+from logilab.common.decorators import classproperty
 
 from yams.constraints import BASE_CONVERTERS
 from yams.schema import role_name as rn
 
-from cubicweb import ValidationError, typed_eid
+from cubicweb import ValidationError, RegistryException, typed_eid
+from cubicweb.view import Component
 from cubicweb.server.sources import datafeed
-
-def ensure_str_keys(dic):
-    for key in dic:
-        dic[str(key)] = dic.pop(key)
+from cubicweb.server.hook import match_rtype
 
 # XXX see cubicweb.cwvreg.YAMS_TO_PY
 # XXX see cubicweb.web.views.xmlrss.SERIALIZERS
@@ -65,61 +61,31 @@
         ustr = ustr.split('.',1)[0]
     return datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S')
 DEFAULT_CONVERTERS['Datetime'] = convert_datetime
+# XXX handle timezone, though this will be enough as TZDatetime are
+# serialized without time zone by default (UTC time). See
+# cw.web.views.xmlrss.SERIALIZERS.
+DEFAULT_CONVERTERS['TZDatetime'] = convert_datetime
 def convert_time(ustr):
     return totime(datetime.strptime(ustr, '%H:%M:%S'))
 DEFAULT_CONVERTERS['Time'] = convert_time
+DEFAULT_CONVERTERS['TZTime'] = convert_time
 def convert_interval(ustr):
     return time(seconds=int(ustr))
 DEFAULT_CONVERTERS['Interval'] = convert_interval
 
-# use a cookie enabled opener to use session cookie if any
-_OPENER = urllib2.build_opener()
-try:
-    from logilab.common import urllib2ext
-    _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
-except ImportError: # python-kerberos not available
-    pass
-_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
-
 def extract_typed_attrs(eschema, stringdict, converters=DEFAULT_CONVERTERS):
     typeddict = {}
     for rschema in eschema.subject_relations():
         if rschema.final and rschema in stringdict:
-            if rschema == 'eid':
+            if rschema in ('eid', 'cwuri', 'cwtype', 'cwsource'):
                 continue
             attrtype = eschema.destination(rschema)
-            typeddict[rschema.type] = converters[attrtype](stringdict[rschema])
+            value = stringdict[rschema]
+            if value is not None:
+                value = converters[attrtype](value)
+            typeddict[rschema.type] = value
     return typeddict
 
-def _parse_entity_etree(parent):
-    for node in list(parent):
-        try:
-            item = {'cwtype': unicode(node.tag),
-                    'cwuri': node.attrib['cwuri'],
-                    'eid': typed_eid(node.attrib['eid']),
-                    }
-        except KeyError:
-            # cw < 3.11 compat mode XXX
-            item = {'cwtype': unicode(node.tag),
-                    'cwuri': node.find('cwuri').text,
-                    'eid': typed_eid(node.find('eid').text),
-                    }
-        rels = {}
-        for child in node:
-            role = child.get('role')
-            if role:
-                # relation
-                related = rels.setdefault(role, {}).setdefault(child.tag, [])
-                related += [ritem for ritem, _ in _parse_entity_etree(child)]
-            else:
-                # attribute
-                item[child.tag] = unicode(child.text)
-        yield item, rels
-
-def build_search_rql(etype, attrs):
-    restrictions = ['X %(attr)s %%(%(attr)s)s'%{'attr': attr} for attr in attrs]
-    return 'Any X WHERE X is %s, %s' % (etype, ', '.join(restrictions))
-
 def rtype_role_rql(rtype, role):
     if role == 'object':
         return 'Y %s X WHERE X eid %%(x)s' % rtype
@@ -127,34 +93,40 @@
         return 'X %s Y WHERE X eid %%(x)s' % rtype
 
 
-def _check_no_option(action, options, eid, _):
-    if options:
-        msg = _("'%s' action doesn't take any options") % action
-        raise ValidationError(eid, {rn('options', 'subject'): msg})
+class CWEntityXMLParser(datafeed.DataFeedXMLParser):
+    """datafeed parser for the 'xml' entity view
 
-def _check_linkattr_option(action, options, eid, _):
-    if not 'linkattr' in options:
-        msg = _("'%s' action requires 'linkattr' option") % action
-        raise ValidationError(eid, {rn('options', 'subject'): msg})
+    Most of the logic is delegated to the following components:
+
+    * an "item builder" component, turning an etree xml node into a specific
+      python dictionnary representing an entity
 
-
-class CWEntityXMLParser(datafeed.DataFeedParser):
-    """datafeed parser for the 'xml' entity view"""
-    __regid__ = 'cw.entityxml'
+    * "action" components, selected given an entity, a relation and its role in
+      the relation, and responsible to link the entity to given related items
+      (eg dictionnary)
 
-    action_options = {
-        'copy': _check_no_option,
-        'link-or-create': _check_linkattr_option,
-        'link': _check_linkattr_option,
-        }
+    So the parser is only doing the gluing service and the connection to the
+    source.
+    """
+    __regid__ = 'cw.entityxml'
 
     def __init__(self, *args, **kwargs):
         super(CWEntityXMLParser, self).__init__(*args, **kwargs)
-        self.action_methods = {
-            'copy': self.related_copy,
-            'link-or-create': self.related_link_or_create,
-            'link': self.related_link,
-            }
+        self._parsed_urls = {}
+        self._processed_entities = set()
+
+    def select_linker(self, action, rtype, role, entity=None):
+        try:
+            return self._cw.vreg['components'].select(
+                'cw.entityxml.action.%s' % action, self._cw, entity=entity,
+                rtype=rtype, role=role, parser=self)
+        except RegistryException:
+            raise RegistryException('Unknown action %s' % action)
+
+    def list_actions(self):
+        reg = self._cw.vreg['components']
+        return sorted(clss[0].action for rid, clss in reg.iteritems()
+                      if rid.startswith('cw.entityxml.action.'))
 
     # mapping handling #########################################################
 
@@ -180,10 +152,14 @@
             raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
         try:
             action = options.pop('action')
-            self.action_options[action](action, options, schemacfg.eid, _)
+            linker = self.select_linker(action, rtype, role)
+            linker.check_options(options, schemacfg.eid)
         except KeyError:
             msg = _('"action" must be specified in options; allowed values are '
-                    '%s') % ', '.join(self.action_methods)
+                    '%s') % ', '.join(self.list_actions())
+            raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
+        except RegistryException:
+            msg = _('allowed values for "action" are %s') % ', '.join(self.list_actions())
             raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
         if not checkonly:
             if role == 'subject':
@@ -208,184 +184,325 @@
 
     # import handling ##########################################################
 
-    def process(self, url, partialcommit=True):
+    def process(self, url, raise_on_error=False, partialcommit=True):
         """IDataFeedParser main entry point"""
-        # XXX suppression support according to source configuration. If set, get
-        # all cwuri of entities from this source, and compare with newly
-        # imported ones
-        error = False
-        for item, rels in self.parse(url):
-            cwuri = item['cwuri']
-            try:
-                self.process_item(item, rels)
-                if partialcommit:
-                    # commit+set_pool instead of commit(reset_pool=False) to let
-                    # other a chance to get our pool
-                    self._cw.commit()
-                    self._cw.set_pool()
-            except ValidationError, exc:
-                if partialcommit:
-                    self.source.error('Skipping %s because of validation error %s' % (cwuri, exc))
-                    self._cw.rollback()
-                    self._cw.set_pool()
-                    error = True
-                else:
-                    raise
-        return error
+        if url.startswith('http'): # XXX similar loose test as in parse of sources.datafeed
+            url = self.complete_url(url)
+        super(CWEntityXMLParser, self).process(url, raise_on_error, partialcommit)
 
-    def parse(self, url):
-        if not url.startswith('http'):
-            stream = StringIO.StringIO(url)
-        else:
-            for mappedurl in HOST_MAPPING:
-                if url.startswith(mappedurl):
-                    url = url.replace(mappedurl, HOST_MAPPING[mappedurl], 1)
-                    break
-            self.source.info('GET %s', url)
-            stream = _OPENER.open(url)
-        return _parse_entity_etree(etree.parse(stream).getroot())
+    def parse_etree(self, parent):
+        for node in list(parent):
+            builder = self._cw.vreg['components'].select(
+                'cw.entityxml.item-builder', self._cw, node=node,
+                parser=self)
+            yield builder.build_item()
 
     def process_item(self, item, rels):
-        entity = self.extid2entity(str(item.pop('cwuri')),  item.pop('cwtype'),
-                                   item=item)
+        """
+        item and rels are what's returned by the item builder `build_item` method:
+
+        * `item` is an {attribute: value} dictionary
+        * `rels` is for relations and structured as
+           {role: {relation: [(related item, related rels)...]}
+        """
+        entity = self.extid2entity(str(item['cwuri']),  item['cwtype'],
+                                   cwsource=item['cwsource'], item=item)
+        if entity is None:
+            return None
+        if entity.eid in self._processed_entities:
+            return entity
+        self._processed_entities.add(entity.eid)
         if not (self.created_during_pull(entity) or self.updated_during_pull(entity)):
             self.notify_updated(entity)
-            item.pop('eid')
-            # XXX check modification date
             attrs = extract_typed_attrs(entity.e_schema, item)
-            entity.set_attributes(**attrs)
-        for (rtype, role, action), rules in self.source.mapping.get(entity.__regid__, {}).iteritems():
+            # check modification date and compare attribute values to only
+            # update what's actually needed
+            entity.complete(tuple(attrs))
+            mdate = attrs.get('modification_date')
+            if not mdate or mdate > entity.modification_date:
+                attrs = dict( (k, v) for k, v in attrs.iteritems()
+                              if v != getattr(entity, k))
+                if attrs:
+                    entity.set_attributes(**attrs)
+        self.process_relations(entity, rels)
+        return entity
+
+    def process_relations(self, entity, rels):
+        etype = entity.__regid__
+        for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
             try:
                 related_items = rels[role][rtype]
             except KeyError:
                 self.source.error('relation %s-%s not found in xml export of %s',
-                                  rtype, role, entity.__regid__)
+                                  rtype, role, etype)
                 continue
             try:
-                actionmethod = self.action_methods[action]
-            except KeyError:
-                raise Exception('Unknown action %s' % action)
-            actionmethod(entity, rtype, role, related_items, rules)
-        return entity
+                linker = self.select_linker(action, rtype, role, entity)
+            except RegistryException:
+                self.source.error('no linker for action %s', action)
+            else:
+                linker.link_items(related_items, rules)
 
     def before_entity_copy(self, entity, sourceparams):
         """IDataFeedParser callback"""
         attrs = extract_typed_attrs(entity.e_schema, sourceparams['item'])
         entity.cw_edited.update(attrs)
 
-    def related_copy(self, entity, rtype, role, others, rules):
-        """implementation of 'copy' action
+    def complete_url(self, url, etype=None, known_relations=None):
+        """append to the url's query string information about relation that should
+        be included in the resulting xml, according to source mapping.
 
-        Takes no option.
-        """
-        assert not any(x[1] for x in rules), "'copy' action takes no option"
-        ttypes = set([x[0] for x in rules])
-        others = [item for item in others if item['cwtype'] in ttypes]
-        eids = [] # local eids
-        if not others:
-            self._clear_relation(entity, rtype, role, ttypes)
-            return
-        for item in others:
-            item, _rels = self._complete_item(item)
-            other_entity = self.process_item(item, [])
-            eids.append(other_entity.eid)
-        self._set_relation(entity, rtype, role, eids)
-
-    def related_link(self, entity, rtype, role, others, rules):
-        """implementation of 'link' action
+        If etype is not specified, try to guess it using the last path part of
+        the url, i.e. the format used by default in cubicweb to map all entities
+        of a given type as in 'http://mysite.org/EntityType'.
 
-        requires an options to control search of the linked entity.
+        If `known_relations` is given, it should be a dictionary of already
+        known relations, so they don't get queried again.
         """
-        for ttype, options in rules:
-            assert 'linkattr' in options, (
-                "'link' action requires a list of attributes used to "
-                "search if the entity already exists")
-            self._related_link(entity, rtype, role, ttype, others, [options['linkattr']],
-                               create_when_not_found=False)
+        try:
+            url, qs = url.split('?', 1)
+        except ValueError:
+            qs = ''
+        params = parse_qs(qs)
+        if not 'vid' in params:
+            params['vid'] = ['xml']
+        if etype is None:
+            try:
+                etype = url.rsplit('/', 1)[1]
+            except ValueError:
+                return url + '?' + self._cw.build_url_params(**params)
+            try:
+                etype = self._cw.vreg.case_insensitive_etypes[etype.lower()]
+            except KeyError:
+                return url + '?' + self._cw.build_url_params(**params)
+        relations = params.setdefault('relation', [])
+        for rtype, role, _ in self.source.mapping.get(etype, ()):
+            if known_relations and rtype in known_relations.get('role', ()):
+                continue
+            reldef = '%s-%s' % (rtype, role)
+            if not reldef in relations:
+                relations.append(reldef)
+        return url + '?' + self._cw.build_url_params(**params)
 
-    def related_link_or_create(self, entity, rtype, role, others, rules):
-        """implementation of 'link-or-create' action
+    def complete_item(self, item, rels):
+        try:
+            return self._parsed_urls[item['cwuri']]
+        except KeyError:
+            itemurl = self.complete_url(item['cwuri'], item['cwtype'], rels)
+            item_rels = list(self.parse(itemurl))
+            assert len(item_rels) == 1, 'url %s expected to bring back one '\
+                   'and only one entity, got %s' % (itemurl, len(item_rels))
+            self._parsed_urls[item['cwuri']] = item_rels[0]
+            if rels:
+                # XXX (do it better) merge relations
+                new_rels = item_rels[0][1]
+                new_rels.get('subject', {}).update(rels.get('subject', {}))
+                new_rels.get('object', {}).update(rels.get('object', {}))
+            return item_rels[0]
 
-        requires an options to control search of the linked entity.
-        """
-        for ttype, options in rules:
-            assert 'linkattr' in options, (
-                "'link-or-create' action requires a list of attributes used to "
-                "search if the entity already exists")
-            self._related_link(entity, rtype, role, ttype, others, [options['linkattr']],
-                               create_when_not_found=True)
+
+class CWEntityXMLItemBuilder(Component):
+    __regid__ = 'cw.entityxml.item-builder'
+
+    def __init__(self, _cw, parser, node, **kwargs):
+        super(CWEntityXMLItemBuilder, self).__init__(_cw, **kwargs)
+        self.parser = parser
+        self.node = node
+
+    def build_item(self):
+        """parse a XML document node and return two dictionaries defining (part
+        of) an entity:
 
-    def _related_link(self, entity, rtype, role, ttype, others, searchattrs,
-                      create_when_not_found):
-        def issubset(x,y):
-            return all(z in y for z in x)
+        - {attribute: value}
+        - {role: {relation: [(related item, related rels)...]}
+        """
+        node = self.node
+        item = dict(node.attrib.items())
+        item['cwtype'] = unicode(node.tag)
+        item.setdefault('cwsource', None)
+        try:
+            item['eid'] = typed_eid(item['eid'])
+        except KeyError:
+            # cw < 3.11 compat mode XXX
+            item['eid'] = typed_eid(node.find('eid').text)
+            item['cwuri'] = node.find('cwuri').text
+        rels = {}
+        for child in node:
+            role = child.get('role')
+            if role:
+                # relation
+                related = rels.setdefault(role, {}).setdefault(child.tag, [])
+                related += self.parser.parse_etree(child)
+            elif child.text:
+                # attribute
+                item[child.tag] = unicode(child.text)
+            else:
+                # None attribute (empty tag)
+                item[child.tag] = None
+        return item, rels
+
+
+class CWEntityXMLActionCopy(Component):
+    """implementation of cubicweb entity xml parser's'copy' action
+
+    Takes no option.
+    """
+    __regid__ = 'cw.entityxml.action.copy'
+
+    def __init__(self, _cw, parser, rtype, role, entity=None, **kwargs):
+        super(CWEntityXMLActionCopy, self).__init__(_cw, **kwargs)
+        self.parser = parser
+        self.rtype = rtype
+        self.role = role
+        self.entity = entity
+
+    @classproperty
+    def action(cls):
+        return cls.__regid__.rsplit('.', 1)[-1]
+
+    def check_options(self, options, eid):
+        self._check_no_options(options, eid)
+
+    def _check_no_options(self, options, eid, msg=None):
+        if options:
+            if msg is None:
+                msg = self._cw._("'%s' action doesn't take any options") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+    def link_items(self, others, rules):
+        assert not any(x[1] for x in rules), "'copy' action takes no option"
+        ttypes = frozenset([x[0] for x in rules])
         eids = [] # local eids
-        for item in others:
-            if item['cwtype'] != ttype:
-                continue
-            if not issubset(searchattrs, item):
-                item, _rels = self._complete_item(item, False)
-                if not issubset(searchattrs, item):
-                    self.source.error('missing attribute, got %s expected keys %s'
-                                      % item, searchattrs)
-                    continue
-            kwargs = dict((attr, item[attr]) for attr in searchattrs)
-            rql = build_search_rql(item['cwtype'], kwargs)
-            rset = self._cw.execute(rql, kwargs)
-            if len(rset) > 1:
-                self.source.error('ambiguous link: found %s entity %s with attributes %s',
-                                  len(rset), item['cwtype'], kwargs)
-            elif len(rset) == 1:
-                eids.append(rset[0][0])
-            elif create_when_not_found:
-                ensure_str_keys(kwargs) # XXX necessary with python < 2.6
-                eids.append(self._cw.create_entity(item['cwtype'], **kwargs).eid)
-            else:
-                self.source.error('can not find %s entity with attributes %s',
-                                  item['cwtype'], kwargs)
-        if not eids:
-            self._clear_relation(entity, rtype, role, (ttype,))
+        for item, rels in others:
+            if item['cwtype'] in ttypes:
+                item, rels = self.parser.complete_item(item, rels)
+                other_entity = self.parser.process_item(item, rels)
+                if other_entity is not None:
+                    eids.append(other_entity.eid)
+        if eids:
+            self._set_relation(eids)
         else:
-            self._set_relation(entity, rtype, role, eids)
+            self._clear_relation(ttypes)
 
-    def _complete_item(self, item, add_relations=True):
-        itemurl = item['cwuri'] + '?vid=xml'
-        if add_relations:
-            for rtype, role, _ in self.source.mapping.get(item['cwtype'], ()):
-                itemurl += '&relation=%s-%s' % (rtype, role)
-        item_rels = list(self.parse(itemurl))
-        assert len(item_rels) == 1
-        return item_rels[0]
-
-    def _clear_relation(self, entity, rtype, role, ttypes):
-        if entity.eid not in self.stats['created']:
+    def _clear_relation(self, ttypes):
+        if not self.parser.created_during_pull(self.entity):
             if len(ttypes) > 1:
                 typerestr = ', Y is IN(%s)' % ','.join(ttypes)
             else:
                 typerestr = ', Y is %s' % ','.join(ttypes)
-            self._cw.execute('DELETE ' + rtype_role_rql(rtype, role) + typerestr,
-                             {'x': entity.eid})
+            self._cw.execute('DELETE ' + rtype_role_rql(self.rtype, self.role) + typerestr,
+                             {'x': self.entity.eid})
+
+    def _set_relation(self, eids):
+        assert eids
+        rtype = self.rtype
+        rqlbase = rtype_role_rql(rtype, self.role)
+        eidstr = ','.join(str(eid) for eid in eids)
+        self._cw.execute('DELETE %s, NOT Y eid IN (%s)' % (rqlbase, eidstr),
+                         {'x': self.entity.eid})
+        if self.role == 'object':
+            rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype)
+        else:
+            rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype)
+        self._cw.execute(rql, {'x': self.entity.eid})
+
+
+class CWEntityXMLActionLink(CWEntityXMLActionCopy):
+    """implementation of cubicweb entity xml parser's'link' action
+
+    requires a 'linkattr' option to control search of the linked entity.
+    """
+    __regid__ = 'cw.entityxml.action.link'
+
+    def check_options(self, options, eid):
+        if not 'linkattr' in options:
+            msg = self._cw._("'%s' action requires 'linkattr' option") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+    create_when_not_found = False
 
-    def _set_relation(self, entity, rtype, role, eids):
-        rqlbase = rtype_role_rql(rtype, role)
-        rql = 'DELETE %s' % rqlbase
+    def link_items(self, others, rules):
+        for ttype, options in rules:
+            searchattrs = splitstrip(options.get('linkattr', ''))
+            self._related_link(ttype, others, searchattrs)
+
+    def _related_link(self, ttype, others, searchattrs):
+        def issubset(x,y):
+            return all(z in y for z in x)
+        eids = [] # local eids
+        source = self.parser.source
+        for item, rels in others:
+            if item['cwtype'] != ttype:
+                continue
+            if not issubset(searchattrs, item):
+                item, rels = self.parser.complete_item(item, rels)
+                if not issubset(searchattrs, item):
+                    source.error('missing attribute, got %s expected keys %s',
+                                 item, searchattrs)
+                    continue
+            # XXX str() needed with python < 2.6
+            kwargs = dict((str(attr), item[attr]) for attr in searchattrs)
+            targets = self._find_entities(item, kwargs)
+            if len(targets) == 1:
+                entity = targets[0]
+            elif not targets and self.create_when_not_found:
+                entity = self._cw.create_entity(item['cwtype'], **kwargs)
+            else:
+                if len(targets) > 1:
+                    source.error('ambiguous link: found %s entity %s with attributes %s',
+                                 len(targets), item['cwtype'], kwargs)
+                else:
+                    source.error('can not find %s entity with attributes %s',
+                                 item['cwtype'], kwargs)
+                continue
+            eids.append(entity.eid)
+            self.parser.process_relations(entity, rels)
         if eids:
-            eidstr = ','.join(str(eid) for eid in eids)
-            rql += ', NOT Y eid IN (%s)' % eidstr
-        self._cw.execute(rql, {'x': entity.eid})
-        if eids:
-            if role == 'object':
-                rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype)
-            else:
-                rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype)
-            self._cw.execute(rql, {'x': entity.eid})
+            self._set_relation(eids)
+        else:
+            self._clear_relation((ttype,))
+
+    def _find_entities(self, item, kwargs):
+        return tuple(self._cw.find_entities(item['cwtype'], **kwargs))
+
+
+class CWEntityXMLActionLinkInState(CWEntityXMLActionLink):
+    """custom implementation of cubicweb entity xml parser's'link' action for
+    in_state relation
+    """
+    __select__ = match_rtype('in_state')
+
+    def check_options(self, options, eid):
+        super(CWEntityXMLActionLinkInState, self).check_options(options, eid)
+        if not 'name' in options['linkattr']:
+            msg = self._cw._("'%s' action for in_state relation should at least have 'linkattr=name' option") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+    def _find_entities(self, item, kwargs):
+        assert 'name' in item # XXX else, complete_item
+        state_name = item['name']
+        wf = self.entity.cw_adapt_to('IWorkflowable').current_workflow
+        state = wf.state_by_name(state_name)
+        if state is None:
+            return ()
+        return (state,)
+
+
+class CWEntityXMLActionLinkOrCreate(CWEntityXMLActionLink):
+    """implementation of cubicweb entity xml parser's'link-or-create' action
+
+    requires a 'linkattr' option to control search of the linked entity.
+    """
+    __regid__ = 'cw.entityxml.action.link-or-create'
+    create_when_not_found = True
+
 
 def registration_callback(vreg):
     vreg.register_all(globals().values(), __name__)
-    global HOST_MAPPING
-    HOST_MAPPING = {}
+    global URL_MAPPING
+    URL_MAPPING = {}
     if vreg.config.apphome:
-        host_mapping_file = osp.join(vreg.config.apphome, 'hostmapping.py')
-        if osp.exists(host_mapping_file):
-            HOST_MAPPING = eval(file(host_mapping_file).read())
-            vreg.info('using host mapping %s from %s', HOST_MAPPING, host_mapping_file)
+        url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py')
+        if osp.exists(url_mapping_file):
+            URL_MAPPING = eval(file(url_mapping_file).read())
+            vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file)
--- a/sobjects/test/data/schema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/sobjects/test/data/schema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -25,4 +25,4 @@
 
 class Tag(EntityType):
     name = String(unique=True)
-    tags = SubjectRelation('CWUser')
+    tags = SubjectRelation(('CWUser', 'CWGroup', 'EmailAddress'))
--- a/sobjects/test/unittest_parsers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/sobjects/test/unittest_parsers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -16,6 +16,8 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
+from __future__ import with_statement
+
 from datetime import datetime
 
 from cubicweb.devtools.testlib import CubicWebTC
@@ -40,7 +42,7 @@
 
 BASEXML = ''.join(u'''
 <rset size="1">
- <CWUser eid="5" cwuri="http://pouet.org/5">
+ <CWUser eid="5" cwuri="http://pouet.org/5" cwsource="system">
   <login>sthenault</login>
   <upassword>toto</upassword>
   <last_login_time>2011-01-25 14:14:06</last_login_time>
@@ -57,17 +59,23 @@
     <Tag cwuri="http://pouet.org/9" eid="9"/>
     <Tag cwuri="http://pouet.org/10" eid="10"/>
   </tags>
+  <in_state role="subject">
+    <State cwuri="http://pouet.org/11" eid="11" name="activated"/>
+  </in_state>
  </CWUser>
 </rset>
 '''.splitlines())
 
-RELATEDXML ={
+RELATEDXML = {
     'http://pouet.org/6': u'''
 <rset size="1">
  <EmailAddress eid="6" cwuri="http://pouet.org/6">
   <address>syt@logilab.fr</address>
   <modification_date>2010-04-13 14:35:56</modification_date>
   <creation_date>2010-04-13 14:35:56</creation_date>
+  <tags role="object">
+    <Tag cwuri="http://pouet.org/9" eid="9"/>
+  </tags>
  </EmailAddress>
 </rset>
 ''',
@@ -75,6 +83,9 @@
 <rset size="1">
  <CWGroup eid="7" cwuri="http://pouet.org/7">
   <name>users</name>
+  <tags role="object">
+    <Tag cwuri="http://pouet.org/9" eid="9"/>
+  </tags>
  </CWGroup>
 </rset>
 ''',
@@ -101,20 +112,66 @@
 ''',
     }
 
+
+OTHERXML = ''.join(u'''
+<rset size="1">
+ <CWUser eid="5" cwuri="http://pouet.org/5" cwsource="myfeed">
+  <login>sthenault</login>
+  <upassword>toto</upassword>
+  <last_login_time>2011-01-25 14:14:06</last_login_time>
+  <creation_date>2010-01-22 10:27:59</creation_date>
+  <modification_date>2011-01-25 14:14:06</modification_date>
+  <in_group role="subject">
+    <CWGroup cwuri="http://pouet.org/7" eid="7"/>
+  </in_group>
+ </CWUser>
+</rset>
+'''.splitlines()
+)
+
+
 class CWEntityXMLParserTC(CubicWebTC):
-    def setup_database(self):
-        req = self.request()
-        source = req.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+    """/!\ this test use a pre-setup database /!\, if you modify above xml,
+    REMOVE THE DATABASE TEMPLATE else it won't be considered
+    """
+    test_db_id = 'xmlparser'
+    @classmethod
+    def pre_setup_database(cls, session, config):
+        myfeed = session.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
                                    parser=u'cw.entityxml', url=BASEXML)
-        self.commit()
-        source.init_mapping([(('CWUser', 'use_email', '*'),
+        myotherfeed = session.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed',
+                                            parser=u'cw.entityxml', url=OTHERXML)
+        session.commit()
+        myfeed.init_mapping([(('CWUser', 'use_email', '*'),
                               u'role=subject\naction=copy'),
                              (('CWUser', 'in_group', '*'),
                               u'role=subject\naction=link\nlinkattr=name'),
-                             (('*', 'tags', 'CWUser'),
+                             (('CWUser', 'in_state', '*'),
+                              u'role=subject\naction=link\nlinkattr=name'),
+                             (('*', 'tags', '*'),
                               u'role=object\naction=link-or-create\nlinkattr=name'),
                             ])
-        req.create_entity('Tag', name=u'hop')
+        myotherfeed.init_mapping([(('CWUser', 'in_group', '*'),
+                                   u'role=subject\naction=link\nlinkattr=name'),
+                                  (('CWUser', 'in_state', '*'),
+                                   u'role=subject\naction=link\nlinkattr=name'),
+                                  ])
+        session.create_entity('Tag', name=u'hop')
+
+    def test_complete_url(self):
+        dfsource = self.repo.sources_by_uri['myfeed']
+        parser = dfsource._get_parser(self.session)
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'),
+                         'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=xml')
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'),
+                         'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=xml')
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
+                         'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'),
+                         'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf')
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'),
+                         'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=xml')
+
 
     def test_actions(self):
         dfsource = self.repo.sources_by_uri['myfeed']
@@ -122,13 +179,23 @@
                          {u'CWUser': {
                              (u'in_group', u'subject', u'link'): [
                                  (u'CWGroup', {u'linkattr': u'name'})],
+                             (u'in_state', u'subject', u'link'): [
+                                 (u'State', {u'linkattr': u'name'})],
                              (u'tags', u'object', u'link-or-create'): [
                                  (u'Tag', {u'linkattr': u'name'})],
                              (u'use_email', u'subject', u'copy'): [
                                  (u'EmailAddress', {})]
-                             }
+                             },
+                          u'CWGroup': {
+                             (u'tags', u'object', u'link-or-create'): [
+                                 (u'Tag', {u'linkattr': u'name'})],
+                             },
+                          u'EmailAddress': {
+                             (u'tags', u'object', u'link-or-create'): [
+                                 (u'Tag', {u'linkattr': u'name'})],
+                             },
                           })
-        session = self.repo.internal_session()
+        session = self.repo.internal_session(safe=True)
         stats = dfsource.pull_data(session, force=True, raise_on_error=True)
         self.assertEqual(sorted(stats.keys()), ['created', 'updated'])
         self.assertEqual(len(stats['created']), 2)
@@ -139,31 +206,121 @@
         self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
         self.assertEqual(user.cwuri, 'http://pouet.org/5')
         self.assertEqual(user.cw_source[0].name, 'myfeed')
+        self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
         self.assertEqual(len(user.use_email), 1)
         # copy action
         email = user.use_email[0]
         self.assertEqual(email.address, 'syt@logilab.fr')
         self.assertEqual(email.cwuri, 'http://pouet.org/6')
+        self.assertEqual(email.absolute_url(), 'http://pouet.org/6')
         self.assertEqual(email.cw_source[0].name, 'myfeed')
+        self.assertEqual(len(email.reverse_tags), 1)
+        self.assertEqual(email.reverse_tags[0].name, 'hop')
         # link action
         self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"'))
         groups = sorted([g.name for g in user.in_group])
         self.assertEqual(groups, ['users'])
+        group = user.in_group[0]
+        self.assertEqual(len(group.reverse_tags), 1)
+        self.assertEqual(group.reverse_tags[0].name, 'hop')
         # link or create action
-        tags = sorted([t.name for t in user.reverse_tags])
-        self.assertEqual(tags, ['hop', 'unknown'])
-        tag = self.execute('Tag X WHERE X name "unknown"').get_entity(0, 0)
-        self.assertEqual(tag.cwuri, 'http://testing.fr/cubicweb/%s' % tag.eid)
-        self.assertEqual(tag.cw_source[0].name, 'system')
-
-        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name)
+                    for t in user.reverse_tags])
+        self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'),
+                                    ('unknown', 'http://testing.fr/cubicweb/', 'system')))
+                         )
+        session.set_cnxset()
+        with session.security_enabled(read=False): # avoid Unauthorized due to password selection
+            stats = dfsource.pull_data(session, force=True, raise_on_error=True)
         self.assertEqual(stats['created'], set())
         self.assertEqual(len(stats['updated']), 2)
         self.repo._type_source_cache.clear()
         self.repo._extid_cache.clear()
-        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        session.set_cnxset()
+        with session.security_enabled(read=False): # avoid Unauthorized due to password selection
+            stats = dfsource.pull_data(session, force=True, raise_on_error=True)
         self.assertEqual(stats['created'], set())
         self.assertEqual(len(stats['updated']), 2)
+        session.commit()
+
+        # test move to system source
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid})
+        self.commit()
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, email.eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'EmailAddress',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+        self.commit()
+        # test everything is still fine after source synchronization
+        session.set_cnxset()
+        with session.security_enabled(read=False): # avoid Unauthorized due to password selection
+            stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, email.eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'EmailAddress',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+        session.commit()
+
+        # test delete entity
+        e.cw_delete()
+        self.commit()
+        # test everything is still fine after source synchronization
+        session.set_cnxset()
+        with session.security_enabled(read=False): # avoid Unauthorized due to password selection
+            stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 0)
+        rset = self.sexecute('Any X WHERE X use_email E, X login "sthenault"')
+        self.assertEqual(len(rset), 0)
+
+    def test_external_entity(self):
+        dfsource = self.repo.sources_by_uri['myotherfeed']
+        session = self.repo.internal_session(safe=True)
+        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
+        self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
+        self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+        self.assertEqual(user.cwuri, 'http://pouet.org/5')
+        self.assertEqual(user.cw_source[0].name, 'myfeed')
+
+    def test_noerror_missing_fti_attribute(self):
+        dfsource = self.repo.sources_by_uri['myfeed']
+        session = self.repo.internal_session(safe=True)
+        parser = dfsource._get_parser(session)
+        dfsource.process_urls(parser, ['''
+<rset size="1">
+ <Card eid="50" cwuri="http://pouet.org/50" cwsource="system">
+  <title>how-to</title>
+ </Card>
+</rset>
+'''], raise_on_error=True)
+
+    def test_noerror_unspecified_date(self):
+        dfsource = self.repo.sources_by_uri['myfeed']
+        session = self.repo.internal_session(safe=True)
+        parser = dfsource._get_parser(session)
+        dfsource.process_urls(parser, ['''
+<rset size="1">
+ <Card eid="50" cwuri="http://pouet.org/50" cwsource="system">
+  <title>how-to</title>
+  <content>how-to</content>
+  <synopsis>how-to</synopsis>
+  <creation_date/>
+ </Card>
+</rset>
+'''], raise_on_error=True)
 
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
--- a/sobjects/textparsers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/sobjects/textparsers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -83,7 +83,7 @@
                     trinfo = iworkflowable.fire_transition(tr)
                     caller.fire_event('state-changed', {'trinfo': trinfo,
                                                         'entity': entity})
-                except:
+                except Exception:
                     self.exception('while changing state of %s', entity)
             else:
                 self.error("can't pass transition %s on entity %s",
--- a/test/data/rewrite/schema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/data/rewrite/schema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -63,3 +63,15 @@
     object = 'Card'
     inlined = True
     cardinality = '?*'
+
+class inlined_note(RelationDefinition):
+    subject = 'Card'
+    object = 'Note'
+    inlined = True
+    cardinality = '?*'
+
+class inlined_affaire(RelationDefinition):
+    subject = 'Note'
+    object = 'Affaire'
+    inlined = True
+    cardinality = '?*'
--- a/test/unittest_dbapi.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_dbapi.py	Fri Dec 09 12:08:27 2011 +0100
@@ -32,7 +32,8 @@
     def test_public_repo_api(self):
         cnx = self.login('anon')
         self.assertEqual(cnx.get_schema(), self.repo.schema)
-        self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
+        self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system',
+                                                        'use-cwuri-as-url': False}})
         self.restore_connection() # proper way to close cnx
         self.assertRaises(ProgrammingError, cnx.get_schema)
         self.assertRaises(ProgrammingError, cnx.source_defs)
--- a/test/unittest_entity.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_entity.py	Fri Dec 09 12:08:27 2011 +0100
@@ -572,7 +572,7 @@
         self.assertEqual(person.rest_path(), 'personne/doe')
         # ambiguity test
         person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe')
-        person.clear_all_caches()
+        person.cw_clear_all_caches()
         self.assertEqual(person.rest_path(), 'personne/eid/%s' % person.eid)
         self.assertEqual(person2.rest_path(), 'personne/eid/%s' % person2.eid)
         # unique attr with None value (wikiid in this case)
@@ -610,7 +610,9 @@
         req = self.request()
         note = req.create_entity('Note', type=u'z')
         metainf = note.cw_metainformation()
-        self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
+        self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system',
+                                              'use-cwuri-as-url': False},
+                                   'type': u'Note', 'extid': None})
         self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid)
         metainf['source'] = metainf['source'].copy()
         metainf['source']['base-url']  = 'http://cubicweb2.com/'
--- a/test/unittest_req.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_req.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,12 +15,14 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
 from logilab.common.testlib import TestCase, unittest_main
+from cubicweb import ObjectNotFound
 from cubicweb.req import RequestSessionBase
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb import Unauthorized
 
-class RebuildURLTC(TestCase):
+class RequestTC(TestCase):
     def test_rebuild_url(self):
         rebuild_url = RequestSessionBase(None).rebuild_url
         self.assertEqual(rebuild_url('http://logilab.fr?__message=pouet', __message='hop'),
@@ -49,5 +51,13 @@
         self.assertRaises(Unauthorized, req.ensure_ro_rql, 'SET X login "toto" WHERE X is CWUser')
         self.assertRaises(Unauthorized, req.ensure_ro_rql, '   SET X login "toto" WHERE X is CWUser   ')
 
+
+class RequestCWTC(CubicWebTC):
+    def test_view_catch_ex(self):
+        req = self.request()
+        rset = self.execute('CWUser X WHERE X login "hop"')
+        self.assertEqual(req.view('oneline', rset, 'null'), '')
+        self.assertRaises(ObjectNotFound, req.view, 'onelinee', rset, 'null')
+
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_rqlrewrite.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_rqlrewrite.py	Fri Dec 09 12:08:27 2011 +0100
@@ -21,9 +21,8 @@
 from yams import BadSchemaDefinition
 from rql import parse, nodes, RQLHelper
 
-from cubicweb import Unauthorized
+from cubicweb import Unauthorized, rqlrewrite
 from cubicweb.schema import RRQLExpression, ERQLExpression
-from cubicweb.rqlrewrite import RQLRewriter
 from cubicweb.devtools import repotest, TestServerConfiguration
 
 
@@ -62,9 +61,10 @@
             @staticmethod
             def simplify(mainrqlst, needcopy=False):
                 rqlhelper.simplify(rqlst, needcopy)
-    rewriter = RQLRewriter(mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
+    rewriter = rqlrewrite.RQLRewriter(
+        mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
     snippets = []
-    for v, exprs in snippets_map.items():
+    for v, exprs in sorted(snippets_map.items()):
         rqlexprs = [isinstance(snippet, basestring)
                     and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
                                     expression='Any X WHERE '+snippet)
@@ -210,8 +210,8 @@
                         }, {})
         # XXX suboptimal
         self.failUnlessEqual(rqlst.as_string(),
-                             "Any C,A,R WITH A,R,C BEING "
-                             "(Any A,R,C WHERE A ref R, A? inlined_card C, "
+                             "Any C,A,R WITH A,C,R BEING "
+                             "(Any A,C,R WHERE A? inlined_card C, A ref R, "
                              "(A is NULL) OR (EXISTS(A inlined_card B, B require_permission D, "
                              "B is Card, D is CWPermission)), "
                              "A is Affaire, C is Card, EXISTS(C require_permission E, E is CWPermission))")
@@ -236,6 +236,18 @@
                                            ('A2', 'X'): (c2,),
                                            }, {})
 
+    def test_optional_var_inlined_linked(self):
+        c1 = ('X require_permission P')
+        c2 = ('X inlined_card O, O require_permission P')
+        rqlst = parse('Any A,W WHERE A inlined_card C?, C inlined_note N, '
+                      'N inlined_affaire W')
+        rewrite(rqlst, {('C', 'X'): (c1,)}, {})
+        self.failUnlessEqual(rqlst.as_string(),
+                             'Any A,W WHERE A inlined_card C?, A is Affaire '
+                             'WITH C,N,W BEING (Any C,N,W WHERE C inlined_note N, '
+                             'N inlined_affaire W, EXISTS(C require_permission B), '
+                             'C is Card, N is Note, W is Affaire)')
+
     def test_relation_optimization_1_lhs(self):
         # since Card in_state State as monovalued cardinality, the in_state
         # relation used in the rql expression can be ignored and S replaced by
@@ -246,6 +258,7 @@
         self.failUnlessEqual(rqlst.as_string(),
                              "Any C WHERE C in_state STATE, C is Card, "
                              "EXISTS(STATE name 'hop'), STATE is State")
+
     def test_relation_optimization_1_rhs(self):
         snippet = ('TW subworkflow_exit X, TW name "hop"')
         rqlst = parse('WorkflowTransition C WHERE C subworkflow_exit EXIT')
--- a/test/unittest_rset.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_rset.py	Fri Dec 09 12:08:27 2011 +0100
@@ -71,6 +71,13 @@
         result = list(attr_desc_iterator(select, col, 2))
         self.assertEqual(result, [])
 
+    def test_subquery_callfunc_2(self):
+        rql = ('Any X,S,L WHERE X in_state S WITH X, L BEING (Any X,MAX(L) GROUPBY X WHERE X is CWUser, T wf_info_for X, T creation_date L)')
+        rqlst = parse(rql)
+        select, col = rqlst.locate_subquery(0, 'CWUser', None)
+        result = list(attr_desc_iterator(select, col, 0))
+        self.assertEqual(result, [(1, 'in_state', 'subject')])
+
 
 class ResultSetTC(CubicWebTC):
 
@@ -107,7 +114,7 @@
         self.compare_urls(req.build_url('view', _restpath=''), baseurl)
 
 
-    def test_resultset_build(self):
+    def test_build(self):
         """test basic build of a ResultSet"""
         rs = ResultSet([1,2,3], 'CWGroup X', description=['CWGroup', 'CWGroup', 'CWGroup'])
         self.assertEqual(rs.rowcount, 3)
@@ -115,7 +122,7 @@
         self.assertEqual(rs.description, ['CWGroup', 'CWGroup', 'CWGroup'])
 
 
-    def test_resultset_limit(self):
+    def test_limit(self):
         rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
                        'Any U,L where U is CWUser, U login L',
                        description=[['CWUser', 'String']] * 3)
@@ -128,8 +135,30 @@
         self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']])
         self.assertEqual(rs.limit(2, offset=3).rows, [])
 
+    def test_limit_2(self):
+        req = self.request()
+        # drop user from cache for the sake of this test
+        req.drop_entity_cache(req.user.eid)
+        rs = req.execute('Any E,U WHERE E is CWEType, E created_by U')
+        # get entity on row 9. This will fill its created_by relation cache,
+        # with cwuser on row 9 as well
+        e1 = rs.get_entity(9, 0)
+        # get entity on row 10. This will fill its created_by relation cache,
+        # with cwuser built on row 9
+        e2 = rs.get_entity(10, 0)
+        # limit result set from row 10
+        rs.limit(1, 10, inplace=True)
+        # get back eid
+        e = rs.get_entity(0, 0)
+        self.assertTrue(e2 is e)
+        # rs.limit has properly removed cwuser for request cache, but it's
+        # still referenced by e/e2 relation cache
+        u = e.created_by[0]
+        # now ensure this doesn't trigger IndexError because cwuser.cw_row is 9
+        # while now rset has only one row
+        u.cw_rset[u.cw_row]
 
-    def test_resultset_filter(self):
+    def test_filter(self):
         rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
                        'Any U,L where U is CWUser, U login L',
                        description=[['CWUser', 'String']] * 3)
@@ -142,7 +171,7 @@
         self.assertEqual(len(rs2), 2)
         self.assertEqual([login for _, login in rs2], ['adim', 'syt'])
 
-    def test_resultset_transform(self):
+    def test_transform(self):
         rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']],
                        'Any U,L where U is CWUser, U login L',
                        description=[['CWUser', 'String']] * 3)
@@ -154,7 +183,7 @@
         self.assertEqual(len(rs2), 3)
         self.assertEqual(list(rs2), [['adim'],['syt'],['nico']])
 
-    def test_resultset_sort(self):
+    def test_sort(self):
         rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']],
                        'Any U,L where U is CWUser, U login L',
                        description=[['CWUser', 'String']] * 3)
@@ -179,7 +208,7 @@
         # make sure rs is unchanged
         self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico'])
 
-    def test_resultset_split(self):
+    def test_split(self):
         rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'],
                         [12000, 'adim', u'Jardiner facile'],
                         [13000, 'syt',  u'Le carrelage en 42 leçons'],
@@ -457,5 +486,6 @@
         self.assertIsInstance(str(rset), basestring)
         self.assertEqual(len(str(rset).splitlines()), 1)
 
+
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_schema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_schema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -29,7 +29,7 @@
 from yams import ValidationError, BadSchemaDefinition
 from yams.constraints import SizeConstraint, StaticVocabularyConstraint
 from yams.buildobjs import RelationDefinition, EntityType, RelationType
-from yams.reader import PyFileReader
+from yams.reader import fill_schema
 
 from cubicweb.schema import (
     CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader,
@@ -159,7 +159,7 @@
         self.assert_(isinstance(schema, CubicWebSchema))
         self.assertEqual(schema.name, 'data')
         entities = sorted([str(e) for e in schema.entities()])
-        expected_entities = ['BaseTransition', 'Bookmark', 'Boolean', 'Bytes', 'Card',
+        expected_entities = ['BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card',
                              'Date', 'Datetime', 'Decimal',
                              'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType',
                              'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
@@ -194,7 +194,7 @@
                               'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed',
 
                               'has_text',
-                              'identity', 'in_group', 'in_state', 'indexed',
+                              'identity', 'in_group', 'in_state', 'in_synchronization', 'indexed',
                               'initial_state', 'inlined', 'internationalizable', 'is', 'is_instance_of',
 
                               'label', 'last_login_time', 'latest_retrieval', 'lieu', 'login',
@@ -260,18 +260,23 @@
         self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')],
                           ['U has_update_permission X'])
 
+    def test_nonregr_allowed_type_names(self):
+        schema = CubicWebSchema('Test Schema')
+        schema.add_entity_type(EntityType('NaN'))
+
+
 class BadSchemaTC(TestCase):
     def setUp(self):
         self.loader = CubicWebSchemaLoader()
         self.loader.defined = {}
         self.loader.loaded_files = []
         self.loader.post_build_callbacks = []
-        self.loader._pyreader = PyFileReader(self.loader)
 
     def _test(self, schemafile, msg):
         self.loader.handle_file(join(DATADIR, schemafile))
+        sch = self.loader.schemacls('toto')
         with self.assertRaises(BadSchemaDefinition) as cm:
-            self.loader._build_schema('toto', False)
+            fill_schema(sch, self.loader.defined, False)
         self.assertEqual(str(cm.exception), msg)
 
     def test_lowered_etype(self):
--- a/test/unittest_selectors.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_selectors.py	Fri Dec 09 12:08:27 2011 +0100
@@ -26,7 +26,7 @@
 from cubicweb.appobject import Selector, AndSelector, OrSelector
 from cubicweb.selectors import (is_instance, adaptable, match_user_groups,
                                 multi_lines_rset, score_entity, is_in_state,
-                                on_transition, rql_condition)
+                                on_transition, rql_condition, relation_possible)
 from cubicweb.web import action
 
 
@@ -102,6 +102,10 @@
         self.assertIs(csel.search_selector(is_instance), sel)
         csel = AndSelector(Selector(), sel)
         self.assertIs(csel.search_selector(is_instance), sel)
+        self.assertIs(csel.search_selector((AndSelector, OrSelector)), csel)
+        self.assertIs(csel.search_selector((OrSelector, AndSelector)), csel)
+        self.assertIs(csel.search_selector((is_instance, score_entity)),  sel)
+        self.assertIs(csel.search_selector((score_entity, is_instance)), sel)
 
     def test_inplace_and(self):
         selector = _1_()
@@ -140,35 +144,6 @@
         self.assertEqual(selector(None), 0)
 
 
-class IsInStateSelectorTC(CubicWebTC):
-    def setup_database(self):
-        wf = self.shell().add_workflow("testwf", 'StateFull', default=True)
-        initial = wf.add_state(u'initial', initial=True)
-        final = wf.add_state(u'final')
-        wf.add_transition(u'forward', (initial,), final)
-
-    def test_initial_state(self):
-        req = self.request()
-        entity = req.create_entity('StateFull')
-        selector = is_in_state(u'initial')
-        self.commit()
-        score = selector(entity.__class__, None, entity=entity)
-        self.assertEqual(score, 1)
-
-    def test_final_state(self):
-        req = self.request()
-        entity = req.create_entity('StateFull')
-        selector = is_in_state(u'initial')
-        self.commit()
-        entity.cw_adapt_to('IWorkflowable').fire_transition(u'forward')
-        self.commit()
-        score = selector(entity.__class__, None, entity=entity)
-        self.assertEqual(score, 0)
-        selector = is_in_state(u'final')
-        score = selector(entity.__class__, None, entity=entity)
-        self.assertEqual(score, 1)
-
-
 class ImplementsSelectorTC(CubicWebTC):
     def test_etype_priority(self):
         req = self.request()
@@ -189,11 +164,17 @@
         self.assertEqual(is_instance('BaseTransition').score_class(cls, self.request()),
                           3)
 
+    def test_outer_join(self):
+        req = self.request()
+        rset = req.execute('Any U,B WHERE B? bookmarked_by U, U login "anon"')
+        self.assertEqual(is_instance('Bookmark')(None, req, rset=rset, row=0, col=1),
+                         0)
+
 
 class WorkflowSelectorTC(CubicWebTC):
     def _commit(self):
         self.commit()
-        self.wf_entity.clear_all_caches()
+        self.wf_entity.cw_clear_all_caches()
 
     def setup_database(self):
         wf = self.shell().add_workflow("wf_test", 'StateFull', default=True)
@@ -315,6 +296,27 @@
         self.assertEqual(selector(None, self.req, rset=self.rset), 0)
 
 
+class RelationPossibleTC(CubicWebTC):
+
+    def test_rqlst_1(self):
+        req = self.request()
+        selector = relation_possible('in_group')
+        select = self.vreg.parse(req, 'Any X WHERE X is CWUser').children[0]
+        score = selector(None, req, rset=1,
+                         select=select, filtered_variable=select.defined_vars['X'])
+        self.assertEqual(score, 1)
+
+    def test_rqlst_2(self):
+        req = self.request()
+        selector = relation_possible('in_group')
+        select = self.vreg.parse(req, 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, '
+                                 'Y creation_date YD, Y is CWGroup '
+                                 'HAVING DAY(XD)=DAY(YD)').children[0]
+        score = selector(None, req, rset=1,
+                         select=select, filtered_variable=select.defined_vars['X'])
+        self.assertEqual(score, 1)
+
+
 class MatchUserGroupsTC(CubicWebTC):
     def test_owners_group(self):
         """tests usage of 'owners' group with match_user_group"""
--- a/test/unittest_utils.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/test/unittest_utils.py	Fri Dec 09 12:08:27 2011 +0100
@@ -21,9 +21,12 @@
 import decimal
 import datetime
 
+
 from logilab.common.testlib import TestCase, DocTest, unittest_main
 
-from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.utils import (make_uid, UStringIO, SizeConstrainedList,
+                            RepeatList, HTMLHead)
 from cubicweb.entity import Entity
 
 try:
@@ -155,6 +158,102 @@
     def test_encoding_unknown_stuff(self):
         self.assertEqual(self.encode(TestCase), 'null')
 
+class HTMLHeadTC(CubicWebTC):
+    def test_concat_urls(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        urls = [base_url + u'bob1.js',
+                base_url + u'bob2.js',
+                base_url + u'bob3.js']
+        result = head.concat_urls(urls)
+        expected = u'http://test.fr/data/??bob1.js,bob2.js,bob3.js'
+        self.assertEqual(result, expected)
+
+    def test_group_urls(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        urls_spec = [(base_url + u'bob0.js', None),
+                     (base_url + u'bob1.js', None),
+                     (u'http://ext.com/bob2.js', None),
+                     (u'http://ext.com/bob3.js', None),
+                     (base_url + u'bob4.css', 'all'),
+                     (base_url + u'bob5.css', 'all'),
+                     (base_url + u'bob6.css', 'print'),
+                     (base_url + u'bob7.css', 'print'),
+                     (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+                     (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+                     ]
+        result = head.group_urls(urls_spec)
+        expected = [(base_url + u'??bob0.js,bob1.js', None),
+                    (u'http://ext.com/bob2.js', None),
+                    (u'http://ext.com/bob3.js', None),
+                    (base_url + u'??bob4.css,bob5.css', 'all'),
+                    (base_url + u'??bob6.css,bob7.css', 'print'),
+                    (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+                    (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+                    ]
+        self.assertEqual(list(result), expected)
+
+    def test_getvalue_with_concat(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        head.add_js(base_url + u'bob0.js')
+        head.add_js(base_url + u'bob1.js')
+        head.add_js(u'http://ext.com/bob2.js')
+        head.add_js(u'http://ext.com/bob3.js')
+        head.add_css(base_url + u'bob4.css')
+        head.add_css(base_url + u'bob5.css')
+        head.add_css(base_url + u'bob6.css', 'print')
+        head.add_css(base_url + u'bob7.css', 'print')
+        head.add_ie_css(base_url + u'bob8.css')
+        head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+        result = head.getvalue()
+        expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/??bob4.css,bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/??bob6.css,bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]--> 
+<script type="text/javascript" src="http://test.fr/data/??bob0.js,bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+        self.assertEqual(result, expected)
+
+    def test_getvalue_without_concat(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead()
+        head.add_js(base_url + u'bob0.js')
+        head.add_js(base_url + u'bob1.js')
+        head.add_js(u'http://ext.com/bob2.js')
+        head.add_js(u'http://ext.com/bob3.js')
+        head.add_css(base_url + u'bob4.css')
+        head.add_css(base_url + u'bob5.css')
+        head.add_css(base_url + u'bob6.css', 'print')
+        head.add_css(base_url + u'bob7.css', 'print')
+        head.add_ie_css(base_url + u'bob8.css')
+        head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+        result = head.getvalue()
+        expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob4.css"/>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob6.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]--> 
+<script type="text/javascript" src="http://test.fr/data/bob0.js"></script>
+<script type="text/javascript" src="http://test.fr/data/bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+        self.assertEqual(result, expected)
 
 class DocTest(DocTest):
     from cubicweb import utils as module
--- a/toolsutils.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/toolsutils.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -36,7 +36,7 @@
 from logilab.common.compat import any
 from logilab.common.shellutils import ASK
 
-from cubicweb import warning
+from cubicweb import warning # pylint: disable=E0611
 from cubicweb import ConfigurationError, ExecutionError
 
 def underline_title(title, car='-'):
@@ -159,15 +159,11 @@
         print '-> set permissions to 0600 for %s' % filepath
     chmod(filepath, 0600)
 
-def read_config(config_file):
-    """read the instance configuration from a file and return it as a
-    dictionnary
-
-    :type config_file: str
-    :param config_file: path to the configuration file
-
-    :rtype: dict
-    :return: a dictionary with specified values associated to option names
+def read_config(config_file, raise_if_unreadable=False):
+    """read some simple configuration from `config_file` and return it as a
+    dictionary. If `raise_if_unreadable` is false (the default), an empty
+    dictionary will be returned if the file is inexistant or unreadable, else
+    :exc:`ExecutionError` will be raised.
     """
     from logilab.common.fileutils import lines
     config = current = {}
@@ -190,8 +186,12 @@
             value = value.strip()
             current[option] = value or None
     except IOError, ex:
-        warning('missing or non readable configuration file %s (%s)',
-                config_file, ex)
+        if raise_if_unreadable:
+            raise ExecutionError('%s. Are you logged with the correct user '
+                                 'to use this instance?' % ex)
+        else:
+            warning('missing or non readable configuration file %s (%s)',
+                    config_file, ex)
     return config
 
 
--- a/uilib.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/uilib.py	Fri Dec 09 12:08:27 2011 +0100
@@ -31,7 +31,7 @@
 from logilab.mtconverter import xml_escape, html_unescape
 from logilab.common.date import ustrftime
 
-from cubicweb.utils import json_dumps
+from cubicweb.utils import JSString, json_dumps
 
 
 def rql_for_eid(eid):
@@ -51,31 +51,65 @@
     assert eid is not None
     return '%s:%s' % (name, eid)
 
+def print_bytes(value, req, props, displaytime=True):
+    return u''
+
+def print_string(value, req, props, displaytime=True):
+    # don't translate empty value if you don't want strange results
+    if props is not None and value and props.get('internationalizable'):
+        return req._(value)
+    return value
+
+def print_date(value, req, props, displaytime=True):
+    return ustrftime(value, req.property_value('ui.date-format'))
+
+def print_time(value, req, props, displaytime=True):
+    return ustrftime(value, req.property_value('ui.time-format'))
+
+def print_tztime(value, req, props, displaytime=True):
+    return ustrftime(value, req.property_value('ui.time-format')) + u' UTC'
+
+def print_datetime(value, req, props, displaytime=True):
+    if displaytime:
+        return ustrftime(value, req.property_value('ui.datetime-format'))
+    return ustrftime(value, req.property_value('ui.date-format'))
+
+def print_tzdatetime(value, req, props, displaytime=True):
+    if displaytime:
+        return ustrftime(value, req.property_value('ui.datetime-format')) + u' UTC'
+    return ustrftime(value, req.property_value('ui.date-format'))
+
+def print_boolean(value, req, props, displaytime=True):
+    if value:
+        return req._('yes')
+    return req._('no')
+
+def print_float(value, req, props, displaytime=True):
+    return unicode(req.property_value('ui.float-format') % value)
+
+PRINTERS = {
+    'Bytes': print_bytes,
+    'String': print_string,
+    'Date': print_date,
+    'Time': print_time,
+    'TZTime': print_tztime,
+    'Datetime': print_datetime,
+    'TZDatetime': print_tzdatetime,
+    'Boolean': print_boolean,
+    'Float': print_float,
+    'Decimal': print_float,
+    # XXX Interval
+    }
+
 def printable_value(req, attrtype, value, props=None, displaytime=True):
     """return a displayable value (i.e. unicode string)"""
-    if value is None or attrtype == 'Bytes':
+    if value is None:
         return u''
-    if attrtype == 'String':
-        # don't translate empty value if you don't want strange results
-        if props is not None and value and props.get('internationalizable'):
-            return req._(value)
-        return value
-    if attrtype == 'Date':
-        return ustrftime(value, req.property_value('ui.date-format'))
-    if attrtype in ('Time', 'TZTime'):
-        return ustrftime(value, req.property_value('ui.time-format'))
-    if attrtype in ('Datetime', 'TZDatetime'):
-        if displaytime:
-            return ustrftime(value, req.property_value('ui.datetime-format'))
-        return ustrftime(value, req.property_value('ui.date-format'))
-    if attrtype == 'Boolean':
-        if value:
-            return req._('yes')
-        return req._('no')
-    if attrtype in ('Float', 'Decimal'):
-        value = req.property_value('ui.float-format') % value
-    # XXX Interval
-    return unicode(value)
+    try:
+        printer = PRINTERS[attrtype]
+    except KeyError:
+        return unicode(value)
+    return printer(value, req, props, displaytime)
 
 
 # text publishing #############################################################
@@ -127,94 +161,84 @@
 
 REM_ROOT_HTML_TAGS = re.compile('</(body|html)>', re.U)
 
-try:
-    from lxml import etree, html
-    from lxml.html import clean, defs
+from lxml import etree, html
+from lxml.html import clean, defs
 
-    ALLOWED_TAGS = (defs.general_block_tags | defs.list_tags | defs.table_tags |
-                    defs.phrase_tags | defs.font_style_tags |
-                    set(('span', 'a', 'br', 'img', 'map', 'area', 'sub', 'sup'))
-                    )
+ALLOWED_TAGS = (defs.general_block_tags | defs.list_tags | defs.table_tags |
+                defs.phrase_tags | defs.font_style_tags |
+                set(('span', 'a', 'br', 'img', 'map', 'area', 'sub', 'sup'))
+                )
 
-    CLEANER = clean.Cleaner(allow_tags=ALLOWED_TAGS, remove_unknown_tags=False,
-                            style=True, safe_attrs_only=True,
-                            add_nofollow=False,
-                            )
+CLEANER = clean.Cleaner(allow_tags=ALLOWED_TAGS, remove_unknown_tags=False,
+                        style=True, safe_attrs_only=True,
+                        add_nofollow=False,
+                        )
 
-    def soup2xhtml(data, encoding):
-        """tidy html soup by allowing some element tags and return the result
-        """
-        # remove spurious </body> and </html> tags, then normalize line break
-        # (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1)
-        data = REM_ROOT_HTML_TAGS.sub('', u'\n'.join(data.splitlines()))
-        xmltree = etree.HTML(CLEANER.clean_html('<div>%s</div>' % data))
-        # NOTE: lxml 2.0 does support encoding='unicode', but last time I (syt)
-        # tried I got weird results (lxml 2.2.8)
-        body = etree.tostring(xmltree[0], encoding=encoding)
-        # remove <body> and </body> and decode to unicode
-        snippet = body[6:-7].decode(encoding)
-        # take care to bad xhtml (for instance starting with </div>) which
-        # may mess with the <div> we added below. Only remove it if it's
-        # still there...
-        if snippet.startswith('<div>') and snippet.endswith('</div>'):
-            snippet = snippet[5:-6]
-        return snippet
+def soup2xhtml(data, encoding):
+    """tidy html soup by allowing some element tags and return the result
+    """
+    # remove spurious </body> and </html> tags, then normalize line break
+    # (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1)
+    data = REM_ROOT_HTML_TAGS.sub('', u'\n'.join(data.splitlines()))
+    xmltree = etree.HTML(CLEANER.clean_html('<div>%s</div>' % data))
+    # NOTE: lxml 2.0 does support encoding='unicode', but last time I (syt)
+    # tried I got weird results (lxml 2.2.8)
+    body = etree.tostring(xmltree[0], encoding=encoding)
+    # remove <body> and </body> and decode to unicode
+    snippet = body[6:-7].decode(encoding)
+    # take care to bad xhtml (for instance starting with </div>) which
+    # may mess with the <div> we added below. Only remove it if it's
+    # still there...
+    if snippet.startswith('<div>') and snippet.endswith('</div>'):
+        snippet = snippet[5:-6]
+    return snippet
 
-        # lxml.Cleaner envelops text elements by internal logic (not accessible)
-        # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
-        # TODO drop attributes in elements
-        # TODO add policy configuration (content only, embedded content, ...)
-        # XXX this is buggy for "<p>text1</p><p>text2</p>"...
-        # XXX drop these two snippets action and follow the lxml behaviour
-        # XXX (tests need to be updated)
-        # if snippet.startswith('<div>') and snippet.endswith('</div>'):
-        #     snippet = snippet[5:-6]
-        # if snippet.startswith('<p>') and snippet.endswith('</p>'):
-        #     snippet = snippet[3:-4]
-        return snippet.decode(encoding)
-
-except (ImportError, AttributeError):
-    # gae environment: lxml not available
-    # fallback implementation
-    def soup2xhtml(data, encoding):
-        # normalize line break
-        # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
-        return u'\n'.join(data.splitlines())
-else:
-
-    if hasattr(etree.HTML('<div>test</div>'), 'iter'): # XXX still necessary?
+    # lxml.Cleaner envelops text elements by internal logic (not accessible)
+    # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
+    # TODO drop attributes in elements
+    # TODO add policy configuration (content only, embedded content, ...)
+    # XXX this is buggy for "<p>text1</p><p>text2</p>"...
+    # XXX drop these two snippets action and follow the lxml behaviour
+    # XXX (tests need to be updated)
+    # if snippet.startswith('<div>') and snippet.endswith('</div>'):
+    #     snippet = snippet[5:-6]
+    # if snippet.startswith('<p>') and snippet.endswith('</p>'):
+    #     snippet = snippet[3:-4]
+    return snippet.decode(encoding)
 
-        def safe_cut(text, length):
-            """returns an html document of length <length> based on <text>,
-            and cut is necessary.
-            """
-            if text is None:
-                return u''
-            dom = etree.HTML(text)
-            curlength = 0
-            add_ellipsis = False
-            for element in dom.iter():
-                if curlength >= length:
-                    parent = element.getparent()
-                    parent.remove(element)
-                    if curlength == length and (element.text or element.tail):
-                        add_ellipsis = True
-                else:
-                    if element.text is not None:
-                        element.text = cut(element.text, length - curlength)
-                        curlength += len(element.text)
-                    if element.tail is not None:
-                        if curlength < length:
-                            element.tail = cut(element.tail, length - curlength)
-                            curlength += len(element.tail)
-                        elif curlength == length:
-                            element.tail = '...'
-                        else:
-                            element.tail = ''
-            text = etree.tounicode(dom[0])[6:-7] # remove wrapping <body></body>
-            if add_ellipsis:
-                return text + u'...'
-            return text
+if hasattr(etree.HTML('<div>test</div>'), 'iter'): # XXX still necessary?
+    # pylint: disable=E0102
+    def safe_cut(text, length):
+        """returns an html document of length <length> based on <text>,
+        and cut is necessary.
+        """
+        if text is None:
+            return u''
+        dom = etree.HTML(text)
+        curlength = 0
+        add_ellipsis = False
+        for element in dom.iter():
+            if curlength >= length:
+                parent = element.getparent()
+                parent.remove(element)
+                if curlength == length and (element.text or element.tail):
+                    add_ellipsis = True
+            else:
+                if element.text is not None:
+                    element.text = cut(element.text, length - curlength)
+                    curlength += len(element.text)
+                if element.tail is not None:
+                    if curlength < length:
+                        element.tail = cut(element.tail, length - curlength)
+                        curlength += len(element.tail)
+                    elif curlength == length:
+                        element.tail = '...'
+                    else:
+                        element.tail = ''
+        text = etree.tounicode(dom[0])[6:-7] # remove wrapping <body></body>
+        if add_ellipsis:
+            return text + u'...'
+        return text
 
 def text_cut(text, nbwords=30, gotoperiod=True):
     """from the given plain text, return a text with at least <nbwords> words,
@@ -275,16 +299,23 @@
         self.args = args
         self.parent = parent
     def __unicode__(self):
-        args = u','.join(json_dumps(arg) for arg in self.args)
+        args = []
+        for arg in self.args:
+            if isinstance(arg, JSString):
+                args.append(arg)
+            else:
+                args.append(json_dumps(arg))
         if self.parent:
-            return u'%s(%s)' % (self.parent, args)
-        return args
+            return u'%s(%s)' % (self.parent, ','.join(args))
+        return ','.join(args)
 
 class _JS(object):
     def __getattr__(self, attr):
         return _JSId(attr)
 
-"""magic object to return strings suitable to call some javascript function with
+js = _JS()
+js.__doc__ = """\
+magic object to return strings suitable to call some javascript function with
 the given arguments (which should be correctly typed).
 
 >>> str(js.pouet(1, "2"))
@@ -292,9 +323,10 @@
 >>> str(js.cw.pouet(1, "2"))
 'cw.pouet(1,"2")'
 >>> str(js.cw.pouet(1, "2").pouet(None))
-'cw.pouet(1,"2").pouet(null)')
+'cw.pouet(1,"2").pouet(null)'
+>>> str(js.cw.pouet(1, JSString("$")).pouet(None))
+'cw.pouet(1,$).pouet(null)'
 """
-js = _JS()
 
 def domid(string):
     """return a valid DOM id from a string (should also be usable in jQuery
@@ -364,10 +396,10 @@
 def exc_message(ex, encoding):
     try:
         return unicode(ex)
-    except:
+    except Exception:
         try:
             return unicode(str(ex), encoding, 'replace')
-        except:
+        except Exception:
             return unicode(repr(ex), encoding, 'replace')
 
 
@@ -381,7 +413,7 @@
     res.append(u'\n')
     try:
         res.append(u'\t Error: %s\n' % exception)
-    except:
+    except Exception:
         pass
     return u'\n'.join(res)
 
--- a/utils.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/utils.py	Fri Dec 09 12:08:27 2011 +0100
@@ -51,20 +51,6 @@
     return str(key) + uuid4().hex
 
 
-def dump_class(cls, clsname):
-    """create copy of a class by creating an empty class inheriting
-    from the given cls.
-
-    Those class will be used as place holder for attribute and relation
-    description
-    """
-    # type doesn't accept unicode name
-    # return type.__new__(type, str(clsname), (cls,), {})
-    # __autogenerated__ attribute is just a marker
-    return type(str(clsname), (cls,), {'__autogenerated__': True,
-                                       '__doc__': cls.__doc__,
-                                       '__module__': cls.__module__})
-
 def support_args(callable, *argnames):
     """return true if the callable support given argument names"""
     if isinstance(callable, type):
@@ -241,7 +227,7 @@
     xhtml_safe_script_opening = u'<script type="text/javascript"><!--//--><![CDATA[//><!--\n'
     xhtml_safe_script_closing = u'\n//--><!]]></script>'
 
-    def __init__(self):
+    def __init__(self, datadir_url=None):
         super(HTMLHead, self).__init__()
         self.jsvars = []
         self.jsfiles = []
@@ -249,6 +235,7 @@
         self.ie_cssfiles = []
         self.post_inlined_scripts = []
         self.pagedata_unload = False
+        self.datadir_url = datadir_url
 
 
     def add_raw(self, rawheader):
@@ -285,7 +272,7 @@
         if jsfile not in self.jsfiles:
             self.jsfiles.append(jsfile)
 
-    def add_css(self, cssfile, media):
+    def add_css(self, cssfile, media='all'):
         """adds `cssfile` to the list of javascripts used in the webpage
 
         This function checks if the file has already been added
@@ -305,6 +292,45 @@
             self.post_inlined_scripts.append(self.js_unload_code)
             self.pagedata_unload = True
 
+    def concat_urls(self, urls):
+        """concatenates urls into one url usable by Apache mod_concat
+
+        This method returns the url without modifying it if there is only
+        one element in the list
+        :param urls: list of local urls/filenames to concatenate
+        """
+        if len(urls) == 1:
+            return urls[0]
+        len_prefix = len(self.datadir_url)
+        concated = u','.join(url[len_prefix:] for url in urls)
+        return (u'%s??%s' % (self.datadir_url, concated))
+
+    def group_urls(self, urls_spec):
+        """parses urls_spec in order to generate concatenated urls
+        for js and css includes
+
+        This method checks if the file is local and if it shares options
+        with direct neighbors
+        :param urls_spec: entire list of urls/filenames to inspect
+        """
+        concatable = []
+        prev_islocal = False
+        prev_key = None
+        for url, key in urls_spec:
+            islocal = url.startswith(self.datadir_url)
+            if concatable and (islocal != prev_islocal or key != prev_key):
+                yield (self.concat_urls(concatable), prev_key)
+                del concatable[:]
+            if not islocal:
+                yield (url, key)
+            else:
+                concatable.append(url)
+            prev_islocal = islocal
+            prev_key = key
+        if concatable:
+            yield (self.concat_urls(concatable), prev_key)
+
+
     def getvalue(self, skiphead=False):
         """reimplement getvalue to provide a consistent (and somewhat browser
         optimzed cf. http://stevesouders.com/cuzillion) order in external
@@ -322,25 +348,47 @@
                 w(vardecl + u'\n')
             w(self.xhtml_safe_script_closing)
         # 2/ css files
-        for cssfile, media in self.cssfiles:
+        for cssfile, media in (self.group_urls(self.cssfiles) if self.datadir_url else self.cssfiles):
             w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
               (media, xml_escape(cssfile)))
         # 3/ ie css if necessary
         if self.ie_cssfiles:
-            for cssfile, media, iespec in self.ie_cssfiles:
+            ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles)
+            for cssfile, (media, iespec) in (self.group_urls(ie_cssfiles) if self.datadir_url else ie_cssfiles):
                 w(u'<!--%s>\n' % iespec)
                 w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
                   (media, xml_escape(cssfile)))
             w(u'<![endif]--> \n')
         # 4/ js files
-        for jsfile in self.jsfiles:
-            w(u'<script type="text/javascript" src="%s"></script>\n' %
-              xml_escape(jsfile))
+        jsfiles = ((x, None) for x in self.jsfiles)
+        for jsfile, media in self.group_urls(jsfiles) if self.datadir_url else jsfiles:
+            if skiphead:
+                # Don't insert <script> tags directly as they would be
+                # interpreted directly by some browsers (e.g. IE).
+                # Use <pre class="script"> tags instead and let
+                # `loadAjaxHtmlHead` handle the script insertion / execution.
+                w(u'<pre class="script" src="%s"></pre>\n' %
+                  xml_escape(jsfile))
+                # FIXME: a probably better implementation might be to add
+                #        JS or CSS urls in a JS list that loadAjaxHtmlHead
+                #        would iterate on and postprocess:
+                #            cw._ajax_js_scripts.push('myscript.js')
+                #        Then, in loadAjaxHtmlHead, do something like:
+                #            jQuery.each(cw._ajax_js_script, jQuery.getScript)
+            else:
+                w(u'<script type="text/javascript" src="%s"></script>\n' %
+                  xml_escape(jsfile))
         # 5/ post inlined scripts (i.e. scripts depending on other JS files)
         if self.post_inlined_scripts:
-            w(self.xhtml_safe_script_opening)
-            w(u'\n\n'.join(self.post_inlined_scripts))
-            w(self.xhtml_safe_script_closing)
+            if skiphead:
+                for script in self.post_inlined_scripts:
+                    w(u'<pre class="script">')
+                    w(xml_escape(script))
+                    w(u'</pre>')
+            else:
+                w(self.xhtml_safe_script_opening)
+                w(u'\n\n'.join(self.post_inlined_scripts))
+                w(self.xhtml_safe_script_closing)
         header = super(HTMLHead, self).getvalue()
         if skiphead:
             return header
@@ -416,7 +464,7 @@
     else:
         import json
 except ImportError:
-    json_dumps = None
+    json_dumps = JSString = None
 
 else:
     from logilab.common.date import ustrftime
@@ -450,6 +498,40 @@
         return json.dumps(value, cls=CubicWebJsonEncoder)
 
 
+    class JSString(str):
+        """use this string sub class in values given to :func:`js_dumps` to
+        insert raw javascript chain in some JSON string
+        """
+
+    def _dict2js(d, predictable=False):
+        res = [key + ': ' + js_dumps(val, predictable)
+               for key, val in d.iteritems()]
+        return '{%s}' % ', '.join(res)
+
+    def _list2js(l, predictable=False):
+        return '[%s]' % ', '.join([js_dumps(val, predictable) for val in l])
+
+    def js_dumps(something, predictable=False):
+        """similar as :func:`json_dumps`, except values which are instances of
+        :class:`JSString` are expected to be valid javascript and will be output
+        as is
+
+        >>> js_dumps({'hop': JSString('$.hop'), 'bar': None}, predictable=True)
+        '{bar: null, hop: $.hop}'
+        >>> js_dumps({'hop': '$.hop'})
+        '{hop: "$.hop"}'
+        >>> js_dumps({'hip': {'hop': JSString('momo')}})
+        '{hip: {hop: momo}}'
+        """
+        if isinstance(something, dict):
+            return _dict2js(something, predictable)
+        if isinstance(something, list):
+            return _list2js(something, predictable)
+        if isinstance(something, JSString):
+            return something
+        return json_dumps(something)
+
+
 @deprecated('[3.7] merge_dicts is deprecated')
 def merge_dicts(dict1, dict2):
     """update a copy of `dict1` with `dict2`"""
--- a/view.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/view.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -23,6 +23,7 @@
 import types, new
 from cStringIO import StringIO
 from warnings import warn
+from functools import partial
 
 from logilab.common.deprecation import deprecated
 from logilab.mtconverter import xml_escape
@@ -174,7 +175,7 @@
         stream = self.set_stream(w)
         try:
             view_func(**context)
-        except:
+        except Exception:
             self.debug('view call %s failed (context=%s)', view_func, context)
             raise
         # return stream content if we have created it
@@ -375,7 +376,19 @@
 
     def call(self, **kwargs):
         if self.cw_rset is None:
-            self.entity_call(self.cw_extra_kwargs.pop('entity'))
+            # * cw_extra_kwargs is the place where extra selection arguments are
+            #   stored
+            # * when calling req.view('somevid', entity=entity), 'entity' ends
+            #   up in cw_extra_kwargs and kwargs
+            #
+            # handle that to avoid a TypeError with a sanity check
+            #
+            # Notice that could probably be avoided by handling entity_call in
+            # .render
+            entity = self.cw_extra_kwargs.pop('entity')
+            if 'entity' in kwargs:
+                assert kwargs.pop('entity') is entity
+            self.entity_call(entity, **kwargs)
         else:
             super(EntityView, self).call(**kwargs)
 
@@ -439,25 +452,54 @@
     category = _('anyrsetview')
 
     def columns_labels(self, mainindex=0, tr=True):
+        """compute the label of the rset colums
+
+        The logic is based on :meth:`~rql.stmts.Union.get_description`.
+
+        :param mainindex: The index of the main variable. This is an hint to get
+                          more accurate label for various situation
+        :type mainindex:  int
+
+        :param tr: Should the label be translated ?
+        :type tr: boolean
+        """
         if tr:
-            translate = lambda val, req=self._cw: display_name(req, val)
+            translate = partial(display_name, self._cw)
         else:
-            translate = lambda val: val
+            translate = lambda val, *args,**kwargs: val
         # XXX [0] because of missing Union support
-        rqlstdescr = self.cw_rset.syntax_tree().get_description(mainindex,
-                                                                translate)[0]
+        rql_syntax_tree = self.cw_rset.syntax_tree()
+        rqlstdescr = rql_syntax_tree.get_description(mainindex, translate)[0]
         labels = []
         for colidx, label in enumerate(rqlstdescr):
-            try:
-                label = getattr(self, 'label_column_%s' % colidx)()
-            except AttributeError:
-                # compute column header
-                if label == 'Any': # find a better label
-                    label = ','.join(translate(et)
-                                     for et in self.cw_rset.column_types(colidx))
-            labels.append(label)
+            labels.append(self.column_label(colidx, label, translate))
         return labels
 
+    def column_label(self, colidx, default, translate_func=None):
+        """return the label of a specified columns index
+
+        Overwrite me if you need to compute specific label.
+
+        :param colidx: The index of the column the call computes a label for.
+        :type colidx:  int
+
+        :param default: Default value. If ``"Any"`` the default value will be
+                        recomputed as coma separated list for all possible
+                        etypes name.
+        :type colidx:  string
+
+        :param translate_func: A function used to translate name.
+        :type colidx:  function
+        """
+        label = default
+        if label == 'Any':
+            etypes = self.cw_rset.column_types(colidx)
+            if translate_func is not None:
+                etypes = map(translate_func, etypes)
+            label = u','.join(etypes)
+        return label
+
+
 
 # concrete template base classes ##############################################
 
--- a/vregistry.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/vregistry.py	Fri Dec 09 12:08:27 2011 +0100
@@ -184,7 +184,10 @@
 
         raise :exc:`NoSelectableObject` if not object apply
         """
-        return self._select_best(self[__oid], *args, **kwargs)
+        obj =  self._select_best(self[__oid], *args, **kwargs)
+        if obj is None:
+            raise NoSelectableObject(args, kwargs, self[__oid] )
+        return obj
 
     def select_or_none(self, __oid, *args, **kwargs):
         """return the most specific object among those with the given oid
@@ -202,16 +205,18 @@
         context
         """
         for appobjects in self.itervalues():
-            try:
-                yield self._select_best(appobjects, *args, **kwargs)
-            except NoSelectableObject:
+            obj = self._select_best(appobjects,  *args, **kwargs)
+            if obj is None:
                 continue
+            yield obj
 
     def _select_best(self, appobjects, *args, **kwargs):
         """return an instance of the most specific object according
         to parameters
 
-        raise `NoSelectableObject` if not object apply
+        return None if not object apply (don't raise `NoSelectableObject` since
+        it's costly when searching appobjects using `possible_objects`
+        (e.g. searching for hooks).
         """
         if len(args) > 1:
             warn('[3.5] only the request param can not be named when calling select*',
@@ -224,7 +229,7 @@
             elif appobjectscore > 0 and appobjectscore == score:
                 winners.append(appobject)
         if winners is None:
-            raise NoSelectableObject(args, kwargs, appobjects)
+            return None
         if len(winners) > 1:
             # log in production environement / test, error while debugging
             msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)'
--- a/web/action.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/action.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,7 +15,54 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""abstract action classes for CubicWeb web client"""
+"""abstract action classes for CubicWeb web client
+
+Actions are typically displayed in an action box, but can also be used
+in other parts of the interface (the user menu, the footer, etc.). The
+'order', 'category' and 'title' class attributes control how the action will
+be displayed. The 'submenu' attribute is only used for actions in the
+action box.
+
+The most important method from a developper point of view in the
+:meth:'Action.url' method, which returns a URL on which the navigation
+should directed to perform the action.  There are two common ways of
+writing that method:
+
+* do nothing special and simply return a URL to the current rset with
+  a special view (with `self._cw.build_url(...)` for instance)
+
+* define an inner function `callback_func(req, *args)` which will do
+  the work and call it through `self._cw.user_callback(callback_func,
+  args, msg)`: this method will return a URL which calls the inner
+  function, and displays the message in the web interface when the
+  callback has completed (and report any exception occuring in the
+  callback too)
+
+Many examples of the first approach are available in :mod:`cubicweb.web.views.actions`.
+
+Here is an example of the second approach:
+
+.. sourcecode:: python
+
+ from cubicweb.web import action
+ class SomeAction(action.Action):
+     __regid__ = 'mycube_some_action'
+     title = _(some action)
+     __select__ = action.Action.__select__ & is_instance('TargetEntity')
+ 
+     def url(self):
+         if self.cw_row is None:
+             eids = [row[0] for row in self.cw_rset]
+         else:
+             eids = (self.cw_rset[self.cw_row][self.cw_col or 0],)
+         def do_action(req, eids):
+             for eid in eids:
+                 entity = req.entity_from_eid(eid, 'TargetEntity')
+                 entity.perform_action()
+         msg = self._cw._('some_action performed')
+         return self._cw.user_callback(do_action, (eids,), msg)
+
+"""
 
 __docformat__ = "restructuredtext en"
 _ = unicode
@@ -86,15 +133,18 @@
                   & partial_relation_possible(action='add', strict=True))
 
     submenu = 'addrelated'
+    # to be defined in concrete classes
+    target_etype = rtype = None
 
     def url(self):
         try:
-            ttype = self.etype # deprecated in 3.6, already warned by the selector
+            # deprecated in 3.6, already warned by the selector
+            ttype = self.etype # pylint: disable=E1101
         except AttributeError:
             ttype = self.target_etype
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
         linkto = '%s:%s:%s' % (self.rtype, entity.eid, target(self))
-        return self._cw.build_url('add/%s' % ttype, __linkto=linkto,
-                                  __redirectpath=entity.rest_path(),
+        return self._cw.vreg["etypes"].etype_class(ttype).cw_create_url(self._cw,
+                                  __redirectpath=entity.rest_path(), __linkto=linkto,
                                   __redirectvid=self._cw.form.get('__redirectvid', ''))
 
--- a/web/application.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/application.py	Fri Dec 09 12:08:27 2011 +0100
@@ -236,12 +236,10 @@
 
     def open_session(self, req, allow_no_cnx=True):
         session = self.session_manager.open_session(req, allow_no_cnx=allow_no_cnx)
-        cookie = req.get_cookie()
         sessioncookie = self.session_cookie(req)
-        cookie[sessioncookie] = session.sessionid
-        if req.https and req.base_url().startswith('https://'):
-            cookie[sessioncookie]['secure'] = True
-        req.set_cookie(cookie, sessioncookie, maxage=None)
+        secure = req.https and req.base_url().startswith('https://')
+        req.set_cookie(sessioncookie, session.sessionid,
+                       maxage=None, secure=secure)
         if not session.anonymous_session:
             self.session_manager.postlogin(req)
         return session
@@ -251,8 +249,7 @@
         `AuthenticationError`
         """
         self.session_manager.close_session(req.session)
-        sessioncookie = self.session_cookie(req)
-        req.remove_cookie(req.get_cookie(), sessioncookie)
+        req.remove_cookie(self.session_cookie(req))
         raise LogOut(url=goto_url)
 
     # these are overridden by set_log_methods below
@@ -423,7 +420,7 @@
             if req.cnx and not commited:
                 try:
                     req.cnx.rollback()
-                except:
+                except Exception:
                     pass # ignore rollback error at this point
         self.info('query %s executed in %s sec', req.relative_path(), clock() - tstart)
         return result
@@ -460,7 +457,7 @@
             errview = self.vreg['views'].select('error', req)
             template = self.main_template_id(req)
             content = self.vreg['views'].main_template(req, template, view=errview)
-        except:
+        except Exception:
             content = self.vreg['views'].main_template(req, 'error-template')
         raise StatusResponse(code, content)
 
--- a/web/box.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/box.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -118,7 +118,8 @@
     related to the current result set.
     """
 
-    rql  = None
+    # to be defined in concrete classes
+    rql = title = None
 
     def to_display_rql(self):
         assert self.rql is not None, self.__regid__
@@ -168,7 +169,7 @@
     subclasses should define at least id, rtype and target
     class attributes.
     """
-
+    rtype = None
     def cell_call(self, row, col, view=None, **kwargs):
         self._cw.add_js('cubicweb.ajax.js')
         entity = self.cw_rset.get_entity(row, col)
--- a/web/captcha.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/captcha.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,8 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Simple captcha library, based on PIL. Monkey patch functions in this module
 if you want something better...
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from random import randint, choice
--- a/web/component.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/component.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -57,8 +57,6 @@
     page_link_templ = u'<span class="slice"><a href="%s" title="%s">%s</a></span>'
     selected_page_link_templ = u'<span class="selectedSlice"><a href="%s" title="%s">%s</a></span>'
     previous_page_link_templ = next_page_link_templ = page_link_templ
-    no_previous_page_link = u'&lt;&lt;'
-    no_next_page_link = u'&gt;&gt;'
 
     def __init__(self, req, rset, **kwargs):
         super(NavigationComponent, self).__init__(req, rset=rset, **kwargs)
@@ -131,22 +129,54 @@
             return self.selected_page_link_templ % (url, content, content)
         return self.page_link_templ % (url, content, content)
 
-    def previous_link(self, path, params, content='&lt;&lt;', title=_('previous_results')):
+    @property
+    def prev_icon_url(self):
+        return xml_escape(self._cw.data_url('go_prev.png'))
+
+    @property
+    def next_icon_url(self):
+        return xml_escape(self._cw.data_url('go_next.png'))
+
+    @property
+    def no_previous_page_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext_nogo"/>' %
+                (self.prev_icon_url, self._cw._('there is no previous page')))
+
+    @property
+    def no_next_page_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext_nogo"/>' %
+                (self.next_icon_url, self._cw._('there is no next page')))
+
+    @property
+    def no_content_prev_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext"/>' % (
+                (self.prev_icon_url, self._cw._('no content prev link'))))
+
+    @property
+    def no_content_next_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext"/>' %
+                (self.next_icon_url, self._cw._('no content next link')))
+
+    def previous_link(self, path, params, content=None, title=_('previous_results')):
+        if not content:
+            content = self.no_content_prev_link
         start = self.starting_from
         if not start :
             return self.no_previous_page_link
         start = max(0, start - self.page_size)
         stop = start + self.page_size - 1
         url = xml_escape(self.page_url(path, params, start, stop))
-        return self.previous_page_link_templ % (url, title, content)
+        return self.previous_page_link_templ % (url, self._cw._(title), content)
 
-    def next_link(self, path, params, content='&gt;&gt;', title=_('next_results')):
+    def next_link(self, path, params, content=None, title=_('next_results')):
+        if not content:
+            content = self.no_content_next_link
         start = self.starting_from + self.page_size
         if start >= self.total:
             return self.no_next_page_link
         stop = start + self.page_size - 1
         url = xml_escape(self.page_url(path, params, start, stop))
-        return self.next_page_link_templ % (url, title, content)
+        return self.next_page_link_templ % (url, self._cw._(title), content)
 
 
 # new contextual components system #############################################
@@ -291,7 +321,7 @@
             def wview(__vid, rset=None, __fallback_vid=None, **kwargs):
                 self._cw.view(__vid, rset, __fallback_vid, w=self.w, **kwargs)
             self.wview = wview
-            self.call(**kwargs)
+            self.call(**kwargs) # pylint: disable=E1101
             return
         getlayout = self._cw.vreg['components'].select
         layout = getlayout('layout', self._cw, **self.layout_select_args())
@@ -509,6 +539,9 @@
 
     subclasses should define at least id, rtype and target class attributes.
     """
+    # to be defined in concrete classes
+    rtype = None
+
     def render_title(self, w):
         w(display_name(self._cw, self.rtype, role(self),
                        context=self.entity.__regid__))
@@ -536,7 +569,9 @@
     added_msg = None
     removed_msg = None
 
-    # class attributes below *must* be set in concret classes (additionaly to
+    # to be defined in concrete classes
+    rtype = role = target_etype = None
+    # class attributes below *must* be set in concrete classes (additionaly to
     # rtype / role [/ target_etype]. They should correspond to js_* methods on
     # the json controller
 
@@ -676,6 +711,8 @@
     __select__ = EntityVComponent.__select__ & partial_has_related_entities()
 
     vid = 'list'
+    # to be defined in concrete classes
+    rtype = title = None
 
     def rql(self):
         """override this method if you want to use a custom rql query"""
--- a/web/controller.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/controller.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -123,8 +123,10 @@
         """
         newparams = {}
         # sets message if needed
-        if self._cw.message:
-            newparams['_cwmsgid'] = self._cw.set_redirect_message(self._cw.message)
+        # XXX - don't call .message twice since it pops the id
+        msg = self._cw.message
+        if msg:
+            newparams['_cwmsgid'] = self._cw.set_redirect_message(msg)
         if self._cw.form.has_key('__action_apply'):
             self._return_to_edition_view(newparams)
         if self._cw.form.has_key('__action_cancel'):
@@ -165,7 +167,7 @@
         elif self._edited_entity:
             # clear caches in case some attribute participating to the rest path
             # has been modified
-            self._edited_entity.clear_all_caches()
+            self._edited_entity.cw_clear_all_caches()
             path = self._edited_entity.rest_path()
         else:
             path = 'view'
Binary file web/data/accessories-text-editor.png has changed
Binary file web/data/add_button.png has changed
Binary file web/data/banner.png has changed
Binary file web/data/bg_trame_grise.png has changed
Binary file web/data/black-check.png has changed
Binary file web/data/bullet.png has changed
Binary file web/data/bullet_orange.png has changed
Binary file web/data/critical.png has changed
--- a/web/data/cubicweb.ajax.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.ajax.js	Fri Dec 09 12:08:27 2011 +0100
@@ -22,6 +22,9 @@
  *
  * dummy ultra minimalist implementation of deferred for jQuery
  */
+
+cw.ajax = new Namespace('cw.ajax');
+
 function Deferred() {
     this.__init__(this);
 }
@@ -86,40 +89,133 @@
 
 var JSON_BASE_URL = baseuri() + 'json?';
 
-//============= utility function handling remote calls responses. ==============//
-function _loadAjaxHtmlHead($node, $head, tag, srcattr) {
-    var jqtagfilter = tag + '[' + srcattr + ']';
-    if (cw['loaded_'+srcattr] === undefined) {
-        cw['loaded_'+srcattr] = [];
-        var loaded = cw['loaded_'+srcattr];
-        jQuery('head ' + jqtagfilter).each(function(i) {
-                   loaded.push(this.getAttribute(srcattr));
-               });
-    } else {
-        var loaded = cw['loaded_'+srcattr];
+
+jQuery.extend(cw.ajax, {
+    /* variant of jquery evalScript with cache: true in ajax call */
+    _evalscript: function ( i, elem ) {
+       var src = elem.getAttribute('src');
+       if (src) {
+           jQuery.ajax({
+               url: src,
+               async: false,
+               cache: true,
+               dataType: "script"
+           });
+       } else {
+           jQuery.globalEval( elem.text || elem.textContent || elem.innerHTML || "" );
+       }
+       if ( elem.parentNode ) {
+           elem.parentNode.removeChild( elem );
+       }
+    },
+
+    evalscripts: function ( scripts ) {
+        if ( scripts.length ) {
+            jQuery.each(scripts, cw.ajax._evalscript);
+        }
+    },
+
+    /**
+     * returns true if `url` is a mod_concat-like url
+     * (e.g. http://..../data??resource1.js,resource2.js)
+     */
+    _modconcatLikeUrl: function(url) {
+        var base = baseuri();
+        if (!base.endswith('/')) { base += '/'; }
+        var modconcat_rgx = new RegExp('(' + base + 'data/([a-z0-9]+/)?)\\?\\?(.+)');
+        return modconcat_rgx.exec(url);
+    },
+
+    /**
+     * decomposes a mod_concat-like url into its corresponding list of
+     * resources' urls
+     * >>> _listResources('http://foo.com/data/??a.js,b.js,c.js')
+     * ['http://foo.com/data/a.js', 'http://foo.com/data/b.js', 'http://foo.com/data/c.js']
+     */
+    _listResources: function(src) {
+        var resources = [];
+        var groups = cw.ajax._modconcatLikeUrl(src);
+        if (groups == null) {
+            resources.push(src);
+        } else {
+            var dataurl = groups[1];
+            $.each(cw.utils.lastOf(groups).split(','),
+                 function() {
+                     resources.push(dataurl + this);
+                 }
+            );
+        }
+        return resources;
+    },
+
+    _buildMissingResourcesUrl: function(url, loadedResources) {
+        var resources = cw.ajax._listResources(url);
+        var missingResources = $.grep(resources, function(resource) {
+            return $.inArray(resource, loadedResources) == -1;
+        });
+        cw.utils.extend(loadedResources, missingResources);
+        var missingResourceUrl = null;
+        if (missingResources.length == 1) {
+            // only one resource missing: build a node with a single resource url
+            // (maybe the browser has it in cache already)
+            missingResourceUrl = missingResources[0];
+        } else if (missingResources.length > 1) {
+            // several resources missing: build a node with a concatenated
+            // resources url
+            var dataurl = cw.ajax._modconcatLikeUrl(url)[1];
+            var missing_path = $.map(missingResources, function(resource) {
+                return resource.substring(dataurl.length);
+            });
+            missingResourceUrl = dataurl + '??' + missing_path.join(',');
+        }
+        return missingResourceUrl;
+    },
+
+    _loadAjaxStylesheets: function($responseHead, $head) {
+        $responseHead.find('link[href]').each(function(i) {
+            var $srcnode = $(this);
+            var url = $srcnode.attr('href');
+            if (url) {
+                var missingStylesheetsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_links);
+                // compute concat-like url for missing resources and append <link>
+                // element to $head
+                if (missingStylesheetsUrl) {
+                    $srcnode.attr('href', missingStylesheetsUrl);
+                    $srcnode.appendTo($head);
+                }
+            }
+        });
+        $responseHead.find('link[href]').remove();
+    },
+
+    _loadAjaxScripts: function($responseHead, $head) {
+        $responseHead.find('pre.script').each(function(i) {
+            var $srcnode = $(this);
+            var url = $srcnode.attr('src');
+            if (url) {
+                var missingScriptsUrl = cw.ajax._buildMissingResourcesUrl(url, cw.loaded_scripts);
+                if (missingScriptsUrl) {
+                    $srcnode.attr('src', missingScriptsUrl);
+                    /* special handling of <script> tags: script nodes appended by jquery
+                     * use uncached ajax calls and do not appear in the DOM
+                     * (See comments in response to Syt on // http://api.jquery.com/append/),
+                     * which cause undesired duplicated load in our case. We now handle
+                     * a list of already loaded resources, since bare DOM api gives bugs with the
+                     * server-response event, and we lose control on when the
+                     * script is loaded (jQuery loads it immediately). */
+                    cw.ajax.evalscripts($srcnode);
+                }
+            } else {
+                // <script> contains inlined javascript code, node content
+                // must be evaluated
+    	        jQuery.globalEval($srcnode.text());
+    	    }
+        });
+        $responseHead.find('pre.script').remove();
     }
-    $node.find(tag).each(function(i) {
-        var url = this.getAttribute(srcattr);
-        if (url) {
-            if (jQuery.inArray(url, loaded) == -1) {
-                // take care to <script> tags: jQuery append method script nodes
-                // don't appears in the DOM (See comments on
-                // http://api.jquery.com/append/), which cause undesired
-                // duplicated load in our case. After trying to use bare DOM api
-                // to avoid this, we switched to handle a list of already loaded
-                // stuff ourselves, since bare DOM api gives bug with the
-                // server-response event, since we loose control on when the
-                // script is loaded (jQuery load it immediatly).
-                loaded.push(url);
-                jQuery(this).appendTo($head);
-            }
-        } else {
-            jQuery(this).appendTo($head);
-        }
-    });
-    $node.find(jqtagfilter).remove();
-}
+});
 
+//============= utility function handling remote calls responses. ==============//
 /**
  * .. function:: function loadAjaxHtmlHead(response)
  *
@@ -135,8 +231,8 @@
     if (!$responseHead.length) {
         return response;
     }
-    _loadAjaxHtmlHead($responseHead, $head, 'script', 'src');
-    _loadAjaxHtmlHead($responseHead, $head, 'link', 'href');
+    cw.ajax._loadAjaxStylesheets($responseHead, $head);
+    cw.ajax._loadAjaxScripts($responseHead, $head);
     // add any remaining children (e.g. meta)
     $responseHead.children().appendTo($head);
     // remove original container, which is now empty
@@ -161,10 +257,6 @@
 }
 
 function _postAjaxLoad(node) {
-    // find sortable tables if there are some
-    if (typeof(Sortable) != 'undefined') {
-        Sortable.sortTables(node);
-    }
     // find textareas and wrap them if there are some
     if (typeof(FCKeditor) != 'undefined') {
         buildWysiwygEditors();
@@ -406,11 +498,6 @@
         $fragment.loadxhtml('json', ajaxFuncArgs('view', extraparams));
     }
 }
-
-jQuery(document).ready(function() {
-    _loadDynamicFragments();
-});
-
 function unloadPageData() {
     // NOTE: do not make async calls on unload if you want to avoid
     //       strange bugs
@@ -614,7 +701,7 @@
 
 function setTab(tabname, cookiename) {
     // set appropriate cookie
-    loadRemote('json', ajaxFuncArgs('set_cookie', null, cookiename, tabname));
+    jQuery.cookie(cookiename, tabname, {path: '/'});
     // trigger show + tabname event
     triggerLoad(tabname);
 }
@@ -736,3 +823,16 @@
     deferred = deferred.addCallback(resetCursor);
     return deferred;
 }
+
+jQuery(document).ready(function() {
+    _loadDynamicFragments();
+    // build loaded_scripts / loaded_links lists
+    cw.loaded_scripts = [];
+    jQuery('head script[src]').each(function(i) {
+        cw.utils.extend(cw.loaded_scripts, cw.ajax._listResources(this.getAttribute('src')));
+    });
+    cw.loaded_links = [];
+    jQuery('head link[href]').each(function(i) {
+        cw.utils.extend(cw.loaded_links, cw.ajax._listResources(this.getAttribute('href')));
+    });
+});
--- a/web/data/cubicweb.css	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.css	Fri Dec 09 12:08:27 2011 +0100
@@ -20,38 +20,30 @@
 /* got rhythm ? beat of 12*1.25 = 15 px */
 .rhythm_bg { background: url("%(baseRhythmBg)s") repeat ! important; }
 
-/* scale 3:5 stranded */
-/* h1 { font-size:2em; } */
-/* h2 { font-size:1.61538em; } */
-/* h3 { font-size:1.23077em; } */
-
-/* scale le corbusier */
-/* h1 { font-size:2.11538em; } */
-/* h2 { font-size:1.61538em; } */
-/* h3 { font-size:1.30769em; } */
-
-/* scale traditional */
-h1,
-.vtitle { font-size: %(h1FontSize)s; }
-h2 { font-size: %(h2FontSize)s; }
-h3 { font-size: %(h3FontSize)s; }
-
-/* paddings */
 h1,
 .vtitle {
+  font-size: %(h1FontSize)s;
   border-bottom: %(h1BorderBottomStyle)s;
   padding: %(h1Padding)s;
   margin: %(h1Margin)s;
   color: %(h1Color)s;
 }
 
+h2 {
+  font-size: %(h2FontSize)s;
+  padding: %(h2Padding)s;
+}
+
+h3 {
+  font-size: %(h3FontSize)s;
+  padding: %(h3Padding)s;
+}
+
 div.tabbedprimary + h1,
 h1.plain {
  border-bottom: none;
 }
 
-h2 { padding: %(h2Padding)s; }
-h3 { padding: %(h3Padding)s; }
 
 html, body {
   background: %(pageBgColor)s;
@@ -120,6 +112,19 @@
   border: none;
 }
 
+
+img.prevnext {
+  width: 22px;
+  height: 22px;
+}
+
+img.prevnext_nogo {
+  width: 22px;
+  height: 22px;
+  filter:alpha(opacity=25); /* IE */
+  opacity:.25;
+}
+
 fieldset {
   border: none;
 }
@@ -806,21 +811,29 @@
  background: %(listingHihligthedBgColor)s;
 }
 
-table.htableForm {
+table.htableForm label, table.oneRowTableForm label {
   vertical-align: middle;
 }
-
-table.htableForm td{
+table.htableForm td {
   padding-left: 1em;
   padding-top: 0.5em;
 }
-table.htableForm th{
+table.htableForm th {
   padding-left: 1em;
 }
 table.htableForm .validateButton {
   margin-right: 0.2em;
-  vertical-align: top;
-  margin-bottom: 0.2em; /* because vertical-align doesn't seems to have any effect */
+  margin-bottom: 0.2em;
+}
+
+table.oneRowTableForm td {
+  padding-left: 0.5em;
+}
+table.oneRowTableForm th {
+  padding-left: 1em;
+}
+table.oneRowTableForm .validateButton {
+  margin: 0 0 0 1em ;
 }
 
 
--- a/web/data/cubicweb.facets.css	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.facets.css	Fri Dec 09 12:08:27 2011 +0100
@@ -15,7 +15,7 @@
  color: #000;
  margin-bottom: 2px;
  cursor: pointer;
- font: bold 100% Georgia;
+ font: %(facet_titleFont)s;
 }
 
 div.facetTitle a {
@@ -30,8 +30,8 @@
  color: #000 !important;
 }
 
-div.overflowed{
-  height: 12em;
+div.overflowed {
+  height: %(facet_overflowedHeight)s;
   overflow-y: auto;
 }
 
@@ -109,11 +109,25 @@
 div#facetLoading {
   display: none;
   position: fixed;
-  padding-left: 20px;
+  background: #f2f2f2;
   top: 400px;
   width: 200px;
-  height: 100px;
+  padding: 1em;
   font-size:      120%;
   font-weight:    bold;
   text-align:     center;
 }
+
+div.facetTitleSelected {
+  background: url("required.png") no-repeat right top;
+}
+
+table.filter {
+  background-color: #EBE8D9;
+  border: dotted grey 1px;
+}
+
+div.facet {
+ padding: none;
+ margin: .3em!important;
+}
--- a/web/data/cubicweb.facets.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.facets.js	Fri Dec 09 12:08:27 2011 +0100
@@ -1,7 +1,7 @@
 /** filter form, aka facets, javascript functions
  *
  *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -23,15 +23,24 @@
     var values = [];
     $form.find('.facet').each(function() {
         var facetName = jQuery(this).find('.facetTitle').attr('cubicweb:facetName');
-        var facetValues = jQuery(this).find('.facetValueSelected').each(function(x) {
+        // FacetVocabularyWidget
+        jQuery(this).find('.facetValueSelected').each(function(x) {
             names.push(facetName);
             values.push(this.getAttribute('cubicweb:value'));
         });
+        // FacetStringWidget (e.g. has-text)
+        jQuery(this).find('input:text').each(function(){
+            names.push(facetName);
+            values.push(this.value);
+        });
     });
-    $form.find('input').each(function() {
+    // pick up hidden inputs (required metadata inputs such as 'facets'
+    // but also RangeWidgets)
+    $form.find('input:hidden').each(function() {
         names.push(this.name);
         values.push(this.value);
     });
+    // And / Or operators
     $form.find('select option[selected]').each(function() {
         names.push(this.parentNode.name);
         values.push(this.value);
@@ -51,11 +60,11 @@
         var rql = result[0];
         var $bkLink = jQuery('#facetBkLink');
         if ($bkLink.length) {
-            var bkPath = 'view?rql=' + escape(rql);
+            var bkPath = 'view?rql=' + encodeURIComponent(rql);
             if (vid) {
-                bkPath += '&vid=' + escape(vid);
+                bkPath += '&vid=' + encodeURIComponent(vid);
             }
-            var bkUrl = $bkLink.attr('cubicweb:target') + '&path=' + escape(bkPath);
+            var bkUrl = $bkLink.attr('cubicweb:target') + '&path=' + encodeURIComponent(bkPath);
             $bkLink.attr('href', bkUrl);
         }
         var toupdate = result[1];
@@ -94,7 +103,7 @@
                 },
                 'ctxcomponents', 'edit_box'));
             }
-            $node = jQuery('#breadcrumbs')
+            $node = jQuery('#breadcrumbs');
             if ($node.length) {
                 $node.loadxhtml('json', ajaxFuncArgs('render', {
                     'rql': rql
@@ -102,11 +111,19 @@
                 'ctxcomponents', 'breadcrumbs'));
             }
         }
-        var d = loadRemote('json', ajaxFuncArgs('filter_select_content', null, toupdate, rql));
+        var mainvar = null;
+        var index = jQuery.inArray('mainvar', zipped[0]);
+        if (index > - 1) {
+            mainvar = zipped[1][index];
+        }
+
+        var d = loadRemote('json', ajaxFuncArgs('filter_select_content', null, toupdate, rql, mainvar));
         d.addCallback(function(updateMap) {
-            for (facetId in updateMap) {
-                var values = updateMap[facetId];
-                cw.jqNode(facetId).find('.facetCheckBox').each(function() {
+            for (facetName in updateMap) {
+                var values = updateMap[facetName];
+                // XXX fine with jquery 1.6
+                //$form.find('div[cubicweb\\:facetName="' + facetName + '"] ~ div .facetCheckBox').each(function() {
+                $form.find('div').filter(function () {return $(this).attr('cubicweb:facetName') == facetName}).parent().find('.facetCheckBox').each(function() {
                     var value = this.getAttribute('cubicweb:value');
                     if (jQuery.inArray(value, values) == -1) {
                         if (!jQuery(this).hasClass('facetValueDisabled')) {
@@ -134,19 +151,19 @@
         //       called, not when the page is initialized
         var facetargs = form.attr('cubicweb:facetargs');
         if (facetargs != undefined && !form.attr('cubicweb:initialized')) {
-	    form.attr('cubicweb:initialized', '1');
-	    var jsfacetargs = cw.evalJSON(form.attr('cubicweb:facetargs'));
+            form.attr('cubicweb:initialized', '1');
+            var jsfacetargs = cw.evalJSON(form.attr('cubicweb:facetargs'));
             form.submit(function() {
                 buildRQL.apply(null, jsfacetargs);
                 return false;
             });
-	    var divid = jsfacetargs[0];
-	    if (jQuery('#'+divid).length) {
-		var $loadingDiv = $(DIV({id:'facetLoading'},
-					facetLoadingMsg));
-		$loadingDiv.corner();
-		$(jQuery('#'+divid).get(0).parentNode).append($loadingDiv);
-	    }
+            var divid = jsfacetargs[0];
+            if (jQuery('#'+divid).length) {
+                var $loadingDiv = $(DIV({id:'facetLoading'},
+                                        facetLoadingMsg));
+                $loadingDiv.corner();
+                $(jQuery('#'+divid).get(0).parentNode).append($loadingDiv);
+           }
             form.find('div.facet').each(function() {
                 var facet = jQuery(this);
                 facet.find('div.facetCheckBox').each(function(i) {
@@ -247,6 +264,18 @@
     });
 }
 
+// change css class of facets that have a value selected
+function updateFacetTitles() {
+    $('.facet').each(function() {
+        var $divTitle = $(this).find('.facetTitle');
+        var facetSelected = $(this).find('.facetValueSelected');
+        if (facetSelected.length) {
+            $divTitle.addClass('facetTitleSelected');
+        } else {
+            $divTitle.removeClass('facetTitleSelected');
+        }
+    });
+}
 
 // we need to differenciate cases where initFacetBoxEvents is called with one
 // argument or without any argument. If we use `initFacetBoxEvents` as the
@@ -254,4 +283,34 @@
 // his, so we use this small anonymous function instead.
 jQuery(document).ready(function() {
     initFacetBoxEvents();
+    jQuery(cw).bind('facets-content-loaded', onFacetContentLoaded);
+    jQuery(cw).bind('facets-content-loading', onFacetFiltering);
+    jQuery(cw).bind('facets-content-loading', updateFacetTitles);
 });
+
+function showFacetLoading(parentid) {
+    var loadingWidth = 200; // px
+    var loadingHeight = 100; // px
+    var $msg = jQuery('#facetLoading');
+    var $parent = jQuery('#' + parentid);
+    var leftPos = $parent.offset().left + ($parent.width() - loadingWidth) / 2;
+    $parent.fadeTo('normal', 0.2);
+    $msg.css('left', leftPos).show();
+}
+
+function onFacetFiltering(event, divid /* ... */) {
+    showFacetLoading(divid);
+}
+
+function onFacetContentLoaded(event, divid, rql, vid, extraparams) {
+    jQuery('#facetLoading').hide();
+}
+
+jQuery(document).ready(function () {
+    if (jQuery('div.facetBody').length) {
+        var $loadingDiv = $(DIV({id:'facetLoading'},
+                                facetLoadingMsg));
+        $loadingDiv.corner();
+        $('body').append($loadingDiv);
+    }
+});
--- a/web/data/cubicweb.fckcwconfig-full.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.fckcwconfig-full.js	Fri Dec 09 12:08:27 2011 +0100
@@ -29,6 +29,9 @@
 FCKConfig.ContextMenu = ['Generic','Link','Anchor','Image','BulletedList','NumberedList','Table'] ;
 
 FCKConfig.LinkUpload = false ;
+FCKConfig.LinkBrowser = false ;
 FCKConfig.ImageUpload = false ;
+FCKConfig.ImageBrowser = false ;
 FCKConfig.FlashUpload = false ;
+FCKConfig.FlashBrowser = false ;
 
--- a/web/data/cubicweb.fckcwconfig.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.fckcwconfig.js	Fri Dec 09 12:08:27 2011 +0100
@@ -11,5 +11,8 @@
 FCKConfig.ContextMenu = ['Generic','Link','Anchor','Image','BulletedList','NumberedList','Table'] ;
 
 FCKConfig.LinkUpload = false ;
+FCKConfig.LinkBrowser = false ;
 FCKConfig.ImageUpload = false ;
+FCKConfig.ImageBrowser = false ;
 FCKConfig.FlashUpload = false ;
+FCKConfig.FlashBrowser = false ;
--- a/web/data/cubicweb.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.js	Fri Dec 09 12:08:27 2011 +0100
@@ -53,7 +53,13 @@
     },
 
     evalJSON: function (json) { // trust source
-        return eval("(" + json + ")");
+        try {
+            return eval("(" + json + ")");
+        } catch(e) {
+          cw.log(e);
+          cw.log('The faulty json source was', json);
+          throw (e);
+       }
     },
 
     urlEncode: function (str) {
@@ -308,6 +314,28 @@
     },
 
     /**
+     * returns the last element of an array-like object or undefined if empty
+     */
+    lastOf: function(array) {
+        if (array.length) {
+            return array[array.length-1];
+        } else {
+            return undefined;
+        }
+    },
+
+
+    /**
+     * .. function:: extend(array1, array2)
+     *
+     * equivalent of python ``+=`` statement on lists (array1 += array2)
+     */
+    extend: function(array1, array2) {
+        array1.push.apply(array1, array2);
+        return array1; // return array1 for convenience
+    },
+
+    /**
      * .. function:: difference(lst1, lst2)
      *
      * returns a list containing all elements in `lst1` that are not
--- a/web/data/cubicweb.old.css	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.old.css	Fri Dec 09 12:08:27 2011 +0100
@@ -24,7 +24,7 @@
 
 h1,
 .vtitle {
-  font-size: 188%;
+  font-size: %(h1FontSize)s;
   margin: 0.2em 0px 0.3em;
   border-bottom: 1px solid #000;
 }
@@ -35,11 +35,11 @@
 }
 
 h2 {
-  font-size: 135%;
+  font-size: %(h2FontSize)s;
 }
 
 h3 {
-  font-size: 130%;
+  font-size: %(h3FontSize)s;
 }
 
 h4 {
@@ -69,6 +69,18 @@
   text-align: center;
 }
 
+img.prevnext {
+  width: 22px;
+  height: 22px;
+}
+
+img.prevnext_nogo {
+  width: 22px;
+  height: 22px;
+  filter:alpha(opacity=25); /* IE */
+  opacity:.25;
+}
+
 p {
   margin: 0em 0px 0.2em;
   padding-top: 2px;
@@ -613,7 +625,7 @@
 
 span.selectedSlice a:visited,
 span.selectedSlice a {
-  color: #000;
+  background-color: #EBE8D9;
 }
 
 /* FIXME should be moved to cubes/folder */
@@ -824,20 +836,29 @@
   top: -1px;
 }
 
-table.htableForm {
+table.htableForm label, table.oneRowTableForm label {
   vertical-align: middle;
 }
-table.htableForm td{
+table.htableForm td {
   padding-left: 1em;
   padding-top: 0.5em;
 }
-table.htableForm th{
+table.htableForm th {
   padding-left: 1em;
 }
 table.htableForm .validateButton {
   margin-right: 0.2em;
-  vertical-align: top;
-  margin-bottom: 0.2em; /* because vertical-align doesn't seems to have any effect */
+  margin-bottom: 0.2em;
+}
+
+table.oneRowTableForm td {
+  padding-left: 0.5em;
+}
+table.oneRowTableForm th {
+  padding-left: 1em;
+}
+table.oneRowTableForm .validateButton {
+  margin: 0 0 0 1em ;
 }
 
 table.ajaxEditRelationTable{
--- a/web/data/cubicweb.preferences.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.preferences.js	Fri Dec 09 12:08:27 2011 +0100
@@ -104,7 +104,7 @@
         unfreezeButtons = true;
     } else {
         input.removeClass('changed');
-        jQuery("span[id=err-" + input.attr('id') + "]").remove();
+        jQuery('span[id="err-' + input.attr('id') + '"]').remove();
     }
     input.removeClass('error');
     return unfreezeButtons;
@@ -124,7 +124,7 @@
         } else {
             prefsValues[name] = input.val();
         }
-        jQuery(form).find('input[name=edits-' + name + ']').val(prefsValues[name]);
+        jQuery(form).find('input[name="edits-' + name + '"]').val(prefsValues[name]);
     });
 }
 
--- a/web/data/cubicweb.reset.css	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/cubicweb.reset.css	Fri Dec 09 12:08:27 2011 +0100
@@ -1,53 +1,48 @@
-/* http://meyerweb.com/eric/tools/css/reset/ */
-/* v1.0 | 20080212 */
+/* http://meyerweb.com/eric/tools/css/reset/ 
+   v2.0 | 20110126
+   License: none (public domain)
+*/
 
 html, body, div, span, applet, object, iframe,
 h1, h2, h3, h4, h5, h6, p, blockquote, pre,
 a, abbr, acronym, address, big, cite, code,
-del, dfn, em, font, img, ins, kbd, q, s, samp,
+del, dfn, em, img, ins, kbd, q, s, samp,
 small, strike, strong, sub, sup, tt, var,
 b, u, i, center,
 dl, dt, dd, ol, ul, li,
 fieldset, form, label, legend,
-table, caption, tbody, tfoot, thead, tr, th, td {
-  margin: 0;
-  padding: 0;
-  border: 0;
-  outline: 0;
-  font-size: 100%;
-  vertical-align: baseline;
-  background: transparent;
+table, caption, tbody, tfoot, thead, tr, th, td,
+article, aside, canvas, details, embed, 
+figure, figcaption, footer, header, hgroup, 
+menu, nav, output, ruby, section, summary,
+time, mark, audio, video {
+	margin: 0;
+	padding: 0;
+	border: 0;
+	font-size: 100%;
+	font: inherit;
+	vertical-align: baseline;
+}
+/* HTML5 display-role reset for older browsers */
+article, aside, details, figcaption, figure, 
+footer, header, hgroup, menu, nav, section {
+	display: block;
 }
 body {
-  line-height: 1;
+	line-height: 1;
 }
 ol, ul {
-  list-style: none;
+	list-style: none;
 }
 blockquote, q {
-  quotes: none;
+	quotes: none;
 }
 blockquote:before, blockquote:after,
 q:before, q:after {
-  content: '';
-  content: none;
-}
-
-/* remember to define focus styles! */
-:focus {
-  outline: 0;
+	content: '';
+	content: none;
 }
-
-/* remember to highlight inserts somehow! */
-ins {
-  text-decoration: none;
-}
-del {
-  text-decoration: line-through;
-}
-
-/* tables still need 'cellspacing="0"' in the markup */
 table {
-  border-collapse: collapse;
-  border-spacing: 0;
+	border-collapse: collapse;
+	border-spacing: 0;
 }
\ No newline at end of file
Binary file web/data/dublincore-button.png has changed
Binary file web/data/dublincore-icon.png has changed
--- a/web/data/excanvas.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/excanvas.js	Fri Dec 09 12:08:27 2011 +0100
@@ -1,30 +1,1438 @@
-/**
- * jqPlot
- * Pure JavaScript plotting plugin using jQuery
- *
- * Version: @VERSION
- *
- * Copyright (c) 2009-2011 Chris Leonello
- * jqPlot is currently available for use in all personal or commercial projects 
- * under both the MIT (http://www.opensource.org/licenses/mit-license.php) and GPL 
- * version 2.0 (http://www.gnu.org/licenses/gpl-2.0.html) licenses. This means that you can 
- * choose the license that best suits your project and use it accordingly. 
- *
- * Although not required, the author would appreciate an email letting him 
- * know of any substantial use of jqPlot.  You can reach the author at: 
- * chris at jqplot dot com or see http://www.jqplot.com/info.php .
- *
- * If you are feeling kind and generous, consider supporting the project by
- * making a donation at: http://www.jqplot.com/donate.php .
- *
- * sprintf functions contained in jqplot.sprintf.js by Ash Searle:
- *
- *     version 2007.04.27
- *     author Ash Searle
- *     http://hexmen.com/blog/2007/03/printf-sprintf/
- *     http://hexmen.com/js/sprintf.js
- *     The author (Ash Searle) has placed this code in the public domain:
- *     "This code is unrestricted: you are free to use it however you like."
- * 
- */
-if(!document.createElement("canvas").getContext){(function(){var ab=Math;var n=ab.round;var l=ab.sin;var A=ab.cos;var H=ab.abs;var N=ab.sqrt;var d=10;var f=d/2;var z=+navigator.userAgent.match(/MSIE ([\d.]+)?/)[1];function y(){return this.context_||(this.context_=new D(this))}var t=Array.prototype.slice;function g(j,m,p){var i=t.call(arguments,2);return function(){return j.apply(m,i.concat(t.call(arguments)))}}function af(i){return String(i).replace(/&/g,"&amp;").replace(/"/g,"&quot;")}function Y(m,j,i){if(!m.namespaces[j]){m.namespaces.add(j,i,"#default#VML")}}function R(j){Y(j,"g_vml_","urn:schemas-microsoft-com:vml");Y(j,"g_o_","urn:schemas-microsoft-com:office:office");if(!j.styleSheets.ex_canvas_){var i=j.createStyleSheet();i.owningElement.id="ex_canvas_";i.cssText="canvas{display:inline-block;overflow:hidden;text-align:left;width:300px;height:150px}"}}R(document);var e={init:function(i){var j=i||document;j.createElement("canvas");j.attachEvent("onreadystatechange",g(this.init_,this,j))},init_:function(p){var m=p.getElementsByTagName("canvas");for(var j=0;j<m.length;j++){this.initElement(m[j])}},initElement:function(j){if(!j.getContext){j.getContext=y;R(j.ownerDocument);j.innerHTML="";j.attachEvent("onpropertychange",x);j.attachEvent("onresize",W);var i=j.attributes;if(i.width&&i.width.specified){j.style.width=i.width.nodeValue+"px"}else{j.width=j.clientWidth}if(i.height&&i.height.specified){j.style.height=i.height.nodeValue+"px"}else{j.height=j.clientHeight}}return j},uninitElement:function(j){if(j.getContext){var i=j.getContext();delete i.element_;delete i.canvas;j.innerHTML="";j.context_=null;j.getContext=null;j.detachEvent("onpropertychange",x);j.detachEvent("onresize",W)}}};function x(j){var i=j.srcElement;switch(j.propertyName){case"width":i.getContext().clearRect();i.style.width=i.attributes.width.nodeValue+"px";i.firstChild.style.width=i.clientWidth+"px";break;case"height":i.getContext().clearRect();i.style.height=i.attributes.height.nodeValue+"px";i.firstChild.style.height=i.clientHeight+"px";break}}function W(j){var i=j.srcElement;if(i.firstChild){i.firstChild.style.width=i.clientWidth+"px";i.firstChild.style.height=i.clientHeight+"px"}}e.init();var k=[];for(var ae=0;ae<16;ae++){for(var ad=0;ad<16;ad++){k[ae*16+ad]=ae.toString(16)+ad.toString(16)}}function B(){return[[1,0,0],[0,1,0],[0,0,1]]}function J(p,m){var j=B();for(var i=0;i<3;i++){for(var ah=0;ah<3;ah++){var Z=0;for(var ag=0;ag<3;ag++){Z+=p[i][ag]*m[ag][ah]}j[i][ah]=Z}}return j}function v(j,i){i.fillStyle=j.fillStyle;i.lineCap=j.lineCap;i.lineJoin=j.lineJoin;i.lineWidth=j.lineWidth;i.miterLimit=j.miterLimit;i.shadowBlur=j.shadowBlur;i.shadowColor=j.shadowColor;i.shadowOffsetX=j.shadowOffsetX;i.shadowOffsetY=j.shadowOffsetY;i.strokeStyle=j.strokeStyle;i.globalAlpha=j.globalAlpha;i.font=j.font;i.textAlign=j.textAlign;i.textBaseline=j.textBaseline;i.arcScaleX_=j.arcScaleX_;i.arcScaleY_=j.arcScaleY_;i.lineScale_=j.lineScale_}var b={aliceblue:"#F0F8FF",antiquewhite:"#FAEBD7",aquamarine:"#7FFFD4",azure:"#F0FFFF",beige:"#F5F5DC",bisque:"#FFE4C4",black:"#000000",blanchedalmond:"#FFEBCD",blueviolet:"#8A2BE2",brown:"#A52A2A",burlywood:"#DEB887",cadetblue:"#5F9EA0",chartreuse:"#7FFF00",chocolate:"#D2691E",coral:"#FF7F50",cornflowerblue:"#6495ED",cornsilk:"#FFF8DC",crimson:"#DC143C",cyan:"#00FFFF",darkblue:"#00008B",darkcyan:"#008B8B",darkgoldenrod:"#B8860B",darkgray:"#A9A9A9",darkgreen:"#006400",darkgrey:"#A9A9A9",darkkhaki:"#BDB76B",darkmagenta:"#8B008B",darkolivegreen:"#556B2F",darkorange:"#FF8C00",darkorchid:"#9932CC",darkred:"#8B0000",darksalmon:"#E9967A",darkseagreen:"#8FBC8F",darkslateblue:"#483D8B",darkslategray:"#2F4F4F",darkslategrey:"#2F4F4F",darkturquoise:"#00CED1",darkviolet:"#9400D3",deeppink:"#FF1493",deepskyblue:"#00BFFF",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1E90FF",firebrick:"#B22222",floralwhite:"#FFFAF0",forestgreen:"#228B22",gainsboro:"#DCDCDC",ghostwhite:"#F8F8FF",gold:"#FFD700",goldenrod:"#DAA520",grey:"#808080",greenyellow:"#ADFF2F",honeydew:"#F0FFF0",hotpink:"#FF69B4",indianred:"#CD5C5C",indigo:"#4B0082",ivory:"#FFFFF0",khaki:"#F0E68C",lavender:"#E6E6FA",lavenderblush:"#FFF0F5",lawngreen:"#7CFC00",lemonchiffon:"#FFFACD",lightblue:"#ADD8E6",lightcoral:"#F08080",lightcyan:"#E0FFFF",lightgoldenrodyellow:"#FAFAD2",lightgreen:"#90EE90",lightgrey:"#D3D3D3",lightpink:"#FFB6C1",lightsalmon:"#FFA07A",lightseagreen:"#20B2AA",lightskyblue:"#87CEFA",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#B0C4DE",lightyellow:"#FFFFE0",limegreen:"#32CD32",linen:"#FAF0E6",magenta:"#FF00FF",mediumaquamarine:"#66CDAA",mediumblue:"#0000CD",mediumorchid:"#BA55D3",mediumpurple:"#9370DB",mediumseagreen:"#3CB371",mediumslateblue:"#7B68EE",mediumspringgreen:"#00FA9A",mediumturquoise:"#48D1CC",mediumvioletred:"#C71585",midnightblue:"#191970",mintcream:"#F5FFFA",mistyrose:"#FFE4E1",moccasin:"#FFE4B5",navajowhite:"#FFDEAD",oldlace:"#FDF5E6",olivedrab:"#6B8E23",orange:"#FFA500",orangered:"#FF4500",orchid:"#DA70D6",palegoldenrod:"#EEE8AA",palegreen:"#98FB98",paleturquoise:"#AFEEEE",palevioletred:"#DB7093",papayawhip:"#FFEFD5",peachpuff:"#FFDAB9",peru:"#CD853F",pink:"#FFC0CB",plum:"#DDA0DD",powderblue:"#B0E0E6",rosybrown:"#BC8F8F",royalblue:"#4169E1",saddlebrown:"#8B4513",salmon:"#FA8072",sandybrown:"#F4A460",seagreen:"#2E8B57",seashell:"#FFF5EE",sienna:"#A0522D",skyblue:"#87CEEB",slateblue:"#6A5ACD",slategray:"#708090",slategrey:"#708090",snow:"#FFFAFA",springgreen:"#00FF7F",steelblue:"#4682B4",tan:"#D2B48C",thistle:"#D8BFD8",tomato:"#FF6347",turquoise:"#40E0D0",violet:"#EE82EE",wheat:"#F5DEB3",whitesmoke:"#F5F5F5",yellowgreen:"#9ACD32"};function M(j){var p=j.indexOf("(",3);var i=j.indexOf(")",p+1);var m=j.substring(p+1,i).split(",");if(m.length!=4||j.charAt(3)!="a"){m[3]=1}return m}function c(i){return parseFloat(i)/100}function r(j,m,i){return Math.min(i,Math.max(m,j))}function I(ag){var i,ai,aj,ah,ak,Z;ah=parseFloat(ag[0])/360%360;if(ah<0){ah++}ak=r(c(ag[1]),0,1);Z=r(c(ag[2]),0,1);if(ak==0){i=ai=aj=Z}else{var j=Z<0.5?Z*(1+ak):Z+ak-Z*ak;var m=2*Z-j;i=a(m,j,ah+1/3);ai=a(m,j,ah);aj=a(m,j,ah-1/3)}return"#"+k[Math.floor(i*255)]+k[Math.floor(ai*255)]+k[Math.floor(aj*255)]}function a(j,i,m){if(m<0){m++}if(m>1){m--}if(6*m<1){return j+(i-j)*6*m}else{if(2*m<1){return i}else{if(3*m<2){return j+(i-j)*(2/3-m)*6}else{return j}}}}var C={};function F(j){if(j in C){return C[j]}var ag,Z=1;j=String(j);if(j.charAt(0)=="#"){ag=j}else{if(/^rgb/.test(j)){var p=M(j);var ag="#",ah;for(var m=0;m<3;m++){if(p[m].indexOf("%")!=-1){ah=Math.floor(c(p[m])*255)}else{ah=+p[m]}ag+=k[r(ah,0,255)]}Z=+p[3]}else{if(/^hsl/.test(j)){var p=M(j);ag=I(p);Z=p[3]}else{ag=b[j]||j}}}return C[j]={color:ag,alpha:Z}}var o={style:"normal",variant:"normal",weight:"normal",size:10,family:"sans-serif"};var L={};function E(i){if(L[i]){return L[i]}var p=document.createElement("div");var m=p.style;try{m.font=i}catch(j){}return L[i]={style:m.fontStyle||o.style,variant:m.fontVariant||o.variant,weight:m.fontWeight||o.weight,size:m.fontSize||o.size,family:m.fontFamily||o.family}}function u(m,j){var i={};for(var ah in m){i[ah]=m[ah]}var ag=parseFloat(j.currentStyle.fontSize),Z=parseFloat(m.size);if(typeof m.size=="number"){i.size=m.size}else{if(m.size.indexOf("px")!=-1){i.size=Z}else{if(m.size.indexOf("em")!=-1){i.size=ag*Z}else{if(m.size.indexOf("%")!=-1){i.size=(ag/100)*Z}else{if(m.size.indexOf("pt")!=-1){i.size=Z/0.75}else{i.size=ag}}}}}i.size*=0.981;return i}function ac(i){return i.style+" "+i.variant+" "+i.weight+" "+i.size+"px "+i.family}var s={butt:"flat",round:"round"};function S(i){return s[i]||"square"}function D(i){this.m_=B();this.mStack_=[];this.aStack_=[];this.currentPath_=[];this.strokeStyle="#000";this.fillStyle="#000";this.lineWidth=1;this.lineJoin="miter";this.lineCap="butt";this.miterLimit=d*1;this.globalAlpha=1;this.font="10px sans-serif";this.textAlign="left";this.textBaseline="alphabetic";this.canvas=i;var m="width:"+i.clientWidth+"px;height:"+i.clientHeight+"px;overflow:hidden;position:absolute";var j=i.ownerDocument.createElement("div");j.style.cssText=m;i.appendChild(j);var p=j.cloneNode(false);p.style.backgroundColor="red";p.style.filter="alpha(opacity=0)";i.appendChild(p);this.element_=j;this.arcScaleX_=1;this.arcScaleY_=1;this.lineScale_=1}var q=D.prototype;q.clearRect=function(){if(this.textMeasureEl_){this.textMeasureEl_.removeNode(true);this.textMeasureEl_=null}this.element_.innerHTML=""};q.beginPath=function(){this.currentPath_=[]};q.moveTo=function(j,i){var m=V(this,j,i);this.currentPath_.push({type:"moveTo",x:m.x,y:m.y});this.currentX_=m.x;this.currentY_=m.y};q.lineTo=function(j,i){var m=V(this,j,i);this.currentPath_.push({type:"lineTo",x:m.x,y:m.y});this.currentX_=m.x;this.currentY_=m.y};q.bezierCurveTo=function(m,j,ak,aj,ai,ag){var i=V(this,ai,ag);var ah=V(this,m,j);var Z=V(this,ak,aj);K(this,ah,Z,i)};function K(i,Z,m,j){i.currentPath_.push({type:"bezierCurveTo",cp1x:Z.x,cp1y:Z.y,cp2x:m.x,cp2y:m.y,x:j.x,y:j.y});i.currentX_=j.x;i.currentY_=j.y}q.quadraticCurveTo=function(ai,m,j,i){var ah=V(this,ai,m);var ag=V(this,j,i);var aj={x:this.currentX_+2/3*(ah.x-this.currentX_),y:this.currentY_+2/3*(ah.y-this.currentY_)};var Z={x:aj.x+(ag.x-this.currentX_)/3,y:aj.y+(ag.y-this.currentY_)/3};K(this,aj,Z,ag)};q.arc=function(al,aj,ak,ag,j,m){ak*=d;var ap=m?"at":"wa";var am=al+A(ag)*ak-f;var ao=aj+l(ag)*ak-f;var i=al+A(j)*ak-f;var an=aj+l(j)*ak-f;if(am==i&&!m){am+=0.125}var Z=V(this,al,aj);var ai=V(this,am,ao);var ah=V(this,i,an);this.currentPath_.push({type:ap,x:Z.x,y:Z.y,radius:ak,xStart:ai.x,yStart:ai.y,xEnd:ah.x,yEnd:ah.y})};q.rect=function(m,j,i,p){this.moveTo(m,j);this.lineTo(m+i,j);this.lineTo(m+i,j+p);this.lineTo(m,j+p);this.closePath()};q.strokeRect=function(m,j,i,p){var Z=this.currentPath_;this.beginPath();this.moveTo(m,j);this.lineTo(m+i,j);this.lineTo(m+i,j+p);this.lineTo(m,j+p);this.closePath();this.stroke();this.currentPath_=Z};q.fillRect=function(m,j,i,p){var Z=this.currentPath_;this.beginPath();this.moveTo(m,j);this.lineTo(m+i,j);this.lineTo(m+i,j+p);this.lineTo(m,j+p);this.closePath();this.fill();this.currentPath_=Z};q.createLinearGradient=function(j,p,i,m){var Z=new U("gradient");Z.x0_=j;Z.y0_=p;Z.x1_=i;Z.y1_=m;return Z};q.createRadialGradient=function(p,ag,m,j,Z,i){var ah=new U("gradientradial");ah.x0_=p;ah.y0_=ag;ah.r0_=m;ah.x1_=j;ah.y1_=Z;ah.r1_=i;return ah};q.drawImage=function(aq,m){var aj,ah,al,ay,ao,am,at,aA;var ak=aq.runtimeStyle.width;var ap=aq.runtimeStyle.height;aq.runtimeStyle.width="auto";aq.runtimeStyle.height="auto";var ai=aq.width;var aw=aq.height;aq.runtimeStyle.width=ak;aq.runtimeStyle.height=ap;if(arguments.length==3){aj=arguments[1];ah=arguments[2];ao=am=0;at=al=ai;aA=ay=aw}else{if(arguments.length==5){aj=arguments[1];ah=arguments[2];al=arguments[3];ay=arguments[4];ao=am=0;at=ai;aA=aw}else{if(arguments.length==9){ao=arguments[1];am=arguments[2];at=arguments[3];aA=arguments[4];aj=arguments[5];ah=arguments[6];al=arguments[7];ay=arguments[8]}else{throw Error("Invalid number of arguments")}}}var az=V(this,aj,ah);var p=at/2;var j=aA/2;var ax=[];var i=10;var ag=10;ax.push(" <g_vml_:group",' coordsize="',d*i,",",d*ag,'"',' coordorigin="0,0"',' style="width:',i,"px;height:",ag,"px;position:absolute;");if(this.m_[0][0]!=1||this.m_[0][1]||this.m_[1][1]!=1||this.m_[1][0]){var Z=[];Z.push("M11=",this.m_[0][0],",","M12=",this.m_[1][0],",","M21=",this.m_[0][1],",","M22=",this.m_[1][1],",","Dx=",n(az.x/d),",","Dy=",n(az.y/d),"");var av=az;var au=V(this,aj+al,ah);var ar=V(this,aj,ah+ay);var an=V(this,aj+al,ah+ay);av.x=ab.max(av.x,au.x,ar.x,an.x);av.y=ab.max(av.y,au.y,ar.y,an.y);ax.push("padding:0 ",n(av.x/d),"px ",n(av.y/d),"px 0;filter:progid:DXImageTransform.Microsoft.Matrix(",Z.join(""),", sizingmethod='clip');")}else{ax.push("top:",n(az.y/d),"px;left:",n(az.x/d),"px;")}ax.push(' ">','<g_vml_:image src="',aq.src,'"',' style="width:',d*al,"px;"," height:",d*ay,'px"',' cropleft="',ao/ai,'"',' croptop="',am/aw,'"',' cropright="',(ai-ao-at)/ai,'"',' cropbottom="',(aw-am-aA)/aw,'"'," />","</g_vml_:group>");this.element_.insertAdjacentHTML("BeforeEnd",ax.join(""))};q.stroke=function(al){var aj=[];var Z=false;var m=10;var am=10;aj.push("<g_vml_:shape",' filled="',!!al,'"',' style="position:absolute;width:',m,"px;height:",am,'px;"',' coordorigin="0,0"',' coordsize="',d*m,",",d*am,'"',' stroked="',!al,'"',' path="');var an=false;var ag={x:null,y:null};var ak={x:null,y:null};for(var ah=0;ah<this.currentPath_.length;ah++){var j=this.currentPath_[ah];var ai;switch(j.type){case"moveTo":ai=j;aj.push(" m ",n(j.x),",",n(j.y));break;case"lineTo":aj.push(" l ",n(j.x),",",n(j.y));break;case"close":aj.push(" x ");j=null;break;case"bezierCurveTo":aj.push(" c ",n(j.cp1x),",",n(j.cp1y),",",n(j.cp2x),",",n(j.cp2y),",",n(j.x),",",n(j.y));break;case"at":case"wa":aj.push(" ",j.type," ",n(j.x-this.arcScaleX_*j.radius),",",n(j.y-this.arcScaleY_*j.radius)," ",n(j.x+this.arcScaleX_*j.radius),",",n(j.y+this.arcScaleY_*j.radius)," ",n(j.xStart),",",n(j.yStart)," ",n(j.xEnd),",",n(j.yEnd));break}if(j){if(ag.x==null||j.x<ag.x){ag.x=j.x}if(ak.x==null||j.x>ak.x){ak.x=j.x}if(ag.y==null||j.y<ag.y){ag.y=j.y}if(ak.y==null||j.y>ak.y){ak.y=j.y}}}aj.push(' ">');if(!al){w(this,aj)}else{G(this,aj,ag,ak)}aj.push("</g_vml_:shape>");this.element_.insertAdjacentHTML("beforeEnd",aj.join(""))};function w(m,ag){var j=F(m.strokeStyle);var p=j.color;var Z=j.alpha*m.globalAlpha;var i=m.lineScale_*m.lineWidth;if(i<1){Z*=i}ag.push("<g_vml_:stroke",' opacity="',Z,'"',' joinstyle="',m.lineJoin,'"',' miterlimit="',m.miterLimit,'"',' endcap="',S(m.lineCap),'"',' weight="',i,'px"',' color="',p,'" />')}function G(aq,ai,aK,ar){var aj=aq.fillStyle;var aB=aq.arcScaleX_;var aA=aq.arcScaleY_;var j=ar.x-aK.x;var p=ar.y-aK.y;if(aj instanceof U){var an=0;var aF={x:0,y:0};var ax=0;var am=1;if(aj.type_=="gradient"){var al=aj.x0_/aB;var m=aj.y0_/aA;var ak=aj.x1_/aB;var aM=aj.y1_/aA;var aJ=V(aq,al,m);var aI=V(aq,ak,aM);var ag=aI.x-aJ.x;var Z=aI.y-aJ.y;an=Math.atan2(ag,Z)*180/Math.PI;if(an<0){an+=360}if(an<0.000001){an=0}}else{var aJ=V(aq,aj.x0_,aj.y0_);aF={x:(aJ.x-aK.x)/j,y:(aJ.y-aK.y)/p};j/=aB*d;p/=aA*d;var aD=ab.max(j,p);ax=2*aj.r0_/aD;am=2*aj.r1_/aD-ax}var av=aj.colors_;av.sort(function(aN,i){return aN.offset-i.offset});var ap=av.length;var au=av[0].color;var at=av[ap-1].color;var az=av[0].alpha*aq.globalAlpha;var ay=av[ap-1].alpha*aq.globalAlpha;var aE=[];for(var aH=0;aH<ap;aH++){var ao=av[aH];aE.push(ao.offset*am+ax+" "+ao.color)}ai.push('<g_vml_:fill type="',aj.type_,'"',' method="none" focus="100%"',' color="',au,'"',' color2="',at,'"',' colors="',aE.join(","),'"',' opacity="',ay,'"',' g_o_:opacity2="',az,'"',' angle="',an,'"',' focusposition="',aF.x,",",aF.y,'" />')}else{if(aj instanceof T){if(j&&p){var ah=-aK.x;var aC=-aK.y;ai.push("<g_vml_:fill",' position="',ah/j*aB*aB,",",aC/p*aA*aA,'"',' type="tile"',' src="',aj.src_,'" />')}}else{var aL=F(aq.fillStyle);var aw=aL.color;var aG=aL.alpha*aq.globalAlpha;ai.push('<g_vml_:fill color="',aw,'" opacity="',aG,'" />')}}}q.fill=function(){this.stroke(true)};q.closePath=function(){this.currentPath_.push({type:"close"})};function V(j,Z,p){var i=j.m_;return{x:d*(Z*i[0][0]+p*i[1][0]+i[2][0])-f,y:d*(Z*i[0][1]+p*i[1][1]+i[2][1])-f}}q.save=function(){var i={};v(this,i);this.aStack_.push(i);this.mStack_.push(this.m_);this.m_=J(B(),this.m_)};q.restore=function(){if(this.aStack_.length){v(this.aStack_.pop(),this);this.m_=this.mStack_.pop()}};function h(i){return isFinite(i[0][0])&&isFinite(i[0][1])&&isFinite(i[1][0])&&isFinite(i[1][1])&&isFinite(i[2][0])&&isFinite(i[2][1])}function aa(j,i,p){if(!h(i)){return}j.m_=i;if(p){var Z=i[0][0]*i[1][1]-i[0][1]*i[1][0];j.lineScale_=N(H(Z))}}q.translate=function(m,j){var i=[[1,0,0],[0,1,0],[m,j,1]];aa(this,J(i,this.m_),false)};q.rotate=function(j){var p=A(j);var m=l(j);var i=[[p,m,0],[-m,p,0],[0,0,1]];aa(this,J(i,this.m_),false)};q.scale=function(m,j){this.arcScaleX_*=m;this.arcScaleY_*=j;var i=[[m,0,0],[0,j,0],[0,0,1]];aa(this,J(i,this.m_),true)};q.transform=function(Z,p,ah,ag,j,i){var m=[[Z,p,0],[ah,ag,0],[j,i,1]];aa(this,J(m,this.m_),true)};q.setTransform=function(ag,Z,ai,ah,p,j){var i=[[ag,Z,0],[ai,ah,0],[p,j,1]];aa(this,i,true)};q.drawText_=function(am,ak,aj,ap,ai){var ao=this.m_,at=1000,j=0,ar=at,ah={x:0,y:0},ag=[];var i=u(E(this.font),this.element_);var p=ac(i);var au=this.element_.currentStyle;var Z=this.textAlign.toLowerCase();switch(Z){case"left":case"center":case"right":break;case"end":Z=au.direction=="ltr"?"right":"left";break;case"start":Z=au.direction=="rtl"?"right":"left";break;default:Z="left"}switch(this.textBaseline){case"hanging":case"top":ah.y=i.size/1.75;break;case"middle":break;default:case null:case"alphabetic":case"ideographic":case"bottom":ah.y=-i.size/2.25;break}switch(Z){case"right":j=at;ar=0.05;break;case"center":j=ar=at/2;break}var aq=V(this,ak+ah.x,aj+ah.y);ag.push('<g_vml_:line from="',-j,' 0" to="',ar,' 0.05" ',' coordsize="100 100" coordorigin="0 0"',' filled="',!ai,'" stroked="',!!ai,'" style="position:absolute;width:1px;height:1px;">');if(ai){w(this,ag)}else{G(this,ag,{x:-j,y:0},{x:ar,y:i.size})}var an=ao[0][0].toFixed(3)+","+ao[1][0].toFixed(3)+","+ao[0][1].toFixed(3)+","+ao[1][1].toFixed(3)+",0,0";var al=n(aq.x/d)+","+n(aq.y/d);ag.push('<g_vml_:skew on="t" matrix="',an,'" ',' offset="',al,'" origin="',j,' 0" />','<g_vml_:path textpathok="true" />','<g_vml_:textpath on="true" string="',af(am),'" style="v-text-align:',Z,";font:",af(p),'" /></g_vml_:line>');this.element_.insertAdjacentHTML("beforeEnd",ag.join(""))};q.fillText=function(m,i,p,j){this.drawText_(m,i,p,j,false)};q.strokeText=function(m,i,p,j){this.drawText_(m,i,p,j,true)};q.measureText=function(m){if(!this.textMeasureEl_){var i='<span style="position:absolute;top:-20000px;left:0;padding:0;margin:0;border:none;white-space:pre;"></span>';this.element_.insertAdjacentHTML("beforeEnd",i);this.textMeasureEl_=this.element_.lastChild}var j=this.element_.ownerDocument;this.textMeasureEl_.innerHTML="";this.textMeasureEl_.style.font=this.font;this.textMeasureEl_.appendChild(j.createTextNode(m));return{width:this.textMeasureEl_.offsetWidth}};q.clip=function(){};q.arcTo=function(){};q.createPattern=function(j,i){return new T(j,i)};function U(i){this.type_=i;this.x0_=0;this.y0_=0;this.r0_=0;this.x1_=0;this.y1_=0;this.r1_=0;this.colors_=[]}U.prototype.addColorStop=function(j,i){i=F(i);this.colors_.push({offset:j,color:i.color,alpha:i.alpha})};function T(j,i){Q(j);switch(i){case"repeat":case null:case"":this.repetition_="repeat";break;case"repeat-x":case"repeat-y":case"no-repeat":this.repetition_=i;break;default:O("SYNTAX_ERR")}this.src_=j.src;this.width_=j.width;this.height_=j.height}function O(i){throw new P(i)}function Q(i){if(!i||i.nodeType!=1||i.tagName!="IMG"){O("TYPE_MISMATCH_ERR")}if(i.readyState!="complete"){O("INVALID_STATE_ERR")}}function P(i){this.code=this[i];this.message=i+": DOM Exception "+this.code}var X=P.prototype=new Error;X.INDEX_SIZE_ERR=1;X.DOMSTRING_SIZE_ERR=2;X.HIERARCHY_REQUEST_ERR=3;X.WRONG_DOCUMENT_ERR=4;X.INVALID_CHARACTER_ERR=5;X.NO_DATA_ALLOWED_ERR=6;X.NO_MODIFICATION_ALLOWED_ERR=7;X.NOT_FOUND_ERR=8;X.NOT_SUPPORTED_ERR=9;X.INUSE_ATTRIBUTE_ERR=10;X.INVALID_STATE_ERR=11;X.SYNTAX_ERR=12;X.INVALID_MODIFICATION_ERR=13;X.NAMESPACE_ERR=14;X.INVALID_ACCESS_ERR=15;X.VALIDATION_ERR=16;X.TYPE_MISMATCH_ERR=17;G_vmlCanvasManager=e;CanvasRenderingContext2D=D;CanvasGradient=U;CanvasPattern=T;DOMException=P})()};
\ No newline at end of file
+// Memory Leaks patch from http://explorercanvas.googlecode.com/svn/trunk/ 
+//  svn : r73
+// ------------------------------------------------------------------
+// Copyright 2006 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+// Known Issues:
+//
+// * Patterns only support repeat.
+// * Radial gradient are not implemented. The VML version of these look very
+//   different from the canvas one.
+// * Clipping paths are not implemented.
+// * Coordsize. The width and height attribute have higher priority than the
+//   width and height style values which isn't correct.
+// * Painting mode isn't implemented.
+// * Canvas width/height should is using content-box by default. IE in
+//   Quirks mode will draw the canvas using border-box. Either change your
+//   doctype to HTML5
+//   (http://www.whatwg.org/specs/web-apps/current-work/#the-doctype)
+//   or use Box Sizing Behavior from WebFX
+//   (http://webfx.eae.net/dhtml/boxsizing/boxsizing.html)
+// * Non uniform scaling does not correctly scale strokes.
+// * Optimize. There is always room for speed improvements.
+
+// Only add this code if we do not already have a canvas implementation
+if (!document.createElement('canvas').getContext) {
+
+(function() {
+
+  // alias some functions to make (compiled) code shorter
+  var m = Math;
+  var mr = m.round;
+  var ms = m.sin;
+  var mc = m.cos;
+  var abs = m.abs;
+  var sqrt = m.sqrt;
+
+  // this is used for sub pixel precision
+  var Z = 10;
+  var Z2 = Z / 2;
+
+  var IE_VERSION = +navigator.userAgent.match(/MSIE ([\d.]+)?/)[1];
+
+  /**
+   * This funtion is assigned to the <canvas> elements as element.getContext().
+   * @this {HTMLElement}
+   * @return {CanvasRenderingContext2D_}
+   */
+  function getContext() {
+    return this.context_ ||
+        (this.context_ = new CanvasRenderingContext2D_(this));
+  }
+
+  var slice = Array.prototype.slice;
+
+  /**
+   * Binds a function to an object. The returned function will always use the
+   * passed in {@code obj} as {@code this}.
+   *
+   * Example:
+   *
+   *   g = bind(f, obj, a, b)
+   *   g(c, d) // will do f.call(obj, a, b, c, d)
+   *
+   * @param {Function} f The function to bind the object to
+   * @param {Object} obj The object that should act as this when the function
+   *     is called
+   * @param {*} var_args Rest arguments that will be used as the initial
+   *     arguments when the function is called
+   * @return {Function} A new function that has bound this
+   */
+  function bind(f, obj, var_args) {
+    var a = slice.call(arguments, 2);
+    return function() {
+      return f.apply(obj, a.concat(slice.call(arguments)));
+    };
+  }
+
+  function encodeHtmlAttribute(s) {
+    return String(s).replace(/&/g, '&amp;').replace(/"/g, '&quot;');
+  }
+
+  function addNamespace(doc, prefix, urn) {
+    if (!doc.namespaces[prefix]) {
+      doc.namespaces.add(prefix, urn, '#default#VML');
+    }
+  }
+
+  function addNamespacesAndStylesheet(doc) {
+    addNamespace(doc, 'g_vml_', 'urn:schemas-microsoft-com:vml');
+    addNamespace(doc, 'g_o_', 'urn:schemas-microsoft-com:office:office');
+
+    // Setup default CSS.  Only add one style sheet per document
+    if (!doc.styleSheets['ex_canvas_']) {
+      var ss = doc.createStyleSheet();
+      ss.owningElement.id = 'ex_canvas_';
+      ss.cssText = 'canvas{display:inline-block;overflow:hidden;' +
+          // default size is 300x150 in Gecko and Opera
+          'text-align:left;width:300px;height:150px}';
+    }
+  }
+
+  // Add namespaces and stylesheet at startup.
+  addNamespacesAndStylesheet(document);
+
+  var G_vmlCanvasManager_ = {
+    init: function(opt_doc) {
+      var doc = opt_doc || document;
+      // Create a dummy element so that IE will allow canvas elements to be
+      // recognized.
+      doc.createElement('canvas');
+      doc.attachEvent('onreadystatechange', bind(this.init_, this, doc));
+    },
+
+    init_: function(doc) {
+      // find all canvas elements
+      var els = doc.getElementsByTagName('canvas');
+      for (var i = 0; i < els.length; i++) {
+        this.initElement(els[i]);
+      }
+    },
+
+    /**
+     * Public initializes a canvas element so that it can be used as canvas
+     * element from now on. This is called automatically before the page is
+     * loaded but if you are creating elements using createElement you need to
+     * make sure this is called on the element.
+     * @param {HTMLElement} el The canvas element to initialize.
+     * @return {HTMLElement} the element that was created.
+     */
+    initElement: function(el) {
+      if (!el.getContext) {
+        el.getContext = getContext;
+
+        // Add namespaces and stylesheet to document of the element.
+        addNamespacesAndStylesheet(el.ownerDocument);
+
+        // Remove fallback content. There is no way to hide text nodes so we
+        // just remove all childNodes. We could hide all elements and remove
+        // text nodes but who really cares about the fallback content.
+        el.innerHTML = '';
+
+        // do not use inline function because that will leak memory
+        el.attachEvent('onpropertychange', onPropertyChange);
+        el.attachEvent('onresize', onResize);
+
+        var attrs = el.attributes;
+        if (attrs.width && attrs.width.specified) {
+          // TODO: use runtimeStyle and coordsize
+          // el.getContext().setWidth_(attrs.width.nodeValue);
+          el.style.width = attrs.width.nodeValue + 'px';
+        } else {
+          el.width = el.clientWidth;
+        }
+        if (attrs.height && attrs.height.specified) {
+          // TODO: use runtimeStyle and coordsize
+          // el.getContext().setHeight_(attrs.height.nodeValue);
+          el.style.height = attrs.height.nodeValue + 'px';
+        } else {
+          el.height = el.clientHeight;
+        }
+        //el.getContext().setCoordsize_()
+      }
+      return el;
+    },
+
+    // Memory Leaks patch : see http://code.google.com/p/explorercanvas/issues/detail?id=82
+    uninitElement: function(el){
+      if (el.getContext) {
+        var ctx = el.getContext();
+        delete ctx.element_;
+        delete ctx.canvas;
+        el.innerHTML = "";
+        //el.outerHTML = "";
+        el.context_ = null;
+        el.getContext = null;
+        el.detachEvent("onpropertychange", onPropertyChange);
+        el.detachEvent("onresize", onResize);
+      }
+    }
+  };
+
+  function onPropertyChange(e) {
+    var el = e.srcElement;
+
+    switch (e.propertyName) {
+      case 'width':
+        el.getContext().clearRect();
+        el.style.width = el.attributes.width.nodeValue + 'px';
+        // In IE8 this does not trigger onresize.
+        el.firstChild.style.width =  el.clientWidth + 'px';
+        break;
+      case 'height':
+        el.getContext().clearRect();
+        el.style.height = el.attributes.height.nodeValue + 'px';
+        el.firstChild.style.height = el.clientHeight + 'px';
+        break;
+    }
+  }
+
+  function onResize(e) {
+    var el = e.srcElement;
+    if (el.firstChild) {
+      el.firstChild.style.width =  el.clientWidth + 'px';
+      el.firstChild.style.height = el.clientHeight + 'px';
+    }
+  }
+
+  G_vmlCanvasManager_.init();
+
+  // precompute "00" to "FF"
+  var decToHex = [];
+  for (var i = 0; i < 16; i++) {
+    for (var j = 0; j < 16; j++) {
+      decToHex[i * 16 + j] = i.toString(16) + j.toString(16);
+    }
+  }
+
+  function createMatrixIdentity() {
+    return [
+      [1, 0, 0],
+      [0, 1, 0],
+      [0, 0, 1]
+    ];
+  }
+
+  function matrixMultiply(m1, m2) {
+    var result = createMatrixIdentity();
+
+    for (var x = 0; x < 3; x++) {
+      for (var y = 0; y < 3; y++) {
+        var sum = 0;
+
+        for (var z = 0; z < 3; z++) {
+          sum += m1[x][z] * m2[z][y];
+        }
+
+        result[x][y] = sum;
+      }
+    }
+    return result;
+  }
+
+  function copyState(o1, o2) {
+    o2.fillStyle     = o1.fillStyle;
+    o2.lineCap       = o1.lineCap;
+    o2.lineJoin      = o1.lineJoin;
+    o2.lineWidth     = o1.lineWidth;
+    o2.miterLimit    = o1.miterLimit;
+    o2.shadowBlur    = o1.shadowBlur;
+    o2.shadowColor   = o1.shadowColor;
+    o2.shadowOffsetX = o1.shadowOffsetX;
+    o2.shadowOffsetY = o1.shadowOffsetY;
+    o2.strokeStyle   = o1.strokeStyle;
+    o2.globalAlpha   = o1.globalAlpha;
+    o2.font          = o1.font;
+    o2.textAlign     = o1.textAlign;
+    o2.textBaseline  = o1.textBaseline;
+    o2.arcScaleX_    = o1.arcScaleX_;
+    o2.arcScaleY_    = o1.arcScaleY_;
+    o2.lineScale_    = o1.lineScale_;
+  }
+
+  var colorData = {
+    aliceblue: '#F0F8FF',
+    antiquewhite: '#FAEBD7',
+    aquamarine: '#7FFFD4',
+    azure: '#F0FFFF',
+    beige: '#F5F5DC',
+    bisque: '#FFE4C4',
+    black: '#000000',
+    blanchedalmond: '#FFEBCD',
+    blueviolet: '#8A2BE2',
+    brown: '#A52A2A',
+    burlywood: '#DEB887',
+    cadetblue: '#5F9EA0',
+    chartreuse: '#7FFF00',
+    chocolate: '#D2691E',
+    coral: '#FF7F50',
+    cornflowerblue: '#6495ED',
+    cornsilk: '#FFF8DC',
+    crimson: '#DC143C',
+    cyan: '#00FFFF',
+    darkblue: '#00008B',
+    darkcyan: '#008B8B',
+    darkgoldenrod: '#B8860B',
+    darkgray: '#A9A9A9',
+    darkgreen: '#006400',
+    darkgrey: '#A9A9A9',
+    darkkhaki: '#BDB76B',
+    darkmagenta: '#8B008B',
+    darkolivegreen: '#556B2F',
+    darkorange: '#FF8C00',
+    darkorchid: '#9932CC',
+    darkred: '#8B0000',
+    darksalmon: '#E9967A',
+    darkseagreen: '#8FBC8F',
+    darkslateblue: '#483D8B',
+    darkslategray: '#2F4F4F',
+    darkslategrey: '#2F4F4F',
+    darkturquoise: '#00CED1',
+    darkviolet: '#9400D3',
+    deeppink: '#FF1493',
+    deepskyblue: '#00BFFF',
+    dimgray: '#696969',
+    dimgrey: '#696969',
+    dodgerblue: '#1E90FF',
+    firebrick: '#B22222',
+    floralwhite: '#FFFAF0',
+    forestgreen: '#228B22',
+    gainsboro: '#DCDCDC',
+    ghostwhite: '#F8F8FF',
+    gold: '#FFD700',
+    goldenrod: '#DAA520',
+    grey: '#808080',
+    greenyellow: '#ADFF2F',
+    honeydew: '#F0FFF0',
+    hotpink: '#FF69B4',
+    indianred: '#CD5C5C',
+    indigo: '#4B0082',
+    ivory: '#FFFFF0',
+    khaki: '#F0E68C',
+    lavender: '#E6E6FA',
+    lavenderblush: '#FFF0F5',
+    lawngreen: '#7CFC00',
+    lemonchiffon: '#FFFACD',
+    lightblue: '#ADD8E6',
+    lightcoral: '#F08080',
+    lightcyan: '#E0FFFF',
+    lightgoldenrodyellow: '#FAFAD2',
+    lightgreen: '#90EE90',
+    lightgrey: '#D3D3D3',
+    lightpink: '#FFB6C1',
+    lightsalmon: '#FFA07A',
+    lightseagreen: '#20B2AA',
+    lightskyblue: '#87CEFA',
+    lightslategray: '#778899',
+    lightslategrey: '#778899',
+    lightsteelblue: '#B0C4DE',
+    lightyellow: '#FFFFE0',
+    limegreen: '#32CD32',
+    linen: '#FAF0E6',
+    magenta: '#FF00FF',
+    mediumaquamarine: '#66CDAA',
+    mediumblue: '#0000CD',
+    mediumorchid: '#BA55D3',
+    mediumpurple: '#9370DB',
+    mediumseagreen: '#3CB371',
+    mediumslateblue: '#7B68EE',
+    mediumspringgreen: '#00FA9A',
+    mediumturquoise: '#48D1CC',
+    mediumvioletred: '#C71585',
+    midnightblue: '#191970',
+    mintcream: '#F5FFFA',
+    mistyrose: '#FFE4E1',
+    moccasin: '#FFE4B5',
+    navajowhite: '#FFDEAD',
+    oldlace: '#FDF5E6',
+    olivedrab: '#6B8E23',
+    orange: '#FFA500',
+    orangered: '#FF4500',
+    orchid: '#DA70D6',
+    palegoldenrod: '#EEE8AA',
+    palegreen: '#98FB98',
+    paleturquoise: '#AFEEEE',
+    palevioletred: '#DB7093',
+    papayawhip: '#FFEFD5',
+    peachpuff: '#FFDAB9',
+    peru: '#CD853F',
+    pink: '#FFC0CB',
+    plum: '#DDA0DD',
+    powderblue: '#B0E0E6',
+    rosybrown: '#BC8F8F',
+    royalblue: '#4169E1',
+    saddlebrown: '#8B4513',
+    salmon: '#FA8072',
+    sandybrown: '#F4A460',
+    seagreen: '#2E8B57',
+    seashell: '#FFF5EE',
+    sienna: '#A0522D',
+    skyblue: '#87CEEB',
+    slateblue: '#6A5ACD',
+    slategray: '#708090',
+    slategrey: '#708090',
+    snow: '#FFFAFA',
+    springgreen: '#00FF7F',
+    steelblue: '#4682B4',
+    tan: '#D2B48C',
+    thistle: '#D8BFD8',
+    tomato: '#FF6347',
+    turquoise: '#40E0D0',
+    violet: '#EE82EE',
+    wheat: '#F5DEB3',
+    whitesmoke: '#F5F5F5',
+    yellowgreen: '#9ACD32'
+  };
+
+
+  function getRgbHslContent(styleString) {
+    var start = styleString.indexOf('(', 3);
+    var end = styleString.indexOf(')', start + 1);
+    var parts = styleString.substring(start + 1, end).split(',');
+    // add alpha if needed
+    if (parts.length != 4 || styleString.charAt(3) != 'a') {
+      parts[3] = 1;
+    }
+    return parts;
+  }
+
+  function percent(s) {
+    return parseFloat(s) / 100;
+  }
+
+  function clamp(v, min, max) {
+    return Math.min(max, Math.max(min, v));
+  }
+
+  function hslToRgb(parts){
+    var r, g, b, h, s, l;
+    h = parseFloat(parts[0]) / 360 % 360;
+    if (h < 0)
+      h++;
+    s = clamp(percent(parts[1]), 0, 1);
+    l = clamp(percent(parts[2]), 0, 1);
+    if (s == 0) {
+      r = g = b = l; // achromatic
+    } else {
+      var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
+      var p = 2 * l - q;
+      r = hueToRgb(p, q, h + 1 / 3);
+      g = hueToRgb(p, q, h);
+      b = hueToRgb(p, q, h - 1 / 3);
+    }
+
+    return '#' + decToHex[Math.floor(r * 255)] +
+        decToHex[Math.floor(g * 255)] +
+        decToHex[Math.floor(b * 255)];
+  }
+
+  function hueToRgb(m1, m2, h) {
+    if (h < 0)
+      h++;
+    if (h > 1)
+      h--;
+
+    if (6 * h < 1)
+      return m1 + (m2 - m1) * 6 * h;
+    else if (2 * h < 1)
+      return m2;
+    else if (3 * h < 2)
+      return m1 + (m2 - m1) * (2 / 3 - h) * 6;
+    else
+      return m1;
+  }
+
+  var processStyleCache = {};
+
+  function processStyle(styleString) {
+    if (styleString in processStyleCache) {
+      return processStyleCache[styleString];
+    }
+
+    var str, alpha = 1;
+
+    styleString = String(styleString);
+    if (styleString.charAt(0) == '#') {
+      str = styleString;
+    } else if (/^rgb/.test(styleString)) {
+      var parts = getRgbHslContent(styleString);
+      var str = '#', n;
+      for (var i = 0; i < 3; i++) {
+        if (parts[i].indexOf('%') != -1) {
+          n = Math.floor(percent(parts[i]) * 255);
+        } else {
+          n = +parts[i];
+        }
+        str += decToHex[clamp(n, 0, 255)];
+      }
+      alpha = +parts[3];
+    } else if (/^hsl/.test(styleString)) {
+      var parts = getRgbHslContent(styleString);
+      str = hslToRgb(parts);
+      alpha = parts[3];
+    } else {
+      str = colorData[styleString] || styleString;
+    }
+    return processStyleCache[styleString] = {color: str, alpha: alpha};
+  }
+
+  var DEFAULT_STYLE = {
+    style: 'normal',
+    variant: 'normal',
+    weight: 'normal',
+    size: 10,
+    family: 'sans-serif'
+  };
+
+  // Internal text style cache
+  var fontStyleCache = {};
+
+  function processFontStyle(styleString) {
+    if (fontStyleCache[styleString]) {
+      return fontStyleCache[styleString];
+    }
+
+    var el = document.createElement('div');
+    var style = el.style;
+    try {
+      style.font = styleString;
+    } catch (ex) {
+      // Ignore failures to set to invalid font.
+    }
+
+    return fontStyleCache[styleString] = {
+      style: style.fontStyle || DEFAULT_STYLE.style,
+      variant: style.fontVariant || DEFAULT_STYLE.variant,
+      weight: style.fontWeight || DEFAULT_STYLE.weight,
+      size: style.fontSize || DEFAULT_STYLE.size,
+      family: style.fontFamily || DEFAULT_STYLE.family
+    };
+  }
+
+  function getComputedStyle(style, element) {
+    var computedStyle = {};
+
+    for (var p in style) {
+      computedStyle[p] = style[p];
+    }
+
+    // Compute the size
+    var canvasFontSize = parseFloat(element.currentStyle.fontSize),
+        fontSize = parseFloat(style.size);
+
+    if (typeof style.size == 'number') {
+      computedStyle.size = style.size;
+    } else if (style.size.indexOf('px') != -1) {
+      computedStyle.size = fontSize;
+    } else if (style.size.indexOf('em') != -1) {
+      computedStyle.size = canvasFontSize * fontSize;
+    } else if(style.size.indexOf('%') != -1) {
+      computedStyle.size = (canvasFontSize / 100) * fontSize;
+    } else if (style.size.indexOf('pt') != -1) {
+      computedStyle.size = fontSize / .75;
+    } else {
+      computedStyle.size = canvasFontSize;
+    }
+
+    // Different scaling between normal text and VML text. This was found using
+    // trial and error to get the same size as non VML text.
+    computedStyle.size *= 0.981;
+
+    // Fix for VML handling of bare font family names.  Add a '' around font family names.
+    computedStyle.family =  "'" + computedStyle.family.replace(/(\'|\")/g,'').replace(/\s*,\s*/g, "', '") + "'";
+
+    return computedStyle;
+  }
+
+  function buildStyle(style) {
+    return style.style + ' ' + style.variant + ' ' + style.weight + ' ' +
+        style.size + 'px ' + style.family;
+  }
+
+  var lineCapMap = {
+    'butt': 'flat',
+    'round': 'round'
+  };
+
+  function processLineCap(lineCap) {
+    return lineCapMap[lineCap] || 'square';
+  }
+
+  /**
+   * This class implements CanvasRenderingContext2D interface as described by
+   * the WHATWG.
+   * @param {HTMLElement} canvasElement The element that the 2D context should
+   * be associated with
+   */
+  function CanvasRenderingContext2D_(canvasElement) {
+    this.m_ = createMatrixIdentity();
+
+    this.mStack_ = [];
+    this.aStack_ = [];
+    this.currentPath_ = [];
+
+    // Canvas context properties
+    this.strokeStyle = '#000';
+    this.fillStyle = '#000';
+
+    this.lineWidth = 1;
+    this.lineJoin = 'miter';
+    this.lineCap = 'butt';
+    this.miterLimit = Z * 1;
+    this.globalAlpha = 1;
+    this.font = '10px sans-serif';
+    this.textAlign = 'left';
+    this.textBaseline = 'alphabetic';
+    this.canvas = canvasElement;
+
+    var cssText = 'width:' + canvasElement.clientWidth + 'px;height:' +
+        canvasElement.clientHeight + 'px;overflow:hidden;position:absolute';
+    var el = canvasElement.ownerDocument.createElement('div');
+    el.style.cssText = cssText;
+    canvasElement.appendChild(el);
+
+    var overlayEl = el.cloneNode(false);
+    // Use a non transparent background.
+    overlayEl.style.backgroundColor = 'red';
+    overlayEl.style.filter = 'alpha(opacity=0)';
+    canvasElement.appendChild(overlayEl);
+
+    this.element_ = el;
+    this.arcScaleX_ = 1;
+    this.arcScaleY_ = 1;
+    this.lineScale_ = 1;
+  }
+
+  var contextPrototype = CanvasRenderingContext2D_.prototype;
+  contextPrototype.clearRect = function() {
+    if (this.textMeasureEl_) {
+      this.textMeasureEl_.removeNode(true);
+      this.textMeasureEl_ = null;
+    }
+    this.element_.innerHTML = '';
+  };
+
+  contextPrototype.beginPath = function() {
+    // TODO: Branch current matrix so that save/restore has no effect
+    //       as per safari docs.
+    this.currentPath_ = [];
+  };
+
+  contextPrototype.moveTo = function(aX, aY) {
+    var p = getCoords(this, aX, aY);
+    this.currentPath_.push({type: 'moveTo', x: p.x, y: p.y});
+    this.currentX_ = p.x;
+    this.currentY_ = p.y;
+  };
+
+  contextPrototype.lineTo = function(aX, aY) {
+    var p = getCoords(this, aX, aY);
+    this.currentPath_.push({type: 'lineTo', x: p.x, y: p.y});
+
+    this.currentX_ = p.x;
+    this.currentY_ = p.y;
+  };
+
+  contextPrototype.bezierCurveTo = function(aCP1x, aCP1y,
+                                            aCP2x, aCP2y,
+                                            aX, aY) {
+    var p = getCoords(this, aX, aY);
+    var cp1 = getCoords(this, aCP1x, aCP1y);
+    var cp2 = getCoords(this, aCP2x, aCP2y);
+    bezierCurveTo(this, cp1, cp2, p);
+  };
+
+  // Helper function that takes the already fixed cordinates.
+  function bezierCurveTo(self, cp1, cp2, p) {
+    self.currentPath_.push({
+      type: 'bezierCurveTo',
+      cp1x: cp1.x,
+      cp1y: cp1.y,
+      cp2x: cp2.x,
+      cp2y: cp2.y,
+      x: p.x,
+      y: p.y
+    });
+    self.currentX_ = p.x;
+    self.currentY_ = p.y;
+  }
+
+  contextPrototype.quadraticCurveTo = function(aCPx, aCPy, aX, aY) {
+    // the following is lifted almost directly from
+    // http://developer.mozilla.org/en/docs/Canvas_tutorial:Drawing_shapes
+
+    var cp = getCoords(this, aCPx, aCPy);
+    var p = getCoords(this, aX, aY);
+
+    var cp1 = {
+      x: this.currentX_ + 2.0 / 3.0 * (cp.x - this.currentX_),
+      y: this.currentY_ + 2.0 / 3.0 * (cp.y - this.currentY_)
+    };
+    var cp2 = {
+      x: cp1.x + (p.x - this.currentX_) / 3.0,
+      y: cp1.y + (p.y - this.currentY_) / 3.0
+    };
+
+    bezierCurveTo(this, cp1, cp2, p);
+  };
+
+  contextPrototype.arc = function(aX, aY, aRadius,
+                                  aStartAngle, aEndAngle, aClockwise) {
+    aRadius *= Z;
+    var arcType = aClockwise ? 'at' : 'wa';
+
+    var xStart = aX + mc(aStartAngle) * aRadius - Z2;
+    var yStart = aY + ms(aStartAngle) * aRadius - Z2;
+
+    var xEnd = aX + mc(aEndAngle) * aRadius - Z2;
+    var yEnd = aY + ms(aEndAngle) * aRadius - Z2;
+
+    // IE won't render arches drawn counter clockwise if xStart == xEnd.
+    if (xStart == xEnd && !aClockwise) {
+      xStart += 0.125; // Offset xStart by 1/80 of a pixel. Use something
+                       // that can be represented in binary
+    }
+
+    var p = getCoords(this, aX, aY);
+    var pStart = getCoords(this, xStart, yStart);
+    var pEnd = getCoords(this, xEnd, yEnd);
+
+    this.currentPath_.push({type: arcType,
+                           x: p.x,
+                           y: p.y,
+                           radius: aRadius,
+                           xStart: pStart.x,
+                           yStart: pStart.y,
+                           xEnd: pEnd.x,
+                           yEnd: pEnd.y});
+
+  };
+
+  contextPrototype.rect = function(aX, aY, aWidth, aHeight) {
+    this.moveTo(aX, aY);
+    this.lineTo(aX + aWidth, aY);
+    this.lineTo(aX + aWidth, aY + aHeight);
+    this.lineTo(aX, aY + aHeight);
+    this.closePath();
+  };
+
+  contextPrototype.strokeRect = function(aX, aY, aWidth, aHeight) {
+    var oldPath = this.currentPath_;
+    this.beginPath();
+
+    this.moveTo(aX, aY);
+    this.lineTo(aX + aWidth, aY);
+    this.lineTo(aX + aWidth, aY + aHeight);
+    this.lineTo(aX, aY + aHeight);
+    this.closePath();
+    this.stroke();
+
+    this.currentPath_ = oldPath;
+  };
+
+  contextPrototype.fillRect = function(aX, aY, aWidth, aHeight) {
+    var oldPath = this.currentPath_;
+    this.beginPath();
+
+    this.moveTo(aX, aY);
+    this.lineTo(aX + aWidth, aY);
+    this.lineTo(aX + aWidth, aY + aHeight);
+    this.lineTo(aX, aY + aHeight);
+    this.closePath();
+    this.fill();
+
+    this.currentPath_ = oldPath;
+  };
+
+  contextPrototype.createLinearGradient = function(aX0, aY0, aX1, aY1) {
+    var gradient = new CanvasGradient_('gradient');
+    gradient.x0_ = aX0;
+    gradient.y0_ = aY0;
+    gradient.x1_ = aX1;
+    gradient.y1_ = aY1;
+    return gradient;
+  };
+
+  contextPrototype.createRadialGradient = function(aX0, aY0, aR0,
+                                                   aX1, aY1, aR1) {
+    var gradient = new CanvasGradient_('gradientradial');
+    gradient.x0_ = aX0;
+    gradient.y0_ = aY0;
+    gradient.r0_ = aR0;
+    gradient.x1_ = aX1;
+    gradient.y1_ = aY1;
+    gradient.r1_ = aR1;
+    return gradient;
+  };
+
+  contextPrototype.drawImage = function(image, var_args) {
+    var dx, dy, dw, dh, sx, sy, sw, sh;
+
+    // to find the original width we overide the width and height
+    var oldRuntimeWidth = image.runtimeStyle.width;
+    var oldRuntimeHeight = image.runtimeStyle.height;
+    image.runtimeStyle.width = 'auto';
+    image.runtimeStyle.height = 'auto';
+
+    // get the original size
+    var w = image.width;
+    var h = image.height;
+
+    // and remove overides
+    image.runtimeStyle.width = oldRuntimeWidth;
+    image.runtimeStyle.height = oldRuntimeHeight;
+
+    if (arguments.length == 3) {
+      dx = arguments[1];
+      dy = arguments[2];
+      sx = sy = 0;
+      sw = dw = w;
+      sh = dh = h;
+    } else if (arguments.length == 5) {
+      dx = arguments[1];
+      dy = arguments[2];
+      dw = arguments[3];
+      dh = arguments[4];
+      sx = sy = 0;
+      sw = w;
+      sh = h;
+    } else if (arguments.length == 9) {
+      sx = arguments[1];
+      sy = arguments[2];
+      sw = arguments[3];
+      sh = arguments[4];
+      dx = arguments[5];
+      dy = arguments[6];
+      dw = arguments[7];
+      dh = arguments[8];
+    } else {
+      throw Error('Invalid number of arguments');
+    }
+
+    var d = getCoords(this, dx, dy);
+
+    var w2 = sw / 2;
+    var h2 = sh / 2;
+
+    var vmlStr = [];
+
+    var W = 10;
+    var H = 10;
+
+    // For some reason that I've now forgotten, using divs didn't work
+    vmlStr.push(' <g_vml_:group',
+                ' coordsize="', Z * W, ',', Z * H, '"',
+                ' coordorigin="0,0"' ,
+                ' style="width:', W, 'px;height:', H, 'px;position:absolute;');
+
+    // If filters are necessary (rotation exists), create them
+    // filters are bog-slow, so only create them if abbsolutely necessary
+    // The following check doesn't account for skews (which don't exist
+    // in the canvas spec (yet) anyway.
+
+    if (this.m_[0][0] != 1 || this.m_[0][1] ||
+        this.m_[1][1] != 1 || this.m_[1][0]) {
+      var filter = [];
+
+      // Note the 12/21 reversal
+      filter.push('M11=', this.m_[0][0], ',',
+                  'M12=', this.m_[1][0], ',',
+                  'M21=', this.m_[0][1], ',',
+                  'M22=', this.m_[1][1], ',',
+                  'Dx=', mr(d.x / Z), ',',
+                  'Dy=', mr(d.y / Z), '');
+
+      // Bounding box calculation (need to minimize displayed area so that
+      // filters don't waste time on unused pixels.
+      var max = d;
+      var c2 = getCoords(this, dx + dw, dy);
+      var c3 = getCoords(this, dx, dy + dh);
+      var c4 = getCoords(this, dx + dw, dy + dh);
+
+      max.x = m.max(max.x, c2.x, c3.x, c4.x);
+      max.y = m.max(max.y, c2.y, c3.y, c4.y);
+
+      vmlStr.push('padding:0 ', mr(max.x / Z), 'px ', mr(max.y / Z),
+                  'px 0;filter:progid:DXImageTransform.Microsoft.Matrix(',
+                  filter.join(''), ", sizingmethod='clip');");
+
+    } else {
+      vmlStr.push('top:', mr(d.y / Z), 'px;left:', mr(d.x / Z), 'px;');
+    }
+
+    vmlStr.push(' ">' ,
+                '<g_vml_:image src="', image.src, '"',
+                ' style="width:', Z * dw, 'px;',
+                ' height:', Z * dh, 'px"',
+                ' cropleft="', sx / w, '"',
+                ' croptop="', sy / h, '"',
+                ' cropright="', (w - sx - sw) / w, '"',
+                ' cropbottom="', (h - sy - sh) / h, '"',
+                ' />',
+                '</g_vml_:group>');
+
+    this.element_.insertAdjacentHTML('BeforeEnd', vmlStr.join(''));
+  };
+
+  contextPrototype.stroke = function(aFill) {
+    var lineStr = [];
+    var lineOpen = false;
+
+    var W = 10;
+    var H = 10;
+
+    lineStr.push('<g_vml_:shape',
+                 ' filled="', !!aFill, '"',
+                 ' style="position:absolute;width:', W, 'px;height:', H, 'px;"',
+                 ' coordorigin="0,0"',
+                 ' coordsize="', Z * W, ',', Z * H, '"',
+                 ' stroked="', !aFill, '"',
+                 ' path="');
+
+    var newSeq = false;
+    var min = {x: null, y: null};
+    var max = {x: null, y: null};
+
+    for (var i = 0; i < this.currentPath_.length; i++) {
+      var p = this.currentPath_[i];
+      var c;
+
+      switch (p.type) {
+        case 'moveTo':
+          c = p;
+          lineStr.push(' m ', mr(p.x), ',', mr(p.y));
+          break;
+        case 'lineTo':
+          lineStr.push(' l ', mr(p.x), ',', mr(p.y));
+          break;
+        case 'close':
+          lineStr.push(' x ');
+          p = null;
+          break;
+        case 'bezierCurveTo':
+          lineStr.push(' c ',
+                       mr(p.cp1x), ',', mr(p.cp1y), ',',
+                       mr(p.cp2x), ',', mr(p.cp2y), ',',
+                       mr(p.x), ',', mr(p.y));
+          break;
+        case 'at':
+        case 'wa':
+          lineStr.push(' ', p.type, ' ',
+                       mr(p.x - this.arcScaleX_ * p.radius), ',',
+                       mr(p.y - this.arcScaleY_ * p.radius), ' ',
+                       mr(p.x + this.arcScaleX_ * p.radius), ',',
+                       mr(p.y + this.arcScaleY_ * p.radius), ' ',
+                       mr(p.xStart), ',', mr(p.yStart), ' ',
+                       mr(p.xEnd), ',', mr(p.yEnd));
+          break;
+      }
+
+
+      // TODO: Following is broken for curves due to
+      //       move to proper paths.
+
+      // Figure out dimensions so we can do gradient fills
+      // properly
+      if (p) {
+        if (min.x == null || p.x < min.x) {
+          min.x = p.x;
+        }
+        if (max.x == null || p.x > max.x) {
+          max.x = p.x;
+        }
+        if (min.y == null || p.y < min.y) {
+          min.y = p.y;
+        }
+        if (max.y == null || p.y > max.y) {
+          max.y = p.y;
+        }
+      }
+    }
+    lineStr.push(' ">');
+
+    if (!aFill) {
+      appendStroke(this, lineStr);
+    } else {
+      appendFill(this, lineStr, min, max);
+    }
+
+    lineStr.push('</g_vml_:shape>');
+
+    this.element_.insertAdjacentHTML('beforeEnd', lineStr.join(''));
+  };
+
+  function appendStroke(ctx, lineStr) {
+    var a = processStyle(ctx.strokeStyle);
+    var color = a.color;
+    var opacity = a.alpha * ctx.globalAlpha;
+    var lineWidth = ctx.lineScale_ * ctx.lineWidth;
+
+    // VML cannot correctly render a line if the width is less than 1px.
+    // In that case, we dilute the color to make the line look thinner.
+    if (lineWidth < 1) {
+      opacity *= lineWidth;
+    }
+
+    lineStr.push(
+      '<g_vml_:stroke',
+      ' opacity="', opacity, '"',
+      ' joinstyle="', ctx.lineJoin, '"',
+      ' miterlimit="', ctx.miterLimit, '"',
+      ' endcap="', processLineCap(ctx.lineCap), '"',
+      ' weight="', lineWidth, 'px"',
+      ' color="', color, '" />'
+    );
+  }
+
+  function appendFill(ctx, lineStr, min, max) {
+    var fillStyle = ctx.fillStyle;
+    var arcScaleX = ctx.arcScaleX_;
+    var arcScaleY = ctx.arcScaleY_;
+    var width = max.x - min.x;
+    var height = max.y - min.y;
+    if (fillStyle instanceof CanvasGradient_) {
+      // TODO: Gradients transformed with the transformation matrix.
+      var angle = 0;
+      var focus = {x: 0, y: 0};
+
+      // additional offset
+      var shift = 0;
+      // scale factor for offset
+      var expansion = 1;
+
+      if (fillStyle.type_ == 'gradient') {
+        var x0 = fillStyle.x0_ / arcScaleX;
+        var y0 = fillStyle.y0_ / arcScaleY;
+        var x1 = fillStyle.x1_ / arcScaleX;
+        var y1 = fillStyle.y1_ / arcScaleY;
+        var p0 = getCoords(ctx, x0, y0);
+        var p1 = getCoords(ctx, x1, y1);
+        var dx = p1.x - p0.x;
+        var dy = p1.y - p0.y;
+        angle = Math.atan2(dx, dy) * 180 / Math.PI;
+
+        // The angle should be a non-negative number.
+        if (angle < 0) {
+          angle += 360;
+        }
+
+        // Very small angles produce an unexpected result because they are
+        // converted to a scientific notation string.
+        if (angle < 1e-6) {
+          angle = 0;
+        }
+      } else {
+        var p0 = getCoords(ctx, fillStyle.x0_, fillStyle.y0_);
+        focus = {
+          x: (p0.x - min.x) / width,
+          y: (p0.y - min.y) / height
+        };
+
+        width  /= arcScaleX * Z;
+        height /= arcScaleY * Z;
+        var dimension = m.max(width, height);
+        shift = 2 * fillStyle.r0_ / dimension;
+        expansion = 2 * fillStyle.r1_ / dimension - shift;
+      }
+
+      // We need to sort the color stops in ascending order by offset,
+      // otherwise IE won't interpret it correctly.
+      var stops = fillStyle.colors_;
+      stops.sort(function(cs1, cs2) {
+        return cs1.offset - cs2.offset;
+      });
+
+      var length = stops.length;
+      var color1 = stops[0].color;
+      var color2 = stops[length - 1].color;
+      var opacity1 = stops[0].alpha * ctx.globalAlpha;
+      var opacity2 = stops[length - 1].alpha * ctx.globalAlpha;
+
+      var colors = [];
+      for (var i = 0; i < length; i++) {
+        var stop = stops[i];
+        colors.push(stop.offset * expansion + shift + ' ' + stop.color);
+      }
+
+      // When colors attribute is used, the meanings of opacity and o:opacity2
+      // are reversed.
+      lineStr.push('<g_vml_:fill type="', fillStyle.type_, '"',
+                   ' method="none" focus="100%"',
+                   ' color="', color1, '"',
+                   ' color2="', color2, '"',
+                   ' colors="', colors.join(','), '"',
+                   ' opacity="', opacity2, '"',
+                   ' g_o_:opacity2="', opacity1, '"',
+                   ' angle="', angle, '"',
+                   ' focusposition="', focus.x, ',', focus.y, '" />');
+    } else if (fillStyle instanceof CanvasPattern_) {
+      if (width && height) {
+        var deltaLeft = -min.x;
+        var deltaTop = -min.y;
+        lineStr.push('<g_vml_:fill',
+                     ' position="',
+                     deltaLeft / width * arcScaleX * arcScaleX, ',',
+                     deltaTop / height * arcScaleY * arcScaleY, '"',
+                     ' type="tile"',
+                     // TODO: Figure out the correct size to fit the scale.
+                     //' size="', w, 'px ', h, 'px"',
+                     ' src="', fillStyle.src_, '" />');
+       }
+    } else {
+      var a = processStyle(ctx.fillStyle);
+      var color = a.color;
+      var opacity = a.alpha * ctx.globalAlpha;
+      lineStr.push('<g_vml_:fill color="', color, '" opacity="', opacity,
+                   '" />');
+    }
+  }
+
+  contextPrototype.fill = function() {
+    this.stroke(true);
+  };
+
+  contextPrototype.closePath = function() {
+    this.currentPath_.push({type: 'close'});
+  };
+
+  function getCoords(ctx, aX, aY) {
+    var m = ctx.m_;
+    return {
+      x: Z * (aX * m[0][0] + aY * m[1][0] + m[2][0]) - Z2,
+      y: Z * (aX * m[0][1] + aY * m[1][1] + m[2][1]) - Z2
+    };
+  };
+
+  contextPrototype.save = function() {
+    var o = {};
+    copyState(this, o);
+    this.aStack_.push(o);
+    this.mStack_.push(this.m_);
+    this.m_ = matrixMultiply(createMatrixIdentity(), this.m_);
+  };
+
+  contextPrototype.restore = function() {
+    if (this.aStack_.length) {
+      copyState(this.aStack_.pop(), this);
+      this.m_ = this.mStack_.pop();
+    }
+  };
+
+  function matrixIsFinite(m) {
+    return isFinite(m[0][0]) && isFinite(m[0][1]) &&
+        isFinite(m[1][0]) && isFinite(m[1][1]) &&
+        isFinite(m[2][0]) && isFinite(m[2][1]);
+  }
+
+  function setM(ctx, m, updateLineScale) {
+    if (!matrixIsFinite(m)) {
+      return;
+    }
+    ctx.m_ = m;
+
+    if (updateLineScale) {
+      // Get the line scale.
+      // Determinant of this.m_ means how much the area is enlarged by the
+      // transformation. So its square root can be used as a scale factor
+      // for width.
+      var det = m[0][0] * m[1][1] - m[0][1] * m[1][0];
+      ctx.lineScale_ = sqrt(abs(det));
+    }
+  }
+
+  contextPrototype.translate = function(aX, aY) {
+    var m1 = [
+      [1,  0,  0],
+      [0,  1,  0],
+      [aX, aY, 1]
+    ];
+
+    setM(this, matrixMultiply(m1, this.m_), false);
+  };
+
+  contextPrototype.rotate = function(aRot) {
+    var c = mc(aRot);
+    var s = ms(aRot);
+
+    var m1 = [
+      [c,  s, 0],
+      [-s, c, 0],
+      [0,  0, 1]
+    ];
+
+    setM(this, matrixMultiply(m1, this.m_), false);
+  };
+
+  contextPrototype.scale = function(aX, aY) {
+    this.arcScaleX_ *= aX;
+    this.arcScaleY_ *= aY;
+    var m1 = [
+      [aX, 0,  0],
+      [0,  aY, 0],
+      [0,  0,  1]
+    ];
+
+    setM(this, matrixMultiply(m1, this.m_), true);
+  };
+
+  contextPrototype.transform = function(m11, m12, m21, m22, dx, dy) {
+    var m1 = [
+      [m11, m12, 0],
+      [m21, m22, 0],
+      [dx,  dy,  1]
+    ];
+
+    setM(this, matrixMultiply(m1, this.m_), true);
+  };
+
+  contextPrototype.setTransform = function(m11, m12, m21, m22, dx, dy) {
+    var m = [
+      [m11, m12, 0],
+      [m21, m22, 0],
+      [dx,  dy,  1]
+    ];
+
+    setM(this, m, true);
+  };
+
+  /**
+   * The text drawing function.
+   * The maxWidth argument isn't taken in account, since no browser supports
+   * it yet.
+   */
+  contextPrototype.drawText_ = function(text, x, y, maxWidth, stroke) {
+    var m = this.m_,
+        delta = 1000,
+        left = 0,
+        right = delta,
+        offset = {x: 0, y: 0},
+        lineStr = [];
+
+    var fontStyle = getComputedStyle(processFontStyle(this.font), this.element_);
+
+    var fontStyleString = buildStyle(fontStyle);
+
+    var elementStyle = this.element_.currentStyle;
+    var textAlign = this.textAlign.toLowerCase();
+    switch (textAlign) {
+      case 'left':
+      case 'center':
+      case 'right':
+        break;
+      case 'end':
+        textAlign = elementStyle.direction == 'ltr' ? 'right' : 'left';
+        break;
+      case 'start':
+        textAlign = elementStyle.direction == 'rtl' ? 'right' : 'left';
+        break;
+      default:
+        textAlign = 'left';
+    }
+
+    // 1.75 is an arbitrary number, as there is no info about the text baseline
+    switch (this.textBaseline) {
+      case 'hanging':
+      case 'top':
+        offset.y = fontStyle.size / 1.75;
+        break;
+      case 'middle':
+        break;
+      default:
+      case null:
+      case 'alphabetic':
+      case 'ideographic':
+      case 'bottom':
+        offset.y = -fontStyle.size / 2.25;
+        break;
+    }
+
+    switch(textAlign) {
+      case 'right':
+        left = delta;
+        right = 0.05;
+        break;
+      case 'center':
+        left = right = delta / 2;
+        break;
+    }
+
+    var d = getCoords(this, x + offset.x, y + offset.y);
+
+    lineStr.push('<g_vml_:line from="', -left ,' 0" to="', right ,' 0.05" ',
+                 ' coordsize="100 100" coordorigin="0 0"',
+                 ' filled="', !stroke, '" stroked="', !!stroke,
+                 '" style="position:absolute;width:1px;height:1px;">');
+
+    if (stroke) {
+      appendStroke(this, lineStr);
+    } else {
+      // TODO: Fix the min and max params.
+      appendFill(this, lineStr, {x: -left, y: 0},
+                 {x: right, y: fontStyle.size});
+    }
+
+    var skewM = m[0][0].toFixed(3) + ',' + m[1][0].toFixed(3) + ',' +
+                m[0][1].toFixed(3) + ',' + m[1][1].toFixed(3) + ',0,0';
+
+    var skewOffset = mr(d.x / Z + 1 - m[0][0]) + ',' + mr(d.y / Z - 2 * m[1][0]);
+
+
+    lineStr.push('<g_vml_:skew on="t" matrix="', skewM ,'" ',
+                 ' offset="', skewOffset, '" origin="', left ,' 0" />',
+                 '<g_vml_:path textpathok="true" />',
+                 '<g_vml_:textpath on="true" string="',
+                 encodeHtmlAttribute(text),
+                 '" style="v-text-align:', textAlign,
+                 ';font:', encodeHtmlAttribute(fontStyleString),
+                 '" /></g_vml_:line>');
+
+    this.element_.insertAdjacentHTML('beforeEnd', lineStr.join(''));
+  };
+
+  contextPrototype.fillText = function(text, x, y, maxWidth) {
+    this.drawText_(text, x, y, maxWidth, false);
+  };
+
+  contextPrototype.strokeText = function(text, x, y, maxWidth) {
+    this.drawText_(text, x, y, maxWidth, true);
+  };
+
+  contextPrototype.measureText = function(text) {
+    if (!this.textMeasureEl_) {
+      var s = '<span style="position:absolute;' +
+          'top:-20000px;left:0;padding:0;margin:0;border:none;' +
+          'white-space:pre;"></span>';
+      this.element_.insertAdjacentHTML('beforeEnd', s);
+      this.textMeasureEl_ = this.element_.lastChild;
+    }
+    var doc = this.element_.ownerDocument;
+    this.textMeasureEl_.innerHTML = '';
+    this.textMeasureEl_.style.font = this.font;
+    // Don't use innerHTML or innerText because they allow markup/whitespace.
+    this.textMeasureEl_.appendChild(doc.createTextNode(text));
+    return {width: this.textMeasureEl_.offsetWidth};
+  };
+
+  /******** STUBS ********/
+  contextPrototype.clip = function() {
+    // TODO: Implement
+  };
+
+  contextPrototype.arcTo = function() {
+    // TODO: Implement
+  };
+
+  contextPrototype.createPattern = function(image, repetition) {
+    return new CanvasPattern_(image, repetition);
+  };
+
+  // Gradient / Pattern Stubs
+  function CanvasGradient_(aType) {
+    this.type_ = aType;
+    this.x0_ = 0;
+    this.y0_ = 0;
+    this.r0_ = 0;
+    this.x1_ = 0;
+    this.y1_ = 0;
+    this.r1_ = 0;
+    this.colors_ = [];
+  }
+
+  CanvasGradient_.prototype.addColorStop = function(aOffset, aColor) {
+    aColor = processStyle(aColor);
+    this.colors_.push({offset: aOffset,
+                       color: aColor.color,
+                       alpha: aColor.alpha});
+  };
+
+  function CanvasPattern_(image, repetition) {
+    assertImageIsValid(image);
+    switch (repetition) {
+      case 'repeat':
+      case null:
+      case '':
+        this.repetition_ = 'repeat';
+        break
+      case 'repeat-x':
+      case 'repeat-y':
+      case 'no-repeat':
+        this.repetition_ = repetition;
+        break;
+      default:
+        throwException('SYNTAX_ERR');
+    }
+
+    this.src_ = image.src;
+    this.width_ = image.width;
+    this.height_ = image.height;
+  }
+
+  function throwException(s) {
+    throw new DOMException_(s);
+  }
+
+  function assertImageIsValid(img) {
+    if (!img || img.nodeType != 1 || img.tagName != 'IMG') {
+      throwException('TYPE_MISMATCH_ERR');
+    }
+    if (img.readyState != 'complete') {
+      throwException('INVALID_STATE_ERR');
+    }
+  }
+
+  function DOMException_(s) {
+    this.code = this[s];
+    this.message = s +': DOM Exception ' + this.code;
+  }
+  var p = DOMException_.prototype = new Error;
+  p.INDEX_SIZE_ERR = 1;
+  p.DOMSTRING_SIZE_ERR = 2;
+  p.HIERARCHY_REQUEST_ERR = 3;
+  p.WRONG_DOCUMENT_ERR = 4;
+  p.INVALID_CHARACTER_ERR = 5;
+  p.NO_DATA_ALLOWED_ERR = 6;
+  p.NO_MODIFICATION_ALLOWED_ERR = 7;
+  p.NOT_FOUND_ERR = 8;
+  p.NOT_SUPPORTED_ERR = 9;
+  p.INUSE_ATTRIBUTE_ERR = 10;
+  p.INVALID_STATE_ERR = 11;
+  p.SYNTAX_ERR = 12;
+  p.INVALID_MODIFICATION_ERR = 13;
+  p.NAMESPACE_ERR = 14;
+  p.INVALID_ACCESS_ERR = 15;
+  p.VALIDATION_ERR = 16;
+  p.TYPE_MISMATCH_ERR = 17;
+
+  // set up externs
+  G_vmlCanvasManager = G_vmlCanvasManager_;
+  CanvasRenderingContext2D = CanvasRenderingContext2D_;
+  CanvasGradient = CanvasGradient_;
+  CanvasPattern = CanvasPattern_;
+  DOMException = DOMException_;
+  G_vmlCanvasManager._version = 888;
+})();
+
+} // if
Binary file web/data/feed-icon.png has changed
Binary file web/data/feed-icon16x16.png has changed
Binary file web/data/feed-icon32x32.png has changed
Binary file web/data/gmap_blue_marker.png has changed
Binary file web/data/go.png has changed
Binary file web/data/go_next.png has changed
Binary file web/data/go_prev.png has changed
Binary file web/data/gradient-grey-up.png has changed
Binary file web/data/help.png has changed
Binary file web/data/help_ie.png has changed
Binary file web/data/icon_blank.png has changed
Binary file web/data/icon_map.png has changed
Binary file web/data/images/ui-bg_diagonals-thick_18_b81900_40x40.png has changed
Binary file web/data/images/ui-bg_diagonals-thick_20_666666_40x40.png has changed
Binary file web/data/images/ui-bg_flat_10_000000_40x100.png has changed
Binary file web/data/images/ui-bg_glass_65_ffffff_1x400.png has changed
Binary file web/data/images/ui-bg_gloss-wave_35_f6a828_500x100.png has changed
Binary file web/data/images/ui-icons_222222_256x240.png has changed
Binary file web/data/images/ui-icons_228ef1_256x240.png has changed
Binary file web/data/images/ui-icons_ef8c08_256x240.png has changed
Binary file web/data/images/ui-icons_ffd27a_256x240.png has changed
Binary file web/data/images/ui-icons_ffffff_256x240.png has changed
Binary file web/data/information.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.cookie.js	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,41 @@
+/**
+ * jQuery Cookie plugin
+ *
+ * Copyright (c) 2010 Klaus Hartl (stilbuero.de)
+ * Dual licensed under the MIT and GPL licenses:
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.gnu.org/licenses/gpl.html
+ *
+ */
+jQuery.cookie = function (key, value, options) {
+
+    // key and at least value given, set cookie...
+    if (arguments.length > 1 && String(value) !== "[object Object]") {
+        options = jQuery.extend({}, options);
+
+        if (value === null || value === undefined) {
+            options.expires = -1;
+        }
+
+        if (typeof options.expires === 'number') {
+            var days = options.expires, t = options.expires = new Date();
+            t.setDate(t.getDate() + days);
+        }
+
+        value = String(value);
+
+        return (document.cookie = [
+            encodeURIComponent(key), '=',
+            options.raw ? value : encodeURIComponent(value),
+            options.expires ? '; expires=' + options.expires.toUTCString() : '', // use expires attribute, max-age is not supported by IE
+            options.path ? '; path=' + options.path : '',
+            options.domain ? '; domain=' + options.domain : '',
+            options.secure ? '; secure' : ''
+        ].join(''));
+    }
+
+    // key and possibly options given, get cookie...
+    options = value || {};
+    var result, decode = options.raw ? function (s) { return s; } : decodeURIComponent;
+    return (result = new RegExp('(?:^|; )' + encodeURIComponent(key) + '=([^;]*)').exec(document.cookie)) ? decode(result[1]) : null;
+};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.qtip.min.js	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,15 @@
+/*
+ * jquery.qtip. The jQuery tooltip plugin
+ *
+ * Copyright (c) 2009 Craig Thompson
+ * http://craigsworks.com
+ *
+ * Licensed under MIT
+ * http://www.opensource.org/licenses/mit-license.php
+ *
+ * Launch  : February 2009
+ * Version : 1.0.0-rc3
+ * Released: Tuesday 12th May, 2009 - 00:00
+ * Debug: jquery.qtip.debug.js
+ */
+(function(f){f.fn.qtip=function(B,u){var y,t,A,s,x,w,v,z;if(typeof B=="string"){if(typeof f(this).data("qtip")!=="object"){f.fn.qtip.log.error.call(self,1,f.fn.qtip.constants.NO_TOOLTIP_PRESENT,false)}if(B=="api"){return f(this).data("qtip").interfaces[f(this).data("qtip").current]}else{if(B=="interfaces"){return f(this).data("qtip").interfaces}}}else{if(!B){B={}}if(typeof B.content!=="object"||(B.content.jquery&&B.content.length>0)){B.content={text:B.content}}if(typeof B.content.title!=="object"){B.content.title={text:B.content.title}}if(typeof B.position!=="object"){B.position={corner:B.position}}if(typeof B.position.corner!=="object"){B.position.corner={target:B.position.corner,tooltip:B.position.corner}}if(typeof B.show!=="object"){B.show={when:B.show}}if(typeof B.show.when!=="object"){B.show.when={event:B.show.when}}if(typeof B.show.effect!=="object"){B.show.effect={type:B.show.effect}}if(typeof B.hide!=="object"){B.hide={when:B.hide}}if(typeof B.hide.when!=="object"){B.hide.when={event:B.hide.when}}if(typeof B.hide.effect!=="object"){B.hide.effect={type:B.hide.effect}}if(typeof B.style!=="object"){B.style={name:B.style}}B.style=c(B.style);s=f.extend(true,{},f.fn.qtip.defaults,B);s.style=a.call({options:s},s.style);s.user=f.extend(true,{},B)}return f(this).each(function(){if(typeof B=="string"){w=B.toLowerCase();A=f(this).qtip("interfaces");if(typeof A=="object"){if(u===true&&w=="destroy"){while(A.length>0){A[A.length-1].destroy()}}else{if(u!==true){A=[f(this).qtip("api")]}for(y=0;y<A.length;y++){if(w=="destroy"){A[y].destroy()}else{if(A[y].status.rendered===true){if(w=="show"){A[y].show()}else{if(w=="hide"){A[y].hide()}else{if(w=="focus"){A[y].focus()}else{if(w=="disable"){A[y].disable(true)}else{if(w=="enable"){A[y].disable(false)}}}}}}}}}}}else{v=f.extend(true,{},s);v.hide.effect.length=s.hide.effect.length;v.show.effect.length=s.show.effect.length;if(v.position.container===false){v.position.container=f(document.body)}if(v.position.target===false){v.position.target=f(this)}if(v.show.when.target===false){v.show.when.target=f(this)}if(v.hide.when.target===false){v.hide.when.target=f(this)}t=f.fn.qtip.interfaces.length;for(y=0;y<t;y++){if(typeof f.fn.qtip.interfaces[y]=="undefined"){t=y;break}}x=new d(f(this),v,t);f.fn.qtip.interfaces[t]=x;if(typeof f(this).data("qtip")=="object"){if(typeof f(this).attr("qtip")==="undefined"){f(this).data("qtip").current=f(this).data("qtip").interfaces.length}f(this).data("qtip").interfaces.push(x)}else{f(this).data("qtip",{current:0,interfaces:[x]})}if(v.content.prerender===false&&v.show.when.event!==false&&v.show.ready!==true){v.show.when.target.bind(v.show.when.event+".qtip-"+t+"-create",{qtip:t},function(C){z=f.fn.qtip.interfaces[C.data.qtip];z.options.show.when.target.unbind(z.options.show.when.event+".qtip-"+C.data.qtip+"-create");z.cache.mouse={x:C.pageX,y:C.pageY};p.call(z);z.options.show.when.target.trigger(z.options.show.when.event)})}else{x.cache.mouse={x:v.show.when.target.offset().left,y:v.show.when.target.offset().top};p.call(x)}}})};function d(u,t,v){var s=this;s.id=v;s.options=t;s.status={animated:false,rendered:false,disabled:false,focused:false};s.elements={target:u.addClass(s.options.style.classes.target),tooltip:null,wrapper:null,content:null,contentWrapper:null,title:null,button:null,tip:null,bgiframe:null};s.cache={mouse:{},position:{},toggle:0};s.timers={};f.extend(s,s.options.api,{show:function(y){var x,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"show")}if(s.elements.tooltip.css("display")!=="none"){return s}s.elements.tooltip.stop(true,false);x=s.beforeShow.call(s,y);if(x===false){return s}function w(){if(s.options.position.type!=="static"){s.focus()}s.onShow.call(s,y);if(f.browser.msie){s.elements.tooltip.get(0).style.removeAttribute("filter")}}s.cache.toggle=1;if(s.options.position.type!=="static"){s.updatePosition(y,(s.options.show.effect.length>0))}if(typeof s.options.show.solo=="object"){z=f(s.options.show.solo)}else{if(s.options.show.solo===true){z=f("div.qtip").not(s.elements.tooltip)}}if(z){z.each(function(){if(f(this).qtip("api").status.rendered===true){f(this).qtip("api").hide()}})}if(typeof s.options.show.effect.type=="function"){s.options.show.effect.type.call(s.elements.tooltip,s.options.show.effect.length);s.elements.tooltip.queue(function(){w();f(this).dequeue()})}else{switch(s.options.show.effect.type.toLowerCase()){case"fade":s.elements.tooltip.fadeIn(s.options.show.effect.length,w);break;case"slide":s.elements.tooltip.slideDown(s.options.show.effect.length,function(){w();if(s.options.position.type!=="static"){s.updatePosition(y,true)}});break;case"grow":s.elements.tooltip.show(s.options.show.effect.length,w);break;default:s.elements.tooltip.show(null,w);break}s.elements.tooltip.addClass(s.options.style.classes.active)}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_SHOWN,"show")},hide:function(y){var x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"hide")}else{if(s.elements.tooltip.css("display")==="none"){return s}}clearTimeout(s.timers.show);s.elements.tooltip.stop(true,false);x=s.beforeHide.call(s,y);if(x===false){return s}function w(){s.onHide.call(s,y)}s.cache.toggle=0;if(typeof s.options.hide.effect.type=="function"){s.options.hide.effect.type.call(s.elements.tooltip,s.options.hide.effect.length);s.elements.tooltip.queue(function(){w();f(this).dequeue()})}else{switch(s.options.hide.effect.type.toLowerCase()){case"fade":s.elements.tooltip.fadeOut(s.options.hide.effect.length,w);break;case"slide":s.elements.tooltip.slideUp(s.options.hide.effect.length,w);break;case"grow":s.elements.tooltip.hide(s.options.hide.effect.length,w);break;default:s.elements.tooltip.hide(null,w);break}s.elements.tooltip.removeClass(s.options.style.classes.active)}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_HIDDEN,"hide")},updatePosition:function(w,x){var C,G,L,J,H,E,y,I,B,D,K,A,F,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updatePosition")}else{if(s.options.position.type=="static"){return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.CANNOT_POSITION_STATIC,"updatePosition")}}G={position:{left:0,top:0},dimensions:{height:0,width:0},corner:s.options.position.corner.target};L={position:s.getPosition(),dimensions:s.getDimensions(),corner:s.options.position.corner.tooltip};if(s.options.position.target!=="mouse"){if(s.options.position.target.get(0).nodeName.toLowerCase()=="area"){J=s.options.position.target.attr("coords").split(",");for(C=0;C<J.length;C++){J[C]=parseInt(J[C])}H=s.options.position.target.parent("map").attr("name");E=f('img[usemap="#'+H+'"]:first').offset();G.position={left:Math.floor(E.left+J[0]),top:Math.floor(E.top+J[1])};switch(s.options.position.target.attr("shape").toLowerCase()){case"rect":G.dimensions={width:Math.ceil(Math.abs(J[2]-J[0])),height:Math.ceil(Math.abs(J[3]-J[1]))};break;case"circle":G.dimensions={width:J[2]+1,height:J[2]+1};break;case"poly":G.dimensions={width:J[0],height:J[1]};for(C=0;C<J.length;C++){if(C%2==0){if(J[C]>G.dimensions.width){G.dimensions.width=J[C]}if(J[C]<J[0]){G.position.left=Math.floor(E.left+J[C])}}else{if(J[C]>G.dimensions.height){G.dimensions.height=J[C]}if(J[C]<J[1]){G.position.top=Math.floor(E.top+J[C])}}}G.dimensions.width=G.dimensions.width-(G.position.left-E.left);G.dimensions.height=G.dimensions.height-(G.position.top-E.top);break;default:return f.fn.qtip.log.error.call(s,4,f.fn.qtip.constants.INVALID_AREA_SHAPE,"updatePosition");break}G.dimensions.width-=2;G.dimensions.height-=2}else{if(s.options.position.target.add(document.body).length===1){G.position={left:f(document).scrollLeft(),top:f(document).scrollTop()};G.dimensions={height:f(window).height(),width:f(window).width()}}else{if(typeof s.options.position.target.attr("qtip")!=="undefined"){G.position=s.options.position.target.qtip("api").cache.position}else{G.position=s.options.position.target.offset()}G.dimensions={height:s.options.position.target.outerHeight(),width:s.options.position.target.outerWidth()}}}y=f.extend({},G.position);if(G.corner.search(/right/i)!==-1){y.left+=G.dimensions.width}if(G.corner.search(/bottom/i)!==-1){y.top+=G.dimensions.height}if(G.corner.search(/((top|bottom)Middle)|center/)!==-1){y.left+=(G.dimensions.width/2)}if(G.corner.search(/((left|right)Middle)|center/)!==-1){y.top+=(G.dimensions.height/2)}}else{G.position=y={left:s.cache.mouse.x,top:s.cache.mouse.y};G.dimensions={height:1,width:1}}if(L.corner.search(/right/i)!==-1){y.left-=L.dimensions.width}if(L.corner.search(/bottom/i)!==-1){y.top-=L.dimensions.height}if(L.corner.search(/((top|bottom)Middle)|center/)!==-1){y.left-=(L.dimensions.width/2)}if(L.corner.search(/((left|right)Middle)|center/)!==-1){y.top-=(L.dimensions.height/2)}I=(f.browser.msie)?1:0;B=(f.browser.msie&&parseInt(f.browser.version.charAt(0))===6)?1:0;if(s.options.style.border.radius>0){if(L.corner.search(/Left/)!==-1){y.left-=s.options.style.border.radius}else{if(L.corner.search(/Right/)!==-1){y.left+=s.options.style.border.radius}}if(L.corner.search(/Top/)!==-1){y.top-=s.options.style.border.radius}else{if(L.corner.search(/Bottom/)!==-1){y.top+=s.options.style.border.radius}}}if(I){if(L.corner.search(/top/)!==-1){y.top-=I}else{if(L.corner.search(/bottom/)!==-1){y.top+=I}}if(L.corner.search(/left/)!==-1){y.left-=I}else{if(L.corner.search(/right/)!==-1){y.left+=I}}if(L.corner.search(/leftMiddle|rightMiddle/)!==-1){y.top-=1}}if(s.options.position.adjust.screen===true){y=o.call(s,y,G,L)}if(s.options.position.target==="mouse"&&s.options.position.adjust.mouse===true){if(s.options.position.adjust.screen===true&&s.elements.tip){K=s.elements.tip.attr("rel")}else{K=s.options.position.corner.tooltip}y.left+=(K.search(/right/i)!==-1)?-6:6;y.top+=(K.search(/bottom/i)!==-1)?-6:6}if(!s.elements.bgiframe&&f.browser.msie&&parseInt(f.browser.version.charAt(0))==6){f("select, object").each(function(){A=f(this).offset();A.bottom=A.top+f(this).height();A.right=A.left+f(this).width();if(y.top+L.dimensions.height>=A.top&&y.left+L.dimensions.width>=A.left){k.call(s)}})}y.left+=s.options.position.adjust.x;y.top+=s.options.position.adjust.y;F=s.getPosition();if(y.left!=F.left||y.top!=F.top){z=s.beforePositionUpdate.call(s,w);if(z===false){return s}s.cache.position=y;if(x===true){s.status.animated=true;s.elements.tooltip.animate(y,200,"swing",function(){s.status.animated=false})}else{s.elements.tooltip.css(y)}s.onPositionUpdate.call(s,w);if(typeof w!=="undefined"&&w.type&&w.type!=="mousemove"){f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_POSITION_UPDATED,"updatePosition")}}return s},updateWidth:function(w){var x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateWidth")}else{if(w&&typeof w!=="number"){return f.fn.qtip.log.error.call(s,2,"newWidth must be of type number","updateWidth")}}x=s.elements.contentWrapper.siblings().add(s.elements.tip).add(s.elements.button);if(!w){if(typeof s.options.style.width.value=="number"){w=s.options.style.width.value}else{s.elements.tooltip.css({width:"auto"});x.hide();if(f.browser.msie){s.elements.wrapper.add(s.elements.contentWrapper.children()).css({zoom:"normal"})}w=s.getDimensions().width+1;if(!s.options.style.width.value){if(w>s.options.style.width.max){w=s.options.style.width.max}if(w<s.options.style.width.min){w=s.options.style.width.min}}}}if(w%2!==0){w-=1}s.elements.tooltip.width(w);x.show();if(s.options.style.border.radius){s.elements.tooltip.find(".qtip-betweenCorners").each(function(y){f(this).width(w-(s.options.style.border.radius*2))})}if(f.browser.msie){s.elements.wrapper.add(s.elements.contentWrapper.children()).css({zoom:"1"});s.elements.wrapper.width(w);if(s.elements.bgiframe){s.elements.bgiframe.width(w).height(s.getDimensions.height)}}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_WIDTH_UPDATED,"updateWidth")},updateStyle:function(w){var z,A,x,y,B;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateStyle")}else{if(typeof w!=="string"||!f.fn.qtip.styles[w]){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.STYLE_NOT_DEFINED,"updateStyle")}}s.options.style=a.call(s,f.fn.qtip.styles[w],s.options.user.style);s.elements.content.css(q(s.options.style));if(s.options.content.title.text!==false){s.elements.title.css(q(s.options.style.title,true))}s.elements.contentWrapper.css({borderColor:s.options.style.border.color});if(s.options.style.tip.corner!==false){if(f("<canvas>").get(0).getContext){z=s.elements.tooltip.find(".qtip-tip canvas:first");x=z.get(0).getContext("2d");x.clearRect(0,0,300,300);y=z.parent("div[rel]:first").attr("rel");B=b(y,s.options.style.tip.size.width,s.options.style.tip.size.height);h.call(s,z,B,s.options.style.tip.color||s.options.style.border.color)}else{if(f.browser.msie){z=s.elements.tooltip.find('.qtip-tip [nodeName="shape"]');z.attr("fillcolor",s.options.style.tip.color||s.options.style.border.color)}}}if(s.options.style.border.radius>0){s.elements.tooltip.find(".qtip-betweenCorners").css({backgroundColor:s.options.style.border.color});if(f("<canvas>").get(0).getContext){A=g(s.options.style.border.radius);s.elements.tooltip.find(".qtip-wrapper canvas").each(function(){x=f(this).get(0).getContext("2d");x.clearRect(0,0,300,300);y=f(this).parent("div[rel]:first").attr("rel");r.call(s,f(this),A[y],s.options.style.border.radius,s.options.style.border.color)})}else{if(f.browser.msie){s.elements.tooltip.find('.qtip-wrapper [nodeName="arc"]').each(function(){f(this).attr("fillcolor",s.options.style.border.color)})}}}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_STYLE_UPDATED,"updateStyle")},updateContent:function(A,y){var z,x,w;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateContent")}else{if(!A){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.NO_CONTENT_PROVIDED,"updateContent")}}z=s.beforeContentUpdate.call(s,A);if(typeof z=="string"){A=z}else{if(z===false){return}}if(f.browser.msie){s.elements.contentWrapper.children().css({zoom:"normal"})}if(A.jquery&&A.length>0){A.clone(true).appendTo(s.elements.content).show()}else{s.elements.content.html(A)}x=s.elements.content.find("img[complete=false]");if(x.length>0){w=0;x.each(function(C){f('<img src="'+f(this).attr("src")+'" />').load(function(){if(++w==x.length){B()}})})}else{B()}function B(){s.updateWidth();if(y!==false){if(s.options.position.type!=="static"){s.updatePosition(s.elements.tooltip.is(":visible"),true)}if(s.options.style.tip.corner!==false){n.call(s)}}}s.onContentUpdate.call(s);return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_CONTENT_UPDATED,"loadContent")},loadContent:function(w,z,A){var y;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"loadContent")}y=s.beforeContentLoad.call(s);if(y===false){return s}if(A=="post"){f.post(w,z,x)}else{f.get(w,z,x)}function x(B){s.onContentLoad.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_CONTENT_LOADED,"loadContent");s.updateContent(B)}return s},updateTitle:function(w){if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateTitle")}else{if(!w){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.NO_CONTENT_PROVIDED,"updateTitle")}}returned=s.beforeTitleUpdate.call(s);if(returned===false){return s}if(s.elements.button){s.elements.button=s.elements.button.clone(true)}s.elements.title.html(w);if(s.elements.button){s.elements.title.prepend(s.elements.button)}s.onTitleUpdate.call(s);return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_TITLE_UPDATED,"updateTitle")},focus:function(A){var y,x,w,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"focus")}else{if(s.options.position.type=="static"){return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.CANNOT_FOCUS_STATIC,"focus")}}y=parseInt(s.elements.tooltip.css("z-index"));x=6000+f("div.qtip[qtip]").length-1;if(!s.status.focused&&y!==x){z=s.beforeFocus.call(s,A);if(z===false){return s}f("div.qtip[qtip]").not(s.elements.tooltip).each(function(){if(f(this).qtip("api").status.rendered===true){w=parseInt(f(this).css("z-index"));if(typeof w=="number"&&w>-1){f(this).css({zIndex:parseInt(f(this).css("z-index"))-1})}f(this).qtip("api").status.focused=false}});s.elements.tooltip.css({zIndex:x});s.status.focused=true;s.onFocus.call(s,A);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_FOCUSED,"focus")}return s},disable:function(w){if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"disable")}if(w){if(!s.status.disabled){s.status.disabled=true;f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_DISABLED,"disable")}else{f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.TOOLTIP_ALREADY_DISABLED,"disable")}}else{if(s.status.disabled){s.status.disabled=false;f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_ENABLED,"disable")}else{f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.TOOLTIP_ALREADY_ENABLED,"disable")}}return s},destroy:function(){var w,x,y;x=s.beforeDestroy.call(s);if(x===false){return s}if(s.status.rendered){s.options.show.when.target.unbind("mousemove.qtip",s.updatePosition);s.options.show.when.target.unbind("mouseout.qtip",s.hide);s.options.show.when.target.unbind(s.options.show.when.event+".qtip");s.options.hide.when.target.unbind(s.options.hide.when.event+".qtip");s.elements.tooltip.unbind(s.options.hide.when.event+".qtip");s.elements.tooltip.unbind("mouseover.qtip",s.focus);s.elements.tooltip.remove()}else{s.options.show.when.target.unbind(s.options.show.when.event+".qtip-create")}if(typeof s.elements.target.data("qtip")=="object"){y=s.elements.target.data("qtip").interfaces;if(typeof y=="object"&&y.length>0){for(w=0;w<y.length-1;w++){if(y[w].id==s.id){y.splice(w,1)}}}}delete f.fn.qtip.interfaces[s.id];if(typeof y=="object"&&y.length>0){s.elements.target.data("qtip").current=y.length-1}else{s.elements.target.removeData("qtip")}s.onDestroy.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_DESTROYED,"destroy");return s.elements.target},getPosition:function(){var w,x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"getPosition")}w=(s.elements.tooltip.css("display")!=="none")?false:true;if(w){s.elements.tooltip.css({visiblity:"hidden"}).show()}x=s.elements.tooltip.offset();if(w){s.elements.tooltip.css({visiblity:"visible"}).hide()}return x},getDimensions:function(){var w,x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"getDimensions")}w=(!s.elements.tooltip.is(":visible"))?true:false;if(w){s.elements.tooltip.css({visiblity:"hidden"}).show()}x={height:s.elements.tooltip.outerHeight(),width:s.elements.tooltip.outerWidth()};if(w){s.elements.tooltip.css({visiblity:"visible"}).hide()}return x}})}function p(){var s,w,u,t,v,y,x;s=this;s.beforeRender.call(s);s.status.rendered=true;s.elements.tooltip='<div qtip="'+s.id+'" class="qtip '+(s.options.style.classes.tooltip||s.options.style)+'"style="display:none; -moz-border-radius:0; -webkit-border-radius:0; border-radius:0;position:'+s.options.position.type+';">  <div class="qtip-wrapper" style="position:relative; overflow:hidden; text-align:left;">    <div class="qtip-contentWrapper" style="overflow:hidden;">       <div class="qtip-content '+s.options.style.classes.content+'"></div></div></div></div>';s.elements.tooltip=f(s.elements.tooltip);s.elements.tooltip.appendTo(s.options.position.container);s.elements.tooltip.data("qtip",{current:0,interfaces:[s]});s.elements.wrapper=s.elements.tooltip.children("div:first");s.elements.contentWrapper=s.elements.wrapper.children("div:first").css({background:s.options.style.background});s.elements.content=s.elements.contentWrapper.children("div:first").css(q(s.options.style));if(f.browser.msie){s.elements.wrapper.add(s.elements.content).css({zoom:1})}if(s.options.hide.when.event=="unfocus"){s.elements.tooltip.attr("unfocus",true)}if(typeof s.options.style.width.value=="number"){s.updateWidth()}if(f("<canvas>").get(0).getContext||f.browser.msie){if(s.options.style.border.radius>0){m.call(s)}else{s.elements.contentWrapper.css({border:s.options.style.border.width+"px solid "+s.options.style.border.color})}if(s.options.style.tip.corner!==false){e.call(s)}}else{s.elements.contentWrapper.css({border:s.options.style.border.width+"px solid "+s.options.style.border.color});s.options.style.border.radius=0;s.options.style.tip.corner=false;f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.CANVAS_VML_NOT_SUPPORTED,"render")}if((typeof s.options.content.text=="string"&&s.options.content.text.length>0)||(s.options.content.text.jquery&&s.options.content.text.length>0)){u=s.options.content.text}else{if(typeof s.elements.target.attr("title")=="string"&&s.elements.target.attr("title").length>0){u=s.elements.target.attr("title").replace("\\n","<br />");s.elements.target.attr("title","")}else{if(typeof s.elements.target.attr("alt")=="string"&&s.elements.target.attr("alt").length>0){u=s.elements.target.attr("alt").replace("\\n","<br />");s.elements.target.attr("alt","")}else{u=" ";f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.NO_VALID_CONTENT,"render")}}}if(s.options.content.title.text!==false){j.call(s)}s.updateContent(u);l.call(s);if(s.options.show.ready===true){s.show()}if(s.options.content.url!==false){t=s.options.content.url;v=s.options.content.data;y=s.options.content.method||"get";s.loadContent(t,v,y)}s.onRender.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_RENDERED,"render")}function m(){var F,z,t,B,x,E,u,G,D,y,w,C,A,s,v;F=this;F.elements.wrapper.find(".qtip-borderBottom, .qtip-borderTop").remove();t=F.options.style.border.width;B=F.options.style.border.radius;x=F.options.style.border.color||F.options.style.tip.color;E=g(B);u={};for(z in E){u[z]='<div rel="'+z+'" style="'+((z.search(/Left/)!==-1)?"left":"right")+":0; position:absolute; height:"+B+"px; width:"+B+'px; overflow:hidden; line-height:0.1px; font-size:1px">';if(f("<canvas>").get(0).getContext){u[z]+='<canvas height="'+B+'" width="'+B+'" style="vertical-align: top"></canvas>'}else{if(f.browser.msie){G=B*2+3;u[z]+='<v:arc stroked="false" fillcolor="'+x+'" startangle="'+E[z][0]+'" endangle="'+E[z][1]+'" style="width:'+G+"px; height:"+G+"px; margin-top:"+((z.search(/bottom/)!==-1)?-2:-1)+"px; margin-left:"+((z.search(/Right/)!==-1)?E[z][2]-3.5:-1)+'px; vertical-align:top; display:inline-block; behavior:url(#default#VML)"></v:arc>'}}u[z]+="</div>"}D=F.getDimensions().width-(Math.max(t,B)*2);y='<div class="qtip-betweenCorners" style="height:'+B+"px; width:"+D+"px; overflow:hidden; background-color:"+x+'; line-height:0.1px; font-size:1px;">';w='<div class="qtip-borderTop" dir="ltr" style="height:'+B+"px; margin-left:"+B+'px; line-height:0.1px; font-size:1px; padding:0;">'+u.topLeft+u.topRight+y;F.elements.wrapper.prepend(w);C='<div class="qtip-borderBottom" dir="ltr" style="height:'+B+"px; margin-left:"+B+'px; line-height:0.1px; font-size:1px; padding:0;">'+u.bottomLeft+u.bottomRight+y;F.elements.wrapper.append(C);if(f("<canvas>").get(0).getContext){F.elements.wrapper.find("canvas").each(function(){A=E[f(this).parent("[rel]:first").attr("rel")];r.call(F,f(this),A,B,x)})}else{if(f.browser.msie){F.elements.tooltip.append('<v:image style="behavior:url(#default#VML);"></v:image>')}}s=Math.max(B,(B+(t-B)));v=Math.max(t-B,0);F.elements.contentWrapper.css({border:"0px solid "+x,borderWidth:v+"px "+s+"px"})}function r(u,w,s,t){var v=u.get(0).getContext("2d");v.fillStyle=t;v.beginPath();v.arc(w[0],w[1],s,0,Math.PI*2,false);v.fill()}function e(v){var t,s,x,u,w;t=this;if(t.elements.tip!==null){t.elements.tip.remove()}s=t.options.style.tip.color||t.options.style.border.color;if(t.options.style.tip.corner===false){return}else{if(!v){v=t.options.style.tip.corner}}x=b(v,t.options.style.tip.size.width,t.options.style.tip.size.height);t.elements.tip='<div class="'+t.options.style.classes.tip+'" dir="ltr" rel="'+v+'" style="position:absolute; height:'+t.options.style.tip.size.height+"px; width:"+t.options.style.tip.size.width+'px; margin:0 auto; line-height:0.1px; font-size:1px;">';if(f("<canvas>").get(0).getContext){t.elements.tip+='<canvas height="'+t.options.style.tip.size.height+'" width="'+t.options.style.tip.size.width+'"></canvas>'}else{if(f.browser.msie){u=t.options.style.tip.size.width+","+t.options.style.tip.size.height;w="m"+x[0][0]+","+x[0][1];w+=" l"+x[1][0]+","+x[1][1];w+=" "+x[2][0]+","+x[2][1];w+=" xe";t.elements.tip+='<v:shape fillcolor="'+s+'" stroked="false" filled="true" path="'+w+'" coordsize="'+u+'" style="width:'+t.options.style.tip.size.width+"px; height:"+t.options.style.tip.size.height+"px; line-height:0.1px; display:inline-block; behavior:url(#default#VML); vertical-align:"+((v.search(/top/)!==-1)?"bottom":"top")+'"></v:shape>';t.elements.tip+='<v:image style="behavior:url(#default#VML);"></v:image>';t.elements.contentWrapper.css("position","relative")}}t.elements.tooltip.prepend(t.elements.tip+"</div>");t.elements.tip=t.elements.tooltip.find("."+t.options.style.classes.tip).eq(0);if(f("<canvas>").get(0).getContext){h.call(t,t.elements.tip.find("canvas:first"),x,s)}if(v.search(/top/)!==-1&&f.browser.msie&&parseInt(f.browser.version.charAt(0))===6){t.elements.tip.css({marginTop:-4})}n.call(t,v)}function h(t,v,s){var u=t.get(0).getContext("2d");u.fillStyle=s;u.beginPath();u.moveTo(v[0][0],v[0][1]);u.lineTo(v[1][0],v[1][1]);u.lineTo(v[2][0],v[2][1]);u.fill()}function n(u){var t,w,s,x,v;t=this;if(t.options.style.tip.corner===false||!t.elements.tip){return}if(!u){u=t.elements.tip.attr("rel")}w=positionAdjust=(f.browser.msie)?1:0;t.elements.tip.css(u.match(/left|right|top|bottom/)[0],0);if(u.search(/top|bottom/)!==-1){if(f.browser.msie){if(parseInt(f.browser.version.charAt(0))===6){positionAdjust=(u.search(/top/)!==-1)?-3:1}else{positionAdjust=(u.search(/top/)!==-1)?1:2}}if(u.search(/Middle/)!==-1){t.elements.tip.css({left:"50%",marginLeft:-(t.options.style.tip.size.width/2)})}else{if(u.search(/Left/)!==-1){t.elements.tip.css({left:t.options.style.border.radius-w})}else{if(u.search(/Right/)!==-1){t.elements.tip.css({right:t.options.style.border.radius+w})}}}if(u.search(/top/)!==-1){t.elements.tip.css({top:-positionAdjust})}else{t.elements.tip.css({bottom:positionAdjust})}}else{if(u.search(/left|right/)!==-1){if(f.browser.msie){positionAdjust=(parseInt(f.browser.version.charAt(0))===6)?1:((u.search(/left/)!==-1)?1:2)}if(u.search(/Middle/)!==-1){t.elements.tip.css({top:"50%",marginTop:-(t.options.style.tip.size.height/2)})}else{if(u.search(/Top/)!==-1){t.elements.tip.css({top:t.options.style.border.radius-w})}else{if(u.search(/Bottom/)!==-1){t.elements.tip.css({bottom:t.options.style.border.radius+w})}}}if(u.search(/left/)!==-1){t.elements.tip.css({left:-positionAdjust})}else{t.elements.tip.css({right:positionAdjust})}}}s="padding-"+u.match(/left|right|top|bottom/)[0];x=t.options.style.tip.size[(s.search(/left|right/)!==-1)?"width":"height"];t.elements.tooltip.css("padding",0);t.elements.tooltip.css(s,x);if(f.browser.msie&&parseInt(f.browser.version.charAt(0))==6){v=parseInt(t.elements.tip.css("margin-top"))||0;v+=parseInt(t.elements.content.css("margin-top"))||0;t.elements.tip.css({marginTop:v})}}function j(){var s=this;if(s.elements.title!==null){s.elements.title.remove()}s.elements.title=f('<div class="'+s.options.style.classes.title+'">').css(q(s.options.style.title,true)).css({zoom:(f.browser.msie)?1:0}).prependTo(s.elements.contentWrapper);if(s.options.content.title.text){s.updateTitle.call(s,s.options.content.title.text)}if(s.options.content.title.button!==false&&typeof s.options.content.title.button=="string"){s.elements.button=f('<a class="'+s.options.style.classes.button+'" style="float:right; position: relative"></a>').css(q(s.options.style.button,true)).html(s.options.content.title.button).prependTo(s.elements.title).click(function(t){if(!s.status.disabled){s.hide(t)}})}}function l(){var t,v,u,s;t=this;v=t.options.show.when.target;u=t.options.hide.when.target;if(t.options.hide.fixed){u=u.add(t.elements.tooltip)}if(t.options.hide.when.event=="inactive"){s=["click","dblclick","mousedown","mouseup","mousemove","mouseout","mouseenter","mouseleave","mouseover"];function y(z){if(t.status.disabled===true){return}clearTimeout(t.timers.inactive);t.timers.inactive=setTimeout(function(){f(s).each(function(){u.unbind(this+".qtip-inactive");t.elements.content.unbind(this+".qtip-inactive")});t.hide(z)},t.options.hide.delay)}}else{if(t.options.hide.fixed===true){t.elements.tooltip.bind("mouseover.qtip",function(){if(t.status.disabled===true){return}clearTimeout(t.timers.hide)})}}function x(z){if(t.status.disabled===true){return}if(t.options.hide.when.event=="inactive"){f(s).each(function(){u.bind(this+".qtip-inactive",y);t.elements.content.bind(this+".qtip-inactive",y)});y()}clearTimeout(t.timers.show);clearTimeout(t.timers.hide);t.timers.show=setTimeout(function(){t.show(z)},t.options.show.delay)}function w(z){if(t.status.disabled===true){return}if(t.options.hide.fixed===true&&t.options.hide.when.event.search(/mouse(out|leave)/i)!==-1&&f(z.relatedTarget).parents("div.qtip[qtip]").length>0){z.stopPropagation();z.preventDefault();clearTimeout(t.timers.hide);return false}clearTimeout(t.timers.show);clearTimeout(t.timers.hide);t.elements.tooltip.stop(true,true);t.timers.hide=setTimeout(function(){t.hide(z)},t.options.hide.delay)}if((t.options.show.when.target.add(t.options.hide.when.target).length===1&&t.options.show.when.event==t.options.hide.when.event&&t.options.hide.when.event!=="inactive")||t.options.hide.when.event=="unfocus"){t.cache.toggle=0;v.bind(t.options.show.when.event+".qtip",function(z){if(t.cache.toggle==0){x(z)}else{w(z)}})}else{v.bind(t.options.show.when.event+".qtip",x);if(t.options.hide.when.event!=="inactive"){u.bind(t.options.hide.when.event+".qtip",w)}}if(t.options.position.type.search(/(fixed|absolute)/)!==-1){t.elements.tooltip.bind("mouseover.qtip",t.focus)}if(t.options.position.target==="mouse"&&t.options.position.type!=="static"){v.bind("mousemove.qtip",function(z){t.cache.mouse={x:z.pageX,y:z.pageY};if(t.status.disabled===false&&t.options.position.adjust.mouse===true&&t.options.position.type!=="static"&&t.elements.tooltip.css("display")!=="none"){t.updatePosition(z)}})}}function o(u,v,A){var z,s,x,y,t,w;z=this;if(A.corner=="center"){return v.position}s=f.extend({},u);y={x:false,y:false};t={left:(s.left<f.fn.qtip.cache.screen.scroll.left),right:(s.left+A.dimensions.width+2>=f.fn.qtip.cache.screen.width+f.fn.qtip.cache.screen.scroll.left),top:(s.top<f.fn.qtip.cache.screen.scroll.top),bottom:(s.top+A.dimensions.height+2>=f.fn.qtip.cache.screen.height+f.fn.qtip.cache.screen.scroll.top)};x={left:(t.left&&(A.corner.search(/right/i)!=-1||(A.corner.search(/right/i)==-1&&!t.right))),right:(t.right&&(A.corner.search(/left/i)!=-1||(A.corner.search(/left/i)==-1&&!t.left))),top:(t.top&&A.corner.search(/top/i)==-1),bottom:(t.bottom&&A.corner.search(/bottom/i)==-1)};if(x.left){if(z.options.position.target!=="mouse"){s.left=v.position.left+v.dimensions.width}else{s.left=z.cache.mouse.x}y.x="Left"}else{if(x.right){if(z.options.position.target!=="mouse"){s.left=v.position.left-A.dimensions.width}else{s.left=z.cache.mouse.x-A.dimensions.width}y.x="Right"}}if(x.top){if(z.options.position.target!=="mouse"){s.top=v.position.top+v.dimensions.height}else{s.top=z.cache.mouse.y}y.y="top"}else{if(x.bottom){if(z.options.position.target!=="mouse"){s.top=v.position.top-A.dimensions.height}else{s.top=z.cache.mouse.y-A.dimensions.height}y.y="bottom"}}if(s.left<0){s.left=u.left;y.x=false}if(s.top<0){s.top=u.top;y.y=false}if(z.options.style.tip.corner!==false){s.corner=new String(A.corner);if(y.x!==false){s.corner=s.corner.replace(/Left|Right|Middle/,y.x)}if(y.y!==false){s.corner=s.corner.replace(/top|bottom/,y.y)}if(s.corner!==z.elements.tip.attr("rel")){e.call(z,s.corner)}}return s}function q(u,t){var v,s;v=f.extend(true,{},u);for(s in v){if(t===true&&s.search(/(tip|classes)/i)!==-1){delete v[s]}else{if(!t&&s.search(/(width|border|tip|title|classes|user)/i)!==-1){delete v[s]}}}return v}function c(s){if(typeof s.tip!=="object"){s.tip={corner:s.tip}}if(typeof s.tip.size!=="object"){s.tip.size={width:s.tip.size,height:s.tip.size}}if(typeof s.border!=="object"){s.border={width:s.border}}if(typeof s.width!=="object"){s.width={value:s.width}}if(typeof s.width.max=="string"){s.width.max=parseInt(s.width.max.replace(/([0-9]+)/i,"$1"))}if(typeof s.width.min=="string"){s.width.min=parseInt(s.width.min.replace(/([0-9]+)/i,"$1"))}if(typeof s.tip.size.x=="number"){s.tip.size.width=s.tip.size.x;delete s.tip.size.x}if(typeof s.tip.size.y=="number"){s.tip.size.height=s.tip.size.y;delete s.tip.size.y}return s}function a(){var s,t,u,x,v,w;s=this;u=[true,{}];for(t=0;t<arguments.length;t++){u.push(arguments[t])}x=[f.extend.apply(f,u)];while(typeof x[0].name=="string"){x.unshift(c(f.fn.qtip.styles[x[0].name]))}x.unshift(true,{classes:{tooltip:"qtip-"+(arguments[0].name||"defaults")}},f.fn.qtip.styles.defaults);v=f.extend.apply(f,x);w=(f.browser.msie)?1:0;v.tip.size.width+=w;v.tip.size.height+=w;if(v.tip.size.width%2>0){v.tip.size.width+=1}if(v.tip.size.height%2>0){v.tip.size.height+=1}if(v.tip.corner===true){v.tip.corner=(s.options.position.corner.tooltip==="center")?false:s.options.position.corner.tooltip}return v}function b(v,u,t){var s={bottomRight:[[0,0],[u,t],[u,0]],bottomLeft:[[0,0],[u,0],[0,t]],topRight:[[0,t],[u,0],[u,t]],topLeft:[[0,0],[0,t],[u,t]],topMiddle:[[0,t],[u/2,0],[u,t]],bottomMiddle:[[0,0],[u,0],[u/2,t]],rightMiddle:[[0,0],[u,t/2],[0,t]],leftMiddle:[[u,0],[u,t],[0,t/2]]};s.leftTop=s.bottomRight;s.rightTop=s.bottomLeft;s.leftBottom=s.topRight;s.rightBottom=s.topLeft;return s[v]}function g(s){var t;if(f("<canvas>").get(0).getContext){t={topLeft:[s,s],topRight:[0,s],bottomLeft:[s,0],bottomRight:[0,0]}}else{if(f.browser.msie){t={topLeft:[-90,90,0],topRight:[-90,90,-s],bottomLeft:[90,270,0],bottomRight:[90,270,-s]}}}return t}function k(){var s,t,u;s=this;u=s.getDimensions();t='<iframe class="qtip-bgiframe" frameborder="0" tabindex="-1" src="javascript:false" style="display:block; position:absolute; z-index:-1; filter:alpha(opacity=\'0\'); border: 1px solid red; height:'+u.height+"px; width:"+u.width+'px" />';s.elements.bgiframe=s.elements.wrapper.prepend(t).children(".qtip-bgiframe:first")}f(document).ready(function(){f.fn.qtip.cache={screen:{scroll:{left:f(window).scrollLeft(),top:f(window).scrollTop()},width:f(window).width(),height:f(window).height()}};var s;f(window).bind("resize scroll",function(t){clearTimeout(s);s=setTimeout(function(){if(t.type==="scroll"){f.fn.qtip.cache.screen.scroll={left:f(window).scrollLeft(),top:f(window).scrollTop()}}else{f.fn.qtip.cache.screen.width=f(window).width();f.fn.qtip.cache.screen.height=f(window).height()}for(i=0;i<f.fn.qtip.interfaces.length;i++){var u=f.fn.qtip.interfaces[i];if(u.status.rendered===true&&(u.options.position.type!=="static"||u.options.position.adjust.scroll&&t.type==="scroll"||u.options.position.adjust.resize&&t.type==="resize")){u.updatePosition(t,true)}}},100)});f(document).bind("mousedown.qtip",function(t){if(f(t.target).parents("div.qtip").length===0){f(".qtip[unfocus]").each(function(){var u=f(this).qtip("api");if(f(this).is(":visible")&&!u.status.disabled&&f(t.target).add(u.elements.target).length>1){u.hide(t)}})}})});f.fn.qtip.interfaces=[];f.fn.qtip.log={error:function(){return this}};f.fn.qtip.constants={};f.fn.qtip.defaults={content:{prerender:false,text:false,url:false,data:null,title:{text:false,button:false}},position:{target:false,corner:{target:"bottomRight",tooltip:"topLeft"},adjust:{x:0,y:0,mouse:true,screen:false,scroll:true,resize:true},type:"absolute",container:false},show:{when:{target:false,event:"mouseover"},effect:{type:"fade",length:100},delay:140,solo:false,ready:false},hide:{when:{target:false,event:"mouseout"},effect:{type:"fade",length:100},delay:0,fixed:false},api:{beforeRender:function(){},onRender:function(){},beforePositionUpdate:function(){},onPositionUpdate:function(){},beforeShow:function(){},onShow:function(){},beforeHide:function(){},onHide:function(){},beforeContentUpdate:function(){},onContentUpdate:function(){},beforeContentLoad:function(){},onContentLoad:function(){},beforeTitleUpdate:function(){},onTitleUpdate:function(){},beforeDestroy:function(){},onDestroy:function(){},beforeFocus:function(){},onFocus:function(){}}};f.fn.qtip.styles={defaults:{background:"white",color:"#111",overflow:"hidden",textAlign:"left",width:{min:0,max:250},padding:"5px 9px",border:{width:1,radius:0,color:"#d3d3d3"},tip:{corner:false,color:false,size:{width:13,height:13},opacity:1},title:{background:"#e1e1e1",fontWeight:"bold",padding:"7px 12px"},button:{cursor:"pointer"},classes:{target:"",tip:"qtip-tip",title:"qtip-title",button:"qtip-button",content:"qtip-content",active:"qtip-active"}},cream:{border:{width:3,radius:0,color:"#F9E98E"},title:{background:"#F0DE7D",color:"#A27D35"},background:"#FBF7AA",color:"#A27D35",classes:{tooltip:"qtip-cream"}},light:{border:{width:3,radius:0,color:"#E2E2E2"},title:{background:"#f1f1f1",color:"#454545"},background:"white",color:"#454545",classes:{tooltip:"qtip-light"}},dark:{border:{width:3,radius:0,color:"#303030"},title:{background:"#404040",color:"#f3f3f3"},background:"#505050",color:"#f3f3f3",classes:{tooltip:"qtip-dark"}},red:{border:{width:3,radius:0,color:"#CE6F6F"},title:{background:"#f28279",color:"#9C2F2F"},background:"#F79992",color:"#9C2F2F",classes:{tooltip:"qtip-red"}},green:{border:{width:3,radius:0,color:"#A9DB66"},title:{background:"#b9db8c",color:"#58792E"},background:"#CDE6AC",color:"#58792E",classes:{tooltip:"qtip-green"}},blue:{border:{width:3,radius:0,color:"#ADD9ED"},title:{background:"#D0E9F5",color:"#5E99BD"},background:"#E5F6FE",color:"#4D9FBF",classes:{tooltip:"qtip-blue"}}}})(jQuery);
\ No newline at end of file
--- a/web/data/jquery.tablesorter.js	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/jquery.tablesorter.js	Fri Dec 09 12:08:27 2011 +0100
@@ -874,5 +874,3 @@
 Sortable.sortTables = function() {
    jQuery("table.listing").tablesorter({textExtraction: cubicwebSortValueExtraction});
 };
-
-jQuery(document).ready(Sortable.sortTables);
Binary file web/data/liveclipboard-icon.png has changed
Binary file web/data/logo-logilab.png has changed
Binary file web/data/logo.png has changed
Binary file web/data/microformats-button.png has changed
Binary file web/data/microformats-icon.png has changed
Binary file web/data/no-check-no-border.png has changed
Binary file web/data/pen_icon.png has changed
Binary file web/data/plus.png has changed
Binary file web/data/puce.png has changed
Binary file web/data/puce_down.png has changed
Binary file web/data/puce_down_black.png has changed
Binary file web/data/puce_up.png has changed
Binary file web/data/required.png has changed
Binary file web/data/rhythm15.png has changed
Binary file web/data/rhythm18.png has changed
Binary file web/data/rhythm20.png has changed
Binary file web/data/rhythm22.png has changed
Binary file web/data/rhythm24.png has changed
Binary file web/data/rhythm26.png has changed
Binary file web/data/rss-button.png has changed
Binary file web/data/rss.png has changed
Binary file web/data/search.png has changed
Binary file web/data/sendcancel.png has changed
Binary file web/data/sendok.png has changed
Binary file web/data/timeline/blue-circle.png has changed
Binary file web/data/timeline/bubble-arrows.png has changed
Binary file web/data/timeline/bubble-body-and-arrows.png has changed
Binary file web/data/timeline/bubble-body.png has changed
Binary file web/data/timeline/bubble-bottom-arrow.png has changed
Binary file web/data/timeline/bubble-bottom-left.png has changed
Binary file web/data/timeline/bubble-bottom-right.png has changed
Binary file web/data/timeline/bubble-bottom.png has changed
Binary file web/data/timeline/bubble-left-arrow.png has changed
Binary file web/data/timeline/bubble-left.png has changed
Binary file web/data/timeline/bubble-right-arrow.png has changed
Binary file web/data/timeline/bubble-right.png has changed
Binary file web/data/timeline/bubble-top-arrow.png has changed
Binary file web/data/timeline/bubble-top-left.png has changed
Binary file web/data/timeline/bubble-top-right.png has changed
Binary file web/data/timeline/bubble-top.png has changed
Binary file web/data/timeline/close-button.png has changed
Binary file web/data/timeline/copyright-vertical.png has changed
Binary file web/data/timeline/copyright.png has changed
Binary file web/data/timeline/dark-blue-circle.png has changed
Binary file web/data/timeline/dark-green-circle.png has changed
Binary file web/data/timeline/dark-red-circle.png has changed
Binary file web/data/timeline/dull-blue-circle.png has changed
Binary file web/data/timeline/dull-green-circle.png has changed
Binary file web/data/timeline/dull-red-circle.png has changed
Binary file web/data/timeline/gray-circle.png has changed
Binary file web/data/timeline/green-circle.png has changed
Binary file web/data/timeline/message-bottom-left.png has changed
Binary file web/data/timeline/message-bottom-right.png has changed
Binary file web/data/timeline/message-left.png has changed
Binary file web/data/timeline/message-right.png has changed
Binary file web/data/timeline/message-top-left.png has changed
Binary file web/data/timeline/message-top-right.png has changed
Binary file web/data/timeline/message.png has changed
Binary file web/data/timeline/red-circle.png has changed
Binary file web/data/timeline/sundial.png has changed
Binary file web/data/timeline/top-bubble.png has changed
Binary file web/data/trash_can.png has changed
Binary file web/data/trash_can_small.png has changed
--- a/web/data/uiprops.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/data/uiprops.py	Fri Dec 09 12:08:27 2011 +0100
@@ -77,6 +77,17 @@
 headerBgColor = '#ff7700'
 headerBg = lazystr('%(headerBgColor)s url("banner.png") repeat-x top left')
 
+
+# scale 3:5 stranded
+# h1 { font-size:2em; }
+# h2 { font-size:1.61538em; }
+# h3 { font-size:1.23077em; }
+#
+# scale le corbusier */
+# h1 { font-size:2.11538em; }
+# h2 { font-size:1.61538em; }
+# h3 { font-size:1.30769em; }
+
 # h
 h1FontSize = '1.5em' # 18px
 h1Padding = '0 0 0.14em 0 '
@@ -154,3 +165,9 @@
 infoMsgBgImg = 'url("information.png") 5px center no-repeat'
 errorMsgBgImg = 'url("error.png") 100% 50% no-repeat'
 errorMsgColor = '#ed0d0d'
+
+# facets
+facet_titleFont = 'bold 100% Georgia'
+facet_overflowedHeight = '12em'
+
+
--- a/web/facet.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/facet.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -30,6 +30,7 @@
 .. autoclass:: cubicweb.web.facet.RelationAttributeFacet
 .. autoclass:: cubicweb.web.facet.HasRelationFacet
 .. autoclass:: cubicweb.web.facet.AttributeFacet
+.. autoclass:: cubicweb.web.facet.RQLPathFacet
 .. autoclass:: cubicweb.web.facet.RangeFacet
 .. autoclass:: cubicweb.web.facet.DateRangeFacet
 
@@ -47,6 +48,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from warnings import warn
 from copy import deepcopy
 from datetime import date, datetime, timedelta
 
@@ -55,15 +57,16 @@
 from logilab.common.decorators import cached
 from logilab.common.date import datetime2ticks, ustrftime, ticks2datetime
 from logilab.common.compat import all
+from logilab.common.deprecation import deprecated
 
 from rql import parse, nodes, utils
 
 from cubicweb import Unauthorized, typed_eid
 from cubicweb.schema import display_name
 from cubicweb.utils import make_uid
-from cubicweb.selectors import match_context_prop, partial_relation_possible
+from cubicweb.selectors import match_context_prop, partial_relation_possible, yes
 from cubicweb.appobject import AppObject
-from cubicweb.web.htmlwidgets import HTMLWidget
+from cubicweb.web import RequestError, htmlwidgets
 
 
 def rtype_facet_title(facet):
@@ -73,24 +76,73 @@
                             context=iter(ptypes).next())
     return display_name(facet._cw, facet.rtype, form=facet.role)
 
+def get_facet(req, facetid, select, filtered_variable):
+    return req.vreg['facets'].object_by_id(facetid, req, select=select,
+                                           filtered_variable=filtered_variable)
+
+@deprecated('[3.13] filter_hiddens moved to cubicweb.web.views.facets with '
+            'slightly modified prototype')
+def filter_hiddens(w, baserql, **kwargs):
+    from cubicweb.web.views.facets import filter_hiddens
+    return filter_hiddens(w, baserql, wdgs=kwargs.pop('facets'), **kwargs)
+
+
 ## rqlst manipulation functions used by facets ################################
 
-def prepare_facets_rqlst(rqlst, args=None):
+def init_facets(rset, select, mainvar=None):
+    """Alters in place the <select> for filtering and returns related data.
+
+    Calls :func:`prepare_select` to prepare the syntaxtree for selection and
+    :func:`get_filtered_variable` that selects the variable to be filtered and
+    drops several parts of the select tree. See each function docstring for
+    details.
+
+    :param rset: ResultSet we init facet for.
+    :type rset: :class:`~cubicweb.rset.ResultSet`
+
+    :param select: Select statement to be *altered* to support filtering.
+    :type select:   :class:`~rql.stmts.Select` from the ``rset`` parameters.
+
+    :param mainvar: Name of the variable we want to filter with facets.
+    :type mainvar:  string
+
+    :rtype: (filtered_variable, baserql) tuple.
+    :return filtered_variable:  A rql class:`~rql.node.VariableRef`
+                                instance as returned by
+                                :func:`get_filtered_variable`.
+
+    :return baserql: A string containing the rql before
+                     :func:`prepare_select` but after
+                     :func:`get_filtered_variable`.
+    """
+    rset.req.vreg.rqlhelper.annotate(select)
+    filtered_variable = get_filtered_variable(select, mainvar)
+    baserql = select.as_string(kwargs=rset.args) # before call to prepare_select
+    prepare_select(select, filtered_variable)
+    return filtered_variable, baserql
+
+def get_filtered_variable(select, mainvar=None):
+    """drop any limit/offset from select (in-place modification) and return the
+    variable whose name is `mainvar` or the first variable selected in column 0
+    """
+    select.set_limit(None)
+    select.set_offset(None)
+    if mainvar is None:
+        vref = select.selection[0].iget_nodes(nodes.VariableRef).next()
+        return vref.variable
+    return select.defined_vars[mainvar]
+
+def prepare_select(select, filtered_variable):
     """prepare a syntax tree to generate facet filters
 
     * remove ORDERBY/GROUPBY clauses
     * cleanup selection (remove everything)
     * undefine unnecessary variables
     * set DISTINCT
-    * unset LIMIT/OFFSET
+
+    Notice unset of LIMIT/OFFSET us expected to be done by a previous call to
+    :func:`get_filtered_variable`.
     """
-    if len(rqlst.children) > 1:
-        raise NotImplementedError('FIXME: union not yet supported')
-    select = rqlst.children[0]
-    mainvar = filtered_variable(select)
-    select.set_limit(None)
-    select.set_offset(None)
-    baserql = select.as_string(kwargs=args)
     # cleanup sort terms / group by
     select.remove_sort_terms()
     select.remove_groups()
@@ -100,31 +152,120 @@
         select.remove_selected(term)
     # remove unbound variables which only have some type restriction
     for dvar in select.defined_vars.values():
-        if not (dvar is mainvar or dvar.stinfo['relations']):
+        if not (dvar is filtered_variable or dvar.stinfo['relations']):
             select.undefine_variable(dvar)
     # global tree config: DISTINCT, LIMIT, OFFSET
     select.set_distinct(True)
-    return mainvar, baserql
+
+@deprecated('[3.13] use init_facets instead')
+def prepare_facets_rqlst(rqlst, args=None):
+    assert len(rqlst.children) == 1, 'FIXME: union not yet supported'
+    select = rqlst.children[0]
+    filtered_variable = get_filtered_variable(select)
+    baserql = select.as_string(args)
+    prepare_select(select, filtered_variable)
+    return filtered_variable, baserql
 
-def filtered_variable(rqlst):
-    vref = rqlst.selection[0].iget_nodes(nodes.VariableRef).next()
-    return vref.variable
+def prepare_vocabulary_select(select, filtered_variable, rtype, role,
+                              select_target_entity=True):
+    """prepare a syntax tree to generate a filter vocabulary rql using the given
+    relation:
+    * create a variable to filter on this relation
+    * add the relation
+    * add the new variable to GROUPBY clause if necessary
+    * add the new variable to the selection
+    """
+    newvar = _add_rtype_relation(select, filtered_variable, rtype, role)[0]
+    if select_target_entity:
+        # if select.groupby: XXX we remove groupby now
+        #     select.add_group_var(newvar)
+        select.add_selected(newvar)
+    # add is restriction if necessary
+    if filtered_variable.stinfo['typerel'] is None:
+        etypes = frozenset(sol[filtered_variable.name] for sol in select.solutions)
+        select.add_type_restriction(filtered_variable, etypes)
+    return newvar
 
 
-def get_facet(req, facetid, rqlst, mainvar):
-    return req.vreg['facets'].object_by_id(facetid, req, rqlst=rqlst,
-                                           filtered_variable=mainvar)
+def insert_attr_select_relation(select, filtered_variable, rtype, role, attrname,
+                                sortfuncname=None, sortasc=True,
+                                select_target_entity=True):
+    """modify a syntax tree to :
+    * link a new variable to `filtered_variable` through `rtype` (where filtered_variable has `role`)
+    * retrieve only the newly inserted variable and its `attrname`
+
+    Sorting:
+    * on `attrname` ascendant (`sortasc`=True) or descendant (`sortasc`=False)
+    * on `sortfuncname`(`attrname`) if `sortfuncname` is specified
+    * no sort if `sortasc` is None
+    """
+    cleanup_select(select, filtered_variable)
+    var = prepare_vocabulary_select(select, filtered_variable, rtype, role,
+                                   select_target_entity)
+    attrvar = select.make_variable()
+    select.add_relation(var, attrname, attrvar)
+    # if query is grouped, we have to add the attribute variable
+    #if select.groupby: XXX may not occur anymore
+    #    if not attrvar in select.groupby:
+    #        select.add_group_var(attrvar)
+    if sortasc is not None:
+        _set_orderby(select, attrvar, sortasc, sortfuncname)
+    # add attribute variable to selection
+    select.add_selected(attrvar)
+    return var
 
 
-def filter_hiddens(w, **kwargs):
-    for key, val in kwargs.items():
-        w(u'<input type="hidden" name="%s" value="%s" />' % (
-            key, xml_escape(val)))
+def cleanup_select(select, filtered_variable):
+    """cleanup tree from unnecessary restrictions:
+    * attribute selection
+    * optional relations linked to the main variable
+    * mandatory relations linked to the main variable
+    """
+    if select.where is None:
+        return
+    schema = select.root.schema
+    toremove = set()
+    vargraph = deepcopy(select.vargraph) # graph representing links between variable
+    for rel in select.where.get_nodes(nodes.Relation):
+        ovar = _may_be_removed(rel, schema, filtered_variable)
+        if ovar is not None:
+            toremove.add(ovar)
+    removed = set()
+    while toremove:
+        trvar = toremove.pop()
+        trvarname = trvar.name
+        # remove paths using this variable from the graph
+        linkedvars = vargraph.pop(trvarname)
+        for ovarname in linkedvars:
+            vargraph[ovarname].remove(trvarname)
+        # remove relation using this variable
+        for rel in trvar.stinfo['relations']:
+            if rel in removed:
+                # already removed
+                continue
+            select.remove_node(rel)
+            removed.add(rel)
+        rel = trvar.stinfo['typerel']
+        if rel is not None and not rel in removed:
+            select.remove_node(rel)
+            removed.add(rel)
+        # cleanup groupby clause
+        if select.groupby:
+            for vref in select.groupby[:]:
+                if vref.name == trvarname:
+                    select.remove_group_var(vref)
+        # we can also remove all variables which are linked to this variable
+        # and have no path to the main variable
+        for ovarname in linkedvars:
+            if ovarname == filtered_variable.name:
+                continue
+            if not has_path(vargraph, ovarname, filtered_variable.name):
+                toremove.add(select.defined_vars[ovarname])
 
 
-def _may_be_removed(rel, schema, mainvar):
+def _may_be_removed(rel, schema, variable):
     """if the given relation may be removed from the tree, return the variable
-    on the other side of `mainvar`, else return None
+    on the other side of `variable`, else return None
     Conditions:
     * the relation is an attribute selection of the main variable
     * the relation is optional relation linked to the main variable
@@ -133,7 +274,7 @@
     """
     lhs, rhs = rel.get_variable_parts()
     rschema = schema.rschema(rel.r_type)
-    if lhs.variable is mainvar:
+    if lhs.variable is variable:
         try:
             ovar = rhs.variable
         except AttributeError:
@@ -141,13 +282,14 @@
             # XXX: X title LOWER(T) if it makes sense?
             return None
         if rschema.final:
-            if len(ovar.stinfo['relations']) == 1:
+            if len(ovar.stinfo['relations']) == 1 \
+                   and not ovar.stinfo.get('having'):
                 # attribute selection
                 return ovar
             return None
         opt = 'right'
         cardidx = 0
-    elif getattr(rhs, 'variable', None) is mainvar:
+    elif getattr(rhs, 'variable', None) is variable:
         ovar = lhs.variable
         opt = 'left'
         cardidx = 1
@@ -168,52 +310,28 @@
         return ovar
     return None
 
-def _make_relation(rqlst, mainvar, rtype, role):
-    newvar = rqlst.make_variable()
+def _make_relation(select, variable, rtype, role):
+    newvar = select.make_variable()
     if role == 'object':
-        rel = nodes.make_relation(newvar, rtype, (mainvar,), nodes.VariableRef)
+        rel = nodes.make_relation(newvar, rtype, (variable,), nodes.VariableRef)
     else:
-        rel = nodes.make_relation(mainvar, rtype, (newvar,), nodes.VariableRef)
+        rel = nodes.make_relation(variable, rtype, (newvar,), nodes.VariableRef)
     return newvar, rel
 
-def _add_rtype_relation(rqlst, mainvar, rtype, role):
-    """add a relation relying `mainvar` to entities linked by the `rtype`
-    relation (where `mainvar` has `role`)
+def _add_rtype_relation(select, variable, rtype, role):
+    """add a relation relying `variable` to entities linked by the `rtype`
+    relation (where `variable` has `role`)
 
     return the inserted variable for linked entities.
     """
-    newvar, newrel = _make_relation(rqlst, mainvar, rtype, role)
-    rqlst.add_restriction(newrel)
+    newvar, newrel = _make_relation(select, variable, rtype, role)
+    select.add_restriction(newrel)
     return newvar, newrel
 
-def _add_eid_restr(rel, restrvar, value):
-    rrel = nodes.make_constant_restriction(restrvar, 'eid', value, 'Int')
-    rel.parent.replace(rel, nodes.And(rel, rrel))
-
-def _prepare_vocabulary_rqlst(rqlst, mainvar, rtype, role,
-                              select_target_entity=True):
-    """prepare a syntax tree to generate a filter vocabulary rql using the given
-    relation:
-    * create a variable to filter on this relation
-    * add the relation
-    * add the new variable to GROUPBY clause if necessary
-    * add the new variable to the selection
-    """
-    newvar = _add_rtype_relation(rqlst, mainvar, rtype, role)[0]
-    if select_target_entity:
-        if rqlst.groupby:
-            rqlst.add_group_var(newvar)
-        rqlst.add_selected(newvar)
-    # add is restriction if necessary
-    if mainvar.stinfo['typerel'] is None:
-        etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
-        rqlst.add_type_restriction(mainvar, etypes)
-    return newvar
-
-def _remove_relation(rqlst, rel, var):
+def _remove_relation(select, rel, var):
     """remove a constraint relation from the syntax tree"""
     # remove the relation
-    rqlst.remove_node(rel)
+    select.remove_node(rel)
     # remove relations where the filtered variable appears on the
     # lhs and rhs is a constant restriction
     extra = []
@@ -223,93 +341,31 @@
         if vrel.children[0].variable is var:
             if not vrel.children[1].get_nodes(nodes.Constant):
                 extra.append(vrel)
-            rqlst.remove_node(vrel)
+            select.remove_node(vrel)
     return extra
 
-def _set_orderby(rqlst, newvar, sortasc, sortfuncname):
+def _set_orderby(select, newvar, sortasc, sortfuncname):
     if sortfuncname is None:
-        rqlst.add_sort_var(newvar, sortasc)
+        select.add_sort_var(newvar, sortasc)
     else:
         vref = nodes.variable_ref(newvar)
         vref.register_reference()
         sortfunc = nodes.Function(sortfuncname)
         sortfunc.append(vref)
         term = nodes.SortTerm(sortfunc, sortasc)
-        rqlst.add_sort_term(term)
-
-def insert_attr_select_relation(rqlst, mainvar, rtype, role, attrname,
-                                sortfuncname=None, sortasc=True,
-                                select_target_entity=True):
-    """modify a syntax tree to :
-    * link a new variable to `mainvar` through `rtype` (where mainvar has `role`)
-    * retrieve only the newly inserted variable and its `attrname`
-
-    Sorting:
-    * on `attrname` ascendant (`sortasc`=True) or descendant (`sortasc`=False)
-    * on `sortfuncname`(`attrname`) if `sortfuncname` is specified
-    * no sort if `sortasc` is None
-    """
-    _cleanup_rqlst(rqlst, mainvar)
-    var = _prepare_vocabulary_rqlst(rqlst, mainvar, rtype, role,
-                                    select_target_entity)
-    attrvar = rqlst.make_variable()
-    rqlst.add_relation(var, attrname, attrvar)
-    # if query is grouped, we have to add the attribute variable
-    if rqlst.groupby:
-        if not attrvar in rqlst.groupby:
-            rqlst.add_group_var(attrvar)
-    if sortasc is not None:
-        _set_orderby(rqlst, attrvar, sortasc, sortfuncname)
-    # add attribute variable to selection
-    rqlst.add_selected(attrvar)
-    return var
+        select.add_sort_term(term)
 
-def _cleanup_rqlst(rqlst, mainvar):
-    """cleanup tree from unnecessary restriction:
-    * attribute selection
-    * optional relations linked to the main variable
-    * mandatory relations linked to the main variable
-    """
-    if rqlst.where is None:
-        return
-    schema = rqlst.root.schema
-    toremove = set()
-    vargraph = deepcopy(rqlst.vargraph) # graph representing links between variable
-    for rel in rqlst.where.get_nodes(nodes.Relation):
-        ovar = _may_be_removed(rel, schema, mainvar)
-        if ovar is not None:
-            toremove.add(ovar)
-    removed = set()
-    while toremove:
-        trvar = toremove.pop()
-        trvarname = trvar.name
-        # remove paths using this variable from the graph
-        linkedvars = vargraph.pop(trvarname)
-        for ovarname in linkedvars:
-            vargraph[ovarname].remove(trvarname)
-        # remove relation using this variable
-        for rel in trvar.stinfo['relations']:
-            if rel in removed:
-                # already removed
-                continue
-            rqlst.remove_node(rel)
-            removed.add(rel)
-        rel = trvar.stinfo['typerel']
-        if rel is not None and not rel in removed:
-            rqlst.remove_node(rel)
-            removed.add(rel)
-        # cleanup groupby clause
-        if rqlst.groupby:
-            for vref in rqlst.groupby[:]:
-                if vref.name == trvarname:
-                    rqlst.remove_group_var(vref)
-        # we can also remove all variables which are linked to this variable
-        # and have no path to the main variable
-        for ovarname in linkedvars:
-            if ovarname == mainvar.name:
-                continue
-            if not has_path(vargraph, ovarname, mainvar.name):
-                toremove.add(rqlst.defined_vars[ovarname])
+def _get_var(select, varname, varmap):
+    try:
+        return varmap[varname]
+    except KeyError:
+        varmap[varname] = var = select.make_variable()
+        return var
+
+
+_prepare_vocabulary_rqlst = deprecated('[3.13] renamed prepare_vocabulary_select')(
+    prepare_vocabulary_select)
+_cleanup_rqlst = deprecated('[3.13] renamed to cleanup_select')(cleanup_select)
 
 
 ## base facet classes ##########################################################
@@ -335,7 +391,8 @@
     Facets will have the following attributes set (beside the standard
     :class:`~cubicweb.appobject.AppObject` ones):
 
-    * `rqlst`, the rql syntax tree being facetted
+    * `select`, the :class:`rql.stmts.Select` node of the rql syntax tree being
+      filtered
 
     * `filtered_variable`, the variable node in this rql syntax tree that we're
       interested in filtering
@@ -343,7 +400,7 @@
     Facets implementors may also be interested in the following properties /
     methods:
 
-    .. automethod:: cubicweb.web.facet.AbstractFacet.operator
+    .. autoattribute:: cubicweb.web.facet.AbstractFacet.operator
     .. automethod:: cubicweb.web.facet.AbstractFacet.rqlexec
     """
     __abstract__ = True
@@ -365,32 +422,18 @@
     start_unfolded = True
     cw_rset = None # ensure facets have a cw_rset attribute
 
-    def __init__(self, req, rqlst=None, filtered_variable=None,
+    def __init__(self, req, select=None, filtered_variable=None,
                  **kwargs):
         super(AbstractFacet, self).__init__(req, **kwargs)
-        assert rqlst is not None
+        assert select is not None
         assert filtered_variable
         # take care: facet may be retreived using `object_by_id` from an ajax call
         # or from `select` using the result set to filter
-        self.rqlst = rqlst
+        self.select = select
         self.filtered_variable = filtered_variable
 
-    @property
-    def operator(self):
-        """Return the operator (AND or OR) to use for this facet when multiple
-        values are selected.
-        """
-        # OR between selected values by default
-        return self._cw.form.get(self.__regid__ + '_andor', 'OR')
-
-    def rqlexec(self, rql, args=None):
-        """Utility method to execute some rql queries, and simply returning an
-        empty list if :exc:`Unauthorized` is raised.
-        """
-        try:
-            return self._cw.execute(rql, args)
-        except Unauthorized:
-            return []
+    def __repr__(self):
+        return '<%s>' % self.__class__.__name__
 
     def get_widget(self):
         """Return the widget instance to use to display this facet, or None if
@@ -403,10 +446,36 @@
         """When some facet criteria has been updated, this method is called to
         add restriction for this facet into the rql syntax tree. It should get
         back its value in form parameters, and modify the syntax tree
-        (`self.rqlst`) accordingly.
+        (`self.select`) accordingly.
         """
         raise NotImplementedError
 
+    @property
+    def operator(self):
+        """Return the operator (AND or OR) to use for this facet when multiple
+        values are selected.
+        """
+        # OR between selected values by default
+        return self._cw.form.get(xml_escape(self.__regid__) + '_andor', 'OR')
+
+    def rqlexec(self, rql, args=None):
+        """Utility method to execute some rql queries, and simply returning an
+        empty list if :exc:`Unauthorized` is raised.
+        """
+        try:
+            return self._cw.execute(rql, args)
+        except Unauthorized:
+            return []
+
+    @property
+    def wdgclass(self):
+        raise NotImplementedError
+
+    @property
+    @deprecated('[3.13] renamed .select')
+    def rqlst(self):
+        return self.select
+
 
 class VocabularyFacet(AbstractFacet):
     """This abstract class extend :class:`AbstractFacet` to use the
@@ -419,6 +488,11 @@
     .. automethod:: cubicweb.web.facet.VocabularyFacet.possible_values
     """
     needs_update = True
+    support_and = False
+
+    @property
+    def wdgclass(self):
+        return FacetVocabularyWidget
 
     def get_widget(self):
         """Return the widget instance to use to display this facet.
@@ -429,7 +503,7 @@
         vocab = self.vocabulary()
         if len(vocab) <= 1:
             return None
-        wdg = FacetVocabularyWidget(self)
+        wdg = self.wdgclass(self)
         selected = frozenset(typed_eid(eid) for eid in self._cw.list_form_param(self.__regid__))
         for label, value in vocab:
             if value is None:
@@ -449,13 +523,11 @@
         """
         raise NotImplementedError
 
-    def support_and(self):
-        return False
 
 
 class RelationFacet(VocabularyFacet):
     """Base facet to filter some entities according to other entities to which
-    they are related. Create concret facet by inheriting from this class an then
+    they are related. Create concrete facet by inheriting from this class an then
     configuring it by setting class attribute described below.
 
     The relation is defined by the `rtype` and `role` attributes.
@@ -474,7 +546,7 @@
     set. By default, `i18nable` will be set according to the schema, but you can
     force its value by setting it has a class attribute.
 
-    You can filter out target entity types by specifying `target_type`
+    You can filter out target entity types by specifying `target_type`.
 
     By default, vocabulary will be displayed sorted on `target_attr` value in an
     ascending way. You can control sorting with:
@@ -520,8 +592,14 @@
     # class attributes to configure the relation facet
     rtype = None
     role = 'subject'
+    target_type = None
     target_attr = 'eid'
-    target_type = None
+    # for subclasses parametrization, should not change if you want a
+    # RelationFacet
+    target_attr_type = 'Int'
+    restr_attr = 'eid'
+    restr_attr_type = 'Int'
+    comparator = '=' # could be '<', '<=', '>', '>='
     # set this to a stored procedure name if you want to sort on the result of
     # this function's result instead of direct value
     sortfunc = None
@@ -534,7 +612,80 @@
     _select_target_entity = True
 
     title = property(rtype_facet_title)
-    no_relation_label = '<no relation>'
+    no_relation_label = _('<no relation>')
+
+    def __repr__(self):
+        return '<%s on (%s-%s)>' % (self.__class__.__name__, self.rtype, self.role)
+
+    # facet public API #########################################################
+
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        select = self.select
+        select.save_state()
+        if self.rql_sort:
+            sort = self.sortasc
+        else:
+            sort = None # will be sorted on label
+        try:
+            var = insert_attr_select_relation(
+                select, self.filtered_variable, self.rtype, self.role,
+                self.target_attr, self.sortfunc, sort,
+                self._select_target_entity)
+            if self.target_type is not None:
+                select.add_type_restriction(var, self.target_type)
+            try:
+                rset = self.rqlexec(select.as_string(), self.cw_rset.args)
+            except Exception:
+                self.exception('error while getting vocabulary for %s, rql: %s',
+                               self, select.as_string())
+                return ()
+        finally:
+            select.recover()
+        # don't call rset_vocabulary on empty result set, it may be an empty
+        # *list* (see rqlexec implementation)
+        values = rset and self.rset_vocabulary(rset) or []
+        if self._include_no_relation():
+            values.insert(0, (self._cw._(self.no_relation_label), ''))
+        return values
+
+    def possible_values(self):
+        """return a list of possible values (as string since it's used to
+        compare to a form value in javascript) for this facet
+        """
+        select = self.select
+        select.save_state()
+        try:
+            cleanup_select(select, self.filtered_variable)
+            if self._select_target_entity:
+                prepare_vocabulary_select(select, self.filtered_variable, self.rtype,
+                                         self.role, select_target_entity=True)
+            else:
+                insert_attr_select_relation(
+                    select, self.filtered_variable, self.rtype, self.role,
+                    self.target_attr, select_target_entity=False)
+            values = [unicode(x) for x, in self.rqlexec(select.as_string())]
+        except Exception:
+            self.exception('while computing values for %s', self)
+            return []
+        finally:
+            select.recover()
+        if self._include_no_relation():
+            values.append('')
+        return values
+
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self._cw.form.get(self.__regid__)
+        if value is None:
+            return
+        filtered_variable = self.filtered_variable
+        restrvar, rel = _add_rtype_relation(self.select, filtered_variable,
+                                            self.rtype, self.role)
+        self.value_restriction(restrvar, rel, value)
+
+    # internal control API #####################################################
 
     @property
     def i18nable(self):
@@ -561,62 +712,6 @@
         return self.sortfunc is not None or (self.label_vid is None
                                              and not self.i18nable)
 
-    def vocabulary(self):
-        """return vocabulary for this facet, eg a list of 2-uple (label, value)
-        """
-        rqlst = self.rqlst
-        rqlst.save_state()
-        if self.rql_sort:
-            sort = self.sortasc
-        else:
-            sort = None # will be sorted on label
-        try:
-            mainvar = self.filtered_variable
-            var = insert_attr_select_relation(
-                rqlst, mainvar, self.rtype, self.role, self.target_attr,
-                self.sortfunc, sort, self._select_target_entity)
-            if self.target_type is not None:
-                rqlst.add_type_restriction(var, self.target_type)
-            try:
-                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
-            except:
-                self.exception('error while getting vocabulary for %s, rql: %s',
-                               self, rqlst.as_string())
-                return ()
-        finally:
-            rqlst.recover()
-        # don't call rset_vocabulary on empty result set, it may be an empty
-        # *list* (see rqlexec implementation)
-        values = rset and self.rset_vocabulary(rset) or []
-        if self._include_no_relation():
-            values.insert(0, (self._cw._(self.no_relation_label), ''))
-        return values
-
-    def possible_values(self):
-        """return a list of possible values (as string since it's used to
-        compare to a form value in javascript) for this facet
-        """
-        rqlst = self.rqlst
-        rqlst.save_state()
-        try:
-            _cleanup_rqlst(rqlst, self.filtered_variable)
-            if self._select_target_entity:
-                _prepare_vocabulary_rqlst(rqlst, self.filtered_variable, self.rtype,
-                                          self.role, select_target_entity=True)
-            else:
-                insert_attr_select_relation(
-                    rqlst, self.filtered_variable, self.rtype, self.role, self.target_attr,
-                    select_target_entity=False)
-            values = [unicode(x) for x, in self.rqlexec(rqlst.as_string())]
-        except:
-            self.exception('while computing values for %s', self)
-            return []
-        finally:
-            rqlst.recover()
-        if self._include_no_relation():
-            values.append('')
-        return values
-
     def rset_vocabulary(self, rset):
         if self.i18nable:
             _ = self._cw._
@@ -635,42 +730,66 @@
                 values = list(reversed(values))
         return values
 
+    @property
     def support_and(self):
         return self._search_card('+*')
 
-    def add_rql_restrictions(self):
-        """add restriction for this facet into the rql syntax tree"""
-        value = self._cw.form.get(self.__regid__)
-        if value is None:
-            return
-        mainvar = self.filtered_variable
-        restrvar, rel = _add_rtype_relation(self.rqlst, mainvar, self.rtype,
-                                            self.role)
+    # internal utilities #######################################################
+
+    def _support_and_compat(self):
+        support = self.support_and
+        if callable(support):
+            warn('[3.13] %s.support_and is now a property' % self.__class__,
+                 DeprecationWarning)
+            support = support()
+        return support
+
+    def value_restriction(self, restrvar, rel, value):
+        # XXX handle rel is None case in RQLPathFacet?
+        if self.restr_attr != 'eid':
+            self.select.set_distinct(True)
         if isinstance(value, basestring):
             # only one value selected
             if value:
-                self.rqlst.add_eid_restriction(restrvar, value)
+                self.select.add_constant_restriction(
+                    restrvar, self.restr_attr, value,
+                    self.restr_attr_type)
             else:
                 rel.parent.replace(rel, nodes.Not(rel))
         elif self.operator == 'OR':
             # set_distinct only if rtype cardinality is > 1
-            if self.support_and():
-                self.rqlst.set_distinct(True)
+            if self._support_and_compat():
+                self.select.set_distinct(True)
             # multiple ORed values: using IN is fine
             if '' in value:
                 value.remove('')
                 self._add_not_rel_restr(rel)
-            _add_eid_restr(rel, restrvar, value)
+            self._and_restriction(rel, restrvar, value)
         else:
-            # multiple values with AND operator
+            # multiple values with AND operator. We've to generate a query like
+            # "X relation A, A eid 1, X relation B, B eid 1", hence the new
+            # relations at each iteration in the while loop below 
             if '' in value:
-                value.remove('')
-                self._add_not_rel_restr(rel)
-            _add_eid_restr(rel, restrvar, value.pop())
+                raise RequestError("this doesn't make sense")
+            self._and_restriction(rel, restrvar, value.pop())
             while value:
-                restrvar, rtrel = _make_relation(self.rqlst, mainvar,
+                restrvar, rtrel = _make_relation(self.select, self.filtered_variable,
                                                  self.rtype, self.role)
-                _add_eid_restr(rel, restrvar, value.pop())
+                if rel is None:
+                    self.select.add_restriction(rtrel)
+                else:
+                    rel.parent.replace(rel, nodes.And(rel, rtrel))
+                self._and_restriction(rel, restrvar, value.pop())
+
+    def _and_restriction(self, rel, restrvar, value):
+        if rel is None:
+            self.select.add_constant_restriction(restrvar, self.restr_attr,
+                                                 value, self.restr_attr_type)
+        else:
+            rrel = nodes.make_constant_restriction(restrvar, self.restr_attr,
+                                                   value, self.restr_attr_type)
+            rel.parent.replace(rel, nodes.And(rel, rrel))
+
 
     @cached
     def _search_card(self, cards):
@@ -704,19 +823,19 @@
         if self._cw.vreg.schema.rschema(self.rtype).final:
             return False
         if self.role == 'object':
-            subj = utils.rqlvar_maker(defined=self.rqlst.defined_vars,
-                                      aliases=self.rqlst.aliases).next()
+            subj = utils.rqlvar_maker(defined=self.select.defined_vars,
+                                      aliases=self.select.aliases).next()
             obj = self.filtered_variable.name
         else:
             subj = self.filtered_variable.name
-            obj = utils.rqlvar_maker(defined=self.rqlst.defined_vars,
-                                     aliases=self.rqlst.aliases).next()
+            obj = utils.rqlvar_maker(defined=self.select.defined_vars,
+                                     aliases=self.select.aliases).next()
         restrictions = []
-        if self.rqlst.where:
-            restrictions.append(self.rqlst.where.as_string())
-        if self.rqlst.with_:
+        if self.select.where:
+            restrictions.append(self.select.where.as_string())
+        if self.select.with_:
             restrictions.append('WITH ' + ','.join(
-                term.as_string() for term in self.rqlst.with_))
+                term.as_string() for term in self.select.with_))
         if restrictions:
             restrictions = ',' + ','.join(restrictions)
         else:
@@ -725,14 +844,14 @@
             self.filtered_variable.name, subj, self.rtype, obj, restrictions)
         try:
             return bool(self.rqlexec(rql, self.cw_rset and self.cw_rset.args))
-        except:
+        except Exception:
             # catch exception on executing rql, work-around #1356884 until a
             # proper fix
             self.exception('cant handle rql generated by %s', self)
             return False
 
     def _add_not_rel_restr(self, rel):
-        nrrel = nodes.Not(_make_relation(self.rqlst, self.filtered_variable,
+        nrrel = nodes.Not(_make_relation(self.select, self.filtered_variable,
                                          self.rtype, self.role)[1])
         rel.parent.replace(rel, nodes.Or(nrrel, rel))
 
@@ -744,7 +863,7 @@
 
     * `label_vid` doesn't make sense here
 
-    * you should specify the attribute type using `attrtype` if it's not a
+    * you should specify the attribute type using `target_attr_type` if it's not a
       String
 
     * you can specify a comparison operator using `comparator`
@@ -777,10 +896,18 @@
     """
     _select_target_entity = False
     # attribute type
-    attrtype = 'String'
+    target_attr_type = 'String'
     # type of comparison: default is an exact match on the attribute value
     comparator = '=' # could be '<', '<=', '>', '>='
 
+    @property
+    def restr_attr(self):
+        return self.target_attr
+
+    @property
+    def restr_attr_type(self):
+        return self.target_attr_type
+
     def rset_vocabulary(self, rset):
         if self.i18nable:
             _ = self._cw._
@@ -793,32 +920,6 @@
             return sorted(values)
         return reversed(sorted(values))
 
-    def add_rql_restrictions(self):
-        """add restriction for this facet into the rql syntax tree"""
-        value = self._cw.form.get(self.__regid__)
-        if not value:
-            return
-        mainvar = self.filtered_variable
-        restrvar = _add_rtype_relation(self.rqlst, mainvar, self.rtype,
-                                       self.role)[0]
-        self.rqlst.set_distinct(True)
-        if isinstance(value, basestring) or self.operator == 'OR':
-            # only one value selected or multiple ORed values: using IN is fine
-            self.rqlst.add_constant_restriction(
-                restrvar, self.target_attr, value,
-                self.attrtype, self.comparator)
-        else:
-            # multiple values with AND operator
-            self.rqlst.add_constant_restriction(
-                restrvar, self.target_attr, value.pop(),
-                self.attrtype, self.comparator)
-            while value:
-                restrvar = _add_rtype_relation(self.rqlst, mainvar, self.rtype,
-                                               self.role)[0]
-                self.rqlst.add_constant_restriction(
-                    restrvar, self.target_attr, value.pop(),
-                    self.attrtype, self.comparator)
-
 
 class AttributeFacet(RelationAttributeFacet):
     """Base facet to filter some entities according one of their attribute.
@@ -852,6 +953,7 @@
                       ('> 275', '275'), ('> 300', '300')]
     """
 
+    support_and = False
     _select_target_entity = True
 
     @property
@@ -867,36 +969,210 @@
     def vocabulary(self):
         """return vocabulary for this facet, eg a list of 2-uple (label, value)
         """
-        rqlst = self.rqlst
-        rqlst.save_state()
+        select = self.select
+        select.save_state()
         try:
-            mainvar = self.filtered_variable
-            _cleanup_rqlst(rqlst, mainvar)
-            newvar = _prepare_vocabulary_rqlst(rqlst, mainvar, self.rtype, self.role)
-            _set_orderby(rqlst, newvar, self.sortasc, self.sortfunc)
+            filtered_variable = self.filtered_variable
+            cleanup_select(select, filtered_variable)
+            newvar = prepare_vocabulary_select(select, filtered_variable, self.rtype, self.role)
+            _set_orderby(select, newvar, self.sortasc, self.sortfunc)
             try:
-                rset = self.rqlexec(rqlst.as_string(), self.cw_rset.args)
-            except:
+                rset = self.rqlexec(select.as_string(), self.cw_rset.args)
+            except Exception:
                 self.exception('error while getting vocabulary for %s, rql: %s',
-                               self, rqlst.as_string())
+                               self, select.as_string())
                 return ()
         finally:
-            rqlst.recover()
+            select.recover()
         # don't call rset_vocabulary on empty result set, it may be an empty
         # *list* (see rqlexec implementation)
         return rset and self.rset_vocabulary(rset)
 
-    def support_and(self):
-        return False
-
     def add_rql_restrictions(self):
         """add restriction for this facet into the rql syntax tree"""
         value = self._cw.form.get(self.__regid__)
         if not value:
             return
-        mainvar = self.filtered_variable
-        self.rqlst.add_constant_restriction(mainvar, self.rtype, value,
-                                            self.attrtype, self.comparator)
+        filtered_variable = self.filtered_variable
+        self.select.add_constant_restriction(filtered_variable, self.rtype, value,
+                                            self.target_attr_type, self.comparator)
+
+
+class RQLPathFacet(RelationFacet):
+    """Base facet to filter some entities according to an arbitrary rql
+    path. Path should be specified as a list of 3-uples or triplet string, where
+    'X' represent the filtered variable. You should specify using
+    `filter_variable` the snippet variable that will be used to filter out
+    results. You may also specify a `label_variable`. If you want to filter on
+    an attribute value, you usually don't want to specify the later since it's
+    the same as the filter variable, though you may have to specify the attribute
+    type using `restr_attr_type` if there are some type ambiguity in the schema
+    for the attribute.
+
+    Using this facet, we can rewrite facets we defined previously:
+
+    .. sourcecode:: python
+
+      class AgencyFacet(RQLPathFacet):
+          __regid__ = 'agency'
+          # this facet should only be selected when visualizing offices
+          __select__ = is_instance('Office')
+          # this facet is a filter on the 'Agency' entities linked to the office
+          # through the 'proposed_by' relation, where the office is the subject
+          # of the relation
+          path = ['X has_address O', 'O name N']
+          filter_variable = 'O'
+          label_variable = 'N'
+
+      class PostalCodeFacet(RQLPathFacet):
+          __regid__ = 'postalcode'
+          # this facet should only be selected when visualizing offices
+          __select__ = is_instance('Office')
+          # this facet is a filter on the PostalAddress entities linked to the
+          # office through the 'has_address' relation, where the office is the
+          # subject of the relation
+          path = ['X has_address O', 'O postal_code PC']
+          filter_variable = 'PC'
+
+    Though some features, such as 'no value' or automatic internationalization,
+    won't work. This facet class is designed to be used for cases where
+    :class:`RelationFacet` or :class:`RelationAttributeFacet` can't do the trick
+    (e.g when you want to filter on entities where are not directly linked to
+    the filtered entities).
+    """
+    __select__ = yes() # we don't want RelationFacet's selector
+    # must be specified
+    path = None
+    filter_variable = None
+    # may be specified
+    label_variable = None
+    # usually guessed, but may be explicitly specified
+    restr_attr = None
+    restr_attr_type = None
+
+    # XXX disabled features
+    i18nable = False
+    no_relation = False
+    support_and = False
+
+    def __init__(self, *args, **kwargs):
+        super(RQLPathFacet, self).__init__(*args, **kwargs)
+        assert self.filter_variable != self.label_variable, \
+            ('filter_variable and label_variable should be different. '
+             'You may want to let label_variable undefined (ie None).')
+        assert self.path and isinstance(self.path, (list, tuple)), \
+            'path should be a list of 3-uples, not %s' % self.path
+        for part in self.path:
+            if isinstance(part, basestring):
+                part = part.split()
+            assert len(part) == 3, \
+                   'path should be a list of 3-uples, not %s' % part
+
+    def __repr__(self):
+        return '<%s %s>' % (self.__class__.__name__,
+                            ','.join(str(p) for p in self.path))
+
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of (label, value)"""
+        select = self.select
+        select.save_state()
+        if self.rql_sort:
+            sort = self.sortasc
+        else:
+            sort = None # will be sorted on label
+        try:
+            cleanup_select(select, self.filtered_variable)
+            varmap, restrvar = self.add_path_to_select()
+            select.append_selected(nodes.VariableRef(restrvar))
+            if self.label_variable:
+                attrvar = varmap[self.label_variable]
+            else:
+                attrvar = restrvar
+            select.append_selected(nodes.VariableRef(attrvar))
+            if sort is not None:
+                _set_orderby(select, attrvar, sort, self.sortfunc)
+            try:
+                rset = self.rqlexec(select.as_string(), self.cw_rset.args)
+            except Exception:
+                self.exception('error while getting vocabulary for %s, rql: %s',
+                               self, select.as_string())
+                return ()
+        finally:
+            select.recover()
+        # don't call rset_vocabulary on empty result set, it may be an empty
+        # *list* (see rqlexec implementation)
+        values = rset and self.rset_vocabulary(rset) or []
+        if self._include_no_relation():
+            values.insert(0, (self._cw._(self.no_relation_label), ''))
+        return values
+
+    def possible_values(self):
+        """return a list of possible values (as string since it's used to
+        compare to a form value in javascript) for this facet
+        """
+        select = self.select
+        select.save_state()
+        try:
+            cleanup_select(select, self.filtered_variable)
+            varmap, restrvar = self.add_path_to_select(skiplabel=True)
+            select.append_selected(nodes.VariableRef(restrvar))
+            values = [unicode(x) for x, in self.rqlexec(select.as_string())]
+        except Exception:
+            self.exception('while computing values for %s', self)
+            return []
+        finally:
+            select.recover()
+        if self._include_no_relation():
+            values.append('')
+        return values
+
+    def add_rql_restrictions(self):
+        """add restriction for this facet into the rql syntax tree"""
+        value = self._cw.form.get(self.__regid__)
+        if value is None:
+            return
+        varmap, restrvar = self.add_path_to_select(
+            skiplabel=True, skipattrfilter=True)
+        self.value_restriction(restrvar, None, value)
+
+    def add_path_to_select(self, skiplabel=False, skipattrfilter=False):
+        varmap = {'X': self.filtered_variable}
+        actual_filter_variable = None
+        for part in self.path:
+            if isinstance(part, basestring):
+                part = part.split()
+            subject, rtype, object = part
+            if skiplabel and object == self.label_variable:
+                continue
+            if object == self.filter_variable:
+                rschema = self._cw.vreg.schema.rschema(rtype)
+                if rschema.final:
+                    # filter variable is an attribute variable
+                    if self.restr_attr is None:
+                        self.restr_attr = rtype
+                    if self.restr_attr_type is None:
+                        attrtypes = set(obj for subj,obj in rschema.rdefs)
+                        if len(attrtypes) > 1:
+                            raise Exception('ambigous attribute %s, specify attrtype on %s'
+                                            % (rtype, self.__class__))
+                        self.restr_attr_type = iter(attrtypes).next()
+                    if skipattrfilter:
+                        actual_filter_variable = subject
+                        continue
+            subjvar = _get_var(self.select, subject, varmap)
+            objvar = _get_var(self.select, object, varmap)
+            rel = nodes.make_relation(subjvar, rtype, (objvar,),
+                                      nodes.VariableRef)
+            self.select.add_restriction(rel)
+        if self.restr_attr is None:
+            self.restr_attr = 'eid'
+        if self.restr_attr_type is None:
+            self.restr_attr_type = 'Int'
+        if actual_filter_variable:
+            restrvar = varmap[actual_filter_variable]
+        else:
+            restrvar = varmap[self.filter_variable]
+        return varmap, restrvar
 
 
 class RangeFacet(AttributeFacet):
@@ -928,12 +1204,54 @@
 
     .. _jquery: http://www.jqueryui.com/
     """
-    attrtype = 'Float' # only numerical types are supported
+    target_attr_type = 'Float' # only numerical types are supported
+    needs_update = False # not supported actually
 
     @property
     def wdgclass(self):
         return FacetRangeWidget
 
+    def _range_rset(self):
+        select = self.select
+        select.save_state()
+        try:
+            filtered_variable = self.filtered_variable
+            cleanup_select(select, filtered_variable)
+            newvar = _add_rtype_relation(select, filtered_variable, self.rtype, self.role)[0]
+            minf = nodes.Function('MIN')
+            minf.append(nodes.VariableRef(newvar))
+            select.add_selected(minf)
+            maxf = nodes.Function('MAX')
+            maxf.append(nodes.VariableRef(newvar))
+            select.add_selected(maxf)
+            # add is restriction if necessary
+            if filtered_variable.stinfo['typerel'] is None:
+                etypes = frozenset(sol[filtered_variable.name] for sol in select.solutions)
+                select.add_type_restriction(filtered_variable, etypes)
+            try:
+                return self.rqlexec(select.as_string(), self.cw_rset.args)
+            except Exception:
+                self.exception('error while getting vocabulary for %s, rql: %s',
+                               self, select.as_string())
+                return ()
+        finally:
+            select.recover()
+
+    def vocabulary(self):
+        """return vocabulary for this facet, eg a list of 2-uple (label, value)
+        """
+        rset = self._range_rset()
+        if rset:
+            minv, maxv = rset[0]
+            return [(unicode(minv), minv), (unicode(maxv), maxv)]
+        return []
+
+    def possible_values(self):
+        """Return a list of possible values (as string since it's used to
+        compare to a form value in javascript) for this facet.
+        """
+        return [strval for strval, val in self.vocabulary()]
+
     def get_widget(self):
         """return the widget instance to use to display this facet"""
         values = set(value for _, value in self.vocabulary() if value is not None)
@@ -964,15 +1282,16 @@
         # when a value is equal to one of the limit, don't add the restriction,
         # else we filter out NULL values implicitly
         if infvalue != self.infvalue(min=True):
-            self.rqlst.add_constant_restriction(self.filtered_variable,
-                                                self.rtype,
-                                                self.formatvalue(infvalue),
-                                                self.attrtype, '>=')
+            self._add_restriction(infvalue, '>=')
         if supvalue != self.supvalue(max=True):
-            self.rqlst.add_constant_restriction(self.filtered_variable,
-                                                self.rtype,
-                                                self.formatvalue(supvalue),
-                                                self.attrtype, '<=')
+            self._add_restriction(supvalue, '<=')
+
+    def _add_restriction(self, value, operator):
+        self.select.add_constant_restriction(self.filtered_variable,
+                                             self.rtype,
+                                             self.formatvalue(value),
+                                             self.target_attr_type, operator)
+
 
 
 class DateRangeFacet(RangeFacet):
@@ -983,7 +1302,7 @@
 
     .. image:: ../images/facet_date_range.png
     """
-    attrtype = 'Date' # only date types are supported
+    target_attr_type = 'Date' # only date types are supported
 
     @property
     def wdgclass(self):
@@ -1023,9 +1342,8 @@
     role = 'subject' # role of filtered entity in the relation
 
     title = property(rtype_facet_title)
-
-    def support_and(self):
-        return False
+    needs_update = False # not supported actually
+    support_and = False
 
     def get_widget(self):
         return CheckBoxFacetWidget(self._cw, self,
@@ -1034,69 +1352,97 @@
 
     def add_rql_restrictions(self):
         """add restriction for this facet into the rql syntax tree"""
-        self.rqlst.set_distinct(True) # XXX
+        self.select.set_distinct(True) # XXX
         value = self._cw.form.get(self.__regid__)
         if not value: # no value sent for this facet
             return
-        var = self.rqlst.make_variable()
+        var = self.select.make_variable()
         if self.role == 'subject':
-            self.rqlst.add_relation(self.filtered_variable, self.rtype, var)
+            self.select.add_relation(self.filtered_variable, self.rtype, var)
         else:
-            self.rqlst.add_relation(var, self.rtype, self.filtered_variable)
+            self.select.add_relation(var, self.rtype, self.filtered_variable)
 
 
 ## html widets ################################################################
+_DEFAULT_CONSTANT_VOCAB_WIDGET_HEIGHT = 9
 
-class FacetVocabularyWidget(HTMLWidget):
+@cached
+def _css_height_to_line_count(vreg):
+    cssprop = vreg.config.uiprops['facet_overflowedHeight'].lower().strip()
+    # let's talk a bit ...
+    # we try to deduce a number of displayed lines from a css property
+    # there is a linear (rough empiric coefficient == 0.73) relation between
+    # css _em_ value and line qty
+    # if we get another unit we're out of luck and resort to one constant
+    # hence, it is strongly advised not to specify but ems for this css prop
+    if cssprop.endswith('em'):
+        try:
+            return int(cssprop[:-2]) * .73
+        except Exception:
+            vreg.warning('css property facet_overflowedHeight looks malformed (%r)',
+                         cssprop)
+    return _DEFAULT_CONSTANT_VOCAB_WIDGET_HEIGHT
+
+class FacetVocabularyWidget(htmlwidgets.HTMLWidget):
 
     def __init__(self, facet):
         self.facet = facet
         self.items = []
 
+    @cached
+    def height(self):
+        maxheight = _css_height_to_line_count(self.facet._cw.vreg)
+        return 1 + min(len(self.items), maxheight) + int(self.facet._support_and_compat())
+
     def append(self, item):
         self.items.append(item)
 
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
-        facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
-               (xml_escape(facetid), title))
-        if self.facet.support_and():
+        facetid = make_uid(self.facet.__regid__)
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+          (xml_escape(self.facet.__regid__), title))
+        if self.facet._support_and_compat():
             _ = self.facet._cw._
-            self.w(u'''<select name="%s" class="radio facetOperator" title="%s">
+            w(u'''<select name="%s" class="radio facetOperator" title="%s">
   <option value="OR">%s</option>
   <option value="AND">%s</option>
-</select>''' % (facetid + '_andor', _('and/or between different values'),
+</select>''' % (xml_escape(self.facet.__regid__) + '_andor', _('and/or between different values'),
                 _('OR'), _('AND')))
         cssclass = 'facetBody'
         if not self.facet.start_unfolded:
             cssclass += ' hidden'
         if len(self.items) > 6:
             cssclass += ' overflowed'
-        self.w(u'<div class="%s">\n' % cssclass)
+        w(u'<div class="%s">\n' % cssclass)
         for item in self.items:
-            item.render(w=self.w)
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+            item.render(w=w)
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
-class FacetStringWidget(HTMLWidget):
+class FacetStringWidget(htmlwidgets.HTMLWidget):
     def __init__(self, facet):
         self.facet = facet
         self.value = None
 
+    def height(self):
+        return 3
+
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
-        facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
-               (facetid, title))
-        self.w(u'<input name="%s" type="text" value="%s" />\n' % (facetid, self.value or u''))
-        self.w(u'</div>\n')
+        facetid = make_uid(self.facet.__regid__)
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+               (xml_escape(self.facet.__regid__), title))
+        w(u'<input name="%s" type="text" value="%s" />\n' % (facetid, self.value or u''))
+        w(u'</div>\n')
 
 
-class FacetRangeWidget(HTMLWidget):
+class FacetRangeWidget(htmlwidgets.HTMLWidget):
     formatter = 'function (value) {return value;}'
     onload = u'''
     var _formatter = %(formatter)s;
@@ -1107,18 +1453,18 @@
         values: [%(minvalue)s, %(maxvalue)s],
         stop: function(event, ui) { // submit when the user stops sliding
            var form = $('#%(sliderid)s').closest('form');
-           buildRQL.apply(null, evalJSON(form.attr('cubicweb:facetargs')));
+           buildRQL.apply(null, cw.evalJSON(form.attr('cubicweb:facetargs')));
         },
         slide: function(event, ui) {
             jQuery('#%(sliderid)s_inf').html(_formatter(ui.values[0]));
             jQuery('#%(sliderid)s_sup').html(_formatter(ui.values[1]));
-            jQuery('input[name=%(facetid)s_inf]').val(ui.values[0]);
-            jQuery('input[name=%(facetid)s_sup]').val(ui.values[1]);
+            jQuery('input[name="%(facetname)s_inf"]').val(ui.values[0]);
+            jQuery('input[name="%(facetname)s_sup"]').val(ui.values[1]);
         }
    });
    // use JS formatter to format value on page load
-   jQuery('#%(sliderid)s_inf').html(_formatter(jQuery('input[name=%(facetid)s_inf]').val()));
-   jQuery('#%(sliderid)s_sup').html(_formatter(jQuery('input[name=%(facetid)s_sup]').val()));
+   jQuery('#%(sliderid)s_inf').html(_formatter(jQuery('input[name="%(facetname)s_inf"]').val()));
+   jQuery('#%(sliderid)s_sup').html(_formatter(jQuery('input[name="%(facetname)s_sup"]').val()));
 '''
     #'# make emacs happier
     def __init__(self, facet, minvalue, maxvalue):
@@ -1126,40 +1472,47 @@
         self.minvalue = minvalue
         self.maxvalue = maxvalue
 
+    def height(self):
+        return 3
+
     def _render(self):
+        w = self.w
         facet = self.facet
         facet._cw.add_js('jquery.ui.js')
         facet._cw.add_css('jquery.ui.css')
         sliderid = make_uid('theslider')
-        facetid = xml_escape(self.facet.__regid__)
+        facetname = self.facet.__regid__
+        facetid = make_uid(facetname)
         facet._cw.html_headers.add_onload(self.onload % {
             'sliderid': sliderid,
             'facetid': facetid,
+            'facetname': facetname,
             'minvalue': self.minvalue,
             'maxvalue': self.maxvalue,
             'formatter': self.formatter,
             })
         title = xml_escape(self.facet.title)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
-               (facetid, title))
+        facetname = xml_escape(facetname)
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+          (facetname, title))
         cssclass = 'facetBody'
         if not self.facet.start_unfolded:
             cssclass += ' hidden'
-        self.w(u'<div class="%s">\n' % cssclass)
-        self.w(u'<span id="%s_inf"></span> - <span id="%s_sup"></span>'
-               % (sliderid, sliderid))
-        self.w(u'<input type="hidden" name="%s_inf" value="%s" />'
-               % (facetid, self.minvalue))
-        self.w(u'<input type="hidden" name="%s_sup" value="%s" />'
-               % (facetid, self.maxvalue))
-        self.w(u'<input type="hidden" name="min_%s_inf" value="%s" />'
-               % (facetid, self.minvalue))
-        self.w(u'<input type="hidden" name="max_%s_sup" value="%s" />'
-               % (facetid, self.maxvalue))
-        self.w(u'<div id="%s"></div>' % sliderid)
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+        w(u'<div class="%s">\n' % cssclass)
+        w(u'<span id="%s_inf"></span> - <span id="%s_sup"></span>'
+          % (sliderid, sliderid))
+        w(u'<input type="hidden" name="%s_inf" value="%s" />'
+          % (facetname, self.minvalue))
+        w(u'<input type="hidden" name="%s_sup" value="%s" />'
+          % (facetname, self.maxvalue))
+        w(u'<input type="hidden" name="min_%s_inf" value="%s" />'
+          % (facetname, self.minvalue))
+        w(u'<input type="hidden" name="max_%s_sup" value="%s" />'
+          % (facetname, self.maxvalue))
+        w(u'<div id="%s"></div>' % sliderid)
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
 class DateFacetRangeWidget(FacetRangeWidget):
@@ -1179,7 +1532,7 @@
         facet._cw.html_headers.define_var('DATE_FMT', fmt)
 
 
-class FacetItem(HTMLWidget):
+class FacetItem(htmlwidgets.HTMLWidget):
 
     selected_img = "black-check.png"
     unselected_img = "no-check-no-border.png"
@@ -1191,6 +1544,7 @@
         self.selected = selected
 
     def _render(self):
+        w = self.w
         cssclass = 'facetValue facetCheckBox'
         if self.selected:
             cssclass += ' facetValueSelected'
@@ -1199,14 +1553,14 @@
         else:
             imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="%s" cubicweb:value="%s">\n'
-               % (cssclass, xml_escape(unicode(self.value))))
-        self.w(u'<img src="%s" alt="%s"/>&#160;' % (imgsrc, imgalt))
-        self.w(u'<a href="javascript: {}">%s</a>' % xml_escape(self.label))
-        self.w(u'</div>')
+        w(u'<div class="%s" cubicweb:value="%s">\n'
+          % (cssclass, xml_escape(unicode(self.value))))
+        w(u'<img src="%s" alt="%s"/>&#160;' % (imgsrc, imgalt))
+        w(u'<a href="javascript: {}">%s</a>' % xml_escape(self.label))
+        w(u'</div>')
 
 
-class CheckBoxFacetWidget(HTMLWidget):
+class CheckBoxFacetWidget(htmlwidgets.HTMLWidget):
     selected_img = "black-check.png"
     unselected_img = "black-uncheck.png"
 
@@ -1216,10 +1570,14 @@
         self.value = value
         self.selected = selected
 
+    def height(self):
+        return 2
+
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
-        facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
+        facetid = make_uid(self.facet.__regid__)
+        w(u'<div id="%s" class="facet">\n' % facetid)
         cssclass = 'facetValue facetCheckBox'
         if self.selected:
             cssclass += ' facetValueSelected'
@@ -1228,17 +1586,18 @@
         else:
             imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="%s" cubicweb:value="%s">\n'
-               % (cssclass, xml_escape(unicode(self.value))))
-        self.w(u'<div class="facetCheckBoxWidget">')
-        self.w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" />&#160;' % (imgsrc, imgalt))
-        self.w(u'<label class="facetTitle" cubicweb:facetName="%s"><a href="javascript: {}">%s</a></label>' % (facetid, title))
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+        w(u'<div class="%s" cubicweb:value="%s">\n'
+          % (cssclass, xml_escape(unicode(self.value))))
+        w(u'<div class="facetCheckBoxWidget">')
+        w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" />&#160;' % (imgsrc, imgalt))
+        w(u'<label class="facetTitle" cubicweb:facetName="%s"><a href="javascript: {}">%s</a></label>'
+          % (xml_escape(self.facet.__regid__), title))
+        w(u'</div>\n')
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
-class FacetSeparator(HTMLWidget):
+class FacetSeparator(htmlwidgets.HTMLWidget):
     def __init__(self, label=None):
         self.label = label or u'&#160;'
 
@@ -1253,15 +1612,15 @@
     def __init__(self, req):
         self._cw = req
 
-    def build_rql(self):#, tablefilter=False):
+    def build_rql(self):
         form = self._cw.form
         facetids = form['facets'].split(',')
         # XXX Union unsupported yet
         select = self._cw.vreg.parse(self._cw, form['baserql']).children[0]
-        mainvar = filtered_variable(select)
+        filtered_variable = get_filtered_variable(select, form.get('mainvar'))
         toupdate = []
         for facetid in facetids:
-            facet = get_facet(self._cw, facetid, select, mainvar)
+            facet = get_facet(self._cw, facetid, select, filtered_variable)
             facet.add_rql_restrictions()
             if facet.needs_update:
                 toupdate.append(facetid)
--- a/web/form.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/form.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -82,6 +82,9 @@
     force_session_key = None
     domid = 'form'
     copy_nav_params = False
+    control_fields = set( ('__form_id', '__errorurl', '__domid',
+                           '__redirectpath', '_cwmsgid', '__message',
+                           ) )
 
     def __init__(self, req, rset=None, row=None, col=None,
                  submitmsg=None, mainform=True, **kwargs):
@@ -112,7 +115,12 @@
                     if value:
                         self.add_hidden(param, value)
         if submitmsg is not None:
-            self.add_hidden(u'__message', submitmsg)
+            self.set_message(submitmsg)
+
+    def set_message(self, submitmsg):
+        """sets a submitmsg if exists, using _cwmsgid mechanism """
+        cwmsgid = self._cw.set_redirect_message(submitmsg)
+        self.add_hidden(u'_cwmsgid', cwmsgid)
 
     @property
     def root_form(self):
--- a/web/formfields.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/formfields.py	Fri Dec 09 12:08:27 2011 +0100
@@ -28,7 +28,7 @@
 
 .. autoclass:: cubicweb.web.formfields.Field
 
-Now, you usually don't use that class but one of the concret field classes
+Now, you usually don't use that class but one of the concrete field classes
 described below, according to what you want to edit.
 
 Basic fields
@@ -37,6 +37,7 @@
 .. autoclass:: cubicweb.web.formfields.StringField()
 .. autoclass:: cubicweb.web.formfields.PasswordField()
 .. autoclass:: cubicweb.web.formfields.IntField()
+.. autoclass:: cubicweb.web.formfields.BigIntField()
 .. autoclass:: cubicweb.web.formfields.FloatField()
 .. autoclass:: cubicweb.web.formfields.BooleanField()
 .. autoclass:: cubicweb.web.formfields.DateField()
@@ -106,7 +107,7 @@
 class Field(object):
     """This class is the abstract base class for all fields. It hold a bunch
     of attributes which may be used for fine control of the behaviour of a
-    concret field.
+    concrete field.
 
     **Attributes**
 
@@ -147,7 +148,9 @@
        bool flag telling if this field is linked to a specific entity
     :attr:`role`
        when the field is linked to an entity attribute or relation, tells the
-       role of the entity in the relation (eg 'subject' or 'object')
+       role of the entity in the relation (eg 'subject' or 'object'). If this is
+       not an attribute or relation of the edited entity, `role` should be
+       `None`.
     :attr:`fieldset`
        optional fieldset to which this field belongs to
     :attr:`order`
@@ -346,6 +349,7 @@
     def initial_typed_value(self, form, load_bytes):
         if self.value is not _MARKER:
             if callable(self.value):
+                # pylint: disable=E1102
                 if support_args(self.value, 'form', 'field'):
                     return self.value(form, self)
                 else:
@@ -386,6 +390,7 @@
         """
         assert self.choices is not None
         if callable(self.choices):
+            # pylint: disable=E1102
             if getattr(self.choices, 'im_self', None) is self:
                 vocab = self.choices(form=form, **kwargs)
             elif support_args(self.choices, 'form', 'field'):
@@ -393,11 +398,11 @@
             else:
                 try:
                     vocab = self.choices(form=form, **kwargs)
-                    warn('[3.6]  %s: choices should now take '
+                    warn('[3.6] %s: choices should now take '
                          'the form and field as named arguments' % self,
                          DeprecationWarning)
                 except TypeError:
-                    warn('[3.3]  %s: choices should now take '
+                    warn('[3.3] %s: choices should now take '
                          'the form and field as named arguments' % self,
                          DeprecationWarning)
                     vocab = self.choices(req=form._cw, **kwargs)
@@ -829,21 +834,25 @@
         return super(EditableFileField, self)._process_form_value(form)
 
 
-class IntField(Field):
-    """Use this field to edit integers (`Int` yams type). This field additionaly
-    support `min` and `max` attributes that specify a minimum and/or maximum
-    value for the integer (`None` meaning no boundary).
+class BigIntField(Field):
+    """Use this field to edit big integers (`BigInt` yams type). This field
+    additionaly support `min` and `max` attributes that specify a minimum and/or
+    maximum value for the integer (`None` meaning no boundary).
 
     Unless explicitly specified, the widget for this field will be a
     :class:`~cubicweb.web.formwidgets.TextInput`.
     """
+    default_text_input_size = 10
+
     def __init__(self, min=None, max=None, **kwargs):
-        super(IntField, self).__init__(**kwargs)
+        super(BigIntField, self).__init__(**kwargs)
         self.min = min
         self.max = max
+
+    def init_widget(self, widget):
+        super(BigIntField, self).init_widget(widget)
         if isinstance(self.widget, fw.TextInput):
-            self.widget.attrs.setdefault('size', 5)
-            self.widget.attrs.setdefault('maxlength', 15)
+            self.widget.attrs.setdefault('size', self.default_text_input_size)
 
     def _ensure_correctly_typed(self, form, value):
         if isinstance(value, basestring):
@@ -857,6 +866,19 @@
         return value
 
 
+class IntField(BigIntField):
+    """Use this field to edit integers (`Int` yams type). Similar to
+    :class:`~cubicweb.web.formfields.BigIntField` but set max length when text
+    input widget is used (the default).
+    """
+    default_text_input_size = 5
+
+    def init_widget(self, widget):
+        super(IntField, self).init_widget(widget)
+        if isinstance(self.widget, fw.TextInput):
+            self.widget.attrs.setdefault('maxlength', 15)
+
+
 class BooleanField(Field):
     """Use this field to edit booleans (`Boolean` yams type).
 
@@ -1207,6 +1229,7 @@
 
     'Boolean':  BooleanField,
     'Int':      IntField,
+    'BigInt':   BigIntField,
     'Float':    FloatField,
     'Decimal':  StringField,
 
--- a/web/formwidgets.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/formwidgets.py	Fri Dec 09 12:08:27 2011 +0100
@@ -110,8 +110,8 @@
 
     **Attributes**
 
-    Here are standard attributes of a widget, that may be set on concret
-    class to override default behaviours:
+    Here are standard attributes of a widget, that may be set on concrete class
+    to override default behaviours:
 
     :attr:`needs_js`
        list of javascript files needed by the widget.
@@ -134,7 +134,7 @@
 
     Also, widget instances takes as first argument a `attrs` dictionary which
     will be stored in the attribute of the same name. It contains HTML
-    attributes that should be set in the widget's input tag (though concret
+    attributes that should be set in the widget's input tag (though concrete
     classes may ignore it).
 
     .. currentmodule:: cubicweb.web.formwidgets
@@ -190,7 +190,7 @@
         return self._render(form, field, renderer)
 
     def _render(self, form, field, renderer):
-        """This is the method you have to implement in concret widget classes.
+        """This is the method you have to implement in concrete widget classes.
         """
         raise NotImplementedError()
 
@@ -232,7 +232,7 @@
         correctly typed value.
 
         3 and 4 are handle by the :meth:`typed_value` method to ease reuse in
-        concret classes.
+        concrete classes.
         """
         values = None
         if not field.ignore_req_params:
--- a/web/http_headers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/http_headers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -744,7 +744,8 @@
 
 def generateDateTime(secSinceEpoch):
     """Convert seconds since epoch to HTTP datetime string."""
-    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch)
+    # take care gmtime doesn't handle time before epoch (crash on windows at least)
+    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(max(0, secSinceEpoch))
     s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
         weekdayname[wd],
         day, monthname[month], year,
@@ -1354,9 +1355,25 @@
         raw_header.append(value)
         self._headers[name] = _RecalcNeeded
 
+    def addHeader(self, name, value):
+        """
+        Add a parsed representatoin to a header that may or may not already exist.
+        If it exists, add it as a separate header to output; do not
+        replace anything.
+        """
+        name=name.lower()
+        header = self._headers.get(name)
+        if header is None:
+            # No header yet
+            header = []
+            self._headers[name] = header
+        elif header is _RecalcNeeded:
+            header = self._toParsed(name)
+        header.append(value)
+        self._raw_headers[name] = _RecalcNeeded
+
     def removeHeader(self, name):
         """Removes the header named."""
-
         name=name.lower()
         if self._raw_headers.has_key(name):
             del self._raw_headers[name]
--- a/web/request.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/request.py	Fri Dec 09 12:08:27 2011 +0100
@@ -19,11 +19,12 @@
 
 __docformat__ = "restructuredtext en"
 
-import Cookie
-import hashlib
 import time
 import random
 import base64
+from hashlib import sha1 # pylint: disable=E0611
+from Cookie import SimpleCookie
+from calendar import timegm
 from datetime import date
 from urlparse import urlsplit
 from itertools import count
@@ -42,12 +43,13 @@
 from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT
 from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit,
                           RequestError, StatusResponse)
-from cubicweb.web.http_headers import Headers
+from cubicweb.web.httpcache import GMTOFFSET
+from cubicweb.web.http_headers import Headers, Cookie
 
 _MARKER = object()
 
 def build_cb_uid(seed):
-    sha = hashlib.sha1('%s%s%s' % (time.time(), seed, random.random()))
+    sha = sha1('%s%s%s' % (time.time(), seed, random.random()))
     return 'cb_%s' % (sha.hexdigest())
 
 
@@ -92,7 +94,7 @@
             self.uiprops = vreg.config.uiprops
             self.datadir_url = vreg.config.datadir_url
         # raw html headers that can be added from any view
-        self.html_headers = HTMLHead()
+        self.html_headers = HTMLHead(self.datadir_url)
         # form parameters
         self.setup_params(form)
         # dictionnary that may be used to store request data that has to be
@@ -214,6 +216,12 @@
             if param == '_cwmsgid':
                 self.set_message_id(val)
             elif param == '__message':
+                warn('[3.13] __message in request parameter is deprecated (may '
+                     'only be given to .build_url). Seeing this message usualy '
+                     'means your application hold some <form> where you should '
+                     'replace use of __message hidden input by form.set_message, '
+                     'so new _cwmsgid mechanism is properly used',
+                     DeprecationWarning)
                 self.set_message(val)
             else:
                 self.form[param] = val
@@ -256,7 +264,7 @@
         """used by AutomaticWebTest to clear html headers between tests on
         the same resultset
         """
-        self.html_headers = HTMLHead()
+        self.html_headers = HTMLHead(self.datadir_url)
         return self
 
     # web state helpers #######################################################
@@ -264,7 +272,7 @@
     @property
     def message(self):
         try:
-            return self.session.data.pop(self._msgid, '')
+            return self.session.data.pop(self._msgid, u'')
         except AttributeError:
             try:
                 return self._msg
@@ -283,6 +291,7 @@
         return make_uid()
 
     def set_redirect_message(self, msg):
+        # TODO - this should probably be merged with append_to_redirect_message
         assert isinstance(msg, unicode)
         msgid = self.redirect_message_id()
         self.session.data[msgid] = msg
@@ -292,7 +301,7 @@
         msgid = self.redirect_message_id()
         currentmsg = self.session.data.get(msgid)
         if currentmsg is not None:
-            currentmsg = '%s %s' % (currentmsg, msg)
+            currentmsg = u'%s %s' % (currentmsg, msg)
         else:
             currentmsg = msg
         self.session.data[msgid] = currentmsg
@@ -355,10 +364,13 @@
         return self.base_url()
 
     def user_rql_callback(self, rqlargs, *args, **kwargs):
-        """register a user callback to execute some rql query and return an url
-        to call it ready to be inserted in html.
+        """register a user callback to execute some rql query, and return a URL
+        to call that callback which can be inserted in an HTML view.
 
-        rqlargs should be a tuple containing argument to give to the execute function.
+        `rqlargs` should be a tuple containing argument to give to the execute function.
+
+        The first argument following rqlargs must be the message to be
+        displayed after the callback is called.
 
         For other allowed arguments, see :meth:`user_callback` method
         """
@@ -367,8 +379,11 @@
         return self.user_callback(rqlexec, rqlargs, *args, **kwargs)
 
     def user_callback(self, cb, cbargs, *args, **kwargs):
-        """register the given user callback and return an url to call it ready
-        to be inserted in html.
+        """register the given user callback and return a URL which can
+        be inserted in an HTML view. When the URL is accessed, the
+        callback function will be called (as 'cb(req, *cbargs)', and a
+        message will be displayed in the web interface. The third
+        positional argument must be 'msg', containing the message.
 
         You can specify the underlying js function to call using a 'jsfunc'
         named args, to one of :func:`userCallback`,
@@ -415,7 +430,8 @@
 
     @cached # so it's writed only once
     def fckeditor_config(self):
-        self.add_js('fckeditor/fckeditor.js')
+        fckeditor_url = self.build_url('fckeditor/fckeditor.js')
+        self.add_js(fckeditor_url, localfile=False)
         self.html_headers.define_var('fcklang', self.lang)
         self.html_headers.define_var('fckconfigpath',
                                      self.data_url('cubicweb.fckcwconfig.js'))
@@ -454,7 +470,7 @@
             try:
                 name, peid = param.split(':', 1)
             except ValueError:
-                if not param.startswith('__') and param != "eid":
+                if not param.startswith('__') and param not in ('eid', '_cw_fields'):
                     self.warning('param %s mis-formatted', param)
                 continue
             if peid == eid:
@@ -504,30 +520,48 @@
 
     def get_cookie(self):
         """retrieve request cookies, returns an empty cookie if not found"""
+        # XXX use http_headers implementation
         try:
-            return Cookie.SimpleCookie(self.get_header('Cookie'))
+            return SimpleCookie(self.get_header('Cookie'))
         except KeyError:
-            return Cookie.SimpleCookie()
+            return SimpleCookie()
 
-    def set_cookie(self, cookie, key, maxage=300, expires=None):
-        """set / update a cookie key
+    def set_cookie(self, name, value, maxage=300, expires=None, secure=False):
+        """set / update a cookie
 
         by default, cookie will be available for the next 5 minutes.
         Give maxage = None to have a "session" cookie expiring when the
         client close its browser
         """
-        morsel = cookie[key]
-        if maxage is not None:
-            morsel['Max-Age'] = maxage
-        if expires:
-            morsel['expires'] = expires.strftime('%a, %d %b %Y %H:%M:%S %z')
+        if isinstance(name, SimpleCookie):
+            warn('[3.13] set_cookie now takes name and value as two first '
+                 'argument, not anymore cookie object and name',
+                 DeprecationWarning, stacklevel=2)
+            secure = name[value]['secure']
+            name, value = value, name[value].value
+        if maxage: # don't check is None, 0 may be specified
+            assert expires is None, 'both max age and expires cant be specified'
+            expires = maxage + time.time()
+        elif expires:
+            expires = timegm((expires + GMTOFFSET).timetuple())
+        else:
+            expires = None
         # make sure cookie is set on the correct path
-        morsel['path'] = self.base_url_path()
-        self.add_header('Set-Cookie', morsel.OutputString())
+        cookie = Cookie(str(name), str(value), self.base_url_path(),
+                        expires=expires, secure=secure)
+        self.headers_out.addHeader('Set-cookie', cookie)
 
-    def remove_cookie(self, cookie, key):
+    def remove_cookie(self, name, bwcompat=None):
         """remove a cookie by expiring it"""
-        self.set_cookie(cookie, key, maxage=0, expires=date(1970, 1, 1))
+        if bwcompat is not None:
+            warn('[3.13] remove_cookie now take only a name as argument',
+                 DeprecationWarning, stacklevel=2)
+            name = bwcompat
+        self.set_cookie(name, '', maxage=0,
+                        # substracting GMTOFFSET because set_cookie
+                        # expects a localtime and we don't want to
+                        # handle times before the EPOCH
+                        expires=date(1970, 1, 1) - GMTOFFSET) 
 
     def set_content_type(self, content_type, filename=None, encoding=None):
         """set output content type for this request. An optional filename
@@ -617,7 +651,7 @@
         url = self.build_url('json', **extraparams)
         cbname = build_cb_uid(url[:50])
         # think to propagate pageid. XXX see https://www.cubicweb.org/ticket/1753121
-        jscode = 'function %s() { $("#%s").%s; }' % (
+        jscode = u'function %s() { $("#%s").%s; }' % (
             cbname, nodeid, js.loadxhtml(url, {'pageid': self.pageid},
                                          'get', replacemode))
         self.html_headers.add_post_inline_script(jscode)
@@ -625,6 +659,16 @@
 
     # urls/path management ####################################################
 
+    def build_url(self, *args, **kwargs):
+        """return an absolute URL using params dictionary key/values as URL
+        parameters. Values are automatically URL quoted, and the
+        publishing method to use may be specified or will be guessed.
+        """
+        if '__message' in kwargs:
+            msg = kwargs.pop('__message')
+            kwargs['_cwmsgid'] = self.set_redirect_message(msg)
+        return super(CubicWebRequestBase, self).build_url(*args, **kwargs)
+
     def url(self, includeparams=True):
         """return currently accessed url"""
         return self.base_url() + self.relative_path(includeparams)
@@ -755,8 +799,13 @@
         The global doctype and xmldec must also be changed otherwise the browser
         will display '<[' at the beginning of the page
         """
-        self.set_content_type('text/html')
-        self.main_stream.set_doctype(TRANSITIONAL_DOCTYPE_NOEXT)
+        if not self.vreg.config['force-html-content-type']:
+            if not hasattr(self, 'main_stream'):
+                raise Exception("Can't demote to html from an ajax context. You "
+                                "should change force-html-content-type to yes "
+                                "in the instance configuration file.")
+            self.set_content_type('text/html')
+            self.main_stream.set_doctype(TRANSITIONAL_DOCTYPE_NOEXT)
 
     def set_doctype(self, doctype, reset_xmldecl=True):
         """helper method to dynamically change page doctype
@@ -890,10 +939,20 @@
 def _parse_accept_header(raw_header, value_parser=None, value_sort_key=None):
     """returns an ordered list accepted types
 
-    returned value is a list of 2-tuple (value, score), ordered
-    by score. Exact type of `value` will depend on what `value_parser`
-    will reutrn. if `value_parser` is None, then the raw value, as found
-    in the http header, is used.
+    :param value_parser: a function to parse a raw accept chunk. If None
+    is provided, the function defaults to identity. If a function is provided,
+    it must accept 2 parameters ``value`` and ``other_params``. ``value`` is
+    the value found before the first ';', `other_params` is a dictionary
+    built from all other chunks after this first ';'
+
+    :param value_sort_key: a key function to sort values found in the accept
+    header. This function will be passed a 3-tuple
+    (raw_value, parsed_value, score). If None is provided, the default
+    sort_key is 1./score
+
+    :return: a list of 3-tuple (raw_value, parsed_value, score),
+    ordered by score. ``parsed_value`` will be the return value of
+    ``value_parser(raw_value)``
     """
     if value_sort_key is None:
         value_sort_key = lambda infos: 1./infos[-1]
@@ -928,7 +987,7 @@
     'text/html;level=1', `mimetypeinfo` will be ('text', '*', {'level': '1'})
     """
     try:
-        media_type, media_subtype = value.strip().split('/')
+        media_type, media_subtype = value.strip().split('/', 1)
     except ValueError: # safety belt : '/' should always be present
         media_type = value.strip()
         media_subtype = '*'
--- a/web/test/unittest_application.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_application.py	Fri Dec 09 12:08:27 2011 +0100
@@ -196,7 +196,7 @@
         eid = unicode(user.eid)
         req.form = {
             'eid':       eid,
-            '__type:'+eid:    'CWUser', '_cw_edited_fields:'+eid: 'login-subject',
+            '__type:'+eid:    'CWUser', '_cw_entity_fields:'+eid: 'login-subject',
             'login-subject:'+eid:     '', # ERROR: no login specified
              # just a sample, missing some necessary information for real life
             '__errorurl': 'view?vid=edition...'
@@ -221,11 +221,11 @@
         req = self.request()
         # set Y before X to ensure both entities are edited, not only X
         req.form = {'eid': ['Y', 'X'], '__maineid': 'X',
-                    '__type:X': 'CWUser', '_cw_edited_fields:X': 'login-subject',
+                    '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject',
                     # missing required field
                     'login-subject:X': u'',
                     # but email address is set
-                    '__type:Y': 'EmailAddress', '_cw_edited_fields:Y': 'address-subject',
+                    '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject',
                     'address-subject:Y': u'bougloup@logilab.fr',
                     'use_email-object:Y': 'X',
                     # necessary to get validation error handling
@@ -250,11 +250,11 @@
         req = self.request()
         # set Y before X to ensure both entities are edited, not only X
         req.form = {'eid': ['Y', 'X'], '__maineid': 'X',
-                    '__type:X': 'CWUser', '_cw_edited_fields:X': 'login-subject,upassword-subject',
+                    '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject',
                     # already existent user
                     'login-subject:X': u'admin',
                     'upassword-subject:X': u'admin', 'upassword-subject-confirm:X': u'admin',
-                    '__type:Y': 'EmailAddress', '_cw_edited_fields:Y': 'address-subject',
+                    '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject',
                     'address-subject:Y': u'bougloup@logilab.fr',
                     'use_email-object:Y': 'X',
                     # necessary to get validation error handling
--- a/web/test/unittest_facet.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_facet.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,24 +1,27 @@
+from logilab.common.date import datetime2ticks
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.web import facet
 
 class BaseFacetTC(CubicWebTC):
 
-    def prepare_rqlst(self):
+    def prepare_rqlst(self, rql='CWUser X', mainvar='X',
+                      expected_baserql='Any X WHERE X is CWUser',
+                      expected_preparedrql='DISTINCT Any  WHERE X is CWUser'):
         req = self.request()
-        rset = self.execute('CWUser X')
+        rset = self.execute(rql)
         rqlst = rset.syntax_tree().copy()
-        req.vreg.rqlhelper.annotate(rqlst)
-        mainvar, baserql = facet.prepare_facets_rqlst(rqlst, rset.args)
-        self.assertEqual(mainvar.name, 'X')
-        self.assertEqual(baserql, 'Any X WHERE X is CWUser')
-        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
-        return req, rset, rqlst, mainvar
+        filtered_variable, baserql = facet.init_facets(rset, rqlst.children[0],
+                                                       mainvar=mainvar)
+        self.assertEqual(filtered_variable.name, mainvar)
+        self.assertEqual(baserql, expected_baserql)
+        self.assertEqual(rqlst.as_string(), expected_preparedrql)
+        return req, rset, rqlst, filtered_variable
 
     def _in_group_facet(self, cls=facet.RelationFacet, no_relation=False):
-        req, rset, rqlst, mainvar = self.prepare_rqlst()
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
         cls.no_relation = no_relation
-        f = cls(req, rset=rset, rqlst=rqlst.children[0],
-                filtered_variable=mainvar)
+        f = cls(req, rset=rset, select=rqlst.children[0],
+                filtered_variable=filtered_variable)
         f.__regid__ = 'in_group'
         f.rtype = 'in_group'
         f.role = 'subject'
@@ -34,30 +37,46 @@
         self.assertEqual(f.vocabulary(),
                       [(u'guests', guests), (u'managers', managers)])
         # ensure rqlst is left unmodified
-        self.assertEqual(f.rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        self.assertEqual(f.select.as_string(), 'DISTINCT Any  WHERE X is CWUser')
         #rqlst = rset.syntax_tree()
         self.assertEqual(f.possible_values(),
                           [str(guests), str(managers)])
         # ensure rqlst is left unmodified
-        self.assertEqual(f.rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        self.assertEqual(f.select.as_string(), 'DISTINCT Any  WHERE X is CWUser')
         f._cw.form[f.__regid__] = str(guests)
         f.add_rql_restrictions()
         # selection is cluttered because rqlst has been prepared for facet (it
         # is not in real life)
-        self.assertEqual(f.rqlst.as_string(),
-                          'DISTINCT Any  WHERE X is CWUser, X in_group D, D eid %s' % guests)
+        self.assertEqual(f.select.as_string(),
+                         'DISTINCT Any  WHERE X is CWUser, X in_group D, D eid %s' % guests)
+
+    def test_relation_multiple_and(self):
+        f, (guests, managers) = self._in_group_facet()
+        f._cw.form[f.__regid__] = [str(guests), str(managers)]
+        f._cw.form[f.__regid__ + '_andor'] = 'AND'
+        f.add_rql_restrictions()
+        self.assertEqual(f.select.as_string(),
+                         'DISTINCT Any  WHERE X is CWUser, X in_group A, B eid %s, X in_group B, A eid %s' % (guests, managers))
+
+    def test_relation_multiple_or(self):
+        f, (guests, managers) = self._in_group_facet()
+        f._cw.form[f.__regid__] = [str(guests), str(managers)]
+        f._cw.form[f.__regid__ + '_andor'] = 'OR'
+        f.add_rql_restrictions()
+        self.assertEqual(f.select.as_string(),
+                         'DISTINCT Any  WHERE X is CWUser, X in_group A, A eid IN(%s, %s)' % (guests, managers))
 
     def test_relation_optional_rel(self):
         req = self.request()
         rset = self.execute('Any X,GROUP_CONCAT(GN) GROUPBY X '
                             'WHERE X in_group G?, G name GN, NOT G name "users"')
         rqlst = rset.syntax_tree().copy()
-        req.vreg.rqlhelper.annotate(rqlst)
-        mainvar, baserql = facet.prepare_facets_rqlst(rqlst, rset.args)
+        select = rqlst.children[0]
+        filtered_variable, baserql = facet.init_facets(rset, select)
 
         f = facet.RelationFacet(req, rset=rset,
-                                rqlst=rqlst.children[0],
-                                filtered_variable=mainvar)
+                                select=select,
+                                filtered_variable=filtered_variable)
         f.rtype = 'in_group'
         f.role = 'subject'
         f.target_attr = 'name'
@@ -76,7 +95,7 @@
         f.add_rql_restrictions()
         # selection is cluttered because rqlst has been prepared for facet (it
         # is not in real life)
-        self.assertEqual(f.rqlst.as_string(),
+        self.assertEqual(f.select.as_string(),
                           'DISTINCT Any  WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests)
 
     def test_relation_no_relation_1(self):
@@ -92,18 +111,18 @@
                           [str(guests), str(managers), ''])
         f._cw.form[f.__regid__] = ''
         f.add_rql_restrictions()
-        self.assertEqual(f.rqlst.as_string(),
+        self.assertEqual(f.select.as_string(),
                           'DISTINCT Any  WHERE X is CWUser, NOT X in_group G')
 
     def test_relation_no_relation_2(self):
         f, (guests, managers) = self._in_group_facet(no_relation=True)
         f._cw.form[f.__regid__] = ['', guests]
-        f.rqlst.save_state()
+        f.select.save_state()
         f.add_rql_restrictions()
-        self.assertEqual(f.rqlst.as_string(),
+        self.assertEqual(f.select.as_string(),
                           'DISTINCT Any  WHERE X is CWUser, (NOT X in_group B) OR (X in_group A, A eid %s)' % guests)
-        f.rqlst.recover()
-        self.assertEqual(f.rqlst.as_string(),
+        f.select.recover()
+        self.assertEqual(f.select.as_string(),
                           'DISTINCT Any  WHERE X is CWUser')
 
 
@@ -113,25 +132,52 @@
         self.assertEqual(f.vocabulary(),
                           [(u'guests', u'guests'), (u'managers', u'managers')])
         # ensure rqlst is left unmodified
-        self.assertEqual(f.rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        self.assertEqual(f.select.as_string(), 'DISTINCT Any  WHERE X is CWUser')
         #rqlst = rset.syntax_tree()
         self.assertEqual(f.possible_values(),
                           ['guests', 'managers'])
         # ensure rqlst is left unmodified
-        self.assertEqual(f.rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        self.assertEqual(f.select.as_string(), 'DISTINCT Any  WHERE X is CWUser')
         f._cw.form[f.__regid__] = 'guests'
         f.add_rql_restrictions()
         # selection is cluttered because rqlst has been prepared for facet (it
         # is not in real life)
-        self.assertEqual(f.rqlst.as_string(),
+        self.assertEqual(f.select.as_string(),
                           "DISTINCT Any  WHERE X is CWUser, X in_group E, E name 'guests'")
 
+    def test_daterange(self):
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
+        f = facet.DateRangeFacet(req, rset=rset,
+                                 select=rqlst.children[0],
+                                 filtered_variable=filtered_variable)
+        f.rtype = 'creation_date'
+        mind, maxd = self.execute('Any MIN(CD), MAX(CD) WHERE X is CWUser, X creation_date CD')[0]
+        self.assertEqual(f.vocabulary(),
+                          [(str(mind), mind),
+                           (str(maxd), maxd)])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        #rqlst = rset.syntax_tree()
+        self.assertEqual(f.possible_values(),
+                         [str(mind), str(maxd)])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        req.form['%s_inf' % f.__regid__] = str(datetime2ticks(mind))
+        req.form['%s_sup' % f.__regid__] = str(datetime2ticks(mind))
+        f.add_rql_restrictions()
+        # selection is cluttered because rqlst has been prepared for facet (it
+        # is not in real life)
+        self.assertEqual(f.select.as_string(),
+                          'DISTINCT Any  WHERE X is CWUser, X creation_date >= "%s", '
+                         'X creation_date <= "%s"'
+                         % (mind.strftime('%Y/%m/%d'),
+                            mind.strftime('%Y/%m/%d')))
 
     def test_attribute(self):
-        req, rset, rqlst, mainvar = self.prepare_rqlst()
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
         f = facet.AttributeFacet(req, rset=rset,
-                                 rqlst=rqlst.children[0],
-                                 filtered_variable=mainvar)
+                                 select=rqlst.children[0],
+                                 filtered_variable=filtered_variable)
         f.rtype = 'login'
         self.assertEqual(f.vocabulary(),
                           [(u'admin', u'admin'), (u'anon', u'anon')])
@@ -146,9 +192,127 @@
         f.add_rql_restrictions()
         # selection is cluttered because rqlst has been prepared for facet (it
         # is not in real life)
-        self.assertEqual(f.rqlst.as_string(),
+        self.assertEqual(f.select.as_string(),
                           "DISTINCT Any  WHERE X is CWUser, X login 'admin'")
 
+    def test_rql_path_eid(self):
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
+        class RPF(facet.RQLPathFacet):
+            path = [('X created_by U'), ('U owned_by O'), ('O login OL')]
+            filter_variable = 'O'
+            label_variable = 'OL'
+        f = RPF(req, rset=rset, select=rqlst.children[0],
+                filtered_variable=filtered_variable)
+        self.assertEqual(f.vocabulary(), [(u'admin', self.user().eid),])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        #rqlst = rset.syntax_tree()
+        self.assertEqual(f.possible_values(),
+                          [str(self.user().eid),])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        req.form[f.__regid__] = '1'
+        f.add_rql_restrictions()
+        # selection is cluttered because rqlst has been prepared for facet (it
+        # is not in real life)
+        self.assertEqual(f.select.as_string(),
+                         "DISTINCT Any  WHERE X is CWUser, X created_by F, F owned_by G, G eid 1")
+
+    def test_rql_path_eid_no_label(self):
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
+        class RPF(facet.RQLPathFacet):
+            path = [('X created_by U'), ('U owned_by O'), ('O login OL')]
+            filter_variable = 'O'
+        f = RPF(req, rset=rset, select=rqlst.children[0],
+                filtered_variable=filtered_variable)
+        self.assertEqual(f.vocabulary(), [(str(self.user().eid), self.user().eid),])
+
+    def test_rql_path_attr(self):
+        req, rset, rqlst, filtered_variable = self.prepare_rqlst()
+        class RPF(facet.RQLPathFacet):
+            path = [('X created_by U'), ('U owned_by O'), ('O login OL')]
+            filter_variable = 'OL'
+        f = RPF(req, rset=rset, select=rqlst.children[0],
+                filtered_variable=filtered_variable)
+
+        self.assertEqual(f.vocabulary(), [(u'admin', 'admin'),])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        self.assertEqual(f.possible_values(), ['admin',])
+        # ensure rqlst is left unmodified
+        self.assertEqual(rqlst.as_string(), 'DISTINCT Any  WHERE X is CWUser')
+        req.form[f.__regid__] = 'admin'
+        f.add_rql_restrictions()
+        # selection is cluttered because rqlst has been prepared for facet (it
+        # is not in real life)
+        self.assertEqual(f.select.as_string(),
+                         "DISTINCT Any  WHERE X is CWUser, X created_by G, G owned_by H, H login 'admin'")
+
+    def test_rql_path_check_filter_label_variable(self):
+        req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst()
+        class RPF(facet.RQLPathFacet):
+            path = [('X created_by U'), ('U owned_by O'), ('O login OL')]
+            filter_variable = 'OL'
+            label_variable = 'OL'
+        self.assertRaises(AssertionError, RPF, req, rset=rset,
+                          select=rqlst.children[0],
+                          filtered_variable=filtered_variable)
+
+    def prepareg_aggregat_rqlst(self):
+        return self.prepare_rqlst(
+            'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, '
+            'X modification_date XM, Y creation_date YD, Y is CWGroup '
+            'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X',
+            expected_baserql='Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, '
+            'X modification_date XM, Y creation_date YD, Y is CWGroup '
+            'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)',
+            expected_preparedrql='DISTINCT Any  WHERE X is CWUser, X creation_date XD, '
+            'X modification_date XM, Y creation_date YD, Y is CWGroup '
+            'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)')
+
+
+    def test_aggregat_query_cleanup_select(self):
+        req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst()
+        select = rqlst.children[0]
+        facet.cleanup_select(select, filtered_variable=filtered_variable)
+        self.assertEqual(select.as_string(),
+                         'DISTINCT Any  WHERE X is CWUser, X creation_date XD, '
+                         'X modification_date XM, Y creation_date YD, Y is CWGroup '
+                         'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)')
+
+    def test_aggregat_query_rql_path(self):
+        req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst()
+        class RPF(facet.RQLPathFacet):
+            path = [('X created_by U'), ('U owned_by O'), ('O login OL')]
+            filter_variable = 'OL'
+        f = RPF(req, rset=rset, select=rqlst.children[0],
+                filtered_variable=filtered_variable)
+        self.assertEqual(f.vocabulary(), [(u'admin', u'admin')])
+        self.assertEqual(f.possible_values(), ['admin'])
+        req.form[f.__regid__] = 'admin'
+        f.add_rql_restrictions()
+        self.assertEqual(f.select.as_string(),
+                         "DISTINCT Any  WHERE X is CWUser, X creation_date XD, "
+                         "X modification_date XM, Y creation_date YD, Y is CWGroup, "
+                         "X created_by G, G owned_by H, H login 'admin' "
+                         "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)")
+
+    def test_aggregat_query_attribute(self):
+        req, rset, rqlst, filtered_variable = self.prepareg_aggregat_rqlst()
+        f = facet.AttributeFacet(req, rset=rset,
+                                 select=rqlst.children[0],
+                                 filtered_variable=filtered_variable)
+        f.rtype = 'login'
+        self.assertEqual(f.vocabulary(),
+                          [(u'admin', u'admin'), (u'anon', u'anon')])
+        self.assertEqual(f.possible_values(),
+                          ['admin', 'anon'])
+        req.form[f.__regid__] = 'admin'
+        f.add_rql_restrictions()
+        self.assertEqual(f.select.as_string(),
+                          "DISTINCT Any  WHERE X is CWUser, X creation_date XD, "
+                          "X modification_date XM, Y creation_date YD, Y is CWGroup, X login 'admin' "
+                          "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)")
 
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
--- a/web/test/unittest_form.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_form.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,7 +21,7 @@
 from logilab.common.testlib import unittest_main, mock_object
 from logilab.common.compat import any
 
-from cubicweb import Binary
+from cubicweb import Binary, ValidationError
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.web.formfields import (IntField, StringField, RichTextField,
                                      PasswordField, DateTimeField,
@@ -42,6 +42,16 @@
         self.assertEqual(StringField().format(form), 'text/rest')
 
 
+    def test_process_posted(self):
+        class AForm(FieldsForm):
+            anint = IntField()
+            astring = StringField()
+        form = AForm(self.request(anint='1', astring='2', _cw_fields='anint,astring'))
+        self.assertEqual(form.process_posted(), {'anint': 1, 'astring': '2'})
+        form = AForm(self.request(anint='1a', astring='2b', _cw_fields='anint,astring'))
+        self.assertRaises(ValidationError, form.process_posted)
+
+
 class EntityFieldsFormTC(CubicWebTC):
 
     def setUp(self):
--- a/web/test/unittest_formfields.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_formfields.py	Fri Dec 09 12:08:27 2011 +0100
@@ -144,6 +144,17 @@
         self.assertEqual(description_format_field.value(form), 'text/rest')
 
 
+    def test_property_key_field(self):
+        from cubicweb.web.views.cwproperties import PropertyKeyField
+        req = self.request()
+        field = PropertyKeyField()
+        e = self.vreg['etypes'].etype_class('CWProperty')(req)
+        renderer = self.vreg['formrenderers'].select('base', req)
+        form = EntityFieldsForm(req, entity=e)
+        form.formvalues = {}
+        field.render(form, renderer)
+
+
 class UtilsTC(TestCase):
     def test_vocab_sort(self):
         self.assertEqual(vocab_sort([('Z', 1), ('A', 2),
--- a/web/test/unittest_reledit.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_reledit.py	Fri Dec 09 12:08:27 2011 +0100
@@ -64,7 +64,7 @@
 <input name="__reledit|reload" type="hidden" value="false" />
 <input name="__reledit|role" type="hidden" value="subject" />
 <input name="__reledit|eid" type="hidden" value="%(eid)s" />
-<input name="_cw_edited_fields:%(eid)s" type="hidden" value="title-subject,__type" />
+<input name="_cw_entity_fields:%(eid)s" type="hidden" value="title-subject,__type" />
 <fieldset class="default">
 <table class="">
 <tr class="title_subject_row">
@@ -97,7 +97,7 @@
 <input name="__reledit|reload" type="hidden" value="false" />
 <input name="__reledit|role" type="hidden" value="subject" />
 <input name="__reledit|eid" type="hidden" value="%(eid)s" />
-<input name="_cw_edited_fields:A" type="hidden" value="title-subject,rss_url-subject,__type,description-subject" />
+<input name="_cw_entity_fields:A" type="hidden" value="title-subject,rss_url-subject,__type,description-subject" />
 <fieldset class="default">
 <table class="attributeForm">
 <tr class="title_subject_row">
@@ -141,7 +141,7 @@
 <input name="__reledit|reload" type="hidden" value="false" />
 <input name="__reledit|role" type="hidden" value="subject" />
 <input name="__reledit|eid" type="hidden" value="%(eid)s" />
-<input name="_cw_edited_fields:%(eid)s" type="hidden" value="manager-subject,__type" />
+<input name="_cw_entity_fields:%(eid)s" type="hidden" value="manager-subject,__type" />
 <fieldset class="default">
 <table class="">
 <tr class="manager_subject_row">
--- a/web/test/unittest_session.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_session.py	Fri Dec 09 12:08:27 2011 +0100
@@ -2,7 +2,7 @@
 """unit tests for cubicweb.web.application
 
 :organization: Logilab
-:copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:copyright: 2001-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
 :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
--- a/web/test/unittest_views_basecontrollers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -33,7 +33,7 @@
 
 def req_form(user):
     return {'eid': [str(user.eid)],
-            '_cw_edited_fields:%s' % user.eid: '_cw_generic_field',
+            '_cw_entity_fields:%s' % user.eid: '_cw_generic_field',
             '__type:%s' % user.eid: user.__regid__
             }
 
@@ -59,7 +59,7 @@
         user = self.user()
         req = self.request()
         req.form = {'eid': 'X', '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject',
                     'login-subject:X': u'admin',
                     'upassword-subject:X': u'toto',
                     'upassword-subject-confirm:X': u'toto',
@@ -79,7 +79,7 @@
         eid = u(user.eid)
         req.form = {
             'eid': eid, '__type:'+eid: 'CWUser',
-            '_cw_edited_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject',
+            '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject',
             'login-subject:'+eid:     u(user.login),
             'surname-subject:'+eid: u'Th\xe9nault',
             'firstname-subject:'+eid:   u'Sylvain',
@@ -100,7 +100,7 @@
         req.form = {
             'eid': eid, '__maineid' : eid,
             '__type:'+eid: 'CWUser',
-            '_cw_edited_fields:'+eid: 'upassword-subject',
+            '_cw_entity_fields:'+eid: 'upassword-subject',
             'upassword-subject:'+eid: 'tournicoton',
             'upassword-subject-confirm:'+eid: 'tournicoton',
             }
@@ -120,7 +120,7 @@
         req.form = {
             'eid':       eid,
             '__type:'+eid:    'CWUser',
-            '_cw_edited_fields:'+eid: 'login-subject,firstname-subject,surname-subject',
+            '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject',
             'login-subject:'+eid:     u(user.login),
             'firstname-subject:'+eid: u'Th\xe9nault',
             'surname-subject:'+eid:   u'Sylvain',
@@ -140,14 +140,14 @@
         req.form = {'eid': ['X', 'Y'], '__maineid' : 'X',
 
                     '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject,surname-subject,in_group-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject,surname-subject,in_group-subject',
                     'login-subject:X': u'adim',
                     'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
                     'surname-subject:X': u'Di Mascio',
                     'in_group-subject:X': u(gueid),
 
                     '__type:Y': 'EmailAddress',
-                    '_cw_edited_fields:Y': 'address-subject,use_email-object',
+                    '_cw_entity_fields:Y': 'address-subject,use_email-object',
                     'address-subject:Y': u'dima@logilab.fr',
                     'use_email-object:Y': 'X',
                     }
@@ -165,11 +165,11 @@
         req.form = {'eid': [peid, 'Y'], '__maineid': peid,
 
                     '__type:'+peid: u'CWUser',
-                    '_cw_edited_fields:'+peid: u'surname-subject',
+                    '_cw_entity_fields:'+peid: u'surname-subject',
                     'surname-subject:'+peid: u'Di Masci',
 
                     '__type:Y': u'EmailAddress',
-                    '_cw_edited_fields:Y': u'address-subject,use_email-object',
+                    '_cw_entity_fields:Y': u'address-subject,use_email-object',
                     'address-subject:Y': u'dima@logilab.fr',
                     'use_email-object:Y': peid,
                     }
@@ -185,16 +185,16 @@
         req.form = {'eid': [peid, emaileid],
 
                     '__type:'+peid: u'CWUser',
-                    '_cw_edited_fields:'+peid: u'surname-subject',
+                    '_cw_entity_fields:'+peid: u'surname-subject',
                     'surname-subject:'+peid: u'Di Masci',
 
                     '__type:'+emaileid: u'EmailAddress',
-                    '_cw_edited_fields:'+emaileid: u'address-subject,use_email-object',
+                    '_cw_entity_fields:'+emaileid: u'address-subject,use_email-object',
                     'address-subject:'+emaileid: u'adim@logilab.fr',
                     'use_email-object:'+emaileid: peid,
                     }
         path, params = self.expect_redirect_publish(req, 'edit')
-        email.clear_all_caches()
+        email.cw_clear_all_caches()
         self.assertEqual(email.address, 'adim@logilab.fr')
 
 
@@ -205,7 +205,7 @@
         req = self.request()
         req.form = {'eid': 'X',
                     '__cloned_eid:X': u(user.eid), '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject',
                     'login-subject:X': u'toto',
                     'upassword-subject:X': u'toto',
                     }
@@ -215,7 +215,7 @@
         req = self.request()
         req.form = {'__cloned_eid:X': u(user.eid),
                     'eid': 'X', '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject',
                     'login-subject:X': u'toto',
                     'upassword-subject:X': u'toto',
                     'upassword-subject-confirm:X': u'tutu',
@@ -232,27 +232,27 @@
         req = self.request(rollbackfirst=True)
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
-                    '_cw_edited_fields:X': 'amount-subject,described_by_test-subject',
+                    '_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
                     'amount-subject:X': u'-10',
                     'described_by_test-subject:X': u(feid),
                 }
         with self.assertRaises(ValidationError) as cm:
             self.ctrl_publish(req)
-        self.assertEqual(cm.exception.errors, {'amount-subject': 'value must be >= 0'})
+        self.assertEqual(cm.exception.errors, {'amount-subject': 'value -10 must be >= 0'})
         req = self.request(rollbackfirst=True)
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
-                    '_cw_edited_fields:X': 'amount-subject,described_by_test-subject',
+                    '_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
                     'amount-subject:X': u'110',
                     'described_by_test-subject:X': u(feid),
                     }
         with self.assertRaises(ValidationError) as cm:
             self.ctrl_publish(req)
-        self.assertEqual(cm.exception.errors, {'amount-subject': 'value must be <= 100'})
+        self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'})
         req = self.request(rollbackfirst=True)
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
-                    '_cw_edited_fields:X': 'amount-subject,described_by_test-subject',
+                    '_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
                     'amount-subject:X': u'10',
                     'described_by_test-subject:X': u(feid),
                     }
@@ -298,7 +298,7 @@
         req = self.request()
         req.form = {
             'eid': 'A', '__maineid' : 'A',
-            '__type:A': 'BlogEntry', '_cw_edited_fields:A': 'content-subject,title-subject',
+            '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject',
             'content-subject:A': u'"13:03:43"',
             'title-subject:A': u'huuu',
             '__redirectrql': redirectrql,
@@ -321,7 +321,7 @@
         req = self.request()
         req.form = {
             'eid': 'A', '__maineid' : 'A',
-            '__type:A': 'BlogEntry', '_cw_edited_fields:A': 'content-subject,title-subject',
+            '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'content-subject,title-subject',
             'content-subject:A': u'"13:03:43"',
             'title-subject:A': u'huuu',
             '__redirectrql': redirectrql,
@@ -377,7 +377,7 @@
         req.form = {
             'eid':      cwetypeeid,
             '__type:'+cwetypeeid:  'CWEType',
-            '_cw_edited_fields:'+cwetypeeid: 'name-subject,final-subject,description-subject,read_permission-subject',
+            '_cw_entity_fields:'+cwetypeeid: 'name-subject,final-subject,description-subject,read_permission-subject',
             'name-subject:'+cwetypeeid:     u'CWEType',
             'final-subject:'+cwetypeeid:    '',
             'description-subject:'+cwetypeeid:     u'users group',
@@ -401,7 +401,7 @@
         req = self.request()
         req.form = {
             'eid': 'A', '__maineid' : 'A',
-            '__type:A': 'BlogEntry', '_cw_edited_fields:A': 'title-subject,content-subject',
+            '__type:A': 'BlogEntry', '_cw_entity_fields:A': 'title-subject,content-subject',
             'title-subject:A': u'"13:03:40"',
             'content-subject:A': u'"13:03:43"',}
         path, params = self.expect_redirect_publish(req, 'edit')
@@ -418,13 +418,13 @@
         req.form = {'eid': ['X', 'Y'],
 
                     '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject,in_group-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject',
                     'login-subject:X': u'adim',
                     'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
                     'in_group-subject:X': `gueid`,
 
                     '__type:Y': 'EmailAddress',
-                    '_cw_edited_fields:Y': 'address-subject,alias-subject,use_email-object',
+                    '_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object',
                     'address-subject:Y': u'',
                     'alias-subject:Y': u'',
                     'use_email-object:Y': 'X',
@@ -438,7 +438,7 @@
         req = self.request()
         req.form = {'__maineid' : 'X', 'eid': 'X',
                     '__cloned_eid:X': user.eid, '__type:X': 'CWUser',
-                    '_cw_edited_fields:X': 'login-subject,upassword-subject',
+                    '_cw_entity_fields:X': 'login-subject,upassword-subject',
                     'login-subject:X': u'toto',
                     'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
                     }
@@ -462,7 +462,7 @@
             req = self.request()
             req.form = {'eid': 'X',
                         '__cloned_eid:X': p.eid, '__type:X': 'CWUser',
-                        '_cw_edited_fields:X': 'login-subject,surname-subject',
+                        '_cw_entity_fields:X': 'login-subject,surname-subject',
                         'login-subject': u'dodo',
                         'surname-subject:X': u'Boom',
                         '__errorurl' : "whatever but required",
--- a/web/test/unittest_views_editforms.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_views_editforms.py	Fri Dec 09 12:08:27 2011 +0100
@@ -64,6 +64,7 @@
                                ])
         self.assertListEqual(rbc(e, 'main', 'metadata'),
                               [('last_login_time', 'subject'),
+                               ('cw_source', 'subject'),
                                ('creation_date', 'subject'),
                                ('cwuri', 'subject'),
                                ('modification_date', 'subject'),
@@ -76,9 +77,9 @@
         # (appears here while expected in hidden
         self.assertListEqual([x for x in rbc(e, 'main', 'relations')
                                if x != ('tags', 'object')],
-                              [('primary_email', 'subject'),
-                               ('connait', 'subject'),
+                              [('connait', 'subject'),
                                ('custom_workflow', 'subject'),
+                               ('primary_email', 'subject'),
                                ('checked_by', 'object'),
                                ])
         self.assertListEqual(rbc(e, 'main', 'inlined'),
@@ -119,7 +120,8 @@
                               [('nom', 'subject'),
                                ])
         self.assertListEqual(rbc(e, 'main', 'metadata'),
-                              [('creation_date', 'subject'),
+                              [('cw_source', 'subject'),
+                               ('creation_date', 'subject'),
                                ('cwuri', 'subject'),
                                ('modification_date', 'subject'),
                                ('created_by', 'subject'),
--- a/web/test/unittest_views_searchrestriction.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_views_searchrestriction.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,11 +15,9 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
 
-"""
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.web.facet import insert_attr_select_relation, prepare_facets_rqlst
+from cubicweb.web import facet
 
 
 class InsertAttrRelationTC(CubicWebTC):
@@ -27,13 +25,14 @@
     def parse(self, query):
         rqlst = self.vreg.parse(self.session, query)
         select = rqlst.children[0]
-        # XXX done in real life?
-        select.remove_groups()
         return rqlst
 
     def _generate(self, rqlst, rel, role, attr):
-        mainvar = prepare_facets_rqlst(rqlst)[0]
-        insert_attr_select_relation(rqlst.children[0], mainvar, rel, role, attr)
+        select = rqlst.children[0]
+        filtered_variable = facet.get_filtered_variable(select)
+        facet.prepare_select(select, filtered_variable)
+        facet.insert_attr_select_relation(select, filtered_variable,
+                                          rel, role, attr)
         return rqlst.as_string()
 
     @property
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_xmlrss.py	Fri Dec 09 12:08:27 2011 +0100
@@ -0,0 +1,38 @@
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.web.views.xmlrss import SERIALIZERS
+class EntityXMLViewTC(CubicWebTC):
+    """see also cw.sobjects.test.unittest_parsers"""
+    def test(self):
+        req = self.request(relation=['tags-object', 'in_group-subject',
+                                     'in_state-subject', 'use_email-subject'])
+        self.assertMultiLineEqual(
+            req.user.view('xml'),
+            '''\
+<CWUser eid="6" cwuri="None6" cwsource="system">
+  <login>admin</login>
+  <upassword/>
+  <firstname/>
+  <surname/>
+  <last_login_time/>
+  <creation_date>%(cdate)s</creation_date>
+  <modification_date>%(mdate)s</modification_date>
+  <tags role="object">
+  </tags>
+  <in_group role="subject">
+    <CWGroup eid="%(group_eid)s" cwuri="None%(group_eid)s"/>
+  </in_group>
+  <in_state role="subject">
+    <State eid="%(state_eid)s" cwuri="None%(state_eid)s" name="activated"/>
+  </in_state>
+  <use_email role="subject">
+  </use_email>
+</CWUser>
+''' % {'cdate': SERIALIZERS['Datetime'](req.user.creation_date),
+       'mdate': SERIALIZERS['Datetime'](req.user.modification_date),
+       'state_eid': req.user.in_state[0].eid,
+       'group_eid': req.user.in_group[0].eid})
+
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/web/test/unittest_viewselector.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/test/unittest_viewselector.py	Fri Dec 09 12:08:27 2011 +0100
@@ -76,7 +76,7 @@
             return
         try:
             self.assertSetEqual(content.keys(), expected)
-        except:
+        except Exception:
             print registry, sorted(expected), sorted(content.keys())
             print 'no more', [v for v in expected if not v in content.keys()]
             print 'missing', [v for v in content.keys() if not v in expected]
@@ -126,7 +126,6 @@
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
-                              ('secondary', baseviews.SecondaryView),
                               ('security', management.SecurityManagementView),
                               ('table', tableview.TableView),
                               ('text', baseviews.TextView),
@@ -150,7 +149,6 @@
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
-                              ('secondary', baseviews.SecondaryView),
                               ('security', management.SecurityManagementView),
                               ('table', tableview.TableView),
                               ('text', baseviews.TextView),
@@ -204,7 +202,6 @@
                               ('primary', primary.PrimaryView),] + RDFVIEWS + [
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
-                              ('secondary', baseviews.SecondaryView),
                               ('security', management.SecurityManagementView),
                               ('table', tableview.TableView),
                               ('text', baseviews.TextView),
@@ -240,7 +237,6 @@
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
-                              ('secondary', baseviews.SecondaryView),
                               ('security', management.SecurityManagementView),
                               ('table', tableview.TableView),
                               ('text', baseviews.TextView),
@@ -468,7 +464,7 @@
         try:
             obj = self.vreg['views'].select(vid, req, rset=rset, **args)
             return obj.render(**args)
-        except:
+        except Exception:
             print vid, rset, args
             raise
 
--- a/web/uicfg.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/uicfg.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -30,6 +30,16 @@
       * ``schema``
       * ``subobject`` (not displayed by default)
 
+   By default only entities on the ``application`` category are shown.
+
+.. sourcecode:: python
+
+    from cubicweb.web import uicfg
+    # force hiding
+    uicfg.indexview_etype_section['HideMe'] = 'subobject'
+    # force display
+    uicfg.indexview_etype_section['ShowMe'] = 'application'
+
 
 Actions box configuration
 `````````````````````````
--- a/web/views/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -22,7 +22,9 @@
 import os
 import sys
 import tempfile
+
 from rql import nodes
+from logilab.mtconverter import xml_escape
 
 
 def need_table_view(rset, schema):
@@ -113,6 +115,16 @@
     return "javascript: selectForAssociation('%s', '%s');" % (triplets, eid)
 
 
+def add_etype_button(req, etype, csscls='addButton right', **urlkwargs):
+    vreg = req.vreg
+    eschema = vreg.schema.eschema(etype)
+    if eschema.has_perm(req, 'add'):
+        url = vreg['etypes'].etype_class(etype).cw_create_url(req, **urlkwargs)
+        return u'<a href="%s" class="%s">%s</a>' % (
+            xml_escape(url), csscls, req.__('New %s' % etype))
+    return u''
+
+
 class TmpFileViewMixin(object):
     binary = True
     content_type = 'application/octet-stream'
--- a/web/views/actions.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/actions.py	Fri Dec 09 12:08:27 2011 +0100
@@ -135,14 +135,19 @@
         params = self._cw.form.copy()
         for param in ('vid', '__message') + controller.NAV_FORM_PARAMETERS:
             params.pop(param, None)
-        return self._cw.build_url(self._cw.relative_path(includeparams=False),
-                                  **params)
+        if self._cw.json_request:
+            path = 'view'
+            if self.cw_rset is not None:
+                params = {'rql': self.cw_rset.printable_rql()}
+        else:
+            path = self._cw.relative_path(includeparams=False)
+        return self._cw.build_url(path, **params)
 
 
 class ModifyAction(action.Action):
     __regid__ = 'edit'
     __select__ = (action.Action.__select__
-                  & one_line_rset() & has_editable_relation('add'))
+                  & one_line_rset() & has_editable_relation())
 
     title = _('modify')
     category = 'mainactions'
@@ -163,7 +168,7 @@
     order = 10
 
     def url(self):
-        return self._cw.build_url('view', rql=self.cw_rset.rql, vid='muledit')
+        return self._cw.build_url('view', rql=self.cw_rset.printable_rql(), vid='muledit')
 
 
 # generic "more" actions #######################################################
@@ -243,7 +248,7 @@
         return self._cw.__('add a %s' % self.rsettype) # generated msgid
 
     def url(self):
-        return self._cw.build_url('add/%s' % self.rsettype)
+        return self._cw.vreg["etypes"].etype_class(self.rsettype).cw_create_url(self._cw)
 
 
 class AddRelatedActions(action.Action):
@@ -314,8 +319,9 @@
                         yield rschema, teschema, role
 
     def linkto_url(self, entity, rtype, etype, target, **kwargs):
-        return self._cw.build_url('add/%s' % etype,
-                                  __linkto='%s:%s:%s' % (rtype, entity.eid, target), **kwargs)
+        return self._cw.vreg["etypes"].etype_class(etype).cw_create_url(
+                self._cw, __linkto='%s:%s:%s' % (rtype, entity.eid, target),
+                **kwargs)
 
 
 class ViewSameCWEType(action.Action):
--- a/web/views/ajaxedit.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/ajaxedit.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -70,7 +70,7 @@
         if getattr(self, 'etype', None):
             rset = entity.unrelated(self.rtype, self.etype, role(self),
                                     ordermethod='fetch_order')
-            self.pagination(self._cw, rset, w=self.w)
+            self.paginate(self._cw, rset=rset, w=self.w)
             return rset.entities()
         super(AddRelationView, self).unrelated_entities(self)
 
--- a/web/views/authentication.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/authentication.py	Fri Dec 09 12:08:27 2011 +0100
@@ -65,6 +65,14 @@
         """
         raise NotImplementedError()
 
+    def cleanup_authentication_information(self, req):
+        """called when the retriever has returned some authentication
+        information but we get an authentication error when using them, so it
+        get a chance to cleanup things (e.g. remove cookie)
+        """
+        pass
+
+
 class LoginPasswordRetreiver(WebAuthInfoRetreiver):
     __regid__ = 'loginpwdauth'
     order = 10
@@ -144,6 +152,7 @@
             try:
                 cnx = self._authenticate(login, authinfo)
             except AuthenticationError:
+                retriever.cleanup_authentication_information(req)
                 continue # the next one may succeed
             for retriever_ in self.authinforetrievers:
                 retriever_.authenticated(retriever, req, cnx, login, authinfo)
--- a/web/views/autoform.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/autoform.py	Fri Dec 09 12:08:27 2011 +0100
@@ -198,6 +198,9 @@
     _select_attrs = ('peid', 'rtype', 'role', 'pform', 'etype')
     removejs = "removeInlinedEntity('%s', '%s', '%s')"
 
+    # make pylint happy
+    peid = rtype = role = pform = etype = None
+
     def __init__(self, *args, **kwargs):
         for attr in self._select_attrs:
             # don't pop attributes from kwargs, so the end-up in
@@ -304,6 +307,9 @@
                   & specified_etype_implements('Any'))
     _select_attrs = InlineEntityEditionFormView._select_attrs + ('petype',)
 
+    # make pylint happy
+    petype = None
+
     @property
     def removejs(self):
         entity = self._entity()
@@ -325,7 +331,7 @@
     def _entity(self):
         try:
             cls = self._cw.vreg['etypes'].etype_class(self.etype)
-        except:
+        except Exception:
             self.w(self._cw._('no such entity type %s') % self.etype)
             return
         entity = cls(self._cw)
@@ -345,6 +351,7 @@
                   & specified_etype_implements('Any'))
 
     _select_attrs = InlineEntityCreationFormView._select_attrs + ('card',)
+    card = None # make pylint happy
     form = None # no actual form wrapped
 
     def call(self, i18nctx, **kwargs):
@@ -752,6 +759,7 @@
 
     def _generic_relations_field(self):
         try:
+            # pylint: disable=E1101
             srels_by_cat = self.srelations_by_category('generic', 'add', strict=True)
             warn('[3.6] %s: srelations_by_category is deprecated, use uicfg or '
                  'override editable_relations instead' % classid(self),
--- a/web/views/basecomponents.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/basecomponents.py	Fri Dec 09 12:08:27 2011 +0100
@@ -50,25 +50,22 @@
     visible = False
 
     def call(self, view=None):
+        req = self._cw
         if hasattr(view, 'filter_box_context_info'):
             rset = view.filter_box_context_info()[0]
         else:
             rset = self.cw_rset
         # display multilines query as one line
-        rql = rset is not None and rset.printable_rql(encoded=False) or self._cw.form.get('rql', '')
+        rql = rset is not None and rset.printable_rql(encoded=False) or req.form.get('rql', '')
         rql = rql.replace(u"\n", u" ")
-        req = self._cw
-        self.w(u'''<div id="rqlinput" class="%s">
-          <form action="%s">
-<fieldset>
+        self.w(u'''<div id="rqlinput" class="%s"><form action="%s"><fieldset>
 <input type="text" id="rql" name="rql" value="%s"  title="%s" tabindex="%s" accesskey="q" class="searchField" />
-</fieldset>
 ''' % (not self.cw_propval('visible') and 'hidden' or '',
-       self._cw.build_url('view'), xml_escape(rql), req._('full text or RQL query'), req.next_tabindex()))
-        if self._cw.search_state[0] != 'normal':
+       req.build_url('view'), xml_escape(rql), req._('full text or RQL query'), req.next_tabindex()))
+        if req.search_state[0] != 'normal':
             self.w(u'<input type="hidden" name="__mode" value="%s"/>'
                    % ':'.join(req.search_state[1]))
-        self.w(u'</form></div>')
+        self.w(u'</fieldset></form></div>')
 
 
 
--- a/web/views/basecontrollers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/basecontrollers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -23,6 +23,7 @@
 _ = unicode
 
 from logilab.common.date import strptime
+from logilab.common.deprecation import deprecated
 
 from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError,
                       AuthenticationError, typed_eid)
@@ -35,11 +36,9 @@
 from cubicweb.web.views import vid_from_rset, formrenderers
 
 try:
-    from cubicweb.web.facet import (FilterRQLBuilder, get_facet,
-                                    prepare_facets_rqlst)
-    HAS_SEARCH_RESTRICTION = True
+    from cubicweb.web import facet as facetbase
 except ImportError: # gae
-    HAS_SEARCH_RESTRICTION = False
+    facetbase = None
 
 def jsonize(func):
     """decorator to sets correct content_type and calls `json_dumps` on
@@ -102,7 +101,7 @@
         msg = self._cw._('you have been logged out')
         # force base_url so on dual http/https configuration, we generate an url
         # on the http version of the site
-        return self._cw.build_url('view', vid='index', __message=msg,
+        return self._cw.build_url('view', vid='loggedout',
                                   base_url=self._cw.vreg.config['base-url'])
 
 
@@ -490,21 +489,23 @@
             return None
         return cb(self._cw)
 
-    if HAS_SEARCH_RESTRICTION:
+    if facetbase is not None:
         @jsonize
         def js_filter_build_rql(self, names, values):
             form = self._rebuild_posted_form(names, values)
             self._cw.form = form
-            builder = FilterRQLBuilder(self._cw)
+            builder = facetbase.FilterRQLBuilder(self._cw)
             return builder.build_rql()
 
         @jsonize
-        def js_filter_select_content(self, facetids, rql):
-            rqlst = self._cw.vreg.parse(self._cw, rql) # XXX Union unsupported yet
-            mainvar = prepare_facets_rqlst(rqlst)[0]
+        def js_filter_select_content(self, facetids, rql, mainvar):
+            # Union unsupported yet
+            select = self._cw.vreg.parse(self._cw, rql).children[0]
+            filtered_variable = facetbase.get_filtered_variable(select, mainvar)
+            facetbase.prepare_select(select, filtered_variable)
             update_map = {}
             for facetid in facetids:
-                facet = get_facet(self._cw, facetid, rqlst.children[0], mainvar)
+                facet = facetbase.get_facet(self._cw, facetid, select, filtered_variable)
                 update_map[facetid] = facet.possible_values()
             return update_map
 
@@ -534,24 +535,20 @@
         statename = treecookiename(treeid)
         treestate = cookies.get(statename)
         if treestate is None:
-            cookies[statename] = nodeeid
-            self._cw.set_cookie(cookies, statename)
+            self._cw.set_cookie(statename, nodeeid)
         else:
             marked = set(filter(None, treestate.value.split(':')))
             if nodeeid in marked:
                 marked.remove(nodeeid)
             else:
                 marked.add(nodeeid)
-            cookies[statename] = ':'.join(marked)
-            self._cw.set_cookie(cookies, statename)
+            self._cw.set_cookie(statename, ':'.join(marked))
 
     @jsonize
+    @deprecated("[3.13] use jQuery.cookie(cookiename, cookievalue, {path: '/'}) in js land instead")
     def js_set_cookie(self, cookiename, cookievalue):
-        # XXX we should consider jQuery.Cookie
         cookiename, cookievalue = str(cookiename), str(cookievalue)
-        cookies = self._cw.get_cookie()
-        cookies[cookiename] = cookievalue
-        self._cw.set_cookie(cookies, cookiename)
+        self._cw.set_cookie(cookiename, cookievalue)
 
     # relations edition stuff ##################################################
 
--- a/web/views/basetemplates.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/basetemplates.py	Fri Dec 09 12:08:27 2011 +0100
@@ -25,7 +25,7 @@
 
 from cubicweb.appobject import objectify_selector
 from cubicweb.selectors import match_kwargs, no_cnx, anonymous_user
-from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW
+from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW, StartupView
 from cubicweb.utils import UStringIO
 from cubicweb.schema import display_name
 from cubicweb.web import component, formfields as ff, formwidgets as fw
@@ -56,6 +56,9 @@
         self.wview('htmlheader', rset=self.cw_rset)
         w(u'<title>%s</title>\n' % xml_escape(page_title))
 
+    def content(self):
+        raise NotImplementedError()
+
 
 class LogInTemplate(LogInOutTemplate):
     __regid__ = 'login'
@@ -66,19 +69,19 @@
         self.wview('logform', rset=self.cw_rset, id='loginBox', klass='')
 
 
-class LoggedOutTemplate(LogInOutTemplate):
+class LoggedOutTemplate(StartupView):
     __regid__ = 'loggedout'
+    __select__ = anonymous_user()
     title = 'logged out'
 
-    def content(self, w):
-        # FIXME Deprecated code ?
+    def call(self):
         msg = self._cw._('you have been logged out')
-        w(u'<h2>%s</h2>\n' % msg)
-        if self._cw.vreg.config.anonymous_user()[0]:
-            indexurl = self._cw.build_url('view', vid='index', __message=msg)
-            w(u'<p><a href="%s">%s</a><p>' % (
-                xml_escape(indexurl),
-                self._cw._('go back to the index page')))
+        if self._cw.cnx:
+            comp = self._cw.vreg['components'].select('applmessages', self._cw)
+            comp.render(w=self.w, msg=msg)
+            self.wview('index')
+        else:
+            self.w(u'<h2>%s</h2>' % msg)
 
 
 @objectify_selector
--- a/web/views/baseviews.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/baseviews.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,12 +15,64 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Set of HTML generic base views:
+"""
+HTML views
+~~~~~~~~~~
+
+Special views
+`````````````
+
+.. autoclass:: NullView
+.. autoclass:: NoResultView
+.. autoclass:: FinalView
+
+
+Base entity views
+`````````````````
+
+.. autoclass:: InContextView
+.. autoclass:: OutOfContextView
+.. autoclass:: OneLineView
 
-* noresult, final
-* primary, sidebox
-* oneline, incontext, outofcontext, text
-* list
+Those are used to display a link to an entity, whose label depends on the entity
+having to be displayed in or out of context (of another entity): some entities
+make sense in the context of another entity. For instance, the `Version` of a
+`Project` in forge. So one may expect that 'incontext' will be called when
+display a version from within the context of a project, while 'outofcontext"'
+will be called in other cases. In our example, the 'incontext' view of the
+version would be something like '0.1.2', while the 'outofcontext' view would
+include the project name, e.g. 'baz 0.1.2' (since only a version number without
+the associated project doesn't make sense if you don't know yet that you're
+talking about the famous 'baz' project. |cubicweb| tries to make guess and call
+'incontext'/'outofcontext' nicely. When it can't know, the 'oneline' view should
+be used.
+
+
+List entity views
+`````````````````
+
+.. autoclass:: ListView
+.. autoclass:: SimpleListView
+.. autoclass:: SameETypeListView
+.. autoclass:: CSVView
+
+Those list views can be given a 'subvid' arguments, telling the view to use of
+each item in the list. When not specified, the value of the 'redirect_vid'
+attribute of :class:`ListItemView` (for 'listview') or of
+:class:`SimpleListView` will be used. This default to 'outofcontext' for 'list'
+/ 'incontext' for 'simplelist'
+
+
+Text entity views
+~~~~~~~~~~~~~~~~~
+
+Basic HTML view have some variants to be used when generating raw text, not HTML
+(for notifications for instance). Also, as explained above, some of the HTML
+views use those text views as a basis.
+
+.. autoclass:: TextView
+.. autoclass:: InContextTextView
+.. autoclass:: OutOfContextView
 """
 
 __docformat__ = "restructuredtext en"
@@ -42,7 +94,12 @@
 
 
 class NullView(AnyRsetView):
-    """default view when no result has been found"""
+    """:__regid__: *null*
+
+    This view is the default view used when nothing needs to be rendered. It is
+    always applicable and is usually used as fallback view when calling
+    :meth:`_cw.view` to display nothing if the result set is empty.
+    """
     __regid__ = 'null'
     __select__ = yes()
     def call(self, **kwargs):
@@ -51,9 +108,16 @@
 
 
 class NoResultView(View):
-    """default view when no result has been found"""
+    """:__regid__: *noresult*
+
+    This view is the default view to be used when no result has been found
+    (i.e. empty result set).
+
+    It's usually used as fallback view when calling :meth:`_cw.view` to display
+    "no results" if the result set is empty.
+    """
+    __regid__ = 'noresult'
     __select__ = empty_rset()
-    __regid__ = 'noresult'
 
     def call(self, **kwargs):
         self.w(u'<div class="searchMessage"><strong>%s</strong></div>\n'
@@ -61,8 +125,11 @@
 
 
 class FinalView(AnyRsetView):
-    """display values without any transformation (i.e. get a number for
-    entities)
+    """:__regid__: *final*
+
+    Display the value of a result set cell with minimal transformations
+    (i.e. you'll get a number for entities). It is applicable on any result set,
+    though usually dedicated for cells containing an attribute's value.
     """
     __regid__ = 'final'
     # record generated i18n catalog messages
@@ -126,21 +193,51 @@
         self.wdata(printable_value(self._cw, etype, value, props))
 
 
-# XXX deprecated
-class SecondaryView(EntityView):
-    __regid__ = 'secondary'
-    title = _('secondary')
+class InContextView(EntityView):
+    """:__regid__: *incontext*
+
+    This view is used whenthe entity should be considered as displayed in its
+    context. By default it produces the result of `textincontext` wrapped in a
+    link leading to the primary view of the entity.
+    """
+    __regid__ = 'incontext'
+
+    def cell_call(self, row, col):
+        entity = self.cw_rset.get_entity(row, col)
+        desc = cut(entity.dc_description(), 50)
+        self.w(u'<a href="%s" title="%s">' % (
+            xml_escape(entity.absolute_url()), xml_escape(desc)))
+        self.w(xml_escape(self._cw.view('textincontext', self.cw_rset,
+                                        row=row, col=col)))
+        self.w(u'</a>')
 
-    def cell_call(self, row, col, **kwargs):
-        """the secondary view for an entity
-        secondary = icon + view(oneline)
-        """
+
+class OutOfContextView(EntityView):
+    """:__regid__: *outofcontext*
+
+    This view is used whenthe entity should be considered as displayed out of
+    its context. By default it produces the result of `textoutofcontext` wrapped
+    in a link leading to the primary view of the entity.
+    """
+    __regid__ = 'outofcontext'
+
+    def cell_call(self, row, col):
         entity = self.cw_rset.get_entity(row, col)
-        self.w(u'&#160;')
-        self.wview('oneline', self.cw_rset, row=row, col=col)
+        desc = cut(entity.dc_description(), 50)
+        self.w(u'<a href="%s" title="%s">' % (
+            xml_escape(entity.absolute_url()), xml_escape(desc)))
+        self.w(xml_escape(self._cw.view('textoutofcontext', self.cw_rset,
+                                        row=row, col=col)))
+        self.w(u'</a>')
 
 
 class OneLineView(EntityView):
+    """:__regid__: *oneline*
+
+    This view is used when we can't tell if the entity should be considered as
+    displayed in or out of context. By default it produces the result of the
+    `text` view in a link leading to the primary view of the entity.
+    """
     __regid__ = 'oneline'
     title = _('oneline')
 
@@ -153,18 +250,25 @@
         self.w(u'</a>')
 
 
+# text views ###################################################################
+
 class TextView(EntityView):
-    """the simplest text view for an entity"""
+    """:__regid__: *text*
+
+    This is the simplest text view for an entity. By default it returns the
+    result of the entity's `dc_title()` method, which is cut to fit the
+    `navigation.short-line-size` property if necessary.
+    """
     __regid__ = 'text'
     title = _('text')
     content_type = 'text/plain'
 
     def call(self, **kwargs):
-        """the view is called for an entire result set, by default loop
-        other rows of the result set and call the same view on the
-        particular row
+        """The view is called for an entire result set, by default loop other
+        rows of the result set and call the same view on the particular row.
 
-        Views applicable on None result sets have to override this method
+        Subclasses views that are applicable on None result sets will have to
+        override this method.
         """
         rset = self.cw_rset
         if rset is None:
@@ -180,40 +284,14 @@
                    self._cw.property_value('navigation.short-line-size')))
 
 
-class MetaDataView(EntityView):
-    """paragraph view of some metadata"""
-    __regid__ = 'metadata'
-    show_eid = True
+class InContextTextView(TextView):
+    """:__regid__: *textincontext*
 
-    def cell_call(self, row, col):
-        _ = self._cw._
-        entity = self.cw_rset.get_entity(row, col)
-        self.w(u'<div>')
-        if self.show_eid:
-            self.w(u'%s #%s - ' % (entity.dc_type(), entity.eid))
-        if entity.modification_date != entity.creation_date:
-            self.w(u'<span>%s</span> ' % _('latest update on'))
-            self.w(u'<span class="value">%s</span>, '
-                   % self._cw.format_date(entity.modification_date))
-        # entities from external source may not have a creation date (eg ldap)
-        if entity.creation_date:
-            self.w(u'<span>%s</span> ' % _('created on'))
-            self.w(u'<span class="value">%s</span>'
-                   % self._cw.format_date(entity.creation_date))
-        if entity.creator:
-            if entity.creation_date:
-                self.w(u' <span>%s</span> ' % _('by'))
-            else:
-                self.w(u' <span>%s</span> ' % _('created_by'))
-            self.w(u'<span class="value">%s</span>' % entity.creator.name())
-        meta = entity.cw_metainformation()
-        if meta['source']['uri'] != 'system':
-            self.w(u' (<span>%s</span>' % _('cw_source'))
-            self.w(u' <span class="value">%s</span>)' % meta['source']['uri'])
-        self.w(u'</div>')
-
-
-class InContextTextView(TextView):
+    Similar to the `text` view, but called when an entity is considered in
+    context (see description of incontext HTML view for more information on
+    this). By default it displays what's returned by the `dc_title()` method of
+    the entity.
+    """
     __regid__ = 'textincontext'
     title = None # not listed as a possible view
     def cell_call(self, row, col):
@@ -222,6 +300,13 @@
 
 
 class OutOfContextTextView(InContextTextView):
+    """:__regid__: *textoutofcontext*
+
+    Similar to the `text` view, but called when an entity is considered out of
+    context (see description of outofcontext HTML view for more information on
+    this). By default it displays what's returned by the `dc_long_title()`
+    method of the entity.
+    """
     __regid__ = 'textoutofcontext'
 
     def cell_call(self, row, col):
@@ -229,35 +314,26 @@
         self.w(entity.dc_long_title())
 
 
-class InContextView(EntityView):
-    __regid__ = 'incontext'
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        desc = cut(entity.dc_description(), 50)
-        self.w(u'<a href="%s" title="%s">' % (
-            xml_escape(entity.absolute_url()), xml_escape(desc)))
-        self.w(xml_escape(self._cw.view('textincontext', self.cw_rset,
-                                        row=row, col=col)))
-        self.w(u'</a>')
-
-
-class OutOfContextView(EntityView):
-    __regid__ = 'outofcontext'
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        desc = cut(entity.dc_description(), 50)
-        self.w(u'<a href="%s" title="%s">' % (
-            xml_escape(entity.absolute_url()), xml_escape(desc)))
-        self.w(xml_escape(self._cw.view('textoutofcontext', self.cw_rset,
-                                        row=row, col=col)))
-        self.w(u'</a>')
-
-
 # list views ##################################################################
 
 class ListView(EntityView):
+    """:__regid__: *list*
+
+    This view displays a list of entities by creating a HTML list (`<ul>`) and
+    call the view `listitem` for each entity of the result set. The 'list' view
+    will generate HTML like:
+
+    .. sourcecode:: html
+
+      <ul class="section">
+        <li>"result of 'subvid' view for a row</li>
+        ...
+      </ul>
+
+    If you wish to use a different view for each entity, either subclass and
+    change the :attr:`item_vid` class attribute or specify a `subvid` argument
+    when calling this view.
+    """
     __regid__ = 'list'
     title = _('list')
     item_vid = 'listitem'
@@ -312,7 +388,21 @@
 
 
 class SimpleListView(ListItemView):
-    """list without bullets"""
+    """:__regid__: *simplelist*
+
+    Similar to :class:~cubicweb.web.views.baseviews.ListView but using '<div>'
+    instead of '<ul>'. It rely on '<div>' behaviour to separate items. HTML will
+    look like
+
+    .. sourcecode:: html
+
+      <div class="section">"result of 'subvid' view for a row</div>
+      ...
+
+
+    It relies on base :class:`~cubicweb.view.View` class implementation of the
+    :meth:`call` method to insert those <div>.
+    """
     __regid__ = 'simplelist'
     redirect_vid = 'incontext'
 
@@ -330,8 +420,13 @@
 
 
 class SameETypeListView(EntityView):
-    """list of entities of the same type, when asked explicitly for same etype list
-    view (for instance, display gallery if only images)
+    """:__regid__: *sameetypelist*
+
+    This view displays a list of entities of the same type, in HTML section
+    ('<div>') and call the view `sameetypelistitem` for each entity of the
+    result set. It's designed to get a more adapted global list when displayed
+    entities are all of the same type (for instance, display gallery if there
+    are only images entities).
     """
     __regid__ = 'sameetypelist'
     __select__ = EntityView.__select__ & one_etype_rset()
@@ -361,6 +456,11 @@
 
 
 class CSVView(SimpleListView):
+    """:__regid__: *csv*
+
+    This view displays each entity in a coma separated list. It is NOT related
+    to the well-known text file format.
+    """
     __regid__ = 'csv'
     redirect_vid = 'incontext'
 
@@ -377,12 +477,48 @@
                 self.w(u", ")
 
 
+# XXX to be documented views ###################################################
+
+class MetaDataView(EntityView):
+    """paragraph view of some metadata"""
+    __regid__ = 'metadata'
+    show_eid = True
+
+    def cell_call(self, row, col):
+        _ = self._cw._
+        entity = self.cw_rset.get_entity(row, col)
+        self.w(u'<div>')
+        if self.show_eid:
+            self.w(u'%s #%s - ' % (entity.dc_type(), entity.eid))
+        if entity.modification_date != entity.creation_date:
+            self.w(u'<span>%s</span> ' % _('latest update on'))
+            self.w(u'<span class="value">%s</span>, '
+                   % self._cw.format_date(entity.modification_date))
+        # entities from external source may not have a creation date (eg ldap)
+        if entity.creation_date:
+            self.w(u'<span>%s</span> ' % _('created on'))
+            self.w(u'<span class="value">%s</span>'
+                   % self._cw.format_date(entity.creation_date))
+        if entity.creator:
+            if entity.creation_date:
+                self.w(u' <span>%s</span> ' % _('by'))
+            else:
+                self.w(u' <span>%s</span> ' % _('created_by'))
+            self.w(u'<span class="value">%s</span>' % entity.creator.name())
+        meta = entity.cw_metainformation()
+        if meta['source']['uri'] != 'system':
+            self.w(u' (<span>%s</span>' % _('cw_source'))
+            self.w(u' <span class="value">%s</span>)' % meta['source']['uri'])
+        self.w(u'</div>')
+
+
 class TreeItemView(ListItemView):
     __regid__ = 'treeitem'
 
     def cell_call(self, row, col):
         self.wview('incontext', self.cw_rset, row=row, col=col)
 
+
 class TextSearchResultView(EntityView):
     """this view is used to display full-text search
 
@@ -405,7 +541,7 @@
                 value = xml_escape(entity.printable_value(attr, format='text/plain').lower())
             except TransformError, ex:
                 continue
-            except:
+            except Exception:
                 continue
             if searched in value:
                 contexts = []
@@ -425,26 +561,6 @@
         self.wview('oneline', self.cw_rset, row=row, col=col)
 
 
-# XXX bw compat
-
-from logilab.common.deprecation import class_moved
-
-try:
-    from cubicweb.web.views.tableview import TableView
-    TableView = class_moved(TableView)
-except ImportError:
-    pass # gae has no tableview module (yet)
-
-from cubicweb.web.views import boxes, xmlrss, primary
-PrimaryView = class_moved(primary.PrimaryView)
-SideBoxView = class_moved(boxes.SideBoxView)
-XmlView = class_moved(xmlrss.XMLView)
-XmlItemView = class_moved(xmlrss.XMLItemView)
-XmlRsetView = class_moved(xmlrss.XMLRsetView)
-RssView = class_moved(xmlrss.RSSView)
-RssItemView = class_moved(xmlrss.RSSItemView)
-
-
 class GroupByView(EntityView):
     """grouped view of a result set. The `group_key` method return the group
     key of an entities (a string or tuple of string).
@@ -452,7 +568,7 @@
     For each group, display a link to entities of this group by generating url
     like <basepath>/<key> or <basepath>/<key item 1>/<key item 2>.
     """
-    __abstrack__ = True
+    __abstract__ = True
     __select__ = EntityView.__select__ & match_kwargs('basepath')
     entity_attribute = None
     reversed = False
@@ -550,3 +666,29 @@
         url = self.index_url(basepath, key, vtitle=vtitle)
         title = self._cw._('archive for %(author)s') % {'author': key}
         return tags.a(label, href=url, title=title)
+
+
+# bw compat ####################################################################
+
+from logilab.common.deprecation import class_moved, class_deprecated
+
+from cubicweb.web.views import boxes, xmlrss, primary, tableview
+PrimaryView = class_moved(primary.PrimaryView)
+SideBoxView = class_moved(boxes.SideBoxView)
+XmlView = class_moved(xmlrss.XMLView)
+XmlItemView = class_moved(xmlrss.XMLItemView)
+XmlRsetView = class_moved(xmlrss.XMLRsetView)
+RssView = class_moved(xmlrss.RSSView)
+RssItemView = class_moved(xmlrss.RSSItemView)
+TableView = class_moved(tableview.TableView)
+
+
+class SecondaryView(EntityView):
+    __metaclass__ = class_deprecated
+    __deprecation_warning__ = '[3.9] the secondary view is deprecated, use one of oneline/incontext/outofcontext'
+    __regid__ = 'secondary'
+
+    def cell_call(self, row, col, **kwargs):
+        entity = self.cw_rset.get_entity(row, col)
+        self.w(u'&#160;')
+        self.wview('oneline', self.cw_rset, row=row, col=col)
--- a/web/views/boxes.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/boxes.py	Fri Dec 09 12:08:27 2011 +0100
@@ -185,7 +185,7 @@
 
     def render_body(self, w):
         for category, views in box.sort_by_category(self.views):
-            menu = htmlwidgets.BoxMenu(self._cw._(category))
+            menu = htmlwidgets.BoxMenu(self._cw._(category), ident=category)
             for view in views:
                 menu.append(self.action_link(view))
             self.append(menu)
--- a/web/views/cwproperties.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/cwproperties.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -102,8 +102,7 @@
         cookiename = self._cookie_name(group)
         cookie = cookies.get(cookiename)
         if cookie is None:
-            cookies[cookiename] = default
-            self._cw.set_cookie(cookies, cookiename, maxage=None)
+            self._cw.set_cookie(cookiename, default, maxage=None)
             status = default
         else:
             status = cookie.value
@@ -248,7 +247,7 @@
         | (one_line_rset() & match_user_groups('managers') & is_instance('CWUser'))
         )
 
-    title = _('preferences')
+    title = _('user preferences')
 
     @property
     def user(self):
@@ -303,6 +302,7 @@
 
     def render(self, form, renderer):
         wdg = self.get_widget(form)
+        # pylint: disable=E1101
         wdg.attrs['tabindex'] = form._cw.next_tabindex()
         wdg.attrs['onchange'] = "javascript:setPropValueWidget('%s', %s)" % (
             form.edited_entity.eid, form._cw.next_tabindex())
@@ -338,7 +338,7 @@
         try:
             pdef = form._cw.vreg.property_info(entity.pkey)
         except UnknownProperty, ex:
-            self.warning('%s (you should probably delete that property '
+            form.warning('%s (you should probably delete that property '
                          'from the database)', ex)
             msg = form._cw._('you should probably delete that property')
             self.widget = NotEditableWidget(entity.printable_value('value'),
--- a/web/views/cwsources.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/cwsources.py	Fri Dec 09 12:08:27 2011 +0100
@@ -24,27 +24,40 @@
 
 from itertools import repeat, chain
 
+from cubicweb import Unauthorized
 from cubicweb.selectors import is_instance, score_entity, match_user_groups
 from cubicweb.view import EntityView, StartupView
 from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name
 from cubicweb.web import uicfg, formwidgets as wdgs
-from cubicweb.web.views import tabs, actions
+from cubicweb.web.views import tabs, actions, ibreadcrumbs, add_etype_button
 
 
 _abaa = uicfg.actionbox_appearsin_addmenu
+# there are explicit 'add' buttons for those
 _abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_schema', '*'), False)
 _abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_for_source', '*'), False)
+_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_host_config_of', '*'), False)
 
 _afs = uicfg.autoform_section
-_afs.tag_attribute(('CWSource', 'synchronizing'), 'main', 'hidden')
 _afs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'main', 'hidden')
+
 _affk = uicfg.autoform_field_kwargs
 _affk.tag_attribute(('CWSource', 'parser'), {'widget': wdgs.TextInput})
 
 # source primary views #########################################################
 
 _pvs = uicfg.primaryview_section
+_pvs.tag_attribute(('CWSource', 'name'), 'hidden')
 _pvs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'hidden')
+_pvs.tag_object_of(('*', 'cw_host_config_of', 'CWSource'), 'hidden')
+
+_pvdc = uicfg.primaryview_display_ctrl
+_pvdc.tag_attribute(('CWSource', 'type'), {'vid': 'attribute'})# disable reledit
+
+_rc = uicfg.reledit_ctrl
+_rc.tag_attribute(('CWSource', 'config'), {'rvid': 'verbatimattr'})
+_rc.tag_attribute(('CWSourceHostConfig', 'config'), {'rvid': 'verbatimattr'})
+_rc.tag_attribute(('CWSourceSchemaConfig', 'options'), {'rvid': 'verbatimattr'})
 
 
 class CWSourcePrimaryView(tabs.TabbedPrimaryView):
@@ -57,6 +70,23 @@
     __regid__ = 'cwsource-main'
     __select__ = tabs.PrimaryTab.__select__ & is_instance('CWSource')
 
+    def render_entity_attributes(self, entity):
+        super(CWSourceMainTab, self).render_entity_attributes(entity)
+        self.w(add_etype_button(self._cw, 'CWSourceHostConfig',
+                                __linkto='cw_host_config_of:%s:subject' % entity.eid,
+                                __redirectpath=entity.rest_path()))
+        try:
+            hostconfig = self._cw.execute(
+                'Any X, XC, XH WHERE X cw_host_config_of S, S eid %(s)s, '
+                'X config XC, X match_host XH', {'s': entity.eid})
+        except Unauthorized:
+            pass
+        else:
+            if hostconfig:
+                self.w(u'<h3>%s</h3>' % self._cw._('CWSourceHostConfig_plural'))
+                self._cw.view('editable-table', hostconfig,
+                              displaycols=range(2), w=self.w)
+
 
 MAPPED_SOURCE_TYPES = set( ('pyrorql', 'datafeed') )
 
@@ -69,12 +99,9 @@
     def entity_call(self, entity):
         _ = self._cw._
         self.w('<h3>%s</h3>' % _('Entity and relation supported by this source'))
-        eschema = self._cw.vreg.schema.eschema('CWSourceSchemaConfig')
-        if eschema.has_perm(self._cw, 'add'):
-            self.w(u'<a href="%s" class="addButton right">%s</a>' % (
-                self._cw.build_url('add/%s' % eschema),
-                self._cw._('add a CWSourceSchemaConfig')))
-            self.w(u'<div class="clear"></div>')
+        self.w(add_etype_button(self._cw, 'CWSourceSchemaConfig',
+                                __linkto='cw_for_source:%s:subject' % entity.eid))
+        self.w(u'<div class="clear"></div>')
         rset = self._cw.execute(
             'Any X, SCH, XO ORDERBY ET WHERE X options XO, X cw_for_source S, S eid %(s)s, '
             'X cw_schema SCH, SCH is ET', {'s': entity.eid})
@@ -96,11 +123,11 @@
         checker = MAPPING_CHECKERS.get(entity.type, MappingChecker)(entity)
         checker.check()
         if (checker.errors or checker.warnings or checker.infos):
-                self.w('<h2>%s</h2>' % _('Detected problems'))
-                errors = zip(repeat(_('error')), checker.errors)
-                warnings = zip(repeat(_('warning')), checker.warnings)
-                infos = zip(repeat(_('warning')), checker.infos)
-                self.wview('pyvaltable', pyvalue=chain(errors, warnings, infos))
+            self.w('<h2>%s</h2>' % _('Detected problems'))
+            errors = zip(repeat(_('error')), checker.errors)
+            warnings = zip(repeat(_('warning')), checker.warnings)
+            infos = zip(repeat(_('warning')), checker.infos)
+            self.wview('pyvaltable', pyvalue=chain(errors, warnings, infos))
 
 
 class MappingChecker(object):
@@ -229,15 +256,20 @@
 
 class CWSourceManagementView(StartupView):
     __regid__ = 'cw.source-management'
-    rql = ('Any S, ST, SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST')
+    rql = ('Any S, ST, SP, SD, SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST, '
+           'S latest_retrieval SD, S parser SP')
     title = _('data sources management')
 
     def call(self, **kwargs):
         self.w('<h1>%s</h1>' % self._cw._(self.title))
-        eschema = self._cw.vreg.schema.eschema('CWSource')
-        if eschema.has_perm(self._cw, 'add'):
-            self.w(u'<a href="%s" class="addButton right">%s</a>' % (
-                self._cw.build_url('add/%s' % eschema),
-                self._cw._('add a CWSource')))
-            self.w(u'<div class="clear"></div>')
-        self.wview('table', self._cw.execute(self.rql), displaycols=range(2))
+        self.w(add_etype_button(self._cw, 'CWSource'))
+        self.w(u'<div class="clear"></div>')
+        self.wview('table', self._cw.execute(self.rql), displaycols=range(4))
+
+
+# breadcrumbs configuration ####################################################
+
+class CWsourceConfigIBreadCrumbsAdapter(ibreadcrumbs.IBreadCrumbsAdapter):
+    __select__ = is_instance('CWSourceHostConfig', 'CWSourceSchemaConfig')
+    def parent_entity(self):
+        return self.entity.cwsource
--- a/web/views/cwuser.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/cwuser.py	Fri Dec 09 12:08:27 2011 +0100
@@ -20,7 +20,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
-import hashlib
+from hashlib import sha1 # pylint: disable=E0611
 
 from logilab.mtconverter import xml_escape
 
@@ -29,7 +29,7 @@
 from cubicweb.selectors import one_line_rset, is_instance, match_user_groups
 from cubicweb.view import EntityView, StartupView
 from cubicweb.web import action, uicfg, formwidgets
-from cubicweb.web.views import tabs, tableview, actions
+from cubicweb.web.views import tabs, tableview, actions, add_etype_button
 
 _pvs = uicfg.primaryview_section
 _pvs.tag_attribute(('CWUser', 'login'), 'hidden')
@@ -86,7 +86,7 @@
         emailaddr = entity.cw_adapt_to('IEmailable').get_email()
         if emailaddr:
             self.w(u'<foaf:mbox_sha1sum>%s</foaf:mbox_sha1sum>\n'
-                   % hashlib.sha1(emailaddr.encode('utf-8')).hexdigest())
+                   % sha1(emailaddr.encode('utf-8')).hexdigest())
         self.w(u'</foaf:Person>\n')
 
 
@@ -168,21 +168,22 @@
 
 class CWUserManagementView(StartupView):
     __regid__ = 'cw.user-management'
+    # XXX one could wish to display for instance only user's firstname/surname
+    # for non managers but filtering out NULL cause crash with an ldapuser
+    # source.
+    __select__ = StartupView.__select__ & match_user_groups('managers')
     rql = ('Any U,USN,F,S,U,UAA,UDS, L,UAA,UDSN ORDERBY L WHERE U is CWUser, '
            'U login L, U firstname F, U surname S, '
            'U in_state US, US name USN, '
            'U primary_email UA?, UA address UAA, '
            'U cw_source UDS, US name UDSN')
     title = _('users and groups management')
+    cache_max_age = 0 # disable caching
 
     def call(self, **kwargs):
         self.w('<h1>%s</h1>' % self._cw._(self.title))
-        for etype in ('CWUser', 'CWGroup'):
-            eschema = self._cw.vreg.schema.eschema(etype)
-            if eschema.has_perm(self._cw, 'add'):
-                self.w(u'<a href="%s" class="addButton right">%s</a>' % (
-                    self._cw.build_url('add/%s' % eschema),
-                    self._cw.__('New %s' % etype).capitalize()))
+        self.w(add_etype_button(self._cw, 'CWUser'))
+        self.w(add_etype_button(self._cw, 'CWGroup'))
         self.w(u'<div class="clear"></div>')
         self.wview('cw.user-table', self._cw.execute(self.rql))
 
--- a/web/views/editcontroller.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/editcontroller.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -23,8 +23,6 @@
 
 from rql.utils import rqlvar_maker
 
-from logilab.common.textutils import splitstrip
-
 from cubicweb import Binary, ValidationError, typed_eid
 from cubicweb.view import EntityAdapter, implements_adapter_compat
 from cubicweb.selectors import is_instance
@@ -190,23 +188,18 @@
             formid = 'edition'
         form = self._cw.vreg['forms'].select(formid, self._cw, entity=entity)
         eid = form.actual_eid(entity.eid)
-        form.formvalues = {} # init fields value cache
         try:
-            editedfields = formparams['_cw_edited_fields']
+            editedfields = formparams['_cw_entity_fields']
         except KeyError:
-            raise RequestError(self._cw._('no edited fields specified for entity %s' % entity.eid))
-        for editedfield in splitstrip(editedfields):
             try:
-                name, role = editedfield.split('-')
-            except:
-                name = editedfield
-                role = None
-            if form.field_by_name.im_func.func_code.co_argcount == 4: # XXX
-                field = form.field_by_name(name, role, eschema=entity.e_schema)
-            else:
-                field = form.field_by_name(name, role)
-            if field.has_been_modified(form):
-                self.handle_formfield(form, field, rqlquery)
+                editedfields = formparams['_cw_edited_fields']
+                warn('[3.13] _cw_edited_fields has been renamed _cw_entity_fields',
+                     DeprecationWarning)
+            except KeyError:
+                raise RequestError(self._cw._('no edited fields specified for entity %s' % entity.eid))
+        form.formvalues = {} # init fields value cache
+        for field in form.iter_modified_fields(editedfields, entity):
+            self.handle_formfield(form, field, rqlquery)
         if self.errors:
             errors = dict((f.role_name(), unicode(ex)) for f, ex in self.errors)
             raise ValidationError(valerror_eid(entity.eid), errors)
--- a/web/views/emailaddress.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/emailaddress.py	Fri Dec 09 12:08:27 2011 +0100
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Specific views for email addresses entities
+"""Specific views for email addresses entities"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.mtconverter import xml_escape
@@ -85,8 +84,7 @@
 class EmailAddressOneLineView(baseviews.OneLineView):
     __select__ = is_instance('EmailAddress')
 
-    def cell_call(self, row, col, **kwargs):
-        entity = self.cw_rset.get_entity(row, col)
+    def entity_call(self, entity, **kwargs):
         if entity.reverse_primary_email:
             self.w(u'<b>')
         if entity.alias:
@@ -106,8 +104,7 @@
     __regid__ = 'mailto'
     __select__ = is_instance('EmailAddress')
 
-    def cell_call(self, row, col, **kwargs):
-        entity = self.cw_rset.get_entity(row, col)
+    def entity_call(self, entity, **kwargs):
         if entity.reverse_primary_email:
             self.w(u'<b>')
         if entity.alias:
@@ -130,7 +127,10 @@
     __select__ = is_instance('EmailAddress')
 
     def cell_call(self, row, col, **kwargs):
-        self.wview('mailto', self.cw_rset, row=row, col=col, **kwargs)
+        if self._cw.vreg.config['mangle-emails']:
+            self.wview('oneline', self.cw_rset, row=row, col=col, **kwargs)
+        else:
+            self.wview('mailto', self.cw_rset, row=row, col=col, **kwargs)
 
 
 class EmailAddressTextView(baseviews.TextView):
--- a/web/views/facets.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/facets.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -26,37 +26,159 @@
 from cubicweb.selectors import (non_final_entity, multi_lines_rset,
                                 match_context_prop, yes, relation_possible)
 from cubicweb.utils import json_dumps
-from cubicweb.web import component, facet
+from cubicweb.web import component, facet as facetbase
+
+def facets(req, rset, context, mainvar=None):
+    """return the base rql and a list of widgets for facets applying to the
+    given rset/context (cached version)
+    """
+    try:
+        cache = req.__rset_facets
+    except AttributeError:
+        cache = req.__rset_facets = {}
+    try:
+        return cache[(rset, context, mainvar)]
+    except KeyError:
+        facets = _facets(req, rset, context, mainvar)
+        cache[(rset, context, mainvar)] = facets
+        return facets
+
+def _facets(req, rset, context, mainvar):
+    """return the base rql and a list of widgets for facets applying to the
+    given rset/context
+    """
+    # XXX done by selectors, though maybe necessary when rset has been hijacked
+    # (e.g. contextview_selector matched)
+    origqlst = rset.syntax_tree()
+    # union not yet supported
+    if len(origqlst.children) != 1:
+        return None, ()
+    rqlst = origqlst.copy()
+    select = rqlst.children[0]
+    filtered_variable, baserql = facetbase.init_facets(rset, select, mainvar)
+    wdgs = [(facet, facet.get_widget()) for facet in req.vreg['facets'].poss_visible_objects(
+        req, rset=rset, rqlst=origqlst, select=select, context=context,
+        filtered_variable=filtered_variable)]
+    return baserql, [wdg for facet, wdg in wdgs if wdg is not None]
+
 
 @objectify_selector
 def contextview_selector(cls, req, rset=None, row=None, col=None, view=None,
                          **kwargs):
-    if view and getattr(view, 'filter_box_context_info', lambda: None)():
-        return 1
+    if view:
+        try:
+            getcontext = getattr(view, 'filter_box_context_info')
+        except AttributeError:
+            return 0
+        rset = getcontext()[0]
+        if rset is None or rset.rowcount < 2:
+            return 0
+        wdgs = facets(req, rset, cls.__regid__)[1]
+        return len(wdgs)
     return 0
 
+@objectify_selector
+def has_facets(cls, req, rset=None, mainvar=None, **kwargs):
+    if rset is None or rset.rowcount < 2:
+        return 0
+    wdgs = facets(req, rset, cls.__regid__, mainvar)[1]
+    return len(wdgs)
+
+
+def filter_hiddens(w, baserql, wdgs, **kwargs):
+    kwargs['facets'] = ','.join(wdg.facet.__regid__ for wdg in wdgs)
+    kwargs['baserql'] = baserql
+    for key, val in kwargs.items():
+        w(u'<input type="hidden" name="%s" value="%s" />' % (
+            key, xml_escape(val)))
+
 
-class FilterBox(component.CtxComponent):
+class FacetFilterMixIn(object):
+    needs_js = ['cubicweb.ajax.js', 'cubicweb.facets.js']
+    needs_css = ['cubicweb.facets.css']
+    roundcorners = True
+
+    def generate_form(self, w, rset, divid, vid, vidargs,
+                      paginate=False, cssclass='', **hiddens):
+        """display a form to filter some view's content"""
+        mainvar = self.cw_extra_kwargs.get('mainvar')
+        baserql, wdgs = facets(self._cw, rset, self.__regid__, mainvar)
+        assert wdgs
+        self._cw.add_js(self.needs_js)
+        self._cw.add_css(self.needs_css)
+        self._cw.html_headers.define_var('facetLoadingMsg',
+                                         self._cw._('facet-loading-msg'))
+        if self.roundcorners:
+            self._cw.html_headers.add_onload(
+                'jQuery(".facet").corner("tl br 10px");')
+        # drop False / None values from vidargs
+        vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
+        facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs]))
+        w(u'<form id="%sForm" class="%s" method="post" action="" '
+          'cubicweb:facetargs="%s" >' % (divid, cssclass, facetargs))
+        w(u'<fieldset>')
+        if mainvar:
+            hiddens['mainvar'] = mainvar
+        filter_hiddens(w, baserql, wdgs, **hiddens)
+        self.layout_widgets(w, self.sorted_widgets(wdgs))
+
+        # <Enter> is supposed to submit the form only if there is a single
+        # input:text field. However most browsers will submit the form
+        # on <Enter> anyway if there is an input:submit field.
+        #
+        # see: http://www.w3.org/MarkUp/html-spec/html-spec_8.html#SEC8.2
+        #
+        # Firefox 7.0.1 does not submit form on <Enter> if there is more than a
+        # input:text field and not input:submit but does it if there is an
+        # input:submit.
+        #
+        # IE 6 or Firefox 2 behave the same way.
+        w(u'<input type="submit" class="hidden" />')
+        #
+        w(u'</fieldset>\n')
+        w(u'</form>\n')
+
+    def sorted_widgets(self, wdgs):
+        """sort widgets: by default sort by widget height, then according to
+        widget.order (the original widgets order)
+        """
+        return sorted(wdgs, key=lambda x: x.height())
+
+    def layout_widgets(self, w, wdgs):
+        """layout widgets: by default simply render each of them
+        (i.e. succession of <div>)
+        """
+        for wdg in wdgs:
+            wdg.render(w=w)
+
+
+class FilterBox(FacetFilterMixIn, component.CtxComponent):
     """filter results of a query"""
-    __regid__ = 'facet.filters'
-    __select__ = ((non_final_entity() & multi_lines_rset())
-                  | contextview_selector())
+    __regid__ = 'facet.filterbox'
+    __select__ = ((non_final_entity() & has_facets())
+                  | contextview_selector()) # can't use has_facets because of
+                                            # contextview mecanism
     context = 'left' # XXX doesn't support 'incontext', only 'left' or 'right'
     title = _('facet.filters')
     visible = True # functionality provided by the search box by default
     order = 1
-    roundcorners = True
-
-    needs_css = 'cubicweb.facets.css'
-    needs_js = ('cubicweb.ajax.js', 'cubicweb.facets.js')
 
     bk_linkbox_template = u'<div class="facetTitle">%s</div>'
 
-    def facetargs(self):
-        """this method returns the list of extra arguments that should
-        be used by the facet
-        """
-        return {}
+    def render_body(self, w, **kwargs):
+        req = self._cw
+        rset, vid, divid, paginate = self._get_context()
+        assert len(rset) > 1
+        if vid is None:
+            vid = req.form.get('vid')
+        if self.bk_linkbox_template and req.vreg.schema['Bookmark'].has_perm(req, 'add'):
+            w(self.bookmark_link(rset))
+        hiddens = {}
+        for param in ('subvid', 'vtitle'):
+            if param in req.form:
+                hiddens[param] = req.form[param]
+        self.generate_form(w, rset, divid, vid, self.vidargs(),
+                           paginate=paginate, **hiddens)
 
     def _get_context(self):
         view = self.cw_extra_kwargs.get('view')
@@ -69,47 +191,6 @@
             paginate = view and view.paginable
         return rset, vid, divid, paginate
 
-    def render(self, w, **kwargs):
-        req = self._cw
-        req.add_js( self.needs_js )
-        req.add_css( self.needs_css)
-        if self.roundcorners:
-            req.html_headers.add_onload('jQuery(".facet").corner("tl br 10px");')
-        rset, vid, divid, paginate = self._get_context()
-        # XXX done by selectors, though maybe necessary when rset has been hijacked
-        if rset.rowcount < 2:
-            return
-        rqlst = rset.syntax_tree()
-        # union not yet supported
-        if len(rqlst.children) != 1:
-            return ()
-        rqlst = rqlst.copy()
-        req.vreg.rqlhelper.annotate(rqlst)
-        mainvar, baserql = facet.prepare_facets_rqlst(rqlst, rset.args)
-        widgets = []
-        for facetobj in self.get_facets(rset, rqlst.children[0], mainvar):
-            wdg = facetobj.get_widget()
-            if wdg is not None:
-                widgets.append(wdg)
-        if not widgets:
-            return
-        if vid is None:
-            vid = req.form.get('vid')
-        if self.bk_linkbox_template and req.vreg.schema['Bookmark'].has_perm(req, 'add'):
-            w(self.bookmark_link(rset))
-        w(u'<form method="post" id="%sForm" cubicweb:facetargs="%s" action="">'  % (
-            divid, xml_escape(json_dumps([divid, vid, paginate, self.facetargs()]))))
-        w(u'<fieldset>')
-        hiddens = {'facets': ','.join(wdg.facet.__regid__ for wdg in widgets),
-                   'baserql': baserql}
-        for param in ('subvid', 'vtitle'):
-            if param in req.form:
-                hiddens[param] = req.form[param]
-        facet.filter_hiddens(w, **hiddens)
-        for wdg in widgets:
-            wdg.render(w=w)
-        w(u'</fieldset>\n</form>\n')
-
     def bookmark_link(self, rset):
         req = self._cw
         bk_path = u'rql=%s' % req.url_quote(rset.printable_rql())
@@ -127,35 +208,94 @@
                 req._('bookmark this search'))
         return self.bk_linkbox_template % bk_link
 
-    def get_facets(self, rset, rqlst, mainvar):
-        return self._cw.vreg['facets'].poss_visible_objects(
-            self._cw, rset=rset, rqlst=rqlst,
-            context='facetbox', filtered_variable=mainvar)
+    def vidargs(self):
+        """this method returns the list of extra arguments that should be used
+        by the filter or the view using it
+        """
+        return {}
+
+
+from cubicweb.view import AnyRsetView
+
+class FilterTable(FacetFilterMixIn, AnyRsetView):
+    __regid__ = 'facet.filtertable'
+    __select__ = has_facets()
+    compact_layout_threshold = 5
+
+    def call(self, vid, divid, vidargs, cssclass=''):
+        self.generate_form(self.w, self.cw_rset, divid, vid, vidargs,
+                           cssclass=cssclass, fromformfilter='1',
+                           # divid=divid XXX
+                           )
+
+    def _simple_horizontal_layout(self, w, wdgs):
+        w(u'<table class="filter">\n')
+        w(u'<tr>\n')
+        for wdg in wdgs:
+            w(u'<td>')
+            wdg.render(w=w)
+            w(u'</td>')
+        w(u'</tr>\n')
+        w(u'</table>\n')
+
+    def layout_widgets(self, w, wdgs):
+        """layout widgets: put them in a table where each column should have
+        sum(wdg.height()) < wdg_stack_size.
+        """
+        if len(wdgs) < self.compact_layout_threshold:
+            self._simple_horizontal_layout(w, wdgs)
+            return
+        w(u'<table class="filter">\n')
+        widget_queue = []
+        queue_height = 0
+        wdg_stack_size = max(wdgs, key=lambda wdg:wdg.height()).height()
+        w(u'<tr>\n')
+        for wdg in wdgs:
+            height = wdg.height()
+            if queue_height + height <= wdg_stack_size:
+                widget_queue.append(wdg)
+                queue_height += height
+                continue
+            w(u'<td>')
+            for queued in widget_queue:
+                queued.render(w=w)
+            w(u'</td>')
+            widget_queue = [wdg]
+            queue_height = height
+        if widget_queue:
+            w(u'<td>')
+            for queued in widget_queue:
+                queued.render(w=w)
+            w(u'</td>')
+        w(u'</tr>\n')
+        w(u'</table>\n')
+
 
 # facets ######################################################################
 
-class CWSourceFacet(facet.RelationFacet):
+class CWSourceFacet(facetbase.RelationFacet):
     __regid__ = 'cw_source-facet'
     rtype = 'cw_source'
     target_attr = 'name'
 
-class CreatedByFacet(facet.RelationFacet):
+class CreatedByFacet(facetbase.RelationFacet):
     __regid__ = 'created_by-facet'
     rtype = 'created_by'
     target_attr = 'login'
 
-class InGroupFacet(facet.RelationFacet):
+class InGroupFacet(facetbase.RelationFacet):
     __regid__ = 'in_group-facet'
     rtype = 'in_group'
     target_attr = 'name'
 
-class InStateFacet(facet.RelationAttributeFacet):
+class InStateFacet(facetbase.RelationAttributeFacet):
     __regid__ = 'in_state-facet'
     rtype = 'in_state'
     target_attr = 'name'
 
+
 # inherit from RelationFacet to benefit from its possible_values implementation
-class ETypeFacet(facet.RelationFacet):
+class ETypeFacet(facetbase.RelationFacet):
     __regid__ = 'etype-facet'
     __select__ = yes()
     order = 1
@@ -177,31 +317,37 @@
         value = self._cw.form.get(self.__regid__)
         if not value:
             return
-        self.rqlst.add_type_restriction(self.filtered_variable, value)
+        self.select.add_type_restriction(self.filtered_variable, value)
 
     def possible_values(self):
         """return a list of possible values (as string since it's used to
         compare to a form value in javascript) for this facet
         """
-        rqlst = self.rqlst
-        rqlst.save_state()
+        select = self.select
+        select.save_state()
         try:
-            facet._cleanup_rqlst(rqlst, self.filtered_variable)
-            etype_var = facet._prepare_vocabulary_rqlst(
-                rqlst, self.filtered_variable, self.rtype, self.role)
-            attrvar = rqlst.make_variable()
-            rqlst.add_selected(attrvar)
-            rqlst.add_relation(etype_var, 'name', attrvar)
-            return [etype for _, etype in self.rqlexec(rqlst.as_string())]
+            facetbase.cleanup_select(select, self.filtered_variable)
+            etype_var = facetbase.prepare_vocabulary_select(
+                select, self.filtered_variable, self.rtype, self.role)
+            attrvar = select.make_variable()
+            select.add_selected(attrvar)
+            select.add_relation(etype_var, 'name', attrvar)
+            return [etype for _, etype in self.rqlexec(select.as_string())]
         finally:
-            rqlst.recover()
+            select.recover()
 
-class HasTextFacet(facet.AbstractFacet):
+
+class HasTextFacet(facetbase.AbstractFacet):
     __select__ = relation_possible('has_text', 'subject') & match_context_prop()
     __regid__ = 'has_text-facet'
     rtype = 'has_text'
     role = 'subject'
     order = 0
+
+    @property
+    def wdgclass(self):
+        return facetbase.FacetStringWidget
+
     @property
     def title(self):
         return self._cw._('has_text')
@@ -212,11 +358,11 @@
         default implentation expects a .vocabulary method on the facet and
         return a combobox displaying this vocabulary
         """
-        return facet.FacetStringWidget(self)
+        return self.wdgclass(self)
 
     def add_rql_restrictions(self):
         """add restriction for this facet into the rql syntax tree"""
         value = self._cw.form.get(self.__regid__)
         if not value:
             return
-        self.rqlst.add_constant_restriction(self.filtered_variable, 'has_text', value, 'String')
+        self.select.add_constant_restriction(self.filtered_variable, 'has_text', value, 'String')
--- a/web/views/formrenderers.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/formrenderers.py	Fri Dec 09 12:08:27 2011 +0100
@@ -37,7 +37,6 @@
 
 from warnings import warn
 
-from logilab.common import dictattr
 from logilab.mtconverter import xml_escape
 
 from cubicweb import tags, uilib
@@ -193,7 +192,7 @@
         if form.domid:
             attrs.setdefault('id', form.domid)
         if form.onsubmit:
-            attrs.setdefault('onsubmit',  form.onsubmit % dictattr(form))
+            attrs.setdefault('onsubmit',  form.onsubmit)
         if form.cssstyle:
             attrs.setdefault('style', form.cssstyle)
         if form.cssclass:
@@ -335,6 +334,43 @@
         pass
 
 
+class OneRowTableFormRenderer(FormRenderer):
+    """The 'htable' form renderer display fields horizontally in a table:
+
+    +--------------+--------------+--------------+--------------+---------+
+    | field1 label | field1 input | field2 label | field2 input | buttons |
+    +--------------+--------------+--------------+--------------+---------+
+    """
+    __regid__ = 'onerowtable'
+
+    display_help = False
+    def _render_fields(self, fields, w, form):
+        w(u'<table border="0" class="oneRowTableForm">')
+        w(u'<tr>')
+        for field in fields:
+            if self.display_label:
+                w(u'<th class="labelCol">%s</th>' % self.render_label(form, field))
+            if self.display_help:
+                w(self.render_help(form, field))
+            error = form.field_error(field)
+            if error:
+                w(u'<td class="error">')
+                self.render_error(w, error)
+            else:
+                w(u'<td>')
+            w(field.render(form, self))
+            w(u'</td>')
+        w(u'<td>')
+        for button in form.form_buttons:
+            w(button.render(form))
+        w(u'</td>')
+        w(u'</tr>')
+        w(u'</table>')
+
+    def render_buttons(self, w, form):
+        pass
+
+
 class EntityCompositeFormRenderer(FormRenderer):
     """This is a specific renderer for the multiple entities edition form
     ('muledit').
--- a/web/views/forms.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/forms.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -41,17 +41,21 @@
 
 but you'll use this one rarely.
 """
+from __future__ import with_statement
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
 
+from logilab.common import dictattr, tempattr
 from logilab.common.decorators import iclassmethod
 from logilab.common.compat import any
+from logilab.common.textutils import splitstrip
 from logilab.common.deprecation import deprecated
 
-from cubicweb import typed_eid
+from cubicweb import ValidationError, typed_eid
 from cubicweb.utils import support_args
 from cubicweb.selectors import non_final_entity, match_kwargs, one_line_rset
+from cubicweb.web import RequestError, ProcessFormError
 from cubicweb.web import uicfg, form, formwidgets as fwdgs
 from cubicweb.web.formfields import relvoc_unrelated, guess_field
 
@@ -124,6 +128,23 @@
 
     .. automethod:: cubicweb.web.views.forms.FieldsForm.render
 
+    **Form posting methods**
+
+    Once a form is posted, you can retrieve the form on the controller side and
+    use the following methods to ease processing. For "simple" forms, this
+    should looks like :
+
+    .. sourcecode :: python
+
+        form = self._cw.vreg['forms'].select('myformid', self._cw)
+        posted = form.process_posted()
+        # do something with the returned dictionary
+
+    Notice that form related to entity edition should usually use the
+    `edit` controller which will handle all the logic for you.
+
+    .. automethod:: cubicweb.web.views.forms.FieldsForm.process_posted
+    .. automethod:: cubicweb.web.views.forms.FieldsForm.iter_modified_fields
     """
     __regid__ = 'base'
 
@@ -132,7 +153,6 @@
     needs_js = ('cubicweb.ajax.js', 'cubicweb.edition.js',)
     needs_css = ('cubicweb.form.css',)
     action = None
-    onsubmit = "return freezeFormButtons('%(domid)s');"
     cssclass = None
     cssstyle = None
     cwtarget = None
@@ -146,6 +166,15 @@
         """true if the form needs enctype=multipart/form-data"""
         return any(field.needs_multipart for field in self.fields)
 
+    def _get_onsubmit(self):
+        try:
+            return self._onsubmit
+        except AttributeError:
+            return "return freezeFormButtons('%(domid)s');" % dictattr(self)
+    def _set_onsubmit(self, value):
+        self._onsubmit = value
+    onsubmit = property(_get_onsubmit, _set_onsubmit)
+
     def add_media(self):
         """adds media (CSS & JS) required by this widget"""
         if self.needs_js:
@@ -209,6 +238,19 @@
         for field in self.fields[:]:
             for field in field.actual_fields(self):
                 field.form_init(self)
+        # store used field in an hidden input for later usage by a controller
+        fields = set()
+        eidfields = set()
+        for field in self.fields:
+            if field.eidparam:
+                eidfields.add(field.role_name())
+            elif field.name not in self.control_fields:
+                fields.add(field.role_name())
+        if fields:
+            self.add_hidden('_cw_fields', u','.join(fields))
+        if eidfields:
+            self.add_hidden('_cw_entity_fields', u','.join(eidfields),
+                            eidparam=True)
 
     _default_form_action_path = 'edit'
     def form_action(self):
@@ -220,6 +262,50 @@
             return self._cw.build_url(self._default_form_action_path)
         return action
 
+    # controller form processing methods #######################################
+
+    def iter_modified_fields(self, editedfields=None, entity=None):
+        """return a generator on field that has been modified by the posted
+        form.
+        """
+        if editedfields is None:
+            try:
+                editedfields = self._cw.form['_cw_fields']
+            except KeyError:
+                raise RequestError(self._cw._('no edited fields specified'))
+        entityform = entity and self.field_by_name.im_func.func_code.co_argcount == 4 # XXX
+        for editedfield in splitstrip(editedfields):
+            try:
+                name, role = editedfield.split('-')
+            except Exception:
+                name = editedfield
+                role = None
+            if entityform:
+                field = self.field_by_name(name, role, eschema=entity.e_schema)
+            else:
+                field = self.field_by_name(name, role)
+            if field.has_been_modified(self):
+                yield field
+
+    def process_posted(self):
+        """use this method to process the content posted by a simple form.  it
+        will return a dictionary with field names as key and typed value as
+        associated value.
+        """
+        with tempattr(self, 'formvalues', {}): # init fields value cache
+            errors = []
+            processed = {}
+            for field in self.iter_modified_fields():
+                try:
+                    for field, value in field.process_posted(self):
+                        processed[field.role_name()] = value
+                except ProcessFormError, exc:
+                    errors.append((field, exc))
+            if errors:
+                errors = dict((f.role_name(), unicode(ex)) for f, ex in errors)
+                raise ValidationError(None, errors)
+            return processed
+
     @deprecated('[3.6] use .add_hidden(name, value, **kwargs)')
     def form_add_hidden(self, name, value=None, **kwargs):
         return self.add_hidden(name, value, **kwargs)
@@ -298,7 +384,8 @@
                 else:
                     msg = self._cw._('entity linked')
         if msg:
-            self.add_hidden('__message', msg)
+            msgid = self._cw.set_redirect_message(msg)
+            self.add_hidden('_cwmsgid', msgid)
 
     def session_key(self):
         """return the key that may be used to store / retreive data about a
@@ -314,16 +401,6 @@
         # different url after a validation error
         return '%s#%s' % (self._cw.url(), self.domid)
 
-    def build_context(self, formvalues=None):
-        if self.formvalues is not None:
-            return # already built
-        super(EntityFieldsForm, self).build_context(formvalues)
-        edited = set()
-        for field in self.fields:
-            if field.eidparam:
-                edited.add(field.role_name())
-        self.add_hidden('_cw_edited_fields', u','.join(edited), eidparam=True)
-
     def default_renderer(self):
         return self._cw.vreg['formrenderers'].select(
             self.form_renderer_id, self._cw, rset=self.cw_rset, row=self.cw_row,
--- a/web/views/ibreadcrumbs.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/ibreadcrumbs.py	Fri Dec 09 12:08:27 2011 +0100
@@ -159,7 +159,7 @@
                 xml_escape(url), xml_escape(uilib.cut(title, textsize))))
         else:
             textsize = self._cw.property_value('navigation.short-line-size')
-            w(uilib.cut(unicode(part), textsize))
+            w(xml_escape(uilib.cut(unicode(part), textsize)))
 
 
 class BreadCrumbETypeVComponent(BreadCrumbEntityVComponent):
--- a/web/views/idownloadable.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/idownloadable.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -75,7 +75,7 @@
 
 class DownloadView(EntityView):
     """download view
-    
+
     this view is replacing the deprecated 'download' controller and allow
     downloading of entities providing the necessary interface
     """
@@ -194,7 +194,7 @@
     def cell_call(self, row, col, link=False, **kwargs):
         entity = self.cw_rset.get_entity(row, col)
         adapter = entity.cw_adapt_to('IDownloadable')
-        tag = self._embedding_tag(src=adapter.download_url(),
+        tag = self._embedding_tag(src=adapter.download_url(), # pylint: disable=E1102
                                   alt=(self._cw._('download %s') % adapter.download_file_name()),
                                   **kwargs)
         if link:
--- a/web/views/magicsearch.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/magicsearch.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -387,7 +387,7 @@
             procname, query = uquery.split(':', 1)
             proc = self.by_name[procname.strip().lower()]
             uquery = query.strip()
-        except:
+        except Exception:
             # use processor chain
             unauthorized = None
             for proc in self.processors:
--- a/web/views/navigation.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/navigation.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -40,10 +40,10 @@
         self.clean_params(params)
         basepath = self._cw.relative_path(includeparams=False)
         self.w(u'<div class="pagination">')
-        self.w(u'%s&#160;' % self.previous_link(basepath, params))
+        self.w(self.previous_link(basepath, params))
         self.w(u'[&#160;%s&#160;]' %
                u'&#160;| '.join(self.iter_page_links(basepath, params)))
-        self.w(u'&#160;%s' % self.next_link(basepath, params))
+        self.w(u'&#160;&#160;%s' % self.next_link(basepath, params))
         self.w(u'</div>')
 
     def index_display(self, start, stop):
@@ -74,12 +74,12 @@
         basepath = self._cw.relative_path(includeparams=False)
         w = self.w
         w(u'<div class="pagination">')
-        w(u'%s&#160;' % self.previous_link(basepath, params))
+        w(self.previous_link(basepath, params))
         w(u'<select onchange="javascript: document.location=this.options[this.selectedIndex].value">')
         for option in self.iter_page_links(basepath, params):
             w(option)
         w(u'</select>')
-        w(u'&#160;%s' % self.next_link(basepath, params))
+        w(u'&#160;&#160;%s' % self.next_link(basepath, params))
         w(u'</div>')
 
 
@@ -211,6 +211,14 @@
     context = 'navbottom'
     order = 10
 
+    @property
+    def prev_icon(self):
+        return '<img src="%s"/>' % xml_escape(self._cw.data_url('go_prev.png'))
+
+    @property
+    def next_icon(self):
+        return '<img src="%s"/>' % xml_escape(self._cw.data_url('go_next.png'))
+
     def init_rendering(self):
         adapter = self.entity.cw_adapt_to('IPrevNext')
         self.previous = adapter.previous_entity()
@@ -232,16 +240,19 @@
 
     def prevnext_entity(self, w, entity, type):
         textsize = self._cw.property_value('navigation.short-line-size')
+        content = xml_escape(cut(entity.dc_title(), textsize))
         if type == 'prev':
             title = self._cw._('i18nprevnext_previous')
-            icon = u'&lt;&lt; '
+            icon = self.prev_icon
             cssclass = u'previousEntity left'
+            content = icon + content
         else:
             title = self._cw._('i18nprevnext_next')
-            icon = u'&gt;&gt; '
+            icon = self.next_icon
             cssclass = u'nextEntity right'
+            content = content + '&#160;&#160;' + icon
         self.prevnext_div(w, type, cssclass, entity.absolute_url(),
-                          title, icon + xml_escape(cut(entity.dc_title(), textsize)))
+                          title, content)
 
     def prevnext_div(self, w, type, cssclass, url, title, content):
         w(u'<div class="%s">' % cssclass)
@@ -277,7 +288,7 @@
             basepath = req.relative_path(includeparams=False)
             params['__force_display'] = 1
             url = nav.page_url(basepath, params)
-            w(u'<div><a href="%s">%s</a></div>\n'
+            w(u'<div class="displayAllLink"><a href="%s">%s</a></div>\n'
               % (xml_escape(url), req._('show %s results') % len(rset)))
         rset.limit(offset=start, limit=stop-start, inplace=True)
 
--- a/web/views/owl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/owl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -40,6 +40,7 @@
 
                 'Boolean': 'xsd:boolean',
                 'Int': 'xsd:int',
+                'BigInt': 'xsd:int',
                 'Float': 'xsd:float',
                 'Decimal' : 'xsd:decimal',
 
--- a/web/views/plots.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/plots.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -33,14 +33,14 @@
     """accept result set with at least one line and two columns of result
     all columns after second must be of numerical types"""
     for etype in rset.description[0]:
-        if etype not in ('Int', 'Float'):
+        if etype not in ('Int', 'BigInt', 'Float'):
             return 0
     return 1
 
 @objectify_selector
 def second_column_is_number(cls, req, rset=None, *args, **kwargs):
     etype = rset.description[0][1]
-    if etype not  in ('Int', 'Float'):
+    if etype not  in ('Int', 'BigInt', 'Float'):
         return 0
     return 1
 
@@ -50,7 +50,7 @@
     if etypes[0] not in ('Date', 'Datetime', 'TZDatetime'):
         return 0
     for etype in etypes[1:]:
-        if etype not in ('Int', 'Float'):
+        if etype not in ('Int', 'BigInt', 'Float'):
             return 0
     return 1
 
@@ -79,6 +79,9 @@
         if w is None:
             return self._stream.getvalue()
 
+    def _render(self, *args, **kwargs):
+        raise NotImplementedError
+
 class FlotPlotWidget(PlotWidget):
     """PlotRenderer widget using Flot"""
     onload = u"""
--- a/web/views/primary.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/primary.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,7 +15,27 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""The default primary view"""
+"""
+Public API of the PrimaryView class
+````````````````````````````````````
+.. autoclass:: cubicweb.web.views.primary.PrimaryView
+
+Views that may be used to display an entity's attribute or relation
+```````````````````````````````````````````````````````````````````
+
+Yoy may easily the display of an attribute or relation by simply configuring the
+view using one of `primaryview_display_ctrl` or `reledit_ctrl` to use one of the
+views describled below. For instance:
+
+.. sourcecode:: python
+
+    primaryview_display_ctrl.tag_attribute(('Foo', 'bar'), {'vid': 'attribute'})
+
+
+.. autoclass:: AttributeView
+.. autoclass:: URLAttributeView
+.. autoclass:: VerbatimAttributeView
+"""
 
 __docformat__ = "restructuredtext en"
 _ = unicode
@@ -34,7 +54,47 @@
 
 
 class PrimaryView(EntityView):
-    """the full view of an non final entity"""
+    """
+    The basic layout of a primary view is as in the :ref:`primary_view_layout`
+    section. This layout is actually drawn by the `render_entity` method.
+
+    The methods you may want to modify while customizing a ``PrimaryView``
+    are:
+
+    .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_title
+    .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_attributes
+    .. automethod:: cubicweb.web.views.primary.PrimaryView.render_entity_relations
+    .. automethod:: cubicweb.web.views.primary.PrimaryView.render_side_boxes
+
+    The placement of relations in the relations section or in side boxes
+    can be controlled through the :ref:`primary_view_configuration` mechanism.
+
+    .. automethod:: cubicweb.web.views.primary.PrimaryView.content_navigation_components
+
+    Also, please note that by setting the following attributes in your
+    subclass, you can already customize some of the rendering:
+
+    :attr:`show_attr_label`
+        Renders the attribute label next to the attribute value if set to `True`.
+        Otherwise, does only display the attribute value.
+
+    :attr:`show_rel_label`
+        Renders the relation label next to the relation value if set to `True`.
+        Otherwise, does only display the relation value.
+
+    :attr:`skip_none`
+        Does not render an attribute value that is None if set to `True`.
+
+    :attr:`main_related_section`
+        Renders the relations of the entity if set to `True`.
+
+    A good practice is for you to identify the content of your entity type for
+    which the default rendering does not answer your need so that you can focus
+    on the specific method (from the list above) that needs to be modified. We
+    do not advise you to overwrite ``render_entity`` unless you want a
+    completely different layout.
+    """
+
     __regid__ = 'primary'
     title = _('primary')
     show_attr_label = True
@@ -70,7 +130,8 @@
         if hasattr(self, 'render_entity_summary'):
             warn('[3.10] render_entity_summary method is deprecated (%s)' % self,
                  DeprecationWarning)
-            self.render_entity_summary(entity)
+            self.render_entity_summary(entity) # pylint: disable=E1101
+
         summary = self.summary(entity)
         if summary:
             warn('[3.10] summary method is deprecated (%s)' % self,
@@ -89,12 +150,18 @@
             self.w(u'<div class="primaryRight">')
             if hasattr(self, 'render_side_related'):
                 warn('[3.2] render_side_related is deprecated')
-                self.render_side_related(entity, [])
+                self.render_side_related(entity, []) # pylint: disable=E1101
             self.render_side_boxes(boxes)
             self.w(u'</div>')
             self.w(u'</td></tr></table>')
 
     def content_navigation_components(self, context):
+        """This method is applicable only for entity type implementing the
+        interface `IPrevNext`. This interface is for entities which can be
+        linked to a previous and/or next entity. This method will render the
+        navigation links between entities of this type, either at the top or at
+        the bottom of the page given the context (navcontent{top|bottom}).
+        """
         self.w(u'<div class="%s">' % context)
         for comp in self._cw.vreg['ctxcomponents'].poss_visible_objects(
             self._cw, rset=self.cw_rset, view=self, context=context):
@@ -106,7 +173,9 @@
         self.w(u'</div>')
 
     def render_entity_title(self, entity):
-        """default implementation return dc_title"""
+        """Renders the entity title, by default using entity's
+        :meth:`dc_title()` method.
+        """
         title = xml_escape(entity.dc_title())
         if title:
             if self.is_primary():
@@ -128,6 +197,10 @@
         return u''
 
     def render_entity_attributes(self, entity):
+        """Renders all attributes and relations in the 'attributes' section. The
+        :attr:`skip_none` attribute controls the display of `None` valued
+        attributes.
+        """
         display_attributes = []
         for rschema, _, role, dispctrl in self._section_def(entity, 'attributes'):
             vid = dispctrl.get('vid', 'reledit')
@@ -145,26 +218,28 @@
         if display_attributes:
             self.w(u'<table>')
             for rschema, role, dispctrl, value in display_attributes:
+                # pylint: disable=E1101
                 if not hasattr(self, '_render_attribute'):
                     label = self._rel_label(entity, rschema, role, dispctrl)
                     self.render_attribute(label, value, table=True)
                 elif support_args(self._render_attribute, 'dispctrl'):
                     warn('[3.9] _render_attribute prototype has changed and '
                          'renamed to render_attribute, please update %s'
-                         % self.__class___, DeprecationWarning)
+                         % self.__class__, DeprecationWarning)
                     self._render_attribute(dispctrl, rschema, value, role=role,
                                            table=True)
                 else:
                     self._render_attribute(rschema, value, role=role, table=True)
                     warn('[3.6] _render_attribute prototype has changed and '
                          'renamed to render_attribute, please update %s'
-                         % self.__class___, DeprecationWarning)
+                         % self.__class__, DeprecationWarning)
             self.w(u'</table>')
 
     def render_attribute(self, label, value, table=False):
         self.field(label, value, tr=False, table=table)
 
     def render_entity_relations(self, entity):
+        """Renders all relations in the 'relations' section."""
         for rschema, tschemas, role, dispctrl in self._section_def(entity, 'relations'):
             if rschema.final or dispctrl.get('rtypevid'):
                 vid = dispctrl.get('vid', 'reledit')
@@ -182,6 +257,7 @@
                 if not rset:
                     continue
                 if hasattr(self, '_render_relation'):
+                    # pylint: disable=E1101
                     if not support_args(self._render_relation, 'showlabel'):
                         self._render_relation(dispctrl, rset, 'autolimited')
                         warn('[3.9] _render_relation prototype has changed and has '
@@ -212,8 +288,9 @@
         self.w(u'</div>')
 
     def render_side_boxes(self, boxes):
-        """display side related relations:
-        non-meta in a first step, meta in a second step
+        """Renders side boxes on the right side of the content. This will
+        generate a box for each relation in the 'sidebox' section, as well as
+        explicit box appobjects selectable in this context.
         """
         for box in boxes:
             if isinstance(box, tuple):
@@ -305,6 +382,8 @@
 
     It will try to display nicely according to the number of items in the result
     set.
+
+    XXX include me in the doc
     """
     __regid__ = 'autolimited'
 
@@ -346,32 +425,16 @@
                 self.w(u'</div>')
 
 
-class URLAttributeView(EntityView):
-    """use this view for attributes whose value is an url and that you want
-    to display as clickable link
-    """
-    __regid__ = 'urlattr'
-    __select__ = EntityView.__select__ & match_kwargs('rtype')
+class AttributeView(EntityView):
+    """:__regid__: *attribute*
 
-    def cell_call(self, row, col, rtype, **kwargs):
-        entity = self.cw_rset.get_entity(row, col)
-        url = entity.printable_value(rtype)
-        if url:
-            self.w(u'<a href="%s">%s</a>' % (url, url))
-
-class AttributeView(EntityView):
-    """use this view on an entity as an alternative to more sophisticated
-    views such as reledit.
-
-    Ex. usage:
-
-    uicfg.primaryview_display_ctrl.tag_attribute(('Foo', 'bar'), {'vid': 'attribute'})
+    This view is generally used to disable the *reledit* feature. It works on
+    both relations and attributes.
     """
     __regid__ = 'attribute'
     __select__ = EntityView.__select__ & match_kwargs('rtype')
 
-    def cell_call(self, row, col, rtype, role, **kwargs):
-        entity = self.cw_rset.get_entity(row, col)
+    def entity_call(self, entity, rtype, role, **kwargs):
         if self._cw.vreg.schema.rschema(rtype).final:
             self.w(entity.printable_value(rtype))
         else:
@@ -382,14 +445,49 @@
                 self.wview('autolimited', rset, initargs={'dispctrl': dispctrl})
 
 
+class URLAttributeView(EntityView):
+    """:__regid__: *urlattr*
+
+    This view will wrap an attribute value (hence expect a string) into an '<a>'
+    HTML tag to display a clickable link.
+    """
+    __regid__ = 'urlattr'
+    __select__ = EntityView.__select__ & match_kwargs('rtype')
+
+    def entity_call(self, entity, rtype, **kwargs):
+        url = entity.printable_value(rtype)
+        if url:
+            self.w(u'<a href="%s">%s</a>' % (url, url))
+
+
+class VerbatimAttributeView(EntityView):
+    """:__regid__: *verbatimattr*
+
+    This view will wrap an attribute value into an '<pre>' HTML tag to display
+    arbitrary text where EOL will be respected. It usually make sense for
+    attributes whose value is a multi-lines string where new lines matters.
+    """
+    __regid__ = 'verbatimattr'
+    __select__ = EntityView.__select__ & match_kwargs('rtype')
+
+    def entity_call(self, entity, rtype, **kwargs):
+        value = entity.printable_value(rtype)
+        if value:
+            self.w(u'<pre>%s</pre>' % value)
+
+
+
+
 
 class ToolbarLayout(component.Layout):
+    # XXX include me in the doc
     __select__ = match_context('ctxtoolbar')
 
     def render(self, w):
         if self.init_rendering():
             self.cw_extra_kwargs['view'].render_body(w)
 
+
 ## default primary ui configuration ###########################################
 
 _pvs = uicfg.primaryview_section
--- a/web/views/pyviews.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/pyviews.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,18 +24,29 @@
 
 
 class PyValTableView(View):
-    """display a list of list of values into an html table.
+    """display a list of list of values into an HTML table.
 
     Take care, content is NOT xml-escaped.
+
+    If `headers` is specfied, it is expected to be a list of headers to be
+    inserted as first row (in <thead>).
+
+    If `colheaders` is True, the first column will be considered as an headers
+    column an its values will be inserted inside <th> instead of <td>.
+
+    `cssclass` is the CSS class used on the <table> tag, and default to
+    'listing' (so that the table will look similar to those generated by the
+    table view).
     """
     __regid__ = 'pyvaltable'
     __select__ = match_kwargs('pyvalue')
 
-    def call(self, pyvalue, headers=None):
+    def call(self, pyvalue, headers=None, colheaders=False,
+             cssclass='listing'):
         if headers is None:
             headers = self._cw.form.get('headers')
         w = self.w
-        w(u'<table class="listing">\n')
+        w(u'<table class="%s">\n' % cssclass)
         if headers:
             w(u'<thead>')
             w(u'<tr>')
@@ -46,6 +57,9 @@
         w(u'<tbody>')
         for row in pyvalue:
             w(u'<tr>')
+            if colheaders:
+                w(u'<th>%s</th>' % row[0])
+                row = row[1:]
             for cell in row:
                 w(u'<td>%s</td>' % cell)
             w(u'</tr>\n')
--- a/web/views/rdf.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/rdf.py	Fri Dec 09 12:08:27 2011 +0100
@@ -89,7 +89,7 @@
                             except xy.UnsupportedVocabulary:
                                 pass
                     else:
-                        for related in entity.related(rtype, role, entities=True):
+                        for related in entity.related(rtype, role, entities=True, safe=True):
                             if role == 'subject':
                                 add( (cwuri, CW[rtype], URIRef(related.cwuri)) )
                                 try:
@@ -100,4 +100,3 @@
                             else:
                                 add( (URIRef(related.cwuri), CW[rtype], cwuri) )
 
-
--- a/web/views/reledit.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/reledit.py	Fri Dec 09 12:08:27 2011 +0100
@@ -104,11 +104,17 @@
                 self._handle_relation(rschema, role, divid, reload, formid, action)
 
     def _handle_attribute(self, rschema, role, divid, reload, action):
-        value = self.entity.printable_value(rschema.type)
+        rvid = self._rules.get('rvid', None)
+        if rvid is not None:
+            value = self._cw.view(rvid, entity=self.entity,
+                                  rtype=rschema.type, role=role)
+        else:
+            value = self.entity.printable_value(rschema.type)
         if not self._should_edit_attribute(rschema):
             self.w(value)
             return
-        form, renderer = self._build_form(self.entity, rschema, role, divid, 'base', reload, action)
+        form, renderer = self._build_form(self.entity, rschema, role, divid,
+                                          'base', reload, action)
         value = value or self._compute_default_value(rschema, role)
         self.view_form(divid, value, form, renderer)
 
@@ -170,6 +176,8 @@
                     self._cw, rschema.type, role)
             else:
                 default = self._cw._('<not specified>')
+        else:
+            default = self._cw._(default)
         return xml_escape(default)
 
     def _is_composite(self):
--- a/web/views/schema.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/schema.py	Fri Dec 09 12:08:27 2011 +0100
@@ -102,7 +102,7 @@
             # XXX get group entity and call it's incontext view
             groups = [u'<a class="%s" href="%s">%s</a>' % (
                 group, self._cw.build_url('cwgroup/%s' % group), label)
-                      for group, label in sorted((_(g), g) for g in groups)]
+                      for label, group in sorted((_(g), g) for g in groups)]
             w(u'<br/>'.join(groups))
             w(u'</td><td>')
             w(u'<br/>'.join(rqlexprs))
@@ -697,7 +697,7 @@
     __regid__ = 'schema'
     __select__ = yes()
 
-    title = _("site schema")
+    title = _("data model schema")
     order = 30
     category = 'manage'
 
--- a/web/views/sessions.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/sessions.py	Fri Dec 09 12:08:27 2011 +0100
@@ -21,10 +21,11 @@
 
 __docformat__ = "restructuredtext en"
 
-from cubicweb import RepositoryError, Unauthorized, AuthenticationError
+from cubicweb import (RepositoryError, Unauthorized, AuthenticationError,
+                      BadConnectionId)
 from cubicweb.web import InvalidSession, Redirect
 from cubicweb.web.application import AbstractSessionManager
-from cubicweb.dbapi import DBAPISession
+from cubicweb.dbapi import ProgrammingError, DBAPISession
 
 
 class InMemoryRepositorySessionManager(AbstractSessionManager):
@@ -98,10 +99,10 @@
         for forminternal_key in ('__form_id', '__domid', '__errorurl'):
             args.pop(forminternal_key, None)
         path = req.relative_path(False)
-        if path == 'login':
+        if path in ('login', 'logout') or req.form.get('vid') == 'loggedout':
             path = 'view'
             args['__message'] = req._('welcome %s !') % req.user.login
-            if 'vid' in req.form:
+            if 'vid' in req.form and req.form['vid'] != 'loggedout':
                 args['vid'] = req.form['vid']
             if 'rql' in req.form:
                 args['rql'] = req.form['rql']
@@ -119,7 +120,7 @@
             req.cnx.commit()
         except (RepositoryError, Unauthorized):
             req.cnx.rollback()
-        except:
+        except Exception:
             req.cnx.rollback()
             raise
 
@@ -132,8 +133,6 @@
         if session.cnx:
             try:
                 session.cnx.close()
-            except:
-                # already closed, may occur if the repository session expired
-                # but not the web session
+            except (ProgrammingError, BadConnectionId): # expired on the repository side
                 pass
             session.cnx = None
--- a/web/views/sparql.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/sparql.py	Fri Dec 09 12:08:27 2011 +0100
@@ -80,6 +80,7 @@
 
     'Boolean': 'boolean',
     'Int': 'integer',
+    'BigInt': 'integer',
     'Float': 'float',
 
     'Datetime': 'dateTime',
--- a/web/views/startup.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/startup.py	Fri Dec 09 12:08:27 2011 +0100
@@ -15,8 +15,10 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Set of HTML startup views. A startup view is global, e.g. doesn't apply to a
-result set.
+"""This module contains the default index page and management view.
+
+.. autoclass:: IndexView
+.. autoclass:: ManageView
 """
 
 __docformat__ = "restructuredtext en"
@@ -32,6 +34,19 @@
 from cubicweb.web import ajax_replace_url, uicfg, httpcache
 
 class ManageView(StartupView):
+    """:__regid__: *manage*
+
+    The manage view, display some information about what's contained by your
+    site and provides access to administration stuff such as user and groups
+    management.
+
+    Regarding the section displaying link to entity type, notice by default it
+    won't display entity types which are related to another one using a
+    mandatory (cardinality == 1) composite relation.
+
+    You can still configure that behaviour manually using the
+    `indexview_etype_section` as explained in :mod:`cubicweb.web.uicfg`.
+    """
     __regid__ = 'manage'
     title = _('manage')
     http_cache_manager = httpcache.EtagHTTPCacheManager
@@ -127,16 +142,16 @@
         for etype in self.add_etype_links:
             eschema = self._cw.vreg.schema.eschema(etype)
             if eschema.has_perm(self._cw, 'add'):
+                url = self._cw.vreg["etypes"].etype_class(etype).cw_create_url(self._cw)
                 self.w(u'<li><a href="%s">%s</a></li>' % (
-                        self._cw.build_url('add/%s' % eschema),
-                        self._cw.__('add a %s' % eschema).capitalize()))
+                        url, self._cw.__('New %s' % eschema).capitalize()))
         self.w(u'</ul>')
 
     def add_entity_link(self, etype):
         """creates a [+] link for adding an entity"""
         url = self._cw.vreg["etypes"].etype_class(etype).cw_create_url(self._cw)
         return u'[<a href="%s" title="%s">+</a>]' % (
-            xml_escape(url), self._cw.__('add a %s' % etype))
+            xml_escape(url), self._cw.__('New %s' % etype))
 
     @deprecated('[3.11] display_folders method is deprecated, backport it if needed')
     def display_folders(self):
@@ -149,6 +164,13 @@
 
 
 class IndexView(ManageView):
+    """:__regid__: *index*
+
+    The default index view, that you'll get when accessing your site's root url.
+    It's by default indentical to the
+    :class:`~cubicweb.web.views.startup.ManageView`, but you'll usually want to
+    customize this one.
+    """
     __regid__ = 'index'
     title = _('view_index')
 
--- a/web/views/tableview.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/tableview.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,82 +15,55 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""generic table view, including filtering abilities"""
+"""generic table view, including filtering abilities using facets"""
 
 __docformat__ = "restructuredtext en"
 _ = unicode
 
 from logilab.mtconverter import xml_escape
 
-from cubicweb.selectors import nonempty_rset, match_form_params
-from cubicweb.utils import make_uid, json_dumps
+from cubicweb import NoSelectableObject, tags
+from cubicweb.selectors import nonempty_rset
+from cubicweb.utils import make_uid, js_dumps, JSString
 from cubicweb.view import EntityView, AnyRsetView
-from cubicweb import tags
 from cubicweb.uilib import toggle_action, limitsize, htmlescape
-from cubicweb.web import jsonize
-from cubicweb.web.component import Link
+from cubicweb.web import jsonize, component, facet
 from cubicweb.web.htmlwidgets import (TableWidget, TableColumn, MenuWidget,
                                       PopupBoxMenu)
-from cubicweb.web.facet import prepare_facets_rqlst, filter_hiddens
+
 
 class TableView(AnyRsetView):
-    """The table view accepts any non-empty rset. It uses
-    introspection on the result set to compute column names and the
-    proper way to display the cells.
+    """The table view accepts any non-empty rset. It uses introspection on the
+    result set to compute column names and the proper way to display the cells.
+
     It is however highly configurable and accepts a wealth of options.
     """
     __regid__ = 'table'
     title = _('table')
     finalview = 'final'
 
+    table_widget_class = TableWidget
+    table_column_class = TableColumn
+
+    tablesorter_settings = {
+        'textExtraction': JSString('cubicwebSortValueExtraction'),
+        }
+
     def form_filter(self, divid, displaycols, displayactions, displayfilter,
                     paginate, hidden=True):
-        rqlst = self.cw_rset.syntax_tree()
-        # union not yet supported
-        if len(rqlst.children) != 1:
+        try:
+            filterform = self._cw.vreg['views'].select(
+                'facet.filtertable', self._cw, rset=self.cw_rset)
+        except NoSelectableObject:
             return ()
-        rqlst = rqlst.copy()
-        self._cw.vreg.rqlhelper.annotate(rqlst)
-        mainvar, baserql = prepare_facets_rqlst(rqlst, self.cw_rset.args)
-        wdgs = [facet.get_widget() for facet in self._cw.vreg['facets'].poss_visible_objects(
-            self._cw, rset=self.cw_rset, rqlst=rqlst.children[0], context='tablefilter',
-            filtered_variable=mainvar)]
-        wdgs = [wdg for wdg in wdgs if wdg is not None]
-        if wdgs:
-            self._generate_form(divid, baserql, wdgs, hidden,
-                               vidargs={'paginate': paginate,
-                                        'displaycols': displaycols,
-                                        'displayactions': displayactions,
-                                        'displayfilter': displayfilter})
-            return self.show_hide_actions(divid, not hidden)
-        return ()
-
-    def _generate_form(self, divid, baserql, fwidgets, hidden=True, vidargs={}):
-        """display a form to filter table's content. This should only
-        occur when a context eid is given
-        """
-        w = self.w
-        self._cw.add_css('cubicweb.facets.css')
-        self._cw.add_js( ('cubicweb.ajax.js', 'cubicweb.facets.js'))
-        # drop False / None values from vidargs
-        vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
-        w(u'<form method="post" cubicweb:facetargs="%s" action="">' %
-          xml_escape(json_dumps([divid, self.__regid__, False, vidargs])))
-        w(u'<fieldset id="%sForm" class="%s">' % (divid, hidden and 'hidden' or ''))
-        w(u'<input type="hidden" name="divid" value="%s" />' % divid)
-        w(u'<input type="hidden" name="fromformfilter" value="1" />')
-        filter_hiddens(w, facets=','.join(wdg.facet.__regid__ for wdg in fwidgets),
-                       baserql=baserql)
-        w(u'<table class="filter">\n')
-        w(u'<tr>\n')
-        for wdg in fwidgets:
-            w(u'<td>')
-            wdg.render(w=w)
-            w(u'</td>\n')
-        w(u'</tr>\n')
-        w(u'</table>\n')
-        w(u'</fieldset>\n')
-        w(u'</form>\n')
+        vidargs = {'paginate': paginate,
+                   'displaycols': displaycols,
+                   'displayactions': displayactions,
+                   'displayfilter': displayfilter}
+        cssclass = hidden and 'hidden' or ''
+        filterform.render(self.w, vid=self.__regid__, divid=divid,
+                          vidargs=vidargs, cssclass=cssclass)
+        return self.show_hide_actions(divid, not hidden)
 
     def main_var_index(self):
         """returns the index of the first non-attribute variable among the RQL
@@ -115,6 +88,15 @@
                 displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection))
         return displaycols
 
+    def _setup_tablesorter(self, divid):
+        req = self._cw
+        req.add_js('jquery.tablesorter.js')
+        req.add_onload('''$(document).ready(function() {
+    $("#%s table.listing").tablesorter(%s);
+});''' % (divid, js_dumps(self.tablesorter_settings)))
+        req.add_css(('cubicweb.tablesorter.css', 'cubicweb.tableview.css'))
+
+
     def call(self, title=None, subvid=None, displayfilter=None, headers=None,
              displaycols=None, displayactions=None, actions=(), divid=None,
              cellvids=None, cellattrs=None, mainindex=None,
@@ -125,10 +107,12 @@
         :param subvid: cell view
         :param displayfilter: filter that selects rows to display
         :param headers: columns' titles
+        :param displaycols: indexes of columns to display (first column is 0)
+        :param displayactions: if True, display action menu
         """
         req = self._cw
-        req.add_js('jquery.tablesorter.js')
-        req.add_css(('cubicweb.tablesorter.css', 'cubicweb.tableview.css'))
+        divid = divid or req.form.get('divid') or 'rs%s' % make_uid(id(self.cw_rset))
+        self._setup_tablesorter(divid)
         # compute label first  since the filter form may remove some necessary
         # information from the rql syntax tree
         if mainindex is None:
@@ -137,7 +121,6 @@
         hidden = True
         if not subvid and 'subvid' in req.form:
             subvid = req.form.pop('subvid')
-        divid = divid or req.form.get('divid') or 'rs%s' % make_uid(id(self.cw_rset))
         actions = list(actions)
         if mainindex is None:
             displayfilter, displayactions = False, False
@@ -178,7 +161,7 @@
         if paginate:
             self.divid = divid # XXX iirk (see usage in page_navigation_url)
             self.paginate(page_size=page_size, show_all_option=False)
-        table = TableWidget(self)
+        table = self.table_widget_class(self)
         for column in self.get_columns(computed_labels, displaycols, headers,
                                        subvid, cellvids, cellattrs, mainindex):
             table.append_column(column)
@@ -213,7 +196,7 @@
                             ident='%sActions' % divid)
         box.append(menu)
         for url, label, klass, ident in actions:
-            menu.append(Link(url, label, klass=klass, id=ident))
+            menu.append(component.Link(url, label, klass=klass, id=ident))
         box.render(w=self.w)
         self.w(u'<div class="clear"/>')
 
@@ -229,7 +212,7 @@
                 label = headers[displaycols.index(colindex)]
             if colindex == mainindex and label is not None:
                 label += ' (%s)' % self.cw_rset.rowcount
-            column = TableColumn(label, colindex)
+            column = self.table_column_class(label, colindex)
             coltype = self.cw_rset.description[0][colindex]
             # compute column cell view (if coltype is None, it's a left outer
             # join, use the default non final subvid)
@@ -291,14 +274,17 @@
         :param cellvid: cell view (defaults to 'outofcontext')
         """
         etype, val = self.cw_rset.description[row][col], self.cw_rset[row][col]
-        if val is not None and etype is not None and not self._cw.vreg.schema.eschema(etype).final:
-            self.wview(cellvid or 'outofcontext', self.cw_rset, row=row, col=col)
-        elif val is None:
-            # This is usually caused by a left outer join and in that case,
-            # regular views will most certainly fail if they don't have
-            # a real eid
-            self.wview('final', self.cw_rset, row=row, col=col)
+        if etype is None or not self._cw.vreg.schema.eschema(etype).final:
+            if val is None:
+                # This is usually caused by a left outer join and in that case,
+                # regular views will most certainly fail if they don't have
+                # a real eid
+                # XXX if cellvid is e.g. reledit, we may wanna call it anyway
+                self.w(u'&#160;')
+            else:
+                self.wview(cellvid or 'outofcontext', self.cw_rset, row=row, col=col)
         else:
+            # XXX why do we need a fallback view here?
             self.wview(cellvid or 'final', self.cw_rset, 'null', row=row, col=col)
 
 
@@ -396,6 +382,7 @@
     def cell_call(self, row, col):
         _ = self._cw._
         entity = self.cw_rset.get_entity(row, col)
+        entity.complete()
         infos = {}
         for col in self.columns:
             meth = getattr(self, 'build_%s_cell' % col, None)
--- a/web/views/tabs.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/tabs.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -93,8 +93,7 @@
         activetab = cookies.get(cookiename)
         if activetab is None:
             domid = uilib.domid(default)
-            cookies[cookiename] = domid
-            self._cw.set_cookie(cookies, cookiename)
+            self._cw.set_cookie(cookiename, domid)
             return domid
         return activetab.value
 
@@ -128,7 +127,7 @@
             entity.view(default, w=self.w)
             return
         self._cw.add_css('jquery.ui.css')
-        self._cw.add_js(('jquery.ui.js', 'cubicweb.ajax.js'))
+        self._cw.add_js(('jquery.ui.js', 'cubicweb.ajax.js', 'jquery.cookie.js'))
         # prune tabs : not all are to be shown
         tabs, active_tab = self.prune_tabs(tabs, default)
         # build the html structure
@@ -140,9 +139,7 @@
         for i, (tabid, domid, tabkwargs) in enumerate(tabs):
             w(u'<li>')
             w(u'<a href="#%s">' % domid)
-            w(u'<span onclick="%s">' % xml_escape(unicode(uilib.js.setTab(domid, self.cookie_name))))
             w(tabkwargs.pop('label', self._cw._(tabid)))
-            w(u'</span>')
             w(u'</a>')
             w(u'</li>')
             if domid == active_tab:
@@ -160,7 +157,12 @@
         # because the callback binding needs to be done before
         # XXX make work history: true
         self._cw.add_onload(u"""
-  jQuery('#entity-tabs-%(eeid)s').tabs( { selected: %(tabindex)s });
+  jQuery('#entity-tabs-%(eeid)s').tabs(
+    { selected: %(tabindex)s,
+      select: function(event, ui) {
+        setTab(ui.panel.id, '%(cookiename)s');
+      }
+    });
   setTab('%(domid)s', '%(cookiename)s');
 """ % {'tabindex'   : active_tab_idx,
        'domid'        : active_tab,
@@ -188,6 +190,8 @@
     """
     __select__ = EntityView.__select__ & partial_has_related_entities()
     vid = 'list'
+    # to be defined in concrete classes
+    rtype = title = None
 
     def cell_call(self, row, col):
         rset = self.cw_rset.get_entity(row, col).related(self.rtype, role(self))
--- a/web/views/treeview.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/treeview.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/web/views/urlpublishing.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/urlpublishing.py	Fri Dec 09 12:08:27 2011 +0100
@@ -260,9 +260,8 @@
             else:
                 try:
                     action = actionsreg._select_best(actions, req, rset=rset)
+                    if action is not None:
+                        raise Redirect(action.url())
                 except RegistryException:
-                    continue
-                else:
-                    # XXX avoid redirect
-                    raise Redirect(action.url())
+                    pass # continue searching
         raise PathDontMatch()
--- a/web/views/workflow.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/workflow.py	Fri Dec 09 12:08:27 2011 +0100
@@ -174,6 +174,7 @@
             warn('[3.10] %s should now implement render_body instead of cell_call'
                  % self.__class__, DeprecationWarning)
             self.w = w
+            # pylint: disable=E1101
             self.cell_call(self.entity.cw_row, self.entity.cw_col)
         else:
             self.entity.view('wfhistory', w=w, title=None)
--- a/web/views/xmlrss.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/views/xmlrss.py	Fri Dec 09 12:08:27 2011 +0100
@@ -73,11 +73,13 @@
 class XMLItemView(EntityView):
     __regid__ = 'xmlitem'
 
-    def cell_call(self, row, col):
-        """ element as an item for an xml feed """
-        entity = self.cw_rset.complete_entity(row, col)
-        self.w(u'<%s eid="%s" cwuri="%s">\n'
-               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri)))
+    def entity_call(self, entity):
+        """element as an item for an xml feed"""
+        entity.complete()
+        source = entity.cw_metainformation()['source']['uri']
+        self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n'
+               % (entity.__regid__, entity.eid, xml_escape(entity.cwuri),
+                  xml_escape(source)))
         for rschema, attrschema in entity.e_schema.attribute_definitions():
             attr = rschema.type
             if attr in ('eid', 'cwuri'):
@@ -113,18 +115,32 @@
                 self.error('unexisting relation %r', relstr)
                 continue
             self.w(u'  <%s role="%s">\n' % (rtype, role))
-            for related in entity.related(rtype, role, entities=True):
-                # XXX put unique attributes as xml attribute, they are much
-                # probably used to search existing entities in client data feed,
-                # and putting it here may avoid an extra request to get those
-                # attributes values
-                self.w(u'    <%s eid="%s" cwuri="%s"/>\n'
-                       % (related.e_schema, related.eid,
-                          xml_escape(related.cwuri)))
+            self.wview('xmlrelateditem', entity.related(rtype, role, safe=True), 'null')
             self.w(u'  </%s>\n' % rtype)
         self.w(u'</%s>\n' % (entity.e_schema))
 
 
+class XMLRelatedItemView(EntityView):
+    __regid__ = 'xmlrelateditem'
+    add_div_section = False
+
+    def entity_call(self, entity):
+        # XXX put unique attributes as xml attribute, they are much probably
+        # used to search existing entities in client data feed, and putting it
+        # here may avoid an extra request to get those attributes values
+        self.w(u'    <%s eid="%s" cwuri="%s"/>\n'
+               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri)))
+
+
+class XMLRelatedItemStateView(XMLRelatedItemView):
+    __select__ = is_instance('State')
+
+    def entity_call(self, entity):
+        self.w(u'    <%s eid="%s" cwuri="%s" name="%s"/>\n'
+               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri),
+                  xml_escape(entity.name)))
+
+
 class XMLRsetView(AnyRsetView):
     """dumps raw rset as xml"""
     __regid__ = 'rsetxml'
--- a/web/wdoc/tut_rql_en.rst	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/wdoc/tut_rql_en.rst	Fri Dec 09 12:08:27 2011 +0100
@@ -23,7 +23,7 @@
 .. _here: schema
 
 
-Some bits of théory
+Some bits of theory
 -------------------
 
 Variables et types
--- a/web/webconfig.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/webconfig.py	Fri Dec 09 12:08:27 2011 +0100
@@ -27,6 +27,7 @@
 from logilab.common.decorators import cached
 from logilab.common.deprecation import deprecated
 
+from cubicweb import ConfigurationError
 from cubicweb.toolsutils import read_config
 from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options, merge_options
 
@@ -233,16 +234,20 @@
         return self.repository().get_versions()
 
     def anonymous_user(self):
-        """return a login and password to use for anonymous users. None
-        may be returned for both if anonymous connections are not allowed
+        """return a login and password to use for anonymous users.
+        
+        None may be returned for both if anonymous connection is not
+        allowed or if an empty login is used in configuration
         """
         try:
-            user = self['anonymous-user']
+            user   = self['anonymous-user'] or None
             passwd = self['anonymous-password']
+            if user:
+                user = unicode(user)
         except KeyError:
             user, passwd = None, None
-        if user is not None:
-            user = unicode(user)
+        except UnicodeDecodeError:
+            raise ConfigurationError("anonymous information should only contains ascii")
         return user, passwd
 
     def locate_resource(self, rid):
@@ -300,19 +305,17 @@
         if not (self.repairing or self.creating):
             self.global_set_option('base-url', baseurl)
         httpsurl = self['https-url']
+        if (self.debugmode or self.mode == 'test'):
+            datadir_path = 'data/'
+        else:
+            datadir_path = 'data/%s/' % self.instance_md5_version()
         if httpsurl:
             if httpsurl[-1] != '/':
                 httpsurl += '/'
                 if not self.repairing:
                     self.global_set_option('https-url', httpsurl)
-            if self.debugmode:
-                self.https_datadir_url = httpsurl + 'data/'
-            else:
-                self.https_datadir_url = httpsurl + 'data%s/' % self.instance_md5_version()
-        if self.debugmode:
-            self.datadir_url = baseurl + 'data/'
-        else:
-            self.datadir_url = baseurl + 'data%s/' % self.instance_md5_version()
+            self.https_datadir_url = httpsurl + datadir_path
+        self.datadir_url = baseurl + datadir_path
 
     def _build_ui_properties(self):
         # self.datadir_url[:-1] to remove trailing /
--- a/web/webctl.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/web/webctl.py	Fri Dec 09 12:08:27 2011 +0100
@@ -21,9 +21,22 @@
 
 __docformat__ = "restructuredtext en"
 
+import os, os.path as osp
+from shutil import copy
+
 from logilab.common.shellutils import ASK
 
-from cubicweb.toolsutils import CommandHandler, underline_title
+from cubicweb import ExecutionError
+from cubicweb.cwctl import CWCTL
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from cubicweb.toolsutils import Command, CommandHandler, underline_title
+
+
+try:
+    from os import symlink as linkdir
+except ImportError:
+    from shutil import copytree as linkdir
+
 
 class WebCreateHandler(CommandHandler):
     cmdname = 'create'
@@ -43,3 +56,57 @@
 
     def postcreate(self, *args, **kwargs):
         """hooks called once instance's initialization has been completed"""
+
+
+class GenStaticDataDir(Command):
+    """Create a directory merging all data directory content from cubes and CW.
+    """
+    name = 'gen-static-datadir'
+    arguments = '<instance> [dirpath]'
+    min_args = 1
+    max_args = 2
+
+    options = ()
+
+    def run(self, args):
+        appid = args.pop(0)
+        config = cwcfg.config_for(appid)
+        if args:
+            dest = args[0]
+        else:
+            dest = osp.join(config.appdatahome, 'data')
+        if osp.exists(dest):
+            raise ExecutionError('Directory %s already exists. '
+                                 'Remove it first.' % dest)
+        config.quick_start = True # notify this is not a regular start
+        # list all resources (no matter their order)
+        resources = set()
+        for datadir in self._datadirs(config):
+            for dirpath, dirnames, filenames in os.walk(datadir):
+                rel_dirpath = dirpath[len(datadir)+1:]
+                resources.update(osp.join(rel_dirpath, f) for f in filenames)
+        # locate resources and copy them to destination
+        for resource in resources:
+            dirname = osp.dirname(resource)
+            dest_resource = osp.join(dest, dirname)
+            if not osp.isdir(dest_resource):
+                os.makedirs(dest_resource)
+            resource_dir, resource_path = config.locate_resource(resource)
+            copy(osp.join(resource_dir, resource_path), dest_resource)
+        # handle md5 version subdirectory
+        linkdir(dest, osp.join(dest, config.instance_md5_version()))
+        print ('You can use apache rewrite rule below :\n'
+               'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
+
+    def _datadirs(self, config):
+        repo = config.repository()
+        if config._cubes is None:
+            # web only config
+            config.init_cubes(repo.get_cubes())
+        for cube in repo.get_cubes():
+            cube_datadir = osp.join(cwcfg.cube_dir(cube), 'data')
+            if osp.isdir(cube_datadir):
+                yield cube_datadir
+        yield osp.join(config.shared_dir(), 'data')
+
+CWCTL.register(GenStaticDataDir)
--- a/wsgi/__init__.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/wsgi/__init__.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -37,7 +37,7 @@
     """pretty prints `obj` if possible"""
     try:
         return _pformat(obj)
-    except:
+    except Exception:
         return u'<could not parse>'
 
 def qs2dict(qs):
--- a/wsgi/request.py	Mon Sep 26 18:37:23 2011 +0200
+++ b/wsgi/request.py	Fri Dec 09 12:08:27 2011 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.