backport stable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Tue, 28 Jun 2011 16:33:53 +0200
changeset 7569 02c338197322
parent 7565 b355d9dd43df (diff)
parent 7568 c5ee33fb6a3b (current diff)
child 7570 648bf83945a5
backport stable
server/serverctl.py
test/unittest_utils.py
utils.py
web/views/plots.py
--- a/.hgtags	Tue Jun 28 16:32:38 2011 +0200
+++ b/.hgtags	Tue Jun 28 16:33:53 2011 +0200
@@ -204,6 +204,8 @@
 6dfe78a0797ccc34962510f8c2a57f63d65ce41e cubicweb-debian-version-3.12.5-1
 a18dac758150fe9c1f9e4958d898717c32a8f679 cubicweb-version-3.12.6
 105767487c7075dbcce36474f1af0485985cbf2c cubicweb-debian-version-3.12.6-1
+b661ef475260ca7d9ea5c36ba2cc86e95e5b17d3 cubicweb-version-3.13.0
+a96137858f571711678954477da6f7f435870cea cubicweb-debian-version-3.13.0-1
 628fe57ce746c1dac87fb1b078b2026057df894e cubicweb-version-3.12.7
 a07517985136bbbfa6610c428a1b42cd04cd530b cubicweb-debian-version-3.12.7-1
 50122a47ce4fb2ecbf3cf20ed2777f4276c93609 cubicweb-version-3.12.8
--- a/__pkginfo__.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/__pkginfo__.py	Tue Jun 28 16:33:53 2011 +0200
@@ -22,7 +22,7 @@
 
 modname = distname = "cubicweb"
 
-numversion = (3, 12, 8)
+numversion = (3, 13, 1)
 version = '.'.join(str(num) for num in numversion)
 
 description = "a repository of entities / relations for knowledge management"
--- a/appobject.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/appobject.py	Tue Jun 28 16:33:53 2011 +0200
@@ -180,12 +180,13 @@
         return self.__class__.__name__
 
     def search_selector(self, selector):
-        """search for the given selector or selector instance in the selectors
-        tree. Return it of None if not found
+        """search for the given selector, selector instance or tuple of
+        selectors in the selectors tree. Return None if not found.
         """
         if self is selector:
             return self
-        if isinstance(selector, type) and isinstance(self, selector):
+        if (isinstance(selector, type) or isinstance(selector, tuple)) and \
+               isinstance(self, selector):
             return self
         return None
 
@@ -250,8 +251,8 @@
         return merged_selectors
 
     def search_selector(self, selector):
-        """search for the given selector or selector instance in the selectors
-        tree. Return it of None if not found
+        """search for the given selector or selector instance (or tuple of
+        selectors) in the selectors tree. Return None if not found
         """
         for childselector in self.selectors:
             if childselector is selector:
@@ -259,7 +260,8 @@
             found = childselector.search_selector(selector)
             if found is not None:
                 return found
-        return None
+        # if not found in children, maybe we are looking for self?
+        return super(MultiSelector, self).search_selector(selector)
 
 
 class AndSelector(MultiSelector):
--- a/cwvreg.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/cwvreg.py	Tue Jun 28 16:33:53 2011 +0200
@@ -194,17 +194,18 @@
 _ = unicode
 
 from warnings import warn
+from datetime import datetime, date, time, timedelta
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated, class_deprecated
 from logilab.common.modutils import cleanup_sys_modules
 
 from rql import RQLHelper
+from yams.constraints import BASE_CONVERTERS
 
 from cubicweb import (ETYPE_NAME_MAP, Binary, UnknownProperty, UnknownEid,
                       ObjectNotFound, NoSelectableObject, RegistryNotFound,
                       CW_EVENT_MANAGER)
-from cubicweb.utils import dump_class
 from cubicweb.vregistry import VRegistry, Registry, class_regid, classid
 from cubicweb.rtags import RTAGS
 
@@ -368,7 +369,10 @@
         # make a copy event if cls.__regid__ == etype, else we may have pb for
         # client application using multiple connections to different
         # repositories (eg shingouz)
-        cls = dump_class(cls, etype)
+        # __autogenerated__ attribute is just a marker
+        cls = type(str(etype), (cls,), {'__autogenerated__': True,
+                                        '__doc__': cls.__doc__,
+                                        '__module__': cls.__module__})
         cls.__regid__ = etype
         cls.__initialize__(self.schema)
         return cls
@@ -412,10 +416,8 @@
                      if not isinstance(view, class_deprecated)]
             try:
                 view = self._select_best(views, req, rset=rset, **kwargs)
-                if view.linkable():
+                if view is not None and view.linkable():
                     yield view
-            except NoSelectableObject:
-                continue
             except Exception:
                 self.exception('error while trying to select %s view for %s',
                                vid, rset)
@@ -849,24 +851,15 @@
         return self['views'].select(__vid, req, rset=rset, **kwargs)
 
 
-import decimal
-from datetime import datetime, date, time, timedelta
-
-YAMS_TO_PY = { # XXX unify with yams.constraints.BASE_CONVERTERS?
-    'String' :  unicode,
-    'Bytes':    Binary,
-    'Password': str,
-
-    'Boolean':  bool,
-    'Int':      int,
-    'Float':    float,
-    'Decimal':  decimal.Decimal,
-
+# XXX unify with yams.constraints.BASE_CONVERTERS?
+YAMS_TO_PY = BASE_CONVERTERS.copy()
+YAMS_TO_PY.update({
+    'Bytes':      Binary,
     'Date':       date,
     'Datetime':   datetime,
     'TZDatetime': datetime,
     'Time':       time,
     'TZTime':     time,
     'Interval':   timedelta,
-    }
+    })
 
--- a/dataimport.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/dataimport.py	Tue Jun 28 16:33:53 2011 +0200
@@ -445,14 +445,14 @@
         ObjectStore.__init__(self)
         if session is None:
             sys.exit('please provide a session of run this script with cubicweb-ctl shell and pass cnx as session')
-        if not hasattr(session, 'set_pool'):
+        if not hasattr(session, 'set_cnxset'):
             # connection
             cnx = session
             session = session.request()
-            session.set_pool = lambda : None
+            session.set_cnxset = lambda : None
             commit = commit or cnx.commit
         else:
-            session.set_pool()
+            session.set_cnxset()
         self.session = session
         self._commit = commit or session.commit
 
@@ -462,7 +462,7 @@
 
     def commit(self):
         txuuid = self._commit()
-        self.session.set_pool()
+        self.session.set_cnxset()
         return txuuid
 
     def rql(self, *args):
@@ -642,7 +642,9 @@
         for k, v in kwargs.iteritems():
             kwargs[k] = getattr(v, 'eid', v)
         entity, rels = self.metagen.base_etype_dicts(etype)
+        # make a copy to keep cached entity pristine
         entity = copy(entity)
+        entity.cw_edited = copy(entity.cw_edited)
         entity.cw_clear_relation_cache()
         self.metagen.init_entity(entity)
         entity.cw_edited.update(kwargs, skipsec=False)
--- a/dbapi.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/dbapi.py	Tue Jun 28 16:33:53 2011 +0200
@@ -346,9 +346,9 @@
 
     # server session compat layer #############################################
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.cnx.describe(eid)
+        return self.cnx.describe(eid, asdict)
 
     def source_defs(self):
         """return the definition of sources used by the repository."""
@@ -674,8 +674,16 @@
         return self._repo.get_option_value(option, foreid)
 
     @check_not_closed
-    def describe(self, eid):
-        return self._repo.describe(self.sessionid, eid, **self._txid())
+    def describe(self, eid, asdict=False):
+        metas = self._repo.describe(self.sessionid, eid, **self._txid())
+        if asdict:
+            if len(metas) == 3:
+                d = dict(zip(('type', 'source', 'extid'), metas))
+                d['asource'] = d['source']
+                return d
+            return dict(zip(('type', 'source', 'extid', 'asource'), metas))
+        # XXX :-1 for cw compat, use asdict=True for full information
+        return metas[:-1]
 
     # db-api like interface ####################################################
 
--- a/debian/changelog	Tue Jun 28 16:32:38 2011 +0200
+++ b/debian/changelog	Tue Jun 28 16:33:53 2011 +0200
@@ -1,3 +1,9 @@
+cubicweb (3.13.0-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Thu, 09 Jun 2011 20:18:41 +0200
+
 cubicweb (3.12.8-1) unstable; urgency=low
 
   * new upstream release
--- a/devtools/__init__.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/devtools/__init__.py	Tue Jun 28 16:33:53 2011 +0200
@@ -28,15 +28,17 @@
 import pickle
 import glob
 import warnings
+import hashlib
 from datetime import timedelta
 from os.path import (abspath, join, exists, basename, dirname, normpath, split,
                      isfile, isabs, splitext, isdir, expanduser)
 from functools import partial
-import hashlib
 
 from logilab.common.date import strptime
 from logilab.common.decorators import cached, clear_cache
-from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError, schema, cwconfig, BadConnectionId
+
+from cubicweb import ConfigurationError, ExecutionError, BadConnectionId
+from cubicweb import CW_SOFTWARE_ROOT, schema, cwconfig
 from cubicweb.server.serverconfig import ServerConfiguration
 from cubicweb.etwist.twconfig import TwistedConfiguration
 
@@ -91,7 +93,7 @@
     """ Idea: this is less costly than a full re-creation of the repo object.
     off:
     * session are closed,
-    * pools are closed
+    * cnxsets are closed
     * system source is shutdown
     """
     if not repo._needs_refresh:
@@ -102,8 +104,8 @@
                 repo.close(sessionid)
             except BadConnectionId: #this is strange ? thread issue ?
                 print 'XXX unknown session', sessionid
-        for pool in repo.pools:
-            pool.close(True)
+        for cnxset in repo.cnxsets:
+            cnxset.close(True)
         repo.system_source.shutdown()
         repo._needs_refresh = True
         repo._has_started = False
@@ -111,12 +113,12 @@
 def turn_repo_on(repo):
     """Idea: this is less costly than a full re-creation of the repo object.
     on:
-    * pools are connected
+    * cnxsets are connected
     * cache are cleared
     """
     if repo._needs_refresh:
-        for pool in repo.pools:
-            pool.reconnect()
+        for cnxset in repo.cnxsets:
+            cnxset.reconnect()
         repo._type_source_cache = {}
         repo._extid_cache = {}
         repo.querier._rql_cache = {}
@@ -197,7 +199,10 @@
         directory from wich tests are launched or by specifying an alternative
         sources file using self.sourcefile.
         """
-        sources = super(TestServerConfiguration, self).sources()
+        try:
+            sources = super(TestServerConfiguration, self).sources()
+        except ExecutionError:
+            sources = {}
         if not sources:
             sources = DEFAULT_SOURCES
         if 'admin' not in sources:
@@ -207,9 +212,6 @@
     # web config methods needed here for cases when we use this config as a web
     # config
 
-    def instance_md5_version(self):
-        return ''
-
     def default_base_url(self):
         return BASE_URL
 
@@ -475,12 +477,11 @@
             repo = self.get_repo(startup=True)
             cnx = self.get_cnx()
             session = repo._sessions[cnx.sessionid]
-            session.set_pool()
+            session.set_cnxset()
             _commit = session.commit
-            def always_pooled_commit():
-                _commit()
-                session.set_pool()
-            session.commit = always_pooled_commit
+            def keep_cnxset_commit():
+                _commit(free_cnxset=False)
+            session.commit = keep_cnxset_commit
             pre_setup_func(session, self.config)
             session.commit()
             cnx.close()
--- a/devtools/fake.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/devtools/fake.py	Tue Jun 28 16:33:53 2011 +0200
@@ -138,13 +138,15 @@
 
 
 class FakeSession(RequestSessionBase):
-    read_security = write_security = True
-    set_read_security = set_write_security = lambda *args, **kwargs: None
 
-    def __init__(self, repo=None, user=None):
+    def __init__(self, repo=None, user=None, vreg=None):
         self.repo = repo
-        self.vreg = getattr(self.repo, 'vreg', CubicWebVRegistry(FakeConfig(), initlog=False))
-        self.pool = FakePool()
+        if vreg is None:
+            vreg = getattr(self.repo, 'vreg', None)
+        if vreg is None:
+            vreg = CubicWebVRegistry(FakeConfig(), initlog=False)
+        self.vreg = vreg
+        self.cnxset = FakeConnectionsSet()
         self.user = user or FakeUser()
         self.is_internal_session = False
         self.transaction_data = {}
@@ -162,6 +164,13 @@
     def set_entity_cache(self, entity):
         pass
 
+    # for use with enabled_security context manager
+    read_security = write_security = True
+    def init_security(self, *args):
+        return None, None
+    def reset_security(self, *args):
+        return
+
 class FakeRepo(object):
     querier = None
     def __init__(self, schema, vreg=None, config=None):
@@ -201,6 +210,6 @@
         self.uri = uri
 
 
-class FakePool(object):
+class FakeConnectionsSet(object):
     def source(self, uri):
         return FakeSource(uri)
--- a/devtools/fill.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/devtools/fill.py	Tue Jun 28 16:33:53 2011 +0200
@@ -275,9 +275,6 @@
     :param choice_func: a function that takes an entity type, an attrname and
                         returns acceptable values for this attribute
     """
-    # XXX HACK, remove or fix asap
-    if etype in set(('String', 'Int', 'Float', 'Boolean', 'Date', 'CWGroup', 'CWUser')):
-        return []
     queries = []
     for index in xrange(entity_num):
         restrictions = []
--- a/devtools/repotest.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/devtools/repotest.py	Tue Jun 28 16:33:53 2011 +0200
@@ -205,7 +205,7 @@
         self.ueid = self.session.user.eid
         assert self.ueid != -1
         self.repo._type_source_cache = {} # clear cache
-        self.pool = self.session.set_pool()
+        self.cnxset = self.session.set_cnxset()
         self.maxeid = self.get_max_eid()
         do_monkey_patch()
         self._dumb_sessions = []
@@ -213,7 +213,7 @@
     def get_max_eid(self):
         return self.session.execute('Any MAX(X)')[0][0]
     def cleanup(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.session.execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
 
     def tearDown(self):
@@ -225,7 +225,7 @@
         for session in self._dumb_sessions:
             session.rollback()
             session.close()
-        self.repo._free_pool(self.pool)
+        self.repo._free_cnxset(self.cnxset)
         assert self.session.user.eid != -1
 
     def set_debug(self, debug):
@@ -263,7 +263,8 @@
         u = self.repo._build_user(self.session, self.session.user.eid)
         u._groups = set(groups)
         s = Session(u, self.repo)
-        s._threaddata.pool = self.pool
+        s._threaddata.cnxset = self.cnxset
+        s._threaddata.ctx_count = 1
         # register session to ensure it gets closed
         self._dumb_sessions.append(s)
         return s
@@ -273,7 +274,7 @@
 
     def commit(self):
         self.session.commit()
-        self.session.set_pool()
+        self.session.set_cnxset()
 
 
 class BasePlannerTC(BaseQuerierTC):
@@ -287,7 +288,7 @@
         # XXX source_defs
         self.o = self.repo.querier
         self.session = self.repo._sessions.values()[0]
-        self.pool = self.session.set_pool()
+        self.cnxset = self.session.set_cnxset()
         self.schema = self.o.schema
         self.sources = self.o._repo.sources
         self.system = self.sources[-1]
@@ -311,7 +312,7 @@
             del self.repo.sources_by_uri[source.uri]
         undo_monkey_patch()
         for session in self._dumb_sessions:
-            session._threaddata.pool = None
+            session._threaddata.cnxset = None
             session.close()
 
     def _prepare_plan(self, rql, kwargs=None):
--- a/devtools/testlib.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/devtools/testlib.py	Tue Jun 28 16:33:53 2011 +0200
@@ -274,7 +274,7 @@
     def session(self):
         """return current server side session (using default manager account)"""
         session = self.repo._sessions[self.cnx.sessionid]
-        session.set_pool()
+        session.set_cnxset()
         return session
 
     @property
@@ -458,7 +458,7 @@
         try:
             return self.cnx.commit()
         finally:
-            self.session.set_pool() # ensure pool still set after commit
+            self.session.set_cnxset() # ensure cnxset still set after commit
 
     @nocoverage
     def rollback(self):
@@ -467,7 +467,7 @@
         except dbapi.ProgrammingError:
             pass # connection closed
         finally:
-            self.session.set_pool() # ensure pool still set after commit
+            self.session.set_cnxset() # ensure cnxset still set after commit
 
     # # server side db api #######################################################
 
@@ -475,7 +475,7 @@
         if eid_key is not None:
             warn('[3.8] eid_key is deprecated, you can safely remove this argument',
                  DeprecationWarning, stacklevel=2)
-        self.session.set_pool()
+        self.session.set_cnxset()
         return self.session.execute(rql, args)
 
     # other utilities #########################################################
@@ -500,6 +500,10 @@
         it2 = set(getattr(x, 'eid', x) for x in it2)
         super(CubicWebTC, self).assertItemsEqual(it1, it2, *args, **kwargs)
 
+    def assertMessageEqual(self, req, params, msg):
+        msg = req.session.data[params['_cwmsgid']]
+        self.assertEqual(msg, msg)
+
     # workflow utilities #######################################################
 
     def assertPossibleTransitions(self, entity, expected):
@@ -568,6 +572,8 @@
             if views:
                 try:
                     view = viewsvreg._select_best(views, req, rset=rset)
+                    if view is None:
+                        raise NoSelectableObject((req,), {'rset':rset}, views)
                     if view.linkable():
                         yield view
                     else:
@@ -722,7 +728,7 @@
         self.assertEqual(session.login, origsession.login)
         self.assertEqual(session.anonymous_session, False)
         self.assertEqual(path, 'view')
-        self.assertEqual(params, {'__message': 'welcome %s !' % req.user.login})
+        self.assertMessageEqual(req, params, 'welcome %s !' % req.user.login)
 
     def assertAuthFailure(self, req, nbsessions=0):
         self.app.connect(req)
--- a/doc/book/en/admin/instance-config.rst	Tue Jun 28 16:32:38 2011 +0200
+++ b/doc/book/en/admin/instance-config.rst	Tue Jun 28 16:33:53 2011 +0200
@@ -43,18 +43,33 @@
 use apache (for example) for redirection and the variable `main.https-url`
 of configuration file.
 
+For this to work you have to activate the following apache modules :
+
+* rewrite
+* proxy
+* http_proxy
+
+The command on Debian based systems for that is ::
+
+  a2enmod rewrite http_proxy proxy
+  /etc/init.d/apache2 restart
+
 :Example:
 
    For an apache redirection of a site accessible via `http://localhost/demo`
    and `https://localhost/demo` and actually running on port 8080, it
    takes to the http:::
 
+     ProxyPreserveHost On
+     RewriteEngine On
      RewriteCond %{REQUEST_URI} ^/demo
      RewriteRule ^/demo$ /demo/
      RewriteRule ^/demo/(.*) http://127.0.0.1:8080/$1 [L,P]
 
    and for the https:::
 
+     ProxyPreserveHost On
+     RewriteEngine On
      RewriteCond %{REQUEST_URI} ^/ demo
      RewriteRule ^/demo$/demo/
      RewriteRule ^/demo/(.*) http://127.0.0.1:8080/https/$1 [L,P]
@@ -65,6 +80,11 @@
      base-url = http://localhost/demo
      https-url = https://localhost/demo
 
+Notice that if you simply want a site accessible through https, not *both* http
+and https, simply set `base-url` to the https url and the first section into your
+apache configuration (as you would have to do for an http configuration with an
+apache front-end).
+
 Setting up the web client
 -------------------------
 :`web.embed-allowed`:
--- a/entities/test/unittest_wfobjs.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/entities/test/unittest_wfobjs.py	Tue Jun 28 16:33:53 2011 +0200
@@ -165,7 +165,7 @@
         user = self.user()
         iworkflowable = user.cw_adapt_to('IWorkflowable')
         iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'deactivated')
         self._test_manager_deactivate(user)
         trinfo = self._test_manager_deactivate(user)
@@ -192,7 +192,7 @@
         self.commit()
         iworkflowable.fire_transition('wake up')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'deactivated')
 
     # XXX test managers can change state without matching transition
@@ -274,14 +274,14 @@
         self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
         iworkflowable.fire_transition('swftr1', u'go')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
         iworkflowable.fire_transition('tr1', u'go')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, state2.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -295,10 +295,10 @@
         # force back to state1
         iworkflowable.change_state('state1', u'gadget')
         iworkflowable.fire_transition('swftr1', u'au')
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         iworkflowable.fire_transition('tr2', u'chapeau')
         self.commit()
-        group.clear_all_caches()
+        group.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_state.eid, state3.eid)
         self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
         self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
@@ -390,7 +390,7 @@
                                  ):
             iworkflowable.fire_transition(trans)
             self.commit()
-            group.clear_all_caches()
+            group.cw_clear_all_caches()
             self.assertEqual(iworkflowable.state, nextstate)
 
 
@@ -408,11 +408,11 @@
         wf.add_state('asleep', initial=True)
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         iworkflowable = self.member.cw_adapt_to('IWorkflowable')
         self.assertEqual(iworkflowable.state, 'activated')# no change before commit
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual(iworkflowable.workflow_history, ())
@@ -429,7 +429,7 @@
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -472,10 +472,10 @@
         self.commit()
         self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': self.member.eid})
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
         self.commit()
-        self.member.clear_all_caches()
+        self.member.cw_clear_all_caches()
         self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
         self.assertEqual(iworkflowable.state, 'activated')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
@@ -504,13 +504,13 @@
         self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
                      {'wf': wf.eid, 'x': user.eid})
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
                           ['rest'])
         iworkflowable.fire_transition('rest')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'asleep')
         self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
                           ['rest'])
@@ -520,7 +520,7 @@
         self.commit()
         iworkflowable.fire_transition('rest')
         self.commit()
-        user.clear_all_caches()
+        user.cw_clear_all_caches()
         self.assertEqual(iworkflowable.state, 'dead')
         self.assertEqual(parse_hist(iworkflowable.workflow_history),
                           [('asleep', 'asleep', 'rest', None),
@@ -557,7 +557,7 @@
     def setUp(self):
         CubicWebTC.setUp(self)
         self.wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.s_activated = self.wf.state_by_name('activated').eid
         self.s_deactivated = self.wf.state_by_name('deactivated').eid
         self.s_dummy = self.wf.add_state(u'dummy').eid
@@ -629,13 +629,13 @@
         iworkflowable = user.cw_adapt_to('IWorkflowable')
         iworkflowable.fire_transition('deactivate')
         cnx.commit()
-        session.set_pool()
+        session.set_cnxset()
         with self.assertRaises(ValidationError) as cm:
             iworkflowable.fire_transition('deactivate')
         self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
                                             u"transition isn't allowed from")
         cnx.rollback()
-        session.set_pool()
+        session.set_cnxset()
         # get back now
         iworkflowable.fire_transition('activate')
         cnx.commit()
--- a/entities/wfobjs.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/entities/wfobjs.py	Tue Jun 28 16:33:53 2011 +0200
@@ -326,8 +326,8 @@
             result[ep.subwf_state.eid] = ep.destination and ep.destination.eid
         return result
 
-    def clear_all_caches(self):
-        super(WorkflowTransition, self).clear_all_caches()
+    def cw_clear_all_caches(self):
+        super(WorkflowTransition, self).cw_clear_all_caches()
         clear_cache(self, 'exit_points')
 
 
--- a/entity.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/entity.py	Tue Jun 28 16:33:53 2011 +0200
@@ -395,8 +395,10 @@
 
     @cached
     def cw_metainformation(self):
-        res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid)))
-        res['source'] = self._cw.source_defs()[res['source']]
+        res = self._cw.describe(self.eid, asdict=True)
+        # use 'asource' and not 'source' since this is the actual source,
+        # while 'source' is the physical source (where it's stored)
+        res['source'] = self._cw.source_defs()[res.pop('asource')]
         return res
 
     def cw_check_perm(self, action):
@@ -431,9 +433,11 @@
         use_ext_id = False
         if 'base_url' not in kwargs and \
                getattr(self._cw, 'search_state', ('normal',))[0] == 'normal':
-            baseurl = self.cw_metainformation()['source'].get('base-url')
-            if baseurl:
-                kwargs['base_url'] = baseurl
+            sourcemeta = self.cw_metainformation()['source']
+            if sourcemeta.get('use-cwuri-as-url'):
+                return self.cwuri # XXX consider kwargs?
+            if sourcemeta.get('base-url'):
+                kwargs['base_url'] = sourcemeta['base-url']
                 use_ext_id = True
         if method in (None, 'view'):
             try:
@@ -942,7 +946,7 @@
             assert role
             self._cw_related_cache.pop('%s_%s' % (rtype, role), None)
 
-    def clear_all_caches(self): # XXX cw_clear_all_caches
+    def cw_clear_all_caches(self):
         """flush all caches on this entity. Further attributes/relations access
         will triggers new database queries to get back values.
 
@@ -1024,6 +1028,10 @@
 
     # deprecated stuff #########################################################
 
+    @deprecated('[3.13] use entity.cw_clear_all_caches()')
+    def clear_all_caches(self):
+        return self.cw_clear_all_caches()
+
     @deprecated('[3.9] use entity.cw_attr_value(attr)')
     def get_value(self, name):
         return self.cw_attr_value(name)
--- a/etwist/server.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/etwist/server.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,14 +17,19 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """twisted server for CubicWeb web instances"""
 
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
 import sys
 import os
+import os.path as osp
 import select
 import errno
 import traceback
 import threading
+import re
+import hashlib
 from os.path import join
 from time import mktime
 from datetime import date, timedelta
@@ -41,7 +46,8 @@
 
 from logilab.common.decorators import monkeypatch
 
-from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER
+from cubicweb import (AuthenticationError, ConfigurationError,
+                      CW_EVENT_MANAGER, CubicWebException)
 from cubicweb.utils import json_dumps
 from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut
 from cubicweb.web.application import CubicWebPublisher
@@ -70,13 +76,73 @@
                             code=http.FORBIDDEN,
                             stream='Access forbidden')
 
-class File(static.File):
-    """Prevent from listing directories"""
+
+class NoListingFile(static.File):
     def directoryListing(self):
         return ForbiddenDirectoryLister()
 
 
-class LongTimeExpiringFile(File):
+class DataLookupDirectory(NoListingFile):
+    def __init__(self, config, path):
+        self.md5_version = config.instance_md5_version()
+        NoListingFile.__init__(self, path)
+        self.config = config
+        self.here = path
+        self._defineChildResources()
+        if self.config.debugmode:
+            self.data_modconcat_basepath = '/data/??'
+        else:
+            self.data_modconcat_basepath = '/data/%s/??' % self.md5_version
+
+    def _defineChildResources(self):
+        self.putChild(self.md5_version, self)
+
+    def getChild(self, path, request):
+        if not path:
+            uri = request.uri
+            if uri.startswith('/https/'):
+                uri = uri[6:]
+            if uri.startswith(self.data_modconcat_basepath):
+                resource_relpath = uri[len(self.data_modconcat_basepath):]
+                if resource_relpath:
+                    paths = resource_relpath.split(',')
+                    try:
+                        return ConcatFiles(self.config, paths)
+                    except ConcatFileNotFoundError:
+                        return self.childNotFound
+            return self.directoryListing()
+        childpath = join(self.here, path)
+        dirpath, rid = self.config.locate_resource(childpath)
+        if dirpath is None:
+            # resource not found
+            return self.childNotFound
+        filepath = os.path.join(dirpath, rid)
+        if os.path.isdir(filepath):
+            resource = DataLookupDirectory(self.config, childpath)
+            # cache resource for this segment path to avoid recomputing
+            # directory lookup
+            self.putChild(path, resource)
+            return resource
+        else:
+            return NoListingFile(filepath)
+
+
+class FCKEditorResource(NoListingFile):
+    def __init__(self, config, path):
+        NoListingFile.__init__(self, path)
+        self.config = config
+
+    def getChild(self, path, request):
+        pre_path = request.path.split('/')[1:]
+        if pre_path[0] == 'https':
+            pre_path.pop(0)
+            uiprops = self.config.https_uiprops
+        else:
+            uiprops = self.config.uiprops
+        return static.File(osp.join(uiprops['FCKEDITOR_PATH'], path))
+
+
+class LongTimeExpiringFile(DataLookupDirectory):
     """overrides static.File and sets a far future ``Expires`` date
     on the resouce.
 
@@ -88,28 +154,90 @@
       etc.
 
     """
+    def _defineChildResources(self):
+        pass
+
     def render(self, request):
         # XXX: Don't provide additional resource information to error responses
         #
         # the HTTP RFC recommands not going further than 1 year ahead
         expires = date.today() + timedelta(days=6*30)
         request.setHeader('Expires', generateDateTime(mktime(expires.timetuple())))
-        return File.render(self, request)
+        return DataLookupDirectory.render(self, request)
+
+
+class ConcatFileNotFoundError(CubicWebException):
+    pass
+
+
+class ConcatFiles(LongTimeExpiringFile):
+    def __init__(self, config, paths):
+        _, ext = osp.splitext(paths[0])
+        self._resources = {}
+        # create a unique / predictable filename
+        fname = 'cache_concat_' + hashlib.md5(';'.join(paths)).hexdigest() + ext
+        filepath = osp.join(config.appdatahome, 'uicache', fname)
+        LongTimeExpiringFile.__init__(self, config, filepath)
+        self._concat_cached_filepath(filepath, paths)
+
+    def _resource(self, path):
+        try:
+            return self._resources[path]
+        except KeyError:
+            self._resources[path] = self.config.locate_resource(path)
+            return self._resources[path]
 
+    def _concat_cached_filepath(self, filepath, paths):
+        if not self._up_to_date(filepath, paths):
+            concat_data = []
+            for path in paths:
+                dirpath, rid = self._resource(path)
+                if rid is None:
+                    if self.config.debugmode:
+                        raise ConcatFileNotFoundError(path)
+                    else:
+                        # In production mode log an error, do not return a 404
+                        # XXX the erroneous content is cached anyway
+                        LOGGER.error('concatenated data url error: %r file '
+                                     'does not exist', path)
+                else:
+                    concat_data.append(open(osp.join(dirpath, rid)).read())
+            with open(filepath, 'wb') as f:
+                f.write('\n'.join(concat_data))
+
+    def _up_to_date(self, filepath, paths):
+        """
+        The concat-file is considered up-to-date if it exists.
+        In debug mode, an additional check is performed to make sure that
+        concat-file is more recent than all concatenated files
+        """
+        if not osp.isfile(filepath):
+            return False
+        if self.config.debugmode:
+            concat_lastmod = os.stat(filepath).st_mtime
+            for path in paths:
+                dirpath, rid = self._resource(path)
+                if rid is None:
+                    raise ConcatFileNotFoundError(path)
+                path = osp.join(dirpath, rid)
+                if os.stat(path).st_mtime > concat_lastmod:
+                    return False
+        return True
 
 class CubicWebRootResource(resource.Resource):
     def __init__(self, config, vreg=None):
+        resource.Resource.__init__(self)
         self.config = config
         # instantiate publisher here and not in init_publisher to get some
         # checks done before daemonization (eg versions consistency)
         self.appli = CubicWebPublisher(config, vreg=vreg)
         self.base_url = config['base-url']
         self.https_url = config['https-url']
-        self.children = {}
-        self.static_directories = set(('data%s' % config.instance_md5_version(),
-                                       'data', 'static', 'fckeditor'))
         global MAX_POST_LENGTH
         MAX_POST_LENGTH = config['max-post-length']
+        self.putChild('static', NoListingFile(config.static_directory))
+        self.putChild('fckeditor', FCKEditorResource(self.config, ''))
+        self.putChild('data', DataLookupDirectory(self.config, ''))
 
     def init_publisher(self):
         config = self.config
@@ -152,38 +280,6 @@
 
     def getChild(self, path, request):
         """Indicate which resource to use to process down the URL's path"""
-        pre_path = request.path.split('/')[1:]
-        if pre_path[0] == 'https':
-            pre_path.pop(0)
-            uiprops = self.config.https_uiprops
-        else:
-            uiprops = self.config.uiprops
-        directory = pre_path[0]
-        # Anything in data/, static/, fckeditor/ and the generated versioned
-        # data directory is treated as static files
-        if directory in self.static_directories:
-            # take care fckeditor may appears as root directory or as a data
-            # subdirectory
-            if directory == 'static':
-                return File(self.config.static_directory)
-            if directory == 'fckeditor':
-                return File(uiprops['FCKEDITOR_PATH'])
-            if directory != 'data':
-                # versioned directory, use specific file with http cache
-                # headers so their are cached for a very long time
-                cls = LongTimeExpiringFile
-            else:
-                cls = File
-            if path == 'fckeditor':
-                return cls(uiprops['FCKEDITOR_PATH'])
-            if path == directory: # recurse
-                return self
-            datadir, path = self.config.locate_resource(path)
-            if datadir is None:
-                return self # recurse
-            self.debug('static file %s from %s', path, datadir)
-            return cls(join(datadir, path))
-        # Otherwise we use this single resource
         return self
 
     def render(self, request):
--- a/etwist/test/unittest_server.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/etwist/test/unittest_server.py	Tue Jun 28 16:33:53 2011 +0200
@@ -15,8 +15,12 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+import os, os.path as osp, glob
+
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.etwist.server import host_prefixed_baseurl
+from cubicweb.etwist.server import (host_prefixed_baseurl, ConcatFiles,
+                                    ConcatFileNotFoundError)
 
 
 class HostPrefixedBaseURLTC(CubicWebTC):
@@ -50,3 +54,30 @@
         self._check('http://localhost:8080/hg/', 'code.cubicweb.org',
                     'http://localhost:8080/hg/')
 
+
+class ConcatFilesTC(CubicWebTC):
+
+    def tearDown(self):
+        super(ConcatFilesTC, self).tearDown()
+        self._cleanup_concat_cache()
+        self.config.debugmode = False
+
+    def _cleanup_concat_cache(self):
+        uicachedir = osp.join(self.config.apphome, 'uicache')
+        for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')):
+            os.unlink(osp.join(uicachedir, fname))
+
+    def test_cache(self):
+        concat = ConcatFiles(self.config, ('cubicweb.ajax.js', 'jquery.js'))
+        self.failUnless(osp.isfile(concat.path))
+
+    def test_404(self):
+        # when not in debug mode, should not crash
+        ConcatFiles(self.config, ('cubicweb.ajax.js', 'dummy.js'))
+        # in debug mode, raise error
+        self.config.debugmode = True
+        try:
+            self.assertRaises(ConcatFileNotFoundError, ConcatFiles, self.config,
+                              ('cubicweb.ajax.js', 'dummy.js'))
+        finally:
+            self.config.debugmode = False
--- a/hooks/__init__.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/__init__.py	Tue Jun 28 16:33:53 2011 +0200
@@ -63,11 +63,9 @@
                             source.info('added %s entities', len(stats['created']))
                         if stats.get('updated'):
                             source.info('updated %s entities', len(stats['updated']))
-                        session.commit()
                     except Exception, exc:
                         session.exception('while trying to update feed %s', source)
-                        session.rollback()
-                    session.set_pool()
+                    session.set_cnxset()
             finally:
                 session.close()
         self.repo.looping_task(60, update_feeds, self.repo)
--- a/hooks/metadata.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/metadata.py	Tue Jun 28 16:33:53 2011 +0200
@@ -23,6 +23,7 @@
 
 from cubicweb.selectors import is_instance
 from cubicweb.server import hook
+from cubicweb.server.edition import EditedEntity
 
 
 class MetaDataHook(hook.Hook):
@@ -68,8 +69,9 @@
     def precommit_event(self):
         session = self.session
         relations = [(eid, session.user.eid) for eid in self.get_data()
-                # don't consider entities that have been created and
-                # deleted in the same transaction
+                # don't consider entities that have been created and deleted in
+                # the same transaction, nor ones where created_by has been
+                # explicitly set
                 if not session.deleted_in_transaction(eid) and \
                    not session.entity_from_eid(eid).created_by]
         session.add_relations([('created_by', relations)])
@@ -141,3 +143,76 @@
             session.repo.system_source.index_entity(
                 session, session.entity_from_eid(self.eidto))
 
+
+
+# entity source handling #######################################################
+
+class ChangeEntityUpdateCaches(hook.Operation):
+    def postcommit_event(self):
+        self.oldsource.reset_caches()
+        repo = self.session.repo
+        entity = self.entity
+        extid = entity.cw_metainformation()['extid']
+        repo._type_source_cache[entity.eid] = (
+            entity.__regid__, self.newsource.uri, None, self.newsource.uri)
+        if self.oldsource.copy_based_source:
+            uri = 'system'
+        else:
+            uri = self.oldsource.uri
+        repo._extid_cache[(extid, uri)] = -entity.eid
+
+class ChangeEntitySourceDeleteHook(MetaDataHook):
+    """support for moving an entity from an external source by watching 'Any
+    cw_source CWSource' relation
+    """
+
+    __regid__ = 'cw.metadata.source-change'
+    __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+    events = ('before_delete_relation',)
+
+    def __call__(self):
+        if (self._cw.deleted_in_transaction(self.eidfrom)
+            or self._cw.deleted_in_transaction(self.eidto)):
+            return
+        schange = self._cw.transaction_data.setdefault('cw_source_change', {})
+        schange[self.eidfrom] = self.eidto
+
+
+class ChangeEntitySourceAddHook(MetaDataHook):
+    __regid__ = 'cw.metadata.source-change'
+    __select__ = MetaDataHook.__select__ & hook.match_rtype('cw_source')
+    events = ('before_add_relation',)
+
+    def __call__(self):
+        schange = self._cw.transaction_data.get('cw_source_change')
+        if schange is not None and self.eidfrom in schange:
+            newsource = self._cw.entity_from_eid(self.eidto)
+            if newsource.name != 'system':
+                raise Exception('changing source to something else than the '
+                                'system source is unsupported')
+            syssource = newsource.repo_source
+            oldsource = self._cw.entity_from_eid(schange[self.eidfrom])
+            entity = self._cw.entity_from_eid(self.eidfrom)
+            # copy entity if necessary
+            if not oldsource.repo_source.copy_based_source:
+                entity.complete(skip_bytes=False)
+                entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+                syssource.add_entity(self._cw, entity)
+            # we don't want the moved entity to be reimported later.  To
+            # distinguish this state, the trick is to change the associated
+            # record in the 'entities' system table with eid=-eid while leaving
+            # other fields unchanged, and to add a new record with eid=eid,
+            # source='system'. External source will then have consider case
+            # where `extid2eid` return a negative eid as 'this entity was known
+            # but has been moved, ignore it'.
+            self._cw.system_sql('UPDATE entities SET eid=-eid,source=%(source)s '
+                                'WHERE eid=%(eid)s',
+                                {'eid': self.eidfrom, 'source': newsource.name})
+            attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': None,
+                     'source': 'system', 'asource': 'system',
+                     'mtime': datetime.now()}
+            self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs)
+            # register an operation to update repository/sources caches
+            ChangeEntityUpdateCaches(self._cw, entity=entity,
+                                     oldsource=oldsource.repo_source,
+                                     newsource=syssource)
--- a/hooks/syncschema.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/syncschema.py	Tue Jun 28 16:33:53 2011 +0200
@@ -42,12 +42,15 @@
 TYPE_CONVERTER = { # XXX
     'Boolean': bool,
     'Int': int,
+    'BigInt': int,
     'Float': float,
     'Password': str,
     'String': unicode,
     'Date' : unicode,
     'Datetime' : unicode,
     'Time' : unicode,
+    'TZDatetime' : unicode,
+    'TZTime' : unicode,
     }
 
 # core entity and relation types which can't be removed
@@ -92,7 +95,7 @@
     # create index before alter table which may expectingly fail during test
     # (sqlite) while index creation should never fail (test for index existence
     # is done by the dbhelper)
-    session.pool.source('system').create_index(session, table, column)
+    session.cnxset.source('system').create_index(session, table, column)
     session.info('added index on %s(%s)', table, column)
 
 
@@ -252,7 +255,7 @@
                                description=entity.description)
         eschema = schema.add_entity_type(etype)
         # create the necessary table
-        tablesql = y2sql.eschema2sql(session.pool.source('system').dbhelper,
+        tablesql = y2sql.eschema2sql(session.cnxset.source('system').dbhelper,
                                      eschema, prefix=SQL_PREFIX)
         for sql in tablesql.split(';'):
             if sql.strip():
@@ -289,7 +292,7 @@
         self.session.vreg.schema.rename_entity_type(oldname, newname)
         # we need sql to operate physical changes on the system database
         sqlexec = self.session.system_sql
-        dbhelper= self.session.pool.source('system').dbhelper
+        dbhelper= self.session.cnxset.source('system').dbhelper
         sql = dbhelper.sql_rename_table(SQL_PREFIX+oldname,
                                         SQL_PREFIX+newname)
         sqlexec(sql)
@@ -433,7 +436,7 @@
         # update the in-memory schema first
         rdefdef = self.init_rdef(**props)
         # then make necessary changes to the system source database
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         attrtype = y2sql.type_from_constraints(
             syssource.dbhelper, rdefdef.object, rdefdef.constraints)
         # XXX should be moved somehow into lgdb: sqlite doesn't support to
@@ -606,7 +609,7 @@
         self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values)
         rdef.update(self.values)
         # then make necessary changes to the system source database
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         if 'indexed' in self.values:
             syssource.update_rdef_indexed(session, rdef)
             self.indexed_changed = True
@@ -624,7 +627,7 @@
         # revert changes on in memory schema
         self.rdef.update(self.oldvalues)
         # revert changes on database
-        syssource = self.session.pool.source('system')
+        syssource = self.session.cnxset.source('system')
         if self.indexed_changed:
             syssource.update_rdef_indexed(self.session, self.rdef)
         if self.null_allowed_changed:
@@ -652,7 +655,7 @@
         rdef.constraints.remove(self.oldcstr)
         # then update database: alter the physical schema on size/unique
         # constraint changes
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         cstrtype = self.oldcstr.type()
         if cstrtype == 'SizeConstraint':
             syssource.update_rdef_column(session, rdef)
@@ -668,7 +671,7 @@
         if self.oldcstr is not None:
             self.rdef.constraints.append(self.oldcstr)
         # revert changes on database
-        syssource = self.session.pool.source('system')
+        syssource = self.session.cnxset.source('system')
         if self.size_cstr_changed:
             syssource.update_rdef_column(self.session, self.rdef)
         if self.unique_changed:
@@ -699,7 +702,7 @@
         rdef.constraints.append(newcstr)
         # then update database: alter the physical schema on size/unique
         # constraint changes
-        syssource = session.pool.source('system')
+        syssource = session.cnxset.source('system')
         if cstrtype == 'SizeConstraint' and (oldcstr is None or
                                              oldcstr.max != newcstr.max):
             syssource.update_rdef_column(session, rdef)
@@ -716,7 +719,7 @@
         prefix = SQL_PREFIX
         table = '%s%s' % (prefix, self.entity.constraint_of[0].name)
         cols = ['%s%s' % (prefix, r.name) for r in self.entity.relations]
-        dbhelper= session.pool.source('system').dbhelper
+        dbhelper= session.cnxset.source('system').dbhelper
         sqls = dbhelper.sqls_create_multicol_unique_index(table, cols)
         for sql in sqls:
             session.system_sql(sql)
@@ -736,7 +739,7 @@
         session = self.session
         prefix = SQL_PREFIX
         table = '%s%s' % (prefix, self.entity.type)
-        dbhelper= session.pool.source('system').dbhelper
+        dbhelper= session.cnxset.source('system').dbhelper
         cols = ['%s%s' % (prefix, c) for c in self.cols]
         sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols)
         for sql in sqls:
@@ -785,7 +788,7 @@
     """
 
     def precommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections.cnxset has been commited"""
         try:
             erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
@@ -814,7 +817,7 @@
     """
 
     def precommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
@@ -1228,7 +1231,7 @@
                         source.fti_index_entities(session, [container])
         if to_reindex:
             # Transaction has already been committed
-            session.pool.commit()
+            session.cnxset.commit()
 
 
 
--- a/hooks/syncsession.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/syncsession.py	Tue Jun 28 16:33:53 2011 +0200
@@ -56,7 +56,7 @@
 class _DeleteGroupOp(_GroupOperation):
     """synchronize user when a in_group relation has been deleted"""
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         groups = self.cnxuser.groups
         try:
             groups.remove(self.group)
@@ -67,7 +67,7 @@
 class _AddGroupOp(_GroupOperation):
     """synchronize user when a in_group relation has been added"""
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         groups = self.cnxuser.groups
         if self.group in groups:
             self.warning('user %s already in group %s', self.cnxuser,
@@ -97,7 +97,7 @@
         hook.Operation.__init__(self, session)
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             self.session.repo.close(self.cnxid)
         except BadConnectionId:
@@ -122,7 +122,7 @@
     """a user's custom properties has been deleted"""
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         try:
             del self.cwpropdict[self.key]
         except KeyError:
@@ -133,7 +133,7 @@
     """a user's custom properties has been added/changed"""
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         self.cwpropdict[self.key] = self.value
 
 
@@ -141,7 +141,7 @@
     """a user's custom properties has been added/changed"""
 
     def postcommit_event(self):
-        """the observed connections pool has been commited"""
+        """the observed connections set has been commited"""
         cwprop = self.cwprop
         if not cwprop.for_user:
             self.session.vreg['propertyvalues'][cwprop.pkey] = cwprop.value
--- a/hooks/syncsources.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/syncsources.py	Tue Jun 28 16:33:53 2011 +0200
@@ -30,6 +30,8 @@
     category = 'cw.sources'
 
 
+# repo sources synchronization #################################################
+
 class SourceAddedOp(hook.Operation):
     def postcommit_event(self):
         self.session.repo.add_source(self.entity)
@@ -100,8 +102,10 @@
                 pass
 
 
-# source mapping synchronization. Expect cw_for_source/cw_schema are immutable
-# relations (i.e. can't change from a source or schema to another).
+# source mapping synchronization ###############################################
+#
+# Expect cw_for_source/cw_schema are immutable relations (i.e. can't change from
+# a source or schema to another).
 
 class SourceMappingDeleteHook(SourceHook):
     """check cw_for_source and cw_schema are immutable relations
--- a/hooks/test/unittest_syncschema.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/test/unittest_syncschema.py	Tue Jun 28 16:33:53 2011 +0200
@@ -36,9 +36,9 @@
         self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
 
     def index_exists(self, etype, attr, unique=False):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         return dbhelper.index_exists(sqlcursor, SQL_PREFIX + etype, SQL_PREFIX + attr, unique=unique)
 
     def _set_perms(self, eid):
@@ -57,9 +57,9 @@
 
     def test_base(self):
         schema = self.repo.schema
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         self.failIf(schema.has_entity('Societe2'))
         self.failIf(schema.has_entity('concerne2'))
         # schema should be update on insertion (after commit)
@@ -170,9 +170,9 @@
     # schema modification hooks tests #########################################
 
     def test_uninline_relation(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         self.failUnless(self.schema['state_of'].inlined)
         try:
             self.execute('SET X inlined FALSE WHERE X name "state_of"')
@@ -195,9 +195,9 @@
             self.assertEqual(len(rset), 2)
 
     def test_indexed_change(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         try:
             self.execute('SET X indexed FALSE WHERE X relation_type R, R name "name"')
             self.failUnless(self.schema['name'].rdef('Workflow', 'String').indexed)
@@ -214,9 +214,9 @@
             self.failUnless(self.index_exists('Workflow', 'name'))
 
     def test_unique_change(self):
-        self.session.set_pool()
-        dbhelper = self.session.pool.source('system').dbhelper
-        sqlcursor = self.session.pool['system']
+        self.session.set_cnxset()
+        dbhelper = self.session.cnxset.source('system').dbhelper
+        sqlcursor = self.session.cnxset['system']
         try:
             self.execute('INSERT CWConstraint X: X cstrtype CT, DEF constrained_by X '
                          'WHERE CT name "UniqueConstraint", DEF relation_type RT, DEF from_entity E,'
--- a/hooks/workflow.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/hooks/workflow.py	Tue Jun 28 16:33:53 2011 +0200
@@ -148,7 +148,7 @@
 
 class WorkflowHook(hook.Hook):
     __abstract__ = True
-    category = 'workflow'
+    category = 'metadata'
 
 
 class SetInitialStateHook(WorkflowHook):
@@ -160,21 +160,15 @@
         _SetInitialStateOp(self._cw, entity=self.entity)
 
 
-class PrepareStateChangeHook(WorkflowHook):
-    """record previous state information"""
-    __regid__ = 'cwdelstate'
-    __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
-    events = ('before_delete_relation',)
+class FireTransitionHook(WorkflowHook):
+    """check the transition is allowed and add missing information into the
+    TrInfo entity.
 
-    def __call__(self):
-        self._cw.transaction_data.setdefault('pendingrelations', []).append(
-            (self.eidfrom, self.rtype, self.eidto))
-
-
-class FireTransitionHook(WorkflowHook):
-    """check the transition is allowed, add missing information. Expect that:
+    Expect that:
     * wf_info_for inlined relation is set
     * by_transition or to_state (managers only) inlined relation is set
+
+    Check for automatic transition to be fired at the end
     """
     __regid__ = 'wffiretransition'
     __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
@@ -273,7 +267,7 @@
 
 
 class FiredTransitionHook(WorkflowHook):
-    """change related entity state"""
+    """change related entity state and handle exit of subworkflow"""
     __regid__ = 'wffiretransition'
     __select__ = WorkflowHook.__select__ & is_instance('TrInfo')
     events = ('after_add_entity',)
@@ -296,6 +290,7 @@
     __regid__ = 'wfcheckinstate'
     __select__ = WorkflowHook.__select__ & hook.match_rtype('in_state')
     events = ('before_add_relation',)
+    category = 'integrity'
 
     def __call__(self):
         session = self._cw
--- a/i18n/de.po	Tue Jun 28 16:32:38 2011 +0200
+++ b/i18n/de.po	Tue Jun 28 16:33:53 2011 +0200
@@ -256,6 +256,12 @@
 msgid "BaseTransition_plural"
 msgstr "Übergänge (abstrakt)"
 
+msgid "BigInt"
+msgstr ""
+
+msgid "BigInt_plural"
+msgstr ""
+
 msgid "Bookmark"
 msgstr "Lesezeichen"
 
@@ -1849,6 +1855,9 @@
 msgid "ctxtoolbar"
 msgstr "Werkzeugleiste"
 
+msgid "currently in synchronization"
+msgstr ""
+
 msgid "custom_workflow"
 msgstr "angepasster Workflow"
 
@@ -2382,6 +2391,9 @@
 msgid "external page"
 msgstr "externe Seite"
 
+msgid "facet-loading-msg"
+msgstr ""
+
 msgid "facet.filters"
 msgstr ""
 
@@ -2566,9 +2578,6 @@
 "generische Relation, die anzeigt, dass eine Entität mit einer anderen Web-"
 "Ressource identisch ist (siehe http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "Zurück zur Index-Seite"
-
 msgid "granted to groups"
 msgstr "an Gruppen gewährt"
 
@@ -3178,6 +3187,12 @@
 msgid "no associated permissions"
 msgstr "keine entsprechende Berechtigung"
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "kein Eingabefeld spezifiziert Für Entität %s"
@@ -3888,6 +3903,13 @@
 msgid "synchronization-interval must be greater than 1 minute"
 msgstr ""
 
+msgid "synchronizing"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "synchronizing"
+msgstr ""
+
 msgid "table"
 msgstr "Tabelle"
 
@@ -3926,6 +3948,12 @@
 msgstr ""
 "Der Wert \"%s\" wird bereits benutzt, bitte verwenden Sie einen anderen Wert"
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr "Achtung! Diese Aktion ist unumkehrbar."
 
@@ -4025,8 +4053,8 @@
 msgid "tr_count"
 msgstr ""
 
-msgid "transaction undone"
-msgstr "Transaktion rückgängig gemacht"
+msgid "transaction undoed"
+msgstr ""
 
 #, python-format
 msgid "transition %(tr)s isn't allowed from %(st)s"
@@ -4310,30 +4338,30 @@
 msgid "value"
 msgstr "Wert"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr ""
 "Der mit diesem Schlüssele verbundene Wert kann n icht manuell geändert "
 "werden."
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "Der Wert muss %(op)s %(boundary)s sein."
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "Der Wert muss <= %(boundary)s sein."
+msgid "value should have maximum size of %s but found %s"
+msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "Der Wert muss >= %(boundary)s sein."
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "Der Wert darf höchstens %s betragen."
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "Der Wert muss mindestens %s betragen."
+msgid "value should have minimum size of %s but found %s"
+msgstr ""
 
 msgid "vcard"
 msgstr "VCard"
@@ -4479,76 +4507,3 @@
 #, python-format
 msgid "you should un-inline relation %s which is supported and may be crossed "
 msgstr ""
-
-#~ msgid "Attributes with non default permissions:"
-#~ msgstr "Attribute mit nicht-standard-Berechtigungen"
-
-#~ msgid "Entity types"
-#~ msgstr "Entitätstypen"
-
-#~ msgid "Index"
-#~ msgstr "Index"
-
-#~ msgid "Permissions for entity types"
-#~ msgstr "Berechtigungen für Entitätstypen"
-
-#~ msgid "Permissions for relations"
-#~ msgstr "Berechtigungen für Relationen"
-
-#~ msgid "Relation types"
-#~ msgstr "Relationstypen"
-
-#~ msgid "am/pm calendar (month)"
-#~ msgstr "am/pm Kalender (Monat)"
-
-#~ msgid "am/pm calendar (semester)"
-#~ msgstr "am/pm Kalender (Halbjahr)"
-
-#~ msgid "am/pm calendar (week)"
-#~ msgstr "am/pm Kalender (Woche)"
-
-#~ msgid "am/pm calendar (year)"
-#~ msgstr "am/pm Kalender (Jahr)"
-
-#~ msgid "application entities"
-#~ msgstr "Anwendungs-Entitäten"
-
-#~ msgid "calendar (month)"
-#~ msgstr "Kalender (monatlich)"
-
-#~ msgid "calendar (semester)"
-#~ msgstr "Kalender (halbjährlich)"
-
-#~ msgid "calendar (week)"
-#~ msgstr "Kalender (wöchentlich)"
-
-#~ msgid "calendar (year)"
-#~ msgstr "Kalender (jährlich)"
-
-#~ msgid ""
-#~ "can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has "
-#~ "cardinality=%(card)s"
-#~ msgstr ""
-#~ "Kann 'inlined' = %(inlined)s nicht zuweisen, %(stype)s %(rtype)s %(otype)"
-#~ "s hat die Kardinalität %(card)s"
-
-#~ msgid "create an index page"
-#~ msgstr "Eine Index-Seite anlegen"
-
-#~ msgid "edit the index page"
-#~ msgstr "Index-Seite bearbeiten"
-
-#~ msgid "schema entities"
-#~ msgstr "Entitäten, die das Schema definieren"
-
-#~ msgid "schema-security"
-#~ msgstr "Rechte"
-
-#~ msgid "system entities"
-#~ msgstr "System-Entitäten"
-
-#~ msgid "timestamp of the latest source synchronization."
-#~ msgstr "Zeitstempel der letzten Synchronisierung mit der Quelle."
-
-#~ msgid "up"
-#~ msgstr "nach oben"
--- a/i18n/en.po	Tue Jun 28 16:32:38 2011 +0200
+++ b/i18n/en.po	Tue Jun 28 16:33:53 2011 +0200
@@ -5,7 +5,7 @@
 msgstr ""
 "Project-Id-Version: 2.0\n"
 "POT-Creation-Date: 2006-01-12 17:35+CET\n"
-"PO-Revision-Date: 2010-09-15 14:55+0200\n"
+"PO-Revision-Date: 2011-04-29 12:57+0200\n"
 "Last-Translator: Sylvain Thenault <sylvain.thenault@logilab.fr>\n"
 "Language-Team: English <devel@logilab.fr.org>\n"
 "Language: en\n"
@@ -245,6 +245,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transitions (abstract)"
 
+msgid "BigInt"
+msgstr "Big integer"
+
+msgid "BigInt_plural"
+msgstr "Big integers"
+
 msgid "Bookmark"
 msgstr "Bookmark"
 
@@ -503,7 +509,7 @@
 msgstr "Interval"
 
 msgid "IntervalBoundConstraint"
-msgstr "interval constraint"
+msgstr "Interval constraint"
 
 msgid "Interval_plural"
 msgstr "Intervals"
@@ -1804,6 +1810,9 @@
 msgid "ctxtoolbar"
 msgstr "toolbar"
 
+msgid "currently in synchronization"
+msgstr ""
+
 msgid "custom_workflow"
 msgstr "custom workflow"
 
@@ -2324,6 +2333,9 @@
 msgid "external page"
 msgstr ""
 
+msgid "facet-loading-msg"
+msgstr "processing, please wait"
+
 msgid "facet.filters"
 msgstr "filter"
 
@@ -2506,9 +2518,6 @@
 "object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def"
 msgstr ""
 
-msgid "go back to the index page"
-msgstr ""
-
 msgid "granted to groups"
 msgstr ""
 
@@ -3089,6 +3098,12 @@
 msgid "no associated permissions"
 msgstr ""
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr ""
@@ -3784,6 +3799,13 @@
 msgid "synchronization-interval must be greater than 1 minute"
 msgstr ""
 
+msgid "synchronizing"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "synchronizing"
+msgstr ""
+
 msgid "table"
 msgstr ""
 
@@ -3821,6 +3843,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr ""
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr ""
 
@@ -3920,7 +3948,7 @@
 msgid "tr_count"
 msgstr "transition number"
 
-msgid "transaction undone"
+msgid "transaction undoed"
 msgstr ""
 
 #, python-format
@@ -4196,27 +4224,27 @@
 msgid "value"
 msgstr ""
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr ""
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr ""
-
-#, python-format
-msgid "value must be <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
 msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr ""
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr ""
-
-#, python-format
-msgid "value should have minimum size of %s"
+msgid "value should have minimum size of %s but found %s"
 msgstr ""
 
 msgid "vcard"
@@ -4361,10 +4389,3 @@
 #, python-format
 msgid "you should un-inline relation %s which is supported and may be crossed "
 msgstr ""
-
-#~ msgctxt "CWAttribute"
-#~ msgid "relations_object"
-#~ msgstr "constrained by"
-
-#~ msgid "schema-security"
-#~ msgstr "permissions"
--- a/i18n/es.po	Tue Jun 28 16:32:38 2011 +0200
+++ b/i18n/es.po	Tue Jun 28 16:33:53 2011 +0200
@@ -257,6 +257,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transiciones (abstractas)"
 
+msgid "BigInt"
+msgstr ""
+
+msgid "BigInt_plural"
+msgstr ""
+
 msgid "Bookmark"
 msgstr "Favorito"
 
@@ -1878,6 +1884,9 @@
 msgid "ctxtoolbar"
 msgstr "Barra de herramientas"
 
+msgid "currently in synchronization"
+msgstr ""
+
 msgid "custom_workflow"
 msgstr "Workflow específico"
 
@@ -2425,6 +2434,9 @@
 msgid "external page"
 msgstr "Página externa"
 
+msgid "facet-loading-msg"
+msgstr ""
+
 msgid "facet.filters"
 msgstr "Filtros"
 
@@ -2609,9 +2621,6 @@
 "Relación genérica que indicar que una entidad es idéntica a otro recurso web "
 "(ver http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "Regresar a la página de inicio"
-
 msgid "granted to groups"
 msgstr "Otorgado a los grupos"
 
@@ -3146,11 +3155,11 @@
 
 msgctxt "CWSource"
 msgid "name"
-msgstr "nombre"
+msgstr ""
 
 msgctxt "State"
 msgid "name"
-msgstr "Nombre"
+msgstr "nombre"
 
 msgctxt "Transition"
 msgid "name"
@@ -3219,6 +3228,12 @@
 msgid "no associated permissions"
 msgstr "No existe permiso asociado"
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "Ningún campo editable especificado para la entidad %s"
@@ -3938,6 +3953,13 @@
 msgid "synchronization-interval must be greater than 1 minute"
 msgstr "synchronization-interval debe ser mayor a 1 minuto"
 
+msgid "synchronizing"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "synchronizing"
+msgstr ""
+
 msgid "table"
 msgstr "Tabla"
 
@@ -3976,6 +3998,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro"
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr "Esta acción es irreversible!."
 
@@ -4075,8 +4103,8 @@
 msgid "tr_count"
 msgstr "n° de transición"
 
-msgid "transaction undone"
-msgstr "Transacciones Anuladas"
+msgid "transaction undoed"
+msgstr ""
 
 #, python-format
 msgid "transition %(tr)s isn't allowed from %(st)s"
@@ -4360,28 +4388,28 @@
 msgid "value"
 msgstr "Vampr"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr ""
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr ""
+
 msgid "value associated to this key is not editable manually"
 msgstr "El valor asociado a este elemento no es editable manualmente"
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "El valor debe ser %(op)s %(boundary)s"
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "El valor debe ser <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
+msgstr ""
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "El valor debe ser >= %(boundary)s"
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "El valor no debe exceder de %s"
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "El valor no puede ser menor a %s"
+msgid "value should have minimum size of %s but found %s"
+msgstr ""
 
 msgid "vcard"
 msgstr "vcard"
@@ -4530,6 +4558,3 @@
 msgstr ""
 "usted debe  quitar la puesta en línea de la relación %s que es aceptada y "
 "puede ser cruzada"
-
-#~ msgid "add a %s"
-#~ msgstr "agregar un %s"
--- a/i18n/fr.po	Tue Jun 28 16:32:38 2011 +0200
+++ b/i18n/fr.po	Tue Jun 28 16:33:53 2011 +0200
@@ -255,6 +255,12 @@
 msgid "BaseTransition_plural"
 msgstr "Transitions (abstraites)"
 
+msgid "BigInt"
+msgstr "Entier long"
+
+msgid "BigInt_plural"
+msgstr "Entiers longs"
+
 msgid "Bookmark"
 msgstr "Signet"
 
@@ -1881,6 +1887,9 @@
 msgid "ctxtoolbar"
 msgstr "barre d'outils"
 
+msgid "currently in synchronization"
+msgstr "en cours de synchronisation"
+
 msgid "custom_workflow"
 msgstr "workflow spécifique"
 
@@ -2423,6 +2432,9 @@
 msgid "external page"
 msgstr "page externe"
 
+msgid "facet-loading-msg"
+msgstr "en cours de traitement, merci de patienter"
+
 msgid "facet.filters"
 msgstr "facettes"
 
@@ -2607,9 +2619,6 @@
 "relation générique permettant d'indiquer qu'une entité est identique à une "
 "autre ressource web (voir http://www.w3.org/TR/owl-ref/#sameAs-def)."
 
-msgid "go back to the index page"
-msgstr "retourner sur la page d'accueil"
-
 msgid "granted to groups"
 msgstr "accordée aux groupes"
 
@@ -3218,6 +3227,12 @@
 msgid "no associated permissions"
 msgstr "aucune permission associée"
 
+msgid "no content next link"
+msgstr ""
+
+msgid "no content prev link"
+msgstr ""
+
 #, python-format
 msgid "no edited fields specified for entity %s"
 msgstr "aucun champ à éditer spécifié pour l'entité %s"
@@ -3939,6 +3954,13 @@
 msgid "synchronization-interval must be greater than 1 minute"
 msgstr "synchronization-interval doit être supérieur à 1 minute"
 
+msgid "synchronizing"
+msgstr "synchronisation"
+
+msgctxt "CWSource"
+msgid "synchronizing"
+msgstr "synchronisation"
+
 msgid "table"
 msgstr "table"
 
@@ -3976,6 +3998,12 @@
 msgid "the value \"%s\" is already used, use another one"
 msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur"
 
+msgid "there is no next page"
+msgstr ""
+
+msgid "there is no previous page"
+msgstr ""
+
 msgid "this action is not reversible!"
 msgstr ""
 "Attention ! Cette opération va détruire les données de façon irréversible."
@@ -4076,8 +4104,13 @@
 msgid "tr_count"
 msgstr "n° de transition"
 
+<<<<<<< /home/syt/src/fcubicweb/cubicweb/i18n/fr.po
+msgid "transaction undoed"
+msgstr "transaction annulée"
+=======
 msgid "transaction undone"
 msgstr "transaction annulée"
+>>>>>>> /tmp/fr.po~other.p9Lq3c
 
 #, python-format
 msgid "transition %(tr)s isn't allowed from %(st)s"
@@ -4359,28 +4392,28 @@
 msgid "value"
 msgstr "valeur"
 
+#, python-format
+msgid "value %(value)s must be %(op)s %(boundary)s"
+msgstr "la valeur %(value)s doit être %(op)s %(boundary)s"
+
+#, python-format
+msgid "value %(value)s must be <= %(boundary)s"
+msgstr "la valeur %(value)s doit être <= %(boundary)s"
+
+#, python-format
+msgid "value %(value)s must be >= %(boundary)s"
+msgstr "la valeur %(value)s doit être >= %(boundary)s"
+
 msgid "value associated to this key is not editable manually"
 msgstr "la valeur associée à cette clé n'est pas éditable manuellement"
 
 #, python-format
-msgid "value must be %(op)s %(boundary)s"
-msgstr "la valeur doit être %(op)s %(boundary)s"
-
-#, python-format
-msgid "value must be <= %(boundary)s"
-msgstr "la valeur doit être <= %(boundary)s"
+msgid "value should have maximum size of %s but found %s"
+msgstr "la taille maximum est %s mais cette valeur est de taille %s"
 
 #, python-format
-msgid "value must be >= %(boundary)s"
-msgstr "la valeur doit être >= %(boundary)s"
-
-#, python-format
-msgid "value should have maximum size of %s"
-msgstr "la valeur doit être de taille %s au maximum"
-
-#, python-format
-msgid "value should have minimum size of %s"
-msgstr "la valeur doit être de taille %s au minimum"
+msgid "value should have minimum size of %s but found %s"
+msgstr "la taille minimum est %s mais cette valeur est de taille %s"
 
 msgid "vcard"
 msgstr "vcard"
@@ -4530,66 +4563,3 @@
 msgstr ""
 "vous devriez enlevé la mise en ligne de la relation %s qui est supportée et "
 "peut-être croisée"
-
-#~ msgid "Attributes with non default permissions:"
-#~ msgstr "Attributs ayant des permissions non-standard"
-
-#~ msgid "Entity types"
-#~ msgstr "Types d'entités"
-
-#~ msgid "Permissions for entity types"
-#~ msgstr "Permissions pour les types d'entités"
-
-#~ msgid "Permissions for relations"
-#~ msgstr "Permissions pour les relations"
-
-#~ msgid "Relation types"
-#~ msgstr "Types de relation"
-
-#~ msgid "add a %s"
-#~ msgstr "ajouter un %s"
-
-#~ msgid "am/pm calendar (month)"
-#~ msgstr "calendrier am/pm (mois)"
-
-#~ msgid "am/pm calendar (semester)"
-#~ msgstr "calendrier am/pm (semestre)"
-
-#~ msgid "am/pm calendar (week)"
-#~ msgstr "calendrier am/pm (semaine)"
-
-#~ msgid "am/pm calendar (year)"
-#~ msgstr "calendrier am/pm (année)"
-
-#~ msgid "application entities"
-#~ msgstr "entités applicatives"
-
-#~ msgid "calendar (month)"
-#~ msgstr "calendrier (mensuel)"
-
-#~ msgid "calendar (semester)"
-#~ msgstr "calendrier (semestriel)"
-
-#~ msgid "calendar (week)"
-#~ msgstr "calendrier (hebdo)"
-
-#~ msgid "calendar (year)"
-#~ msgstr "calendrier (annuel)"
-
-#~ msgid "create an index page"
-#~ msgstr "créer une page d'accueil"
-
-#~ msgid "edit the index page"
-#~ msgstr "éditer la page d'accueil"
-
-#~ msgid "schema entities"
-#~ msgstr "entités définissant le schéma"
-
-#~ msgid "schema-security"
-#~ msgstr "permissions"
-
-#~ msgid "system entities"
-#~ msgstr "entités systèmes"
-
-#~ msgid "timestamp of the latest source synchronization."
-#~ msgstr "date de la dernière synchronisation avec la source."
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.13.0_Any.py	Tue Jun 28 16:33:53 2011 +0200
@@ -0,0 +1,4 @@
+sync_schema_props_perms('cw_source', syncprops=False)
+add_attribute('CWSource', 'synchronizing')
+if schema['BigInt'].eid is None:
+    add_entity_type('BigInt')
--- a/misc/migration/bootstrapmigration_repository.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/misc/migration/bootstrapmigration_repository.py	Tue Jun 28 16:33:53 2011 +0200
@@ -35,6 +35,12 @@
     ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None))
     commit(ask_confirm=False)
 
+if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1):
+    sql('ALTER TABLE entities ADD COLUMN asource VARCHAR(64)')
+    sql('UPDATE entities SET asource=cw_name  '
+        'FROM cw_CWSource, cw_source_relation '
+        'WHERE entities.eid=cw_source_relation.eid_from AND cw_source_relation.eid_to=cw_CWSource.cw_eid')
+
 if applcubicwebversion == (3, 6, 0) and cubicwebversion >= (3, 6, 0):
     CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
                        ask_confirm=False))
@@ -49,7 +55,7 @@
 elif applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0):
     CSTRMAP = dict(rql('Any T, X WHERE X is CWConstraintType, X name T',
                        ask_confirm=False))
-    session.set_pool()
+    session.set_cnxset()
     permsdict = ss.deserialize_ertype_permissions(session)
 
     with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
--- a/misc/scripts/drop_external_entities.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/misc/scripts/drop_external_entities.py	Tue Jun 28 16:33:53 2011 +0200
@@ -3,7 +3,7 @@
 
 sql("DELETE FROM entities WHERE type='Int'")
 
-ecnx = session.pool.connection(source)
+ecnx = session.cnxset.connection(source)
 for e in rql('Any X WHERE X cw_source S, S name %(name)s', {'name': source}).entities():
     meta = e.cw_metainformation()
     assert meta['source']['uri'] == source
--- a/req.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/req.py	Tue Jun 28 16:33:53 2011 +0200
@@ -409,7 +409,7 @@
 
     # abstract methods to override according to the web front-end #############
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
         raise NotImplementedError
 
--- a/rset.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/rset.py	Tue Jun 28 16:33:53 2011 +0200
@@ -475,43 +475,57 @@
         entity.eid = eid
         # cache entity
         req.set_entity_cache(entity)
-        eschema = entity.e_schema
         # try to complete the entity if there are some additional columns
         if len(rowvalues) > 1:
-            rqlst = self.syntax_tree()
-            if rqlst.TYPE == 'select':
-                # UNION query, find the subquery from which this entity has been
-                # found
-                select, col = rqlst.locate_subquery(col, etype, self.args)
+            eschema = entity.e_schema
+            eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col)
+            entity.eid = rowvalues[eid_col]
+            for attr, col_idx in attr_cols.items():
+                entity.cw_attr_cache[attr] = rowvalues[col_idx]
+            for (rtype, role), col_idx in rel_cols.items():
+                value = rowvalues[col_idx]
+                if value is None:
+                    if role == 'subject':
+                        rql = 'Any Y WHERE X %s Y, X eid %s'
+                    else:
+                        rql = 'Any Y WHERE Y %s X, X eid %s'
+                    rrset = ResultSet([], rql % (rtype, entity.eid))
+                    rrset.req = req
+                else:
+                    rrset = self._build_entity(row, col_idx).as_rset()
+                entity.cw_set_relation_cache(rtype, role, rrset)
+        return entity
+
+    @cached
+    def _rset_structure(self, eschema, entity_col):
+        eid_col = col = entity_col
+        rqlst = self.syntax_tree()
+        attr_cols = {}
+        rel_cols = {}
+        if rqlst.TYPE == 'select':
+            # UNION query, find the subquery from which this entity has been
+            # found
+            select, col = rqlst.locate_subquery(entity_col, eschema.type, self.args)
+        else:
+            select = rqlst
+        # take care, due to outer join support, we may find None
+        # values for non final relation
+        for i, attr, role in attr_desc_iterator(select, col, entity_col):
+            if role == 'subject':
+                rschema = eschema.subjrels[attr]
             else:
-                select = rqlst
-            # take care, due to outer join support, we may find None
-            # values for non final relation
-            for i, attr, role in attr_desc_iterator(select, col, entity.cw_col):
-                if role == 'subject':
-                    rschema = eschema.subjrels[attr]
-                    if rschema.final:
-                        if attr == 'eid':
-                            entity.eid = rowvalues[i]
-                        else:
-                            entity.cw_attr_cache[attr] = rowvalues[i]
-                        continue
+                rschema = eschema.objrels[attr]
+            if rschema.final:
+                if attr == 'eid':
+                    eid_col = i
                 else:
-                    rschema = eschema.objrels[attr]
+                    attr_cols[attr] = i
+            else:
                 rdef = eschema.rdef(attr, role)
                 # only keep value if it can't be multivalued
                 if rdef.role_cardinality(role) in '1?':
-                    if rowvalues[i] is None:
-                        if role == 'subject':
-                            rql = 'Any Y WHERE X %s Y, X eid %s'
-                        else:
-                            rql = 'Any Y WHERE Y %s X, X eid %s'
-                        rrset = ResultSet([], rql % (attr, entity.eid))
-                        rrset.req = req
-                    else:
-                        rrset = self._build_entity(row, i).as_rset()
-                    entity.cw_set_relation_cache(attr, role, rrset)
-        return entity
+                    rel_cols[(attr, role)] = i
+        return eid_col, attr_cols, rel_cols
 
     @cached
     def syntax_tree(self):
--- a/schema.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/schema.py	Tue Jun 28 16:33:53 2011 +0200
@@ -544,10 +544,11 @@
         rschema = self.add_relation_type(ybo.RelationType('identity'))
         rschema.final = False
 
+    etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$'
     def add_entity_type(self, edef):
         edef.name = edef.name.encode()
         edef.name = bw_normalize_etype(edef.name)
-        if not re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name):
+        if not re.match(self.etype_name_re, edef.name):
             raise BadSchemaDefinition(
                 '%r is not a valid name for an entity type. It should start '
                 'with an upper cased letter and be followed by at least a '
--- a/schemas/base.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/schemas/base.py	Tue Jun 28 16:33:53 2011 +0200
@@ -21,7 +21,8 @@
 _ = unicode
 
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, String, Datetime, Password, Interval)
+                            SubjectRelation,
+                            String, Datetime, Password, Interval, Boolean)
 from cubicweb.schema import (
     RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
     PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS)
@@ -265,7 +266,8 @@
     url = String(description=_('URLs from which content will be imported. You can put one url per line'))
     parser = String(description=_('parser to use to extract entities from content retrieved at given URLs.'))
     latest_retrieval = Datetime(description=_('latest synchronization time'))
-
+    synchronizing = Boolean(description=_('currently in synchronization'),
+                            default=False)
 
 ENTITY_MANAGERS_PERMISSIONS = {
     'read':   ('managers',),
@@ -307,8 +309,8 @@
 class cw_source(RelationDefinition):
     __permissions__ = {
         'read':   ('managers', 'users', 'guests'),
-        'add':    (),
-        'delete': (),
+        'add':    ('managers',),
+        'delete': ('managers',),
         }
     subject = '*'
     object = 'CWSource'
--- a/server/__init__.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/__init__.py	Tue Jun 28 16:33:53 2011 +0200
@@ -239,7 +239,7 @@
         for path in reversed(paths):
             mhandler.exec_event_script('pre%s' % event, path)
         # enter instance'schema into the database
-        session.set_pool()
+        session.set_cnxset()
         serialize_schema(session, schema)
         # execute cubicweb's post<event> script
         mhandler.exec_event_script('post%s' % event)
--- a/server/checkintegrity.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/checkintegrity.py	Tue Jun 28 16:33:53 2011 +0200
@@ -101,7 +101,7 @@
     # deactivate modification_date hook since we don't want them
     # to be updated due to the reindexation
     repo = session.repo
-    cursor = session.pool['system']
+    cursor = session.cnxset['system']
     dbhelper = session.repo.system_source.dbhelper
     if not dbhelper.has_fti_table(cursor):
         print 'no text index table'
@@ -356,7 +356,7 @@
     using given user and password to locally connect to the repository
     (no running cubicweb server needed)
     """
-    session = repo._get_session(cnx.sessionid, setpool=True)
+    session = repo._get_session(cnx.sessionid, setcnxset=True)
     # yo, launch checks
     if checks:
         eids_cache = {}
@@ -372,6 +372,6 @@
             print 'WARNING: Diagnostic run, nothing has been corrected'
     if reindex:
         cnx.rollback()
-        session.set_pool()
+        session.set_cnxset()
         reindex_entities(repo.schema, session, withpb=withpb)
         cnx.commit()
--- a/server/edition.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/edition.py	Tue Jun 28 16:33:53 2011 +0200
@@ -68,6 +68,11 @@
         super(EditedEntity, self).__delitem__(attr)
         self.entity.cw_attr_cache.pop(attr, None)
 
+    def __copy__(self):
+        # default copy protocol fails in EditedEntity.__setitem__ because
+        # copied entity has no skip_security attribute at this point
+        return EditedEntity(self.entity, **self)
+
     def pop(self, attr, *args):
         # don't update skip_security by design (think to storage api)
         assert not self.saved, 'too late to modify edited attributes'
--- a/server/hook.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/hook.py	Tue Jun 28 16:33:53 2011 +0200
@@ -248,7 +248,7 @@
 from logging import getLogger
 from itertools import chain
 
-from logilab.common.decorators import classproperty
+from logilab.common.decorators import classproperty, cached
 from logilab.common.deprecation import deprecated, class_renamed
 from logilab.common.logging_ext import set_log_methods
 
@@ -257,7 +257,7 @@
 from cubicweb.cwvreg import CWRegistry, VRegistry
 from cubicweb.selectors import (objectify_selector, lltrace, ExpectedValueSelector,
                                 is_instance)
-from cubicweb.appobject import AppObject
+from cubicweb.appobject import AppObject, NotSelector, OrSelector
 from cubicweb.server.session import security_enabled
 
 ENTITIES_HOOKS = set(('before_add_entity',    'after_add_entity',
@@ -318,15 +318,83 @@
             else:
                 entities = []
                 eids_from_to = []
+            pruned = self.get_pruned_hooks(session, event,
+                                           entities, eids_from_to, kwargs)
             # by default, hooks are executed with security turned off
             with security_enabled(session, read=False):
                 for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs):
-                    hooks = sorted(self.possible_objects(session, **_kwargs),
+                    hooks = sorted(self.filtered_possible_objects(pruned, session, **_kwargs),
                                    key=lambda x: x.order)
                     with security_enabled(session, write=False):
                         for hook in hooks:
-                            #print hook.category, hook.__regid__
-                            hook()
+                           hook()
+
+    def get_pruned_hooks(self, session, event, entities, eids_from_to, kwargs):
+        """return a set of hooks that should not be considered by filtered_possible objects
+
+        the idea is to make a first pass over all the hooks in the
+        registry and to mark put some of them in a pruned list. The
+        pruned hooks are the one which:
+
+        * are disabled at the session level
+        * have a match_rtype or an is_instance selector which does not
+          match the rtype / etype of the relations / entities for
+          which we are calling the hooks. This works because the
+          repository calls the hooks grouped by rtype or by etype when
+          using the entities or eids_to_from keyword arguments
+
+        Only hooks with a simple selector or an AndSelector of simple
+        selectors are considered for disabling.
+
+        """
+        if 'entity' in kwargs:
+            entities = [kwargs['entity']]
+        if len(entities):
+            look_for_selector = is_instance
+            etype = entities[0].__regid__
+        elif 'rtype' in kwargs:
+            look_for_selector = match_rtype
+            etype = None
+        else: # nothing to prune, how did we get there ???
+            return set()
+        cache_key = (event, kwargs.get('rtype'), etype)
+        pruned = session.pruned_hooks_cache.get(cache_key)
+        if pruned is not None:
+            return pruned
+        pruned = set()
+        session.pruned_hooks_cache[cache_key] = pruned
+        if look_for_selector is not None:
+            for id, hooks in self.iteritems():
+                for hook in hooks:
+                    enabled_cat, main_filter = hook.filterable_selectors()
+                    if enabled_cat is not None:
+                        if not enabled_cat(hook, session):
+                            pruned.add(hook)
+                            continue
+                    if main_filter is not None:
+                        if isinstance(main_filter, match_rtype) and \
+                           (main_filter.frometypes is not None  or \
+                            main_filter.toetypes is not None):
+                            continue
+                        first_kwargs = _iter_kwargs(entities, eids_from_to, kwargs).next()
+                        if not main_filter(hook, session, **first_kwargs):
+                            pruned.add(hook)
+        return pruned
+
+
+    def filtered_possible_objects(self, pruned, *args, **kwargs):
+        for appobjects in self.itervalues():
+            if pruned:
+                filtered_objects = [obj for obj in appobjects if obj not in pruned]
+                if not filtered_objects:
+                    continue
+            else:
+                filtered_objects = appobjects
+            obj = self._select_best(filtered_objects,
+                                    *args, **kwargs)
+            if obj is None:
+                continue
+            yield obj
 
 class HooksManager(object):
     def __init__(self, vreg):
@@ -464,6 +532,15 @@
     # stop pylint from complaining about missing attributes in Hooks classes
     eidfrom = eidto = entity = rtype = None
 
+    @classmethod
+    @cached
+    def filterable_selectors(cls):
+        search = cls.__select__.search_selector
+        if search((NotSelector, OrSelector)):
+            return None, None
+        enabled_cat = search(enabled_category)
+        main_filter = search((is_instance, match_rtype))
+        return enabled_cat, main_filter
 
     @classmethod
     def check_events(cls):
@@ -653,8 +730,8 @@
     operation. These keyword arguments will be accessible as attributes from the
     operation instance.
 
-    An operation is triggered on connections pool events related to
-    commit / rollback transations. Possible events are:
+    An operation is triggered on connections set events related to commit /
+    rollback transations. Possible events are:
 
     * `precommit`:
 
@@ -728,7 +805,7 @@
         getattr(self, event)()
 
     def precommit_event(self):
-        """the observed connections pool is preparing a commit"""
+        """the observed connections set is preparing a commit"""
 
     def revertprecommit_event(self):
         """an error went when pre-commiting this operation or a later one
@@ -738,14 +815,13 @@
         """
 
     def rollback_event(self):
-        """the observed connections pool has been rollbacked
+        """the observed connections set has been rollbacked
 
-        do nothing by default, the operation will just be removed from the pool
-        operation list
+        do nothing by default
         """
 
     def postcommit_event(self):
-        """the observed connections pool has committed"""
+        """the observed connections set has committed"""
 
     @property
     @deprecated('[3.6] use self.session.user')
@@ -1028,7 +1104,7 @@
     data_key = 'neweids'
 
     def rollback_event(self):
-        """the observed connections pool has been rollbacked,
+        """the observed connections set has been rollbacked,
         remove inserted eid from repository type/source cache
         """
         try:
@@ -1042,7 +1118,7 @@
     """
     data_key = 'pendingeids'
     def postcommit_event(self):
-        """the observed connections pool has been rollbacked,
+        """the observed connections set has been rollbacked,
         remove inserted eid from repository type/source cache
         """
         try:
--- a/server/migractions.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/migractions.py	Tue Jun 28 16:33:53 2011 +0200
@@ -201,7 +201,6 @@
                 versions = repo.get_versions()
                 for cube, version in versions.iteritems():
                     version_file.write('%s %s\n' % (cube, version))
-                    
             if not failed:
                 bkup = tarfile.open(backupfile, 'w|gz')
                 for filename in os.listdir(tmpdir):
@@ -242,7 +241,7 @@
                 written_format = format_file.readline().strip()
                 if written_format in ('portable', 'native'):
                     format = written_format
-        self.config.open_connections_pools = False
+        self.config.init_cnxset_pool = False
         repo = self.repo_connect()
         for source in repo.sources:
             if systemonly and source.uri != 'system':
@@ -255,7 +254,7 @@
                     raise SystemExit(1)
         shutil.rmtree(tmpdir)
         # call hooks
-        repo.open_connections_pools()
+        repo.init_cnxset_pool()
         repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile)
         print '-> database restored.'
 
@@ -288,7 +287,7 @@
                 except (KeyboardInterrupt, EOFError):
                     print 'aborting...'
                     sys.exit(0)
-            self.session.keep_pool_mode('transaction')
+            self.session.keep_cnxset_mode('transaction')
             self.session.data['rebuild-infered'] = False
             return self._cnx
 
@@ -296,10 +295,10 @@
     def session(self):
         if self.config is not None:
             session = self.repo._get_session(self.cnx.sessionid)
-            if session.pool is None:
+            if session.cnxset is None:
                 session.set_read_security(False)
                 session.set_write_security(False)
-            session.set_pool()
+            session.set_cnxset()
             return session
         # no access to session on remote instance
         return None
@@ -308,13 +307,13 @@
         if hasattr(self, '_cnx'):
             self._cnx.commit()
         if self.session:
-            self.session.set_pool()
+            self.session.set_cnxset()
 
     def rollback(self):
         if hasattr(self, '_cnx'):
             self._cnx.rollback()
         if self.session:
-            self.session.set_pool()
+            self.session.set_cnxset()
 
     def rqlexecall(self, rqliter, ask_confirm=False):
         for rql, kwargs in rqliter:
@@ -374,18 +373,21 @@
                     self.cmd_reactivate_verification_hooks()
 
     def install_custom_sql_scripts(self, directory, driver):
+        sql_scripts = []
         for fpath in glob(osp.join(directory, '*.sql.%s' % driver)):
             newname = osp.basename(fpath).replace('.sql.%s' % driver,
                                                   '.%s.sql' % driver)
             warn('[3.5.6] rename %s into %s' % (fpath, newname),
                  DeprecationWarning)
+            sql_scripts.append(fpath)
+        sql_scripts += glob(osp.join(directory, '*.%s.sql' % driver))
+        for fpath in sql_scripts:
             print '-> installing', fpath
-            sqlexec(open(fpath).read(), self.session.system_sql, False,
-                    delimiter=';;')
-        for fpath in glob(osp.join(directory, '*.%s.sql' % driver)):
-            print '-> installing', fpath
-            sqlexec(open(fpath).read(), self.session.system_sql, False,
-                    delimiter=';;')
+            try:
+                sqlexec(open(fpath).read(), self.session.system_sql, False,
+                        delimiter=';;')
+            except Exception, exc:
+                print '-> ERROR:', exc, ', skipping', fpath
 
     # schema synchronization internals ########################################
 
@@ -1375,7 +1377,7 @@
     def _cw(self):
         session = self.session
         if session is not None:
-            session.set_pool()
+            session.set_cnxset()
             return session
         return self.cnx.request()
 
--- a/server/pool.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/pool.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,19 +15,18 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""CubicWeb server connections pool : the repository has a limited number of
-connections pools, each of them dealing with a set of connections on each source
-used by the repository. A connections pools (`ConnectionsPool`) is an
-abstraction for a group of connection to each source.
+"""CubicWeb server connections set : the repository has a limited number of
+:class:`ConnectionsSet` (defined in configuration, default to 4). Each of them
+hold a connection for each source used by the repository.
 """
 
 __docformat__ = "restructuredtext en"
 
 import sys
 
-class ConnectionsPool(object):
+class ConnectionsSet(object):
     """handle connections on a set of sources, at some point associated to a
-    user session
+    :class:`Session`
     """
 
     def __init__(self, sources):
@@ -81,9 +80,9 @@
                 self.reconnect(source)
 
     def close(self, i_know_what_i_do=False):
-        """close all connections in the pool"""
+        """close all connections in the set"""
         if i_know_what_i_do is not True: # unexpected closing safety belt
-            raise RuntimeError('pool shouldn\'t be closed')
+            raise RuntimeError('connections set shouldn\'t be closed')
         for cu in self._cursors.values():
             try:
                 cu.close()
@@ -97,17 +96,17 @@
 
     # internals ###############################################################
 
-    def pool_set(self):
-        """pool is being set"""
+    def cnxset_set(self):
+        """connections set is being set on a session"""
         self.check_connections()
 
-    def pool_reset(self):
-        """pool is being reseted"""
+    def cnxset_freed(self):
+        """connections set is being freed from a session"""
         for source, cnx in self.source_cnxs.values():
-            source.pool_reset(cnx)
+            source.cnxset_freed(cnx)
 
     def sources(self):
-        """return the source objects handled by this pool"""
+        """return the source objects handled by this connections set"""
         # implementation details of flying insert requires the system source
         # first
         yield self.source_cnxs['system'][0]
--- a/server/querier.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/querier.py	Tue Jun 28 16:33:53 2011 +0200
@@ -169,7 +169,7 @@
         # session executing the query
         self.session = session
         # quick reference to the system source
-        self.syssource = session.pool.source('system')
+        self.syssource = session.cnxset.source('system')
         # execution steps
         self.steps = []
         # index of temporary tables created during execution
@@ -734,8 +734,8 @@
             # transaction must been rollbacked
             #
             # notes:
-            # * we should not reset the pool here, since we don't want the
-            #   session to loose its pool during processing
+            # * we should not reset the connections set here, since we don't want the
+            #   session to loose it during processing
             # * don't rollback if we're in the commit process, will be handled
             #   by the session
             if session.commit_state is None:
--- a/server/repository.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/repository.py	Tue Jun 28 16:33:53 2011 +0200
@@ -154,13 +154,13 @@
         self.sources_by_uri = {'system': self.system_source}
         # querier helper, need to be created after sources initialization
         self.querier = querier.QuerierHelper(self, self.schema)
-        # cache eid -> type / source
+        # cache eid -> (type, physical source, extid, actual source)
         self._type_source_cache = {}
         # cache (extid, source uri) -> eid
         self._extid_cache = {}
-        # open some connections pools
-        if config.open_connections_pools:
-            self.open_connections_pools()
+        # open some connections set
+        if config.init_cnxset_pool:
+            self.init_cnxset_pool()
         @onevent('after-registry-reload', self)
         def fix_user_classes(self):
             usercls = self.vreg['etypes'].etype_class('CWUser')
@@ -168,10 +168,10 @@
                 if not isinstance(session.user, InternalManager):
                     session.user.__class__ = usercls
 
-    def open_connections_pools(self):
+    def init_cnxset_pool(self):
         config = self.config
-        self._available_pools = Queue.Queue()
-        self._available_pools.put_nowait(pool.ConnectionsPool(self.sources))
+        self._cnxsets_pool = Queue.Queue()
+        self._cnxsets_pool.put_nowait(pool.ConnectionsSet(self.sources))
         if config.quick_start:
             # quick start, usually only to get a minimal repository to get cubes
             # information (eg dump/restore/...)
@@ -213,14 +213,14 @@
             # configurate tsearch according to postgres version
             for source in self.sources:
                 source.init_creating()
-        # close initialization pool and reopen fresh ones for proper
+        # close initialization connetions set and reopen fresh ones for proper
         # initialization now that we know cubes
-        self._get_pool().close(True)
-        # list of available pools (we can't iterate on Queue instance)
-        self.pools = []
+        self._get_cnxset().close(True)
+        # list of available_cnxsets (we can't iterate on Queue instance)
+        self.cnxsets = []
         for i in xrange(config['connections-pool-size']):
-            self.pools.append(pool.ConnectionsPool(self.sources))
-            self._available_pools.put_nowait(self.pools[-1])
+            self.cnxsets.append(pool.ConnectionsSet(self.sources))
+            self._cnxsets_pool.put_nowait(self.cnxsets[-1])
         if config.quick_start:
             config.init_cubes(self.get_cubes())
         self.hm = hook.HooksManager(self.vreg)
@@ -244,7 +244,7 @@
                     self.sources_by_eid[sourceent.eid] = self.system_source
                     self.system_source.init(True, sourceent)
                     continue
-                self.add_source(sourceent, add_to_pools=False)
+                self.add_source(sourceent, add_to_cnxsets=False)
         finally:
             session.close()
 
@@ -253,7 +253,7 @@
                       'can_cross_relation', 'rel_type_sources'):
             clear_cache(self, cache)
 
-    def add_source(self, sourceent, add_to_pools=True):
+    def add_source(self, sourceent, add_to_cnxsets=True):
         source = self.get_source(sourceent.type, sourceent.name,
                                  sourceent.host_config, sourceent.eid)
         self.sources_by_eid[sourceent.eid] = source
@@ -261,15 +261,15 @@
         if self.config.source_enabled(source):
             # call source's init method to complete their initialisation if
             # needed (for instance looking for persistent configuration using an
-            # internal session, which is not possible until pools have been
+            # internal session, which is not possible until connections sets have been
             # initialized)
             source.init(True, sourceent)
             if not source.copy_based_source:
                 self.sources.append(source)
                 self.querier.set_planner()
-                if add_to_pools:
-                    for pool in self.pools:
-                        pool.add_source(source)
+                if add_to_cnxsets:
+                    for cnxset in self.cnxsets:
+                       cnxset.add_source(source)
         else:
             source.init(False, sourceent)
         self._clear_planning_caches()
@@ -280,8 +280,8 @@
         if self.config.source_enabled(source) and not source.copy_based_source:
             self.sources.remove(source)
             self.querier.set_planner()
-            for pool in self.pools:
-                pool.remove_source(source)
+            for cnxset in self.cnxsets:
+                cnxset.remove_source(source)
         self._clear_planning_caches()
 
     def get_source(self, type, uri, source_config, eid=None):
@@ -368,25 +368,25 @@
         t.start()
 
     #@locked
-    def _get_pool(self):
+    def _get_cnxset(self):
         try:
-            return self._available_pools.get(True, timeout=5)
+            return self._cnxsets_pool.get(True, timeout=5)
         except Queue.Empty:
-            raise Exception('no pool available after 5 secs, probably either a '
+            raise Exception('no connections set available after 5 secs, probably either a '
                             'bug in code (too many uncommited/rollbacked '
                             'connections) or too much load on the server (in '
                             'which case you can try to set a bigger '
-                            'connections pools size)')
+                            'connections pool size)')
 
-    def _free_pool(self, pool):
-        self._available_pools.put_nowait(pool)
+    def _free_cnxset(self, cnxset):
+        self._cnxsets_pool.put_nowait(cnxset)
 
     def pinfo(self):
-        # XXX: session.pool is accessed from a local storage, would be interesting
-        #      to see if there is a pool set in any thread specific data)
-        return '%s: %s (%s)' % (self._available_pools.qsize(),
+        # XXX: session.cnxset is accessed from a local storage, would be interesting
+        #      to see if there is a cnxset set in any thread specific data)
+        return '%s: %s (%s)' % (self._cnxsets_pool.qsize(),
                                 ','.join(session.user.login for session in self._sessions.values()
-                                         if session.pool),
+                                         if session.cnxset),
                                 threading.currentThread())
     def shutdown(self):
         """called on server stop event to properly close opened sessions and
@@ -409,12 +409,12 @@
                 or self.config.quick_start):
             self.hm.call_hooks('server_shutdown', repo=self)
         self.close_sessions()
-        while not self._available_pools.empty():
-            pool = self._available_pools.get_nowait()
+        while not self._cnxsets_pool.empty():
+            cnxset = self._cnxsets_pool.get_nowait()
             try:
-                pool.close(True)
+                cnxset.close(True)
             except:
-                self.exception('error while closing %s' % pool)
+                self.exception('error while closing %s' % cnxset)
                 continue
         if self.pyro_registered:
             if self._use_pyrons():
@@ -496,7 +496,7 @@
         results['nb_open_sessions'] = len(self._sessions)
         results['nb_active_threads'] = threading.activeCount()
         results['looping_tasks'] = ', '.join(str(t) for t in self._looping_tasks)
-        results['available_pools'] = self._available_pools.qsize()
+        results['available_cnxsets'] = self._cnxsets_pool.qsize()
         results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate()))
         return results
 
@@ -535,12 +535,12 @@
         # XXX we may want to check we don't give sensible information
         if foreid is None:
             return self.config[option]
-        _, sourceuri, extid = self.type_and_source_from_eid(foreid)
+        _, sourceuri, extid, _ = self.type_and_source_from_eid(foreid)
         if sourceuri == 'system':
             return self.config[option]
-        pool = self._get_pool()
+        cnxset = self._get_cnxset()
         try:
-            cnx = pool.connection(sourceuri)
+            cnx = cnxset.connection(sourceuri)
             # needed to check connection is valid and usable by the current
             # thread
             newcnx = self.sources_by_uri[sourceuri].check_connection(cnx)
@@ -548,7 +548,7 @@
                 cnx = newcnx
             return cnx.get_option_value(option, extid)
         finally:
-            self._free_pool(pool)
+            self._free_cnxset(cnxset)
 
     @cached
     def get_versions(self, checkversions=False):
@@ -721,7 +721,7 @@
         * build_descr is a flag indicating if the description should be
           built on select queries
         """
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             try:
                 rset = self.querier.execute(session, rqlstring, args,
@@ -747,21 +747,23 @@
                 self.exception('unexpected error while executing %s with %s', rqlstring, args)
                 raise
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def describe(self, sessionid, eid, txid=None):
-        """return a tuple (type, source, extid) for the entity with id <eid>"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        """return a tuple `(type, physical source uri, extid, actual source
+        uri)` for the entity of the given `eid`
+        """
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.type_and_source_from_eid(eid, session)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def check_session(self, sessionid):
         """raise `BadConnectionId` if the connection is no more valid, else
         return its latest activity timestamp.
         """
-        return self._get_session(sessionid, setpool=False).timestamp
+        return self._get_session(sessionid, setcnxset=False).timestamp
 
     def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False):
         """return value associated to key in the session's data dictionary or
@@ -772,7 +774,7 @@
         If key isn't defined in the dictionnary, value specified by the
         `default` argument will be returned.
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         return session.get_shared_data(key, default, pop, txdata)
 
     def set_shared_data(self, sessionid, key, value, txdata=False):
@@ -782,7 +784,7 @@
         transaction's data which are cleared on commit/rollback of the current
         transaction.
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         session.set_shared_data(key, value, txdata)
 
     def commit(self, sessionid, txid=None):
@@ -811,10 +813,10 @@
 
     def close(self, sessionid, txid=None, checkshuttingdown=True):
         """close the session with the given id"""
-        session = self._get_session(sessionid, setpool=True, txid=txid,
+        session = self._get_session(sessionid, setcnxset=True, txid=txid,
                                     checkshuttingdown=checkshuttingdown)
         # operation uncommited before close are rollbacked before hook is called
-        session.rollback(reset_pool=False)
+        session.rollback(free_cnxset=False)
         self.hm.call_hooks('session_close', session)
         # commit session at this point in case write operation has been done
         # during `session_close` hooks
@@ -829,7 +831,7 @@
         * update user information on each user's request (i.e. groups and
           custom properties)
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         if props is not None:
             self.set_session_props(sessionid, props)
         user = session.user
@@ -841,43 +843,43 @@
         * update user information on each user's request (i.e. groups and
           custom properties)
         """
-        session = self._get_session(sessionid, setpool=False)
+        session = self._get_session(sessionid, setcnxset=False)
         for prop, value in props.items():
             session.change_property(prop, value)
 
     def undoable_transactions(self, sessionid, ueid=None, txid=None,
                               **actionfilters):
         """See :class:`cubicweb.dbapi.Connection.undoable_transactions`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.undoable_transactions(session, ueid,
                                                             **actionfilters)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def transaction_info(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_info`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.tx_info(session, txuuid)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def transaction_actions(self, sessionid, txuuid, public=True, txid=None):
         """See :class:`cubicweb.dbapi.Connection.transaction_actions`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.tx_actions(session, txuuid, public)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     def undo_transaction(self, sessionid, txuuid, txid=None):
         """See :class:`cubicweb.dbapi.Connection.undo_transaction`"""
-        session = self._get_session(sessionid, setpool=True, txid=txid)
+        session = self._get_session(sessionid, setcnxset=True, txid=txid)
         try:
             return self.system_source.undo_transaction(session, txuuid)
         finally:
-            session.reset_pool()
+            session.free_cnxset()
 
     # public (inter-repository) interface #####################################
 
@@ -929,14 +931,14 @@
         """return a dbapi like connection/cursor using internal user which
         have every rights on the repository. You'll *have to* commit/rollback
         or close (rollback implicitly) the session once the job's done, else
-        you'll leak connections pool up to the time where no more pool is
+        you'll leak connections set up to the time where no one is
         available, causing irremediable freeze...
         """
         session = InternalSession(self, cnxprops)
-        session.set_pool()
+        session.set_cnxset()
         return session
 
-    def _get_session(self, sessionid, setpool=False, txid=None,
+    def _get_session(self, sessionid, setcnxset=False, txid=None,
                      checkshuttingdown=True):
         """return the user associated to the given session identifier"""
         if checkshuttingdown and self.shutting_down:
@@ -945,9 +947,9 @@
             session = self._sessions[sessionid]
         except KeyError:
             raise BadConnectionId('No such session %s' % sessionid)
-        if setpool:
-            session.set_tx_data(txid) # must be done before set_pool
-            session.set_pool()
+        if setcnxset:
+            session.set_tx_data(txid) # must be done before set_cnxset
+            session.set_cnxset()
         return session
 
     # data sources handling ###################################################
@@ -955,7 +957,9 @@
     # * correspondance between eid and local id (i.e. specific to a given source)
 
     def type_and_source_from_eid(self, eid, session=None):
-        """return a tuple (type, source, extid) for the entity with id <eid>"""
+        """return a tuple `(type, physical source uri, extid, actual source
+        uri)` for the entity of the given `eid`
+        """
         try:
             eid = typed_eid(eid)
         except ValueError:
@@ -965,19 +969,19 @@
         except KeyError:
             if session is None:
                 session = self.internal_session()
-                reset_pool = True
+                free_cnxset = True
             else:
-                reset_pool = False
+                free_cnxset = False
             try:
-                etype, uri, extid = self.system_source.eid_type_source(session,
-                                                                       eid)
+                etype, uri, extid, auri = self.system_source.eid_type_source(
+                    session, eid)
             finally:
-                if reset_pool:
-                    session.reset_pool()
-        self._type_source_cache[eid] = (etype, uri, extid)
-        if uri != 'system':
-            self._extid_cache[(extid, uri)] = eid
-        return etype, uri, extid
+                if free_cnxset:
+                    session.free_cnxset()
+            self._type_source_cache[eid] = (etype, uri, extid, auri)
+            if uri != 'system':
+                self._extid_cache[(extid, uri)] = eid
+            return etype, uri, extid, auri
 
     def clear_caches(self, eids):
         etcache = self._type_source_cache
@@ -985,7 +989,7 @@
         rqlcache = self.querier._rql_cache
         for eid in eids:
             try:
-                etype, uri, extid = etcache.pop(typed_eid(eid)) # may be a string in some cases
+                etype, uri, extid, auri = etcache.pop(typed_eid(eid)) # may be a string in some cases
                 rqlcache.pop('%s X WHERE X eid %s' % (etype, eid), None)
                 extidcache.pop((extid, uri), None)
             except KeyError:
@@ -1019,7 +1023,7 @@
 
     def eid2extid(self, source, eid, session=None):
         """get local id from an eid"""
-        etype, uri, extid = self.type_and_source_from_eid(eid, session)
+        etype, uri, extid, _ = self.type_and_source_from_eid(eid, session)
         if source.uri != uri:
             # eid not from the given source
             raise UnknownEid(eid)
@@ -1027,23 +1031,44 @@
 
     def extid2eid(self, source, extid, etype, session=None, insert=True,
                   sourceparams=None):
-        """get eid from a local id. An eid is attributed if no record is found"""
+        """Return eid from a local id. If the eid is a negative integer, that
+        means the entity is known but has been copied back to the system source
+        hence should be ignored.
+
+        If no record is found, ie the entity is not known yet:
+
+        1. an eid is attributed
+
+        2. the source's :meth:`before_entity_insertion` method is called to
+           build the entity instance
+
+        3. unless source's :attr:`should_call_hooks` tell otherwise,
+          'before_add_entity' hooks are called
+
+        4. record is added into the system source
+
+        5. the source's :meth:`after_entity_insertion` method is called to
+           complete building of the entity instance
+
+        6. unless source's :attr:`should_call_hooks` tell otherwise,
+          'before_add_entity' hooks are called
+        """
         uri = 'system' if source.copy_based_source else source.uri
         cachekey = (extid, uri)
         try:
             return self._extid_cache[cachekey]
         except KeyError:
             pass
-        reset_pool = False
+        free_cnxset = False
         if session is None:
             session = self.internal_session()
-            reset_pool = True
+            free_cnxset = True
         eid = self.system_source.extid2eid(session, uri, extid)
         if eid is not None:
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, uri, extid)
-            if reset_pool:
-                session.reset_pool()
+            self._type_source_cache[eid] = (etype, uri, extid, source.uri)
+            if free_cnxset:
+                session.free_cnxset()
             return eid
         if not insert:
             return
@@ -1055,11 +1080,11 @@
         # processing a commit, we have to use another one
         if not session.is_internal_session:
             session = self.internal_session()
-            reset_pool = True
+            free_cnxset = True
         try:
             eid = self.system_source.create_eid(session)
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, uri, extid)
+            self._type_source_cache[eid] = (etype, uri, extid, source.uri)
             entity = source.before_entity_insertion(
                 session, extid, etype, eid, sourceparams)
             if source.should_call_hooks:
@@ -1069,10 +1094,10 @@
             source.after_entity_insertion(session, extid, entity, sourceparams)
             if source.should_call_hooks:
                 self.hm.call_hooks('after_add_entity', session, entity=entity)
-            session.commit(reset_pool)
+            session.commit(free_cnxset)
             return eid
         except:
-            session.rollback(reset_pool)
+            session.rollback(free_cnxset)
             raise
 
     def add_info(self, session, entity, source, extid=None, complete=True):
@@ -1195,7 +1220,8 @@
                 suri = 'system'
             extid = source.get_extid(entity)
             self._extid_cache[(str(extid), suri)] = entity.eid
-        self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid)
+        self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid,
+                                               source.uri)
         return extid
 
     def glob_add_entity(self, session, edited):
@@ -1356,7 +1382,7 @@
         # in setdefault, this should not be changed without profiling.
 
         for eid in eids:
-            etype, sourceuri, extid = self.type_and_source_from_eid(eid, session)
+            etype, sourceuri, extid, _ = self.type_and_source_from_eid(eid, session)
             # XXX should cache entity's cw_metainformation
             entity = session.entity_from_eid(eid, etype)
             try:
--- a/server/schemaserial.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/schemaserial.py	Tue Jun 28 16:33:53 2011 +0200
@@ -88,7 +88,7 @@
     repo = session.repo
     dbhelper = repo.system_source.dbhelper
     # XXX bw compat (3.6 migration)
-    sqlcu = session.pool['system']
+    sqlcu = session.cnxset['system']
     sqlcu.execute("SELECT * FROM cw_CWRType WHERE cw_name='symetric'")
     if sqlcu.fetchall():
         sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric',
@@ -138,8 +138,8 @@
             except:
                 pass
             tocleanup = [eid]
-            tocleanup += (eid for eid, (eidetype, uri, extid) in repo._type_source_cache.items()
-                          if etype == eidetype)
+            tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems()
+                          if etype == cached[0])
             repo.clear_caches(tocleanup)
             session.commit(False)
             if needcopy:
--- a/server/serverconfig.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/serverconfig.py	Tue Jun 28 16:33:53 2011 +0200
@@ -130,7 +130,7 @@
         ('connections-pool-size',
          {'type' : 'int',
           'default': 4,
-          'help': 'size of the connections pools. Each source supporting multiple \
+          'help': 'size of the connections pool. Each source supporting multiple \
 connections will have this number of opened connections.',
           'group': 'main', 'level': 3,
           }),
@@ -209,9 +209,9 @@
           }),
         ) + CubicWebConfiguration.options)
 
-    # should we open connections pools (eg connect to sources). This is usually
-    # necessary...
-    open_connections_pools = True
+    # should we init the connections pool (eg connect to sources). This is
+    # usually necessary...
+    init_cnxset_pool = True
 
     # read the schema from the database
     read_instance_schema = True
@@ -255,7 +255,7 @@
     # configuration file (#16102)
     @cached
     def read_sources_file(self):
-        return read_config(self.sources_file())
+        return read_config(self.sources_file(), raise_if_unreadable=True)
 
     def sources(self):
         """return a dictionnaries containing sources definitions indexed by
--- a/server/serverctl.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/serverctl.py	Tue Jun 28 16:33:53 2011 +0200
@@ -24,6 +24,7 @@
 # completion). So import locally in command helpers.
 import sys
 import os
+import logging
 
 from logilab.common import nullobject
 from logilab.common.configuration import Configuration
@@ -370,14 +371,21 @@
                                 interactive=not automatic)
         cursor = cnx.cursor()
         helper.init_fti_extensions(cursor)
+        cnx.commit()
         # postgres specific stuff
         if driver == 'postgres':
-            # install plpythonu/plpgsql language if not installed by the cube
-            langs = sys.platform == 'win32' and ('plpgsql',) or ('plpythonu', 'plpgsql')
+            # install plpythonu/plpgsql languages
+            langs = ('plpythonu', 'plpgsql')
             for extlang in langs:
-                helper.create_language(cursor, extlang)
-        cursor.close()
-        cnx.commit()
+                if automatic or ASK.confirm('Create language %s ?' % extlang):
+                    try:
+                        helper.create_language(cursor, extlang)
+                    except Exception, exc:
+                        print '-> ERROR:', exc
+                        print '-> could not create language %s, some stored procedures might be unusable' % extlang
+                        cnx.rollback()
+                    else:
+                        cnx.commit()
         print '-> database for instance %s created and necessary extensions installed.' % appid
         print
         if automatic:
@@ -637,8 +645,7 @@
         appid = args[0]
         debug = self['debug']
         if sys.platform == 'win32' and not debug:
-            from logging import getLogger
-            logger = getLogger('cubicweb.ctl')
+            logger = logging.getLogger('cubicweb.ctl')
             logger.info('Forcing debug mode on win32 platform')
             debug = True
         config = ServerConfiguration.config_for(appid, debugmode=debug)
@@ -970,7 +977,7 @@
         appid = args[0]
         config = ServerConfiguration.config_for(appid)
         repo, cnx = repo_cnx(config)
-        session = repo._get_session(cnx.sessionid, setpool=True)
+        session = repo._get_session(cnx.sessionid, setcnxset=True)
         reindex_entities(repo.schema, session)
         cnx.commit()
 
@@ -995,11 +1002,43 @@
         mih.cmd_synchronize_schema()
 
 
+class SynchronizeSourceCommand(Command):
+    """Force a source synchronization.
+
+    <instance>
+      the identifier of the instance
+    <source>
+      the name of the source to synchronize.
+    """
+    name = 'source-sync'
+    arguments = '<instance> <source>'
+    min_args = max_args = 2
+
+    def run(self, args):
+        config = ServerConfiguration.config_for(args[0])
+        config.global_set_option('log-file', None)
+        config.log_format = '%(levelname)s %(name)s: %(message)s'
+        logger = logging.getLogger('cubicweb.sources')
+        logger.setLevel(logging.INFO)
+        # only retrieve cnx to trigger authentication, close it right away
+        repo, cnx = repo_cnx(config)
+        cnx.close()
+        try:
+            source = repo.sources_by_uri[args[1]]
+        except KeyError:
+            raise ExecutionError('no source named %r' % args[1])
+        session = repo.internal_session()
+        stats = source.pull_data(session, force=True, raise_on_error=True)
+        for key, val in stats.iteritems():
+            if val:
+                print key, ':', val
+
+
 for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand,
                  GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
                  StartRepositoryCommand,
                  DBDumpCommand, DBRestoreCommand, DBCopyCommand,
                  AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
-                 SynchronizeInstanceSchemaCommand,
+                 SynchronizeInstanceSchemaCommand, SynchronizeSourceCommand
                  ):
     CWCTL.register(cmdclass)
--- a/server/session.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/session.py	Tue Jun 28 16:33:53 2011 +0200
@@ -124,21 +124,13 @@
         self.categories = categories
 
     def __enter__(self):
-        self.oldmode = self.session.set_hooks_mode(self.mode)
-        if self.mode is self.session.HOOKS_DENY_ALL:
-            self.changes = self.session.enable_hook_categories(*self.categories)
-        else:
-            self.changes = self.session.disable_hook_categories(*self.categories)
+        self.oldmode, self.changes = self.session.init_hooks_mode_categories(
+            self.mode, self.categories)
 
     def __exit__(self, exctype, exc, traceback):
-        if self.changes:
-            if self.mode is self.session.HOOKS_DENY_ALL:
-                self.session.disable_hook_categories(*self.changes)
-            else:
-                self.session.enable_hook_categories(*self.changes)
-        self.session.set_hooks_mode(self.oldmode)
+        self.session.reset_hooks_mode_categories(self.oldmode, self.mode, self.changes)
 
-INDENT = ''
+
 class security_enabled(object):
     """context manager to control security w/ session.execute, since by
     default security is disabled on queries executed on the repository
@@ -150,33 +142,90 @@
         self.write = write
 
     def __enter__(self):
-#        global INDENT
-        if self.read is not None:
-            self.oldread = self.session.set_read_security(self.read)
-#            print INDENT + 'read', self.read, self.oldread
-        if self.write is not None:
-            self.oldwrite = self.session.set_write_security(self.write)
-#            print INDENT + 'write', self.write, self.oldwrite
-#        INDENT += '  '
+        self.oldread, self.oldwrite = self.session.init_security(
+            self.read, self.write)
 
     def __exit__(self, exctype, exc, traceback):
-#        global INDENT
-#        INDENT = INDENT[:-2]
-        if self.read is not None:
-            self.session.set_read_security(self.oldread)
-#            print INDENT + 'reset read to', self.oldread
-        if self.write is not None:
-            self.session.set_write_security(self.oldwrite)
-#            print INDENT + 'reset write to', self.oldwrite
+        self.session.reset_security(self.oldread, self.oldwrite)
 
 
 class TransactionData(object):
     def __init__(self, txid):
         self.transactionid = txid
+        self.ctx_count = 0
+
 
 class Session(RequestSessionBase):
-    """tie session id, user, connections pool and other session data all
-    together
+    """Repository usersession, tie a session id, user, connections set and
+    other session data all together.
+
+    About session storage / transactions
+    ------------------------------------
+
+    Here is a description of internal session attributes. Besides :attr:`data`
+    and :attr:`transaction_data`, you should not have to use attributes
+    described here but higher level APIs.
+
+      :attr:`data` is a dictionary containing shared data, used to communicate
+      extra information between the client and the repository
+
+      :attr:`_tx_data` is a dictionary of :class:`TransactionData` instance, one
+      for each running transaction. The key is the transaction id. By default
+      the transaction id is the thread name but it can be otherwise (per dbapi
+      cursor for instance, or per thread name *from another process*).
+
+      :attr:`__threaddata` is a thread local storage whose `txdata` attribute
+      refers to the proper instance of :class:`TransactionData` according to the
+      transaction.
+
+      :attr:`_threads_in_transaction` is a set of (thread, connections set)
+      referencing threads that currently hold a connections set for the session.
+
+    You should not have to use neither :attr:`_txdata` nor :attr:`__threaddata`,
+    simply access transaction data transparently through the :attr:`_threaddata`
+    property. Also, you usually don't have to access it directly since current
+    transaction's data may be accessed/modified through properties / methods:
+
+      :attr:`transaction_data`, similarly to :attr:`data`, is a dictionary
+      containing some shared data that should be cleared at the end of the
+      transaction. Hooks and operations may put arbitrary data in there, and
+      this may also be used as a communication channel between the client and
+      the repository.
+
+      :attr:`cnxset`, the connections set to use to execute queries on sources.
+      During a transaction, the connection set may be freed so that is may be
+      used by another session as long as no writing is done. This means we can
+      have multiple sessions with a reasonably low connections set pool size.
+
+      :attr:`mode`, string telling the connections set handling mode, may be one
+      of 'read' (connections set may be freed), 'write' (some write was done in
+      the connections set, it can't be freed before end of the transaction),
+      'transaction' (we want to keep the connections set during all the
+      transaction, with or without writing)
+
+      :attr:`pending_operations`, ordered list of operations to be processed on
+      commit/rollback
+
+      :attr:`commit_state`, describing the transaction commit state, may be one
+      of None (not yet committing), 'precommit' (calling precommit event on
+      operations), 'postcommit' (calling postcommit event on operations),
+      'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error
+      has been raised during the transaction and so it must be rollbacked).
+
+      :attr:`read_security` and :attr:`write_security`, boolean flags telling if
+      read/write security is currently activated.
+
+      :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`.
+
+      :attr:`enabled_hook_categories`, when :attr:`hooks_mode` is
+      `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled.
+
+      :attr:`disabled_hook_categories`, when :attr:`hooks_mode` is
+      `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled.
+
+
+      :attr:`running_dbapi_query`, boolean flag telling if the executing query
+      is coming from a dbapi connection or is a query from within the repository
     """
     is_internal_session = False
 
@@ -245,7 +294,10 @@
         """return a fake request/session using specified user"""
         session = Session(user, self.repo)
         threaddata = session._threaddata
-        threaddata.pool = self.pool
+        threaddata.cnxset = self.cnxset
+        # we attributed a connections set, need to update ctx_count else it will be freed
+        # while undesired
+        threaddata.ctx_count = 1
         # share pending_operations, else operation added in the hi-jacked
         # session such as SendMailOp won't ever be processed
         threaddata.pending_operations = self.pending_operations
@@ -387,14 +439,14 @@
         """return a sql cursor on the system database"""
         if sql.split(None, 1)[0].upper() != 'SELECT':
             self.mode = 'write'
-        source = self.pool.source('system')
+        source = self.cnxset.source('system')
         try:
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
         except (source.OperationalError, source.InterfaceError):
             if not rollback_on_failure:
                 raise
             source.warning("trying to reconnect")
-            self.pool.reconnect(source)
+            self.cnxset.reconnect(source)
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
 
     def set_language(self, language):
@@ -445,6 +497,29 @@
     def security_enabled(self, read=False, write=False):
         return security_enabled(self, read=read, write=write)
 
+    def init_security(self, read, write):
+        if read is None:
+            oldread = None
+        else:
+            oldread = self.set_read_security(read)
+        if write is None:
+            oldwrite = None
+        else:
+            oldwrite = self.set_write_security(write)
+        self._threaddata.ctx_count += 1
+        return oldread, oldwrite
+
+    def reset_security(self, read, write):
+        txstore = self._threaddata
+        txstore.ctx_count -= 1
+        if txstore.ctx_count == 0:
+            self._clear_thread_storage(txstore)
+        else:
+            if read is not None:
+                self.set_read_security(read)
+            if write is not None:
+                self.set_write_security(write)
+
     @property
     def read_security(self):
         """return a boolean telling if read security is activated or not"""
@@ -545,6 +620,28 @@
         self._threaddata.hooks_mode = mode
         return oldmode
 
+    def init_hooks_mode_categories(self, mode, categories):
+        oldmode = self.set_hooks_mode(mode)
+        if mode is self.HOOKS_DENY_ALL:
+            changes = self.enable_hook_categories(*categories)
+        else:
+            changes = self.disable_hook_categories(*categories)
+        self._threaddata.ctx_count += 1
+        return oldmode, changes
+
+    def reset_hooks_mode_categories(self, oldmode, mode, categories):
+        txstore = self._threaddata
+        txstore.ctx_count -= 1
+        if txstore.ctx_count == 0:
+            self._clear_thread_storage(txstore)
+        else:
+            if categories:
+                if mode is self.HOOKS_DENY_ALL:
+                    return self.disable_hook_categories(*categories)
+                else:
+                    return self.enable_hook_categories(*categories)
+            self.set_hooks_mode(oldmode)
+
     @property
     def disabled_hook_categories(self):
         try:
@@ -568,17 +665,18 @@
         - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled
         """
         changes = set()
+        self.pruned_hooks_cache.clear()
         if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enablecats = self.enabled_hook_categories
+            enabledcats = self.enabled_hook_categories
             for category in categories:
-                if category in enablecats:
-                    enablecats.remove(category)
+                if category in enabledcats:
+                    enabledcats.remove(category)
                     changes.add(category)
         else:
-            disablecats = self.disabled_hook_categories
+            disabledcats = self.disabled_hook_categories
             for category in categories:
-                if category not in disablecats:
-                    disablecats.add(category)
+                if category not in disabledcats:
+                    disabledcats.add(category)
                     changes.add(category)
         return tuple(changes)
 
@@ -589,17 +687,18 @@
         - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled
         """
         changes = set()
+        self.pruned_hooks_cache.clear()
         if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enablecats = self.enabled_hook_categories
+            enabledcats = self.enabled_hook_categories
             for category in categories:
-                if category not in enablecats:
-                    enablecats.add(category)
+                if category not in enabledcats:
+                    enabledcats.add(category)
                     changes.add(category)
         else:
-            disablecats = self.disabled_hook_categories
+            disabledcats = self.disabled_hook_categories
             for category in categories:
-                if category in self.disabled_hook_categories:
-                    disablecats.remove(category)
+                if category in disabledcats:
+                    disabledcats.remove(category)
                     changes.add(category)
         return tuple(changes)
 
@@ -619,19 +718,19 @@
 
     # connection management ###################################################
 
-    def keep_pool_mode(self, mode):
-        """set pool_mode, e.g. how the session will keep its pool:
+    def keep_cnxset_mode(self, mode):
+        """set `mode`, e.g. how the session will keep its connections set:
 
-        * if mode == 'write', the pool is freed after each ready query, but kept
-          until the transaction's end (eg commit or rollback) when a write query
-          is detected (eg INSERT/SET/DELETE queries)
+        * if mode == 'write', the connections set is freed after each ready
+          query, but kept until the transaction's end (eg commit or rollback)
+          when a write query is detected (eg INSERT/SET/DELETE queries)
 
-        * if mode == 'transaction', the pool is only freed after the
+        * if mode == 'transaction', the connections set is only freed after the
           transaction's end
 
-        notice that a repository has a limited set of pools, and a session has to
-        wait for a free pool to run any rql query (unless it already has a pool
-        set).
+        notice that a repository has a limited set of connections sets, and a
+        session has to wait for a free connections set to run any rql query
+        (unless it already has one set).
         """
         assert mode in ('transaction', 'write')
         if mode == 'transaction':
@@ -654,56 +753,58 @@
     commit_state = property(get_commit_state, set_commit_state)
 
     @property
-    def pool(self):
-        """connections pool, set according to transaction mode for each query"""
+    def cnxset(self):
+        """connections set, set according to transaction mode for each query"""
         if self._closed:
-            self.reset_pool(True)
-            raise Exception('try to access pool on a closed session')
-        return getattr(self._threaddata, 'pool', None)
+            self.free_cnxset(True)
+            raise Exception('try to access connections set on a closed session')
+        return getattr(self._threaddata, 'cnxset', None)
 
-    def set_pool(self):
-        """the session need a pool to execute some queries"""
+    def set_cnxset(self):
+        """the session need a connections set to execute some queries"""
         with self._closed_lock:
             if self._closed:
-                self.reset_pool(True)
-                raise Exception('try to set pool on a closed session')
-            if self.pool is None:
-                # get pool first to avoid race-condition
-                self._threaddata.pool = pool = self.repo._get_pool()
+                self.free_cnxset(True)
+                raise Exception('try to set connections set on a closed session')
+            if self.cnxset is None:
+                # get connections set first to avoid race-condition
+                self._threaddata.cnxset = cnxset = self.repo._get_cnxset()
+                self._threaddata.ctx_count += 1
                 try:
-                    pool.pool_set()
+                    cnxset.cnxset_set()
                 except:
-                    self._threaddata.pool = None
-                    self.repo._free_pool(pool)
+                    self._threaddata.cnxset = None
+                    self.repo._free_cnxset(cnxset)
                     raise
                 self._threads_in_transaction.add(
-                    (threading.currentThread(), pool) )
-            return self._threaddata.pool
+                    (threading.currentThread(), cnxset) )
+            return self._threaddata.cnxset
 
-    def _free_thread_pool(self, thread, pool, force_close=False):
+    def _free_thread_cnxset(self, thread, cnxset, force_close=False):
         try:
-            self._threads_in_transaction.remove( (thread, pool) )
+            self._threads_in_transaction.remove( (thread, cnxset) )
         except KeyError:
-            # race condition on pool freeing (freed by commit or rollback vs
+            # race condition on cnxset freeing (freed by commit or rollback vs
             # close)
             pass
         else:
             if force_close:
-                pool.reconnect()
+                cnxset.reconnect()
             else:
-                pool.pool_reset()
-            # free pool once everything is done to avoid race-condition
-            self.repo._free_pool(pool)
+                cnxset.cnxset_freed()
+            # free cnxset once everything is done to avoid race-condition
+            self.repo._free_cnxset(cnxset)
 
-    def reset_pool(self, ignoremode=False):
-        """the session is no longer using its pool, at least for some time"""
-        # pool may be none if no operation has been done since last commit
+    def free_cnxset(self, ignoremode=False):
+        """the session is no longer using its connections set, at least for some time"""
+        # cnxset may be none if no operation has been done since last commit
         # or rollback
-        pool = getattr(self._threaddata, 'pool', None)
-        if pool is not None and (ignoremode or self.mode == 'read'):
+        cnxset = getattr(self._threaddata, 'cnxset', None)
+        if cnxset is not None and (ignoremode or self.mode == 'read'):
             # even in read mode, we must release the current transaction
-            self._free_thread_pool(threading.currentThread(), pool)
-            del self._threaddata.pool
+            self._free_thread_cnxset(threading.currentThread(), cnxset)
+            del self._threaddata.cnxset
+            self._threaddata.ctx_count -= 1
 
     def _touch(self):
         """update latest session usage timestamp and reset mode to read"""
@@ -769,9 +870,13 @@
     def source_defs(self):
         return self.repo.source_defs()
 
-    def describe(self, eid):
+    def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.repo.type_and_source_from_eid(eid, self)
+        metas = self.repo.type_and_source_from_eid(eid, self)
+        if asdict:
+            return dict(zip(('type', 'source', 'extid', 'asource'), metas)) 
+       # XXX :-1 for cw compat, use asdict=True for full information
+        return metas[:-1]
 
     # db-api like interface ###################################################
 
@@ -792,9 +897,9 @@
         rset.req = self
         return rset
 
-    def _clear_thread_data(self, reset_pool=True):
-        """remove everything from the thread local storage, except pool
-        which is explicitly removed by reset_pool, and mode which is set anyway
+    def _clear_thread_data(self, free_cnxset=True):
+        """remove everything from the thread local storage, except connections set
+        which is explicitly removed by free_cnxset, and mode which is set anyway
         by _touch
         """
         try:
@@ -802,23 +907,38 @@
         except AttributeError:
             pass
         else:
-            if reset_pool:
-                self._tx_data.pop(txstore.transactionid, None)
-                try:
-                    del self.__threaddata.txdata
-                except AttributeError:
-                    pass
+            if free_cnxset:
+                self.free_cnxset()
+                if txstore.ctx_count == 0:
+                    self._clear_thread_storage(txstore)
+                else:
+                    self._clear_tx_storage(txstore)
             else:
-                for name in ('commit_state', 'transaction_data',
-                             'pending_operations', '_rewriter'):
-                    try:
-                        delattr(txstore, name)
-                    except AttributeError:
-                        continue
+                self._clear_tx_storage(txstore)
+
+    def _clear_thread_storage(self, txstore):
+        self._tx_data.pop(txstore.transactionid, None)
+        try:
+            del self.__threaddata.txdata
+        except AttributeError:
+            pass
 
-    def commit(self, reset_pool=True):
+    def _clear_tx_storage(self, txstore):
+        for name in ('commit_state', 'transaction_data',
+                     'pending_operations', '_rewriter',
+                     'pruned_hooks_cache'):
+            try:
+                delattr(txstore, name)
+            except AttributeError:
+                continue
+
+    def commit(self, free_cnxset=True, reset_pool=None):
         """commit the current session's transaction"""
-        if self.pool is None:
+        if reset_pool is not None:
+            warn('[3.13] use free_cnxset argument instead for reset_pool',
+                 DeprecationWarning, stacklevel=2)
+            free_cnxset = reset_pool
+        if self.cnxset is None:
             assert not self.pending_operations
             self._clear_thread_data()
             self._touch()
@@ -867,9 +987,9 @@
                     # XXX use slice notation since self.pending_operations is a
                     # read-only property.
                     self.pending_operations[:] = processed + self.pending_operations
-                    self.rollback(reset_pool)
+                    self.rollback(free_cnxset)
                     raise
-                self.pool.commit()
+                self.cnxset.commit()
                 self.commit_state = 'postcommit'
                 while self.pending_operations:
                     operation = self.pending_operations.pop(0)
@@ -883,15 +1003,19 @@
                 return self.transaction_uuid(set=False)
         finally:
             self._touch()
-            if reset_pool:
-                self.reset_pool(ignoremode=True)
-            self._clear_thread_data(reset_pool)
+            if free_cnxset:
+                self.free_cnxset(ignoremode=True)
+            self._clear_thread_data(free_cnxset)
 
-    def rollback(self, reset_pool=True):
+    def rollback(self, free_cnxset=True, reset_pool=None):
         """rollback the current session's transaction"""
-        # don't use self.pool, rollback may be called with _closed == True
-        pool = getattr(self._threaddata, 'pool', None)
-        if pool is None:
+        if reset_pool is not None:
+            warn('[3.13] use free_cnxset argument instead for reset_pool',
+                 DeprecationWarning, stacklevel=2)
+            free_cnxset = reset_pool
+        # don't use self.cnxset, rollback may be called with _closed == True
+        cnxset = getattr(self._threaddata, 'cnxset', None)
+        if cnxset is None:
             self._clear_thread_data()
             self._touch()
             self.debug('rollback session %s done (no db activity)', self.id)
@@ -906,20 +1030,20 @@
                     except:
                         self.critical('rollback error', exc_info=sys.exc_info())
                         continue
-                pool.rollback()
+                cnxset.rollback()
                 self.debug('rollback for session %s done', self.id)
         finally:
             self._touch()
-            if reset_pool:
-                self.reset_pool(ignoremode=True)
-            self._clear_thread_data(reset_pool)
+            if free_cnxset:
+                self.free_cnxset(ignoremode=True)
+            self._clear_thread_data(free_cnxset)
 
     def close(self):
-        """do not close pool on session close, since they are shared now"""
+        """do not close connections set on session close, since they are shared now"""
         with self._closed_lock:
             self._closed = True
         # copy since _threads_in_transaction maybe modified while waiting
-        for thread, pool in self._threads_in_transaction.copy():
+        for thread, cnxset in self._threads_in_transaction.copy():
             if thread is threading.currentThread():
                 continue
             self.info('waiting for thread %s', thread)
@@ -929,12 +1053,12 @@
             for i in xrange(10):
                 thread.join(1)
                 if not (thread.isAlive() and
-                        (thread, pool) in self._threads_in_transaction):
+                        (thread, cnxset) in self._threads_in_transaction):
                     break
             else:
                 self.error('thread %s still alive after 10 seconds, will close '
                            'session anyway', thread)
-                self._free_thread_pool(thread, pool, force_close=True)
+                self._free_thread_cnxset(thread, cnxset, force_close=True)
         self.rollback()
         del self.__threaddata
         del self._tx_data
@@ -961,9 +1085,16 @@
             self._threaddata.pending_operations = []
             return self._threaddata.pending_operations
 
+    @property
+    def pruned_hooks_cache(self):
+        try:
+            return self._threaddata.pruned_hooks_cache
+        except AttributeError:
+            self._threaddata.pruned_hooks_cache = {}
+            return self._threaddata.pruned_hooks_cache
+
     def add_operation(self, operation, index=None):
-        """add an observer"""
-        assert self.commit_state != 'commit'
+        """add an operation"""
         if index is None:
             self.pending_operations.append(operation)
         else:
@@ -1074,6 +1205,18 @@
     def schema_rproperty(self, rtype, eidfrom, eidto, rprop):
         return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop)
 
+    @property
+    @deprecated("[3.13] use .cnxset attribute instead of .pool")
+    def pool(self):
+        return self.cnxset
+
+    @deprecated("[3.13] use .set_cnxset() method instead of .set_pool()")
+    def set_pool(self):
+        return self.set_cnxset()
+
+    @deprecated("[3.13] use .free_cnxset() method instead of .reset_pool()")
+    def reset_pool(self):
+        return self.free_cnxset()
 
     @deprecated("[3.7] execute is now unsafe by default in hooks/operation. You"
                 " can also control security with the security_enabled context "
@@ -1140,12 +1283,12 @@
         self.disable_hook_categories('integrity')
 
     @property
-    def pool(self):
-        """connections pool, set according to transaction mode for each query"""
+    def cnxset(self):
+        """connections set, set according to transaction mode for each query"""
         if self.repo.shutting_down:
-            self.reset_pool(True)
+            self.free_cnxset(True)
             raise Exception('repository is shutting down')
-        return getattr(self._threaddata, 'pool', None)
+        return getattr(self._threaddata, 'cnxset', None)
 
 
 class InternalManager(object):
--- a/server/sources/__init__.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/__init__.py	Tue Jun 28 16:33:53 2011 +0200
@@ -110,6 +110,10 @@
     # force deactivation (configuration error for instance)
     disabled = False
 
+    # boolean telling if cwuri of entities from this source is the url that
+    # should be used as entity's absolute url
+    use_cwuri_as_url = False
+
     # source configuration options
     options = ()
 
@@ -119,6 +123,7 @@
         self.support_relations['identity'] = False
         self.eid = eid
         self.public_config = source_config.copy()
+        self.public_config.setdefault('use-cwuri-as-url', self.use_cwuri_as_url)
         self.remove_sensitive_information(self.public_config)
         self.uri = source_config.pop('uri')
         set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
@@ -213,7 +218,7 @@
         """
         pass
 
-    PUBLIC_KEYS = ('type', 'uri')
+    PUBLIC_KEYS = ('type', 'uri', 'use-cwuri-as-url')
     def remove_sensitive_information(self, sourcedef):
         """remove sensitive information such as login / password from source
         definition
@@ -230,23 +235,23 @@
 
     def check_connection(self, cnx):
         """Check connection validity, return None if the connection is still
-        valid else a new connection (called when the pool using the given
-        connection is being attached to a session). Do nothing by default.
+        valid else a new connection (called when the connections set using the
+        given connection is being attached to a session). Do nothing by default.
         """
         pass
 
-    def close_pool_connections(self):
-        for pool in self.repo.pools:
-            pool._cursors.pop(self.uri, None)
-            pool.source_cnxs[self.uri][1].close()
+    def close_source_connections(self):
+        for cnxset in self.repo.cnxsets:
+            cnxset._cursors.pop(self.uri, None)
+            cnxset.source_cnxs[self.uri][1].close()
 
-    def open_pool_connections(self):
-        for pool in self.repo.pools:
-            pool.source_cnxs[self.uri] = (self, self.get_connection())
+    def open_source_connections(self):
+        for cnxset in self.repo.cnxsets:
+            cnxset.source_cnxs[self.uri] = (self, self.get_connection())
 
-    def pool_reset(self, cnx):
-        """the pool using the given connection is being reseted from its current
-        attached session
+    def cnxset_freed(self, cnx):
+        """the connections set holding the given connection is being reseted
+        from its current attached session.
 
         do nothing by default
         """
@@ -404,7 +409,7 @@
         .executemany().
         """
         res = self.syntax_tree_search(session, union, args, varmap=varmap)
-        session.pool.source('system').manual_insert(res, table, session)
+        session.cnxset.source('system').manual_insert(res, table, session)
 
     # write modification api ###################################################
     # read-only sources don't have to implement methods below
--- a/server/sources/datafeed.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/datafeed.py	Tue Jun 28 16:33:53 2011 +0200
@@ -18,15 +18,24 @@
 """datafeed sources: copy data from an external data stream into the system
 database
 """
+from __future__ import with_statement
+
+import urllib2
+import StringIO
 from datetime import datetime, timedelta
 from base64 import b64decode
+from cookielib import CookieJar
 
-from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError
+from lxml import etree
+
+from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid
 from cubicweb.server.sources import AbstractSource
 from cubicweb.appobject import AppObject
 
+
 class DataFeedSource(AbstractSource):
     copy_based_source = True
+    use_cwuri_as_url = True
 
     options = (
         ('synchronize',
@@ -70,7 +79,7 @@
 
     def _entity_update(self, source_entity):
         source_entity.complete()
-        self.parser = source_entity.parser
+        self.parser_id = source_entity.parser
         self.latest_retrieval = source_entity.latest_retrieval
         self.urls = [url.strip() for url in source_entity.url.splitlines()
                      if url.strip()]
@@ -87,12 +96,12 @@
     def init(self, activated, source_entity):
         if activated:
             self._entity_update(source_entity)
-        self.parser = source_entity.parser
+        self.parser_id = source_entity.parser
         self.load_mapping(source_entity._cw)
 
     def _get_parser(self, session, **kwargs):
         return self.repo.vreg['parsers'].select(
-            self.parser, session, source=self, **kwargs)
+            self.parser_id, session, source=self, **kwargs)
 
     def load_mapping(self, session):
         self.mapping = {}
@@ -120,27 +129,50 @@
             return False
         return datetime.utcnow() < (self.latest_retrieval + self.synchro_interval)
 
+    def update_latest_retrieval(self, session):
+        self.latest_retrieval = datetime.utcnow()
+        session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
+                        {'x': self.eid, 'date': self.latest_retrieval})
+
+    def acquire_synchronization_lock(self, session):
+        # XXX race condition until WHERE of SET queries is executed using
+        # 'SELECT FOR UPDATE'
+        if not session.execute('SET X synchronizing TRUE WHERE X eid %(x)s, X synchronizing FALSE',
+                               {'x': self.eid}):
+            self.error('concurrent synchronization detected, skip pull')
+            session.commit(free_cnxset=False)
+            return False
+        session.commit(free_cnxset=False)
+        return True
+
+    def release_synchronization_lock(self, session):
+        session.execute('SET X synchronizing FALSE WHERE X eid %(x)s',
+                        {'x': self.eid})
+        session.commit()
+
     def pull_data(self, session, force=False, raise_on_error=False):
+        """Launch synchronization of the source if needed.
+
+        This method is responsible to handle commit/rollback on the given
+        session.
+        """
         if not force and self.fresh():
             return {}
+        if not self.acquire_synchronization_lock(session):
+            return {}
+        try:
+            with session.transaction(free_cnxset=False):
+                return self._pull_data(session, force, raise_on_error)
+        finally:
+            self.release_synchronization_lock(session)
+
+    def _pull_data(self, session, force=False, raise_on_error=False):
         if self.config['delete-entities']:
             myuris = self.source_cwuris(session)
         else:
             myuris = None
         parser = self._get_parser(session, sourceuris=myuris)
-        error = False
-        self.info('pulling data for source %s', self.uri)
-        for url in self.urls:
-            try:
-                if parser.process(url):
-                    error = True
-            except IOError, exc:
-                if raise_on_error:
-                    raise
-                self.error('could not pull data while processing %s: %s',
-                           url, exc)
-                error = True
-        if error:
+        if self.process_urls(parser, self.urls, raise_on_error):
             self.warning("some error occured, don't attempt to delete entities")
         elif self.config['delete-entities'] and myuris:
             byetype = {}
@@ -150,11 +182,24 @@
             for etype, eids in byetype.iteritems():
                 session.execute('DELETE %s X WHERE X eid IN (%s)'
                                 % (etype, ','.join(eids)))
-        self.latest_retrieval = datetime.utcnow()
-        session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
-                        {'x': self.eid, 'date': self.latest_retrieval})
+        self.update_latest_retrieval(session)
         return parser.stats
 
+    def process_urls(self, parser, urls, raise_on_error=False):
+        error = False
+        for url in urls:
+            self.info('pulling data from %s', url)
+            try:
+                if parser.process(url, raise_on_error):
+                    error = True
+            except Exception, exc:
+                if raise_on_error:
+                    raise
+                self.error('could not pull data while processing %s: %s',
+                           url, exc)
+                error = True
+        return error
+
     def before_entity_insertion(self, session, lid, etype, eid, sourceparams):
         """called by the repository when an eid has been attributed for an
         entity stored here but the entity has not been inserted in the system
@@ -194,8 +239,8 @@
 class DataFeedParser(AppObject):
     __registry__ = 'parsers'
 
-    def __init__(self, session, source, sourceuris=None):
-        self._cw = session
+    def __init__(self, session, source, sourceuris=None, **kwargs):
+        super(DataFeedParser, self).__init__(session, **kwargs)
         self.source = source
         self.sourceuris = sourceuris
         self.stats = {'created': set(),
@@ -212,14 +257,33 @@
         raise ValidationError(schemacfg.eid, {None: msg})
 
     def extid2entity(self, uri, etype, **sourceparams):
+        """return an entity for the given uri. May return None if it should be
+        skipped
+        """
+        # if cwsource is specified and repository has a source with the same
+        # name, call extid2eid on that source so entity will be properly seen as
+        # coming from this source
+        source = self._cw.repo.sources_by_uri.get(
+            sourceparams.pop('cwsource', None), self.source)
         sourceparams['parser'] = self
-        eid = self.source.extid2eid(str(uri), etype, self._cw,
-                                    sourceparams=sourceparams)
+        eid = source.extid2eid(str(uri), etype, self._cw,
+                               sourceparams=sourceparams)
+        if eid < 0:
+            # entity has been moved away from its original source
+            #
+            # Don't give etype to entity_from_eid so we get UnknownEid if the
+            # entity has been removed
+            try:
+                entity = self._cw.entity_from_eid(-eid)
+            except UnknownEid:
+                return None
+            self.notify_updated(entity) # avoid later update from the source's data
+            return entity
         if self.sourceuris is not None:
             self.sourceuris.pop(str(uri), None)
         return self._cw.entity_from_eid(eid, etype)
 
-    def process(self, url):
+    def process(self, url, partialcommit=True):
         """main callback: process the url"""
         raise NotImplementedError
 
@@ -237,3 +301,66 @@
 
     def notify_updated(self, entity):
         return self.stats['updated'].add(entity.eid)
+
+
+class DataFeedXMLParser(DataFeedParser):
+
+    def process(self, url, raise_on_error=False, partialcommit=True):
+        """IDataFeedParser main entry point"""
+        try:
+            parsed = self.parse(url)
+        except Exception, ex:
+            if raise_on_error:
+                raise
+            self.source.error(str(ex))
+            return True
+        error = False
+        for args in parsed:
+            try:
+                self.process_item(*args)
+                if partialcommit:
+                    # commit+set_cnxset instead of commit(free_cnxset=False) to let
+                    # other a chance to get our connections set
+                    self._cw.commit()
+                    self._cw.set_cnxset()
+            except ValidationError, exc:
+                if raise_on_error:
+                    raise
+                if partialcommit:
+                    self.source.error('Skipping %s because of validation error %s' % (args, exc))
+                    self._cw.rollback()
+                    self._cw.set_cnxset()
+                    error = True
+                else:
+                    raise
+        return error
+
+    def parse(self, url):
+        if url.startswith('http'):
+            from cubicweb.sobjects.parsers import HOST_MAPPING
+            for mappedurl in HOST_MAPPING:
+                if url.startswith(mappedurl):
+                    url = url.replace(mappedurl, HOST_MAPPING[mappedurl], 1)
+                    break
+            self.source.info('GET %s', url)
+            stream = _OPENER.open(url)
+        elif url.startswith('file://'):
+            stream = open(url[7:])
+        else:
+            stream = StringIO.StringIO(url)
+        return self.parse_etree(etree.parse(stream).getroot())
+
+    def parse_etree(self, document):
+        return [(document,)]
+
+    def process_item(self, *args):
+        raise NotImplementedError
+
+# use a cookie enabled opener to use session cookie if any
+_OPENER = urllib2.build_opener()
+try:
+    from logilab.common import urllib2ext
+    _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
+except ImportError: # python-kerberos not available
+    pass
+_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
--- a/server/sources/extlite.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/extlite.py	Tue Jun 28 16:33:53 2011 +0200
@@ -102,19 +102,19 @@
 
     def backup(self, backupfile, confirm):
         """method called to create a backup of the source's data"""
-        self.close_pool_connections()
+        self.close_source_connections()
         try:
             self.sqladapter.backup_to_file(backupfile, confirm)
         finally:
-            self.open_pool_connections()
+            self.open_source_connections()
 
     def restore(self, backupfile, confirm, drop):
         """method called to restore a backup of source's data"""
-        self.close_pool_connections()
+        self.close_source_connections()
         try:
             self.sqladapter.restore_from_file(backupfile, confirm, drop)
         finally:
-            self.open_pool_connections()
+            self.open_source_connections()
 
     @property
     def _sqlcnx(self):
@@ -174,15 +174,15 @@
 
     def check_connection(self, cnx):
         """check connection validity, return None if the connection is still valid
-        else a new connection (called when the pool using the given connection is
+        else a new connection (called when the connections set holding the given connection is
         being attached to a session)
 
         always return the connection to reset eventually cached cursor
         """
         return cnx
 
-    def pool_reset(self, cnx):
-        """the pool using the given connection is being reseted from its current
+    def cnxset_freed(self, cnx):
+        """the connections set holding the given connection is being freed from its current
         attached session: release the connection lock if the connection wrapper
         has a connection set
         """
@@ -286,7 +286,7 @@
         """
         if server.DEBUG:
             print 'exec', query, args
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         try:
             # str(query) to avoid error if it's an unicode string
             cursor.execute(str(query), args)
@@ -294,7 +294,7 @@
             self.critical("sql: %r\n args: %s\ndbms message: %r",
                           query, args, ex.args[0])
             try:
-                session.pool.connection(self.uri).rollback()
+                session.cnxset.connection(self.uri).rollback()
                 self.critical('transaction has been rollbacked')
             except:
                 pass
--- a/server/sources/ldapuser.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/ldapuser.py	Tue Jun 28 16:33:53 2011 +0200
@@ -310,7 +310,11 @@
         except Exception:
             self.error('while trying to authenticate %s', user, exc_info=True)
             raise AuthenticationError()
-        return self.extid2eid(user['dn'], 'CWUser', session)
+        eid = self.extid2eid(user['dn'], 'CWUser', session)
+        if eid < 0:
+            # user has been moved away from this source
+            raise AuthenticationError()
+        return eid
 
     def ldap_name(self, var):
         if var.stinfo['relations']:
@@ -392,7 +396,7 @@
                     break
         assert mainvars, rqlst
         columns, globtransforms = self.prepare_columns(mainvars, rqlst)
-        eidfilters = []
+        eidfilters = [lambda x: x > 0]
         allresults = []
         generator = RQL2LDAPFilter(self, session, args, mainvars)
         for mainvar in mainvars:
@@ -524,9 +528,9 @@
         """make an ldap query"""
         self.debug('ldap search %s %s %s %s %s', self.uri, base, scope,
                    searchstr, list(attrs))
-        # XXX for now, we do not have connection pool support for LDAP, so
+        # XXX for now, we do not have connections set support for LDAP, so
         # this is always self._conn
-        cnx = session.pool.connection(self.uri).cnx
+        cnx = session.cnxset.connection(self.uri).cnx
         try:
             res = cnx.search_s(base, scope, searchstr, attrs)
         except ldap.PARTIAL_RESULTS:
--- a/server/sources/native.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/native.py	Tue Jun 28 16:33:53 2011 +0200
@@ -313,9 +313,9 @@
             self.dbhelper.dbname = abspath(self.dbhelper.dbname)
             self.get_connection = lambda: ConnectionWrapper(self)
             self.check_connection = lambda cnx: cnx
-            def pool_reset(cnx):
+            def cnxset_freed(cnx):
                 cnx.close()
-            self.pool_reset = pool_reset
+            self.cnxset_freed = cnxset_freed
         if self.dbdriver == 'sqlite':
             self._create_eid = None
             self.create_eid = self._create_eid_sqlite
@@ -355,21 +355,21 @@
         """execute the query and return its result"""
         return self.process_result(self.doexec(session, sql, args))
 
-    def init_creating(self, pool=None):
+    def init_creating(self, cnxset=None):
         # check full text index availibility
         if self.do_fti:
-            if pool is None:
-                _pool = self.repo._get_pool()
-                _pool.pool_set()
+            if cnxset is None:
+                _cnxset = self.repo._get_cnxset()
+                _cnxset.cnxset_set()
             else:
-                _pool = pool
-            if not self.dbhelper.has_fti_table(_pool['system']):
+                _cnxset = cnxset
+            if not self.dbhelper.has_fti_table(_cnxset['system']):
                 if not self.repo.config.creating:
                     self.critical('no text index table')
                 self.do_fti = False
-            if pool is None:
-                _pool.pool_reset()
-                self.repo._free_pool(_pool)
+            if cnxset is None:
+                _cnxset.cnxset_freed()
+                self.repo._free_cnxset(_cnxset)
 
     def backup(self, backupfile, confirm, format='native'):
         """method called to create a backup of the source's data"""
@@ -377,25 +377,25 @@
             self.repo.fill_schema()
             self.set_schema(self.repo.schema)
             helper = DatabaseIndependentBackupRestore(self)
-            self.close_pool_connections()
+            self.close_source_connections()
             try:
                 helper.backup(backupfile)
             finally:
-                self.open_pool_connections()
+                self.open_source_connections()
         elif format == 'native':
-            self.close_pool_connections()
+            self.close_source_connections()
             try:
                 self.backup_to_file(backupfile, confirm)
             finally:
-                self.open_pool_connections()
+                self.open_source_connections()
         else:
             raise ValueError('Unknown format %r' % format)
 
 
     def restore(self, backupfile, confirm, drop, format='native'):
         """method called to restore a backup of source's data"""
-        if self.repo.config.open_connections_pools:
-            self.close_pool_connections()
+        if self.repo.config.init_cnxset_pool:
+            self.close_source_connections()
         try:
             if format == 'portable':
                 helper = DatabaseIndependentBackupRestore(self)
@@ -405,12 +405,16 @@
             else:
                 raise ValueError('Unknown format %r' % format)
         finally:
-            if self.repo.config.open_connections_pools:
-                self.open_pool_connections()
+            if self.repo.config.init_cnxset_pool:
+                self.open_source_connections()
 
 
     def init(self, activated, source_entity):
-        self.init_creating(source_entity._cw.pool)
+        self.init_creating(source_entity._cw.cnxset)
+        try:
+            source_entity._cw.system_sql('SELECT COUNT(asource) FROM entities')
+        except Exception, ex:
+            self.eid_type_source = self.eid_type_source_pre_131
 
     def shutdown(self):
         if self._eid_creation_cnx:
@@ -532,13 +536,13 @@
                 raise
             # FIXME: better detection of deconnection pb
             self.warning("trying to reconnect")
-            session.pool.reconnect(self)
+            session.cnxset.reconnect(self)
             cursor = self.doexec(session, sql, args)
         except (self.DbapiError,), exc:
             # We get this one with pyodbc and SQL Server when connection was reset
             if exc.args[0] == '08S01' and session.mode != 'write':
                 self.warning("trying to reconnect")
-                session.pool.reconnect(self)
+                session.cnxset.reconnect(self)
                 cursor = self.doexec(session, sql, args)
             else:
                 raise
@@ -727,9 +731,9 @@
         """Execute a query.
         it's a function just so that it shows up in profiling
         """
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         if server.DEBUG & server.DBG_SQL:
-            cnx = session.pool.connection(self.uri)
+            cnx = session.cnxset.connection(self.uri)
             # getattr to get the actual connection if cnx is a ConnectionWrapper
             # instance
             print 'exec', query, args, getattr(cnx, '_cnx', cnx)
@@ -744,7 +748,7 @@
                               query, args, ex.args[0])
             if rollback:
                 try:
-                    session.pool.connection(self.uri).rollback()
+                    session.cnxset.connection(self.uri).rollback()
                     if self.repo.config.mode != 'test':
                         self.critical('transaction has been rollbacked')
                 except Exception, ex:
@@ -773,7 +777,7 @@
         """
         if server.DEBUG & server.DBG_SQL:
             print 'execmany', query, 'with', len(args), 'arguments'
-        cursor = session.pool[self.uri]
+        cursor = session.cnxset[self.uri]
         try:
             # str(query) to avoid error if it's an unicode string
             cursor.executemany(str(query), args)
@@ -784,7 +788,7 @@
                 self.critical("sql many: %r\n args: %s\ndbms message: %r",
                               query, args, ex.args[0])
             try:
-                session.pool.connection(self.uri).rollback()
+                session.cnxset.connection(self.uri).rollback()
                 if self.repo.config.mode != 'test':
                     self.critical('transaction has been rollbacked')
             except:
@@ -802,7 +806,7 @@
             self.error("backend can't alter %s.%s to %s%s", table, column, coltype,
                        not allownull and 'NOT NULL' or '')
             return
-        self.dbhelper.change_col_type(LogCursor(session.pool[self.uri]),
+        self.dbhelper.change_col_type(LogCursor(session.cnxset[self.uri]),
                                       table, column, coltype, allownull)
         self.info('altered %s.%s: now %s%s', table, column, coltype,
                   not allownull and 'NOT NULL' or '')
@@ -817,7 +821,7 @@
             return
         table, column = rdef_table_column(rdef)
         coltype, allownull = rdef_physical_info(self.dbhelper, rdef)
-        self.dbhelper.set_null_allowed(LogCursor(session.pool[self.uri]),
+        self.dbhelper.set_null_allowed(LogCursor(session.cnxset[self.uri]),
                                        table, column, coltype, allownull)
 
     def update_rdef_indexed(self, session, rdef):
@@ -835,29 +839,46 @@
             self.drop_index(session, table, column, unique=True)
 
     def create_index(self, session, table, column, unique=False):
-        cursor = LogCursor(session.pool[self.uri])
+        cursor = LogCursor(session.cnxset[self.uri])
         self.dbhelper.create_index(cursor, table, column, unique)
 
     def drop_index(self, session, table, column, unique=False):
-        cursor = LogCursor(session.pool[self.uri])
+        cursor = LogCursor(session.cnxset[self.uri])
         self.dbhelper.drop_index(cursor, table, column, unique)
 
     # system source interface #################################################
 
     def eid_type_source(self, session, eid):
         """return a tuple (type, source, extid) for the entity with id <eid>"""
+        sql = 'SELECT type, source, extid, asource FROM entities WHERE eid=%s' % eid
+        try:
+            res = self.doexec(session, sql).fetchone()
+        except:
+            assert session.cnxset, 'session has no connections set'
+            raise UnknownEid(eid)
+        if res is None:
+            raise UnknownEid(eid)
+        if res[-2] is not None:
+            if not isinstance(res, list):
+                res = list(res)
+            res[-2] = b64decode(res[-2])
+        return res
+
+    def eid_type_source_pre_131(self, session, eid):
+        """return a tuple (type, source, extid) for the entity with id <eid>"""
         sql = 'SELECT type, source, extid FROM entities WHERE eid=%s' % eid
         try:
             res = self.doexec(session, sql).fetchone()
         except:
-            assert session.pool, 'session has no pool set'
+            assert session.cnxset, 'session has no connections set'
             raise UnknownEid(eid)
         if res is None:
             raise UnknownEid(eid)
+        if not isinstance(res, list):
+            res = list(res)
         if res[-1] is not None:
-            if not isinstance(res, list):
-                res = list(res)
             res[-1] = b64decode(res[-1])
+        res.append(res[1])
         return res
 
     def extid2eid(self, session, source_uri, extid):
@@ -946,7 +967,7 @@
             extid = b64encode(extid)
         uri = 'system' if source.copy_based_source else source.uri
         attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
-                 'source': uri, 'mtime': datetime.now()}
+                 'source': uri, 'asource': source.uri, 'mtime': datetime.now()}
         self.doexec(session, self.sqlgen.insert('entities', attrs), attrs)
         # insert core relations: is, is_instance_of and cw_source
         try:
@@ -1127,7 +1148,7 @@
         important note: while undoing of a transaction, only hooks in the
         'integrity', 'activeintegrity' and 'undo' categories are called.
         """
-        # set mode so pool isn't released subsquently until commit/rollback
+        # set mode so connections set isn't released subsquently until commit/rollback
         session.mode = 'write'
         errors = []
         session.transaction_data['undoing_uuid'] = txuuid
@@ -1372,7 +1393,7 @@
     def fti_unindex_entities(self, session, entities):
         """remove text content for entities from the full text index
         """
-        cursor = session.pool['system']
+        cursor = session.cnxset['system']
         cursor_unindex_object = self.dbhelper.cursor_unindex_object
         try:
             for entity in entities:
@@ -1385,7 +1406,7 @@
         """add text content of created/modified entities to the full text index
         """
         cursor_index_object = self.dbhelper.cursor_index_object
-        cursor = session.pool['system']
+        cursor = session.cnxset['system']
         try:
             # use cursor_index_object, not cursor_reindex_object since
             # unindexing done in the FTIndexEntityOp
@@ -1434,6 +1455,7 @@
   eid INTEGER PRIMARY KEY NOT NULL,
   type VARCHAR(64) NOT NULL,
   source VARCHAR(64) NOT NULL,
+  asource VARCHAR(64) NOT NULL,
   mtime %s NOT NULL,
   extid VARCHAR(256)
 );;
--- a/server/sources/pyrorql.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/pyrorql.py	Tue Jun 28 16:33:53 2011 +0200
@@ -235,10 +235,12 @@
         if dexturi == 'system' or not (
             dexturi in self.repo.sources_by_uri or self._skip_externals):
             assert etype in self.support_entities, etype
-            return self.repo.extid2eid(self, str(extid), etype, session), True
-        if dexturi in self.repo.sources_by_uri:
+            eid = self.repo.extid2eid(self, str(extid), etype, session)
+            if eid > 0:
+                return eid, True
+        elif dexturi in self.repo.sources_by_uri:
             source = self.repo.sources_by_uri[dexturi]
-            cnx = session.pool.connection(source.uri)
+            cnx = session.cnxset.connection(source.uri)
             eid = source.local_eid(cnx, dextid, session)[0]
             return eid, False
         return None, None
@@ -322,7 +324,7 @@
         else a new connection
         """
         # we have to transfer manually thread ownership. This can be done safely
-        # since the pool to which belong the connection is affected to one
+        # since the connections set holding the connection is affected to one
         # session/thread and can't be called simultaneously
         try:
             cnx._repo._transferThread(threading.currentThread())
@@ -359,7 +361,7 @@
         if not args is None:
             args = args.copy()
         # get cached cursor anyway
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         if cu is None:
             # this is a ConnectionWrapper instance
             msg = session._("can't connect to source %s, some data may be missing")
@@ -390,7 +392,7 @@
                     or uidtype(union, i, etype, args)):
                     needtranslation.append(i)
             if needtranslation:
-                cnx = session.pool.connection(self.uri)
+                cnx = session.cnxset.connection(self.uri)
                 for rowindex in xrange(rset.rowcount - 1, -1, -1):
                     row = rows[rowindex]
                     localrow = False
@@ -434,37 +436,37 @@
     def update_entity(self, session, entity):
         """update an entity in the source"""
         relations, kwargs = self._entity_relations_and_kwargs(session, entity)
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations), kwargs)
         self._query_cache.clear()
-        entity.clear_all_caches()
+        entity.cw_clear_all_caches()
 
     def delete_entity(self, session, entity):
         """delete an entity from the source"""
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__,
                    {'x': self.eid2extid(entity.eid, session)})
         self._query_cache.clear()
 
     def add_relation(self, session, subject, rtype, object):
         """add a relation to the source"""
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                    {'x': self.eid2extid(subject, session),
                     'y': self.eid2extid(object, session)})
         self._query_cache.clear()
-        session.entity_from_eid(subject).clear_all_caches()
-        session.entity_from_eid(object).clear_all_caches()
+        session.entity_from_eid(subject).cw_clear_all_caches()
+        session.entity_from_eid(object).cw_clear_all_caches()
 
     def delete_relation(self, session, subject, rtype, object):
         """delete a relation from the source"""
-        cu = session.pool[self.uri]
+        cu = session.cnxset[self.uri]
         cu.execute('DELETE X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                    {'x': self.eid2extid(subject, session),
                     'y': self.eid2extid(object, session)})
         self._query_cache.clear()
-        session.entity_from_eid(subject).clear_all_caches()
-        session.entity_from_eid(object).clear_all_caches()
+        session.entity_from_eid(subject).cw_clear_all_caches()
+        session.entity_from_eid(object).cw_clear_all_caches()
 
 
 class RQL2RQL(object):
--- a/server/sources/rql2sql.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/rql2sql.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1366,6 +1366,8 @@
                 operator = ' LIKE '
             else:
                 operator = ' %s ' % operator
+        elif operator == 'REGEXP':
+            return ' %s' % self.dbhelper.sql_regexp_match_expression(rhs.accept(self))
         elif (operator == '=' and isinstance(rhs, Constant)
               and rhs.eval(self._args) is None):
             if lhs is None:
@@ -1416,6 +1418,8 @@
         if constant.type is None:
             return 'NULL'
         value = constant.value
+        if constant.type == 'etype':
+            return value
         if constant.type == 'Int' and  isinstance(constant.parent, SortTerm):
             return value
         if constant.type in ('Date', 'Datetime'):
@@ -1578,8 +1582,14 @@
             scope = self._state.scopes[var.scope]
             self._state.add_table(sql.split('.', 1)[0], scope=scope)
         except KeyError:
-            sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
-            #self._state.done.add(var.name)
+            # rtype may be an attribute relation when called from
+            # _visit_var_attr_relation.  take care about 'eid' rtype, since in
+            # some case we may use the `entities` table, so in that case we've
+            # to properly use variable'sql
+            if rtype == 'eid':
+                sql = var.accept(self)
+            else:
+                sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
         return sql
 
     def _linked_var_sql(self, variable):
--- a/server/sources/storages.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/sources/storages.py	Tue Jun 28 16:33:53 2011 +0200
@@ -204,7 +204,7 @@
         """return the current fs_path of the tribute.
 
         Return None is the attr is not stored yet."""
-        sysource = entity._cw.pool.source('system')
+        sysource = entity._cw.cnxset.source('system')
         cu = sysource.doexec(entity._cw,
                              'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
                              attr, entity.__regid__, entity.eid))
--- a/server/test/unittest_datafeed.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_datafeed.py	Tue Jun 28 16:33:53 2011 +0200
@@ -39,7 +39,7 @@
 
         class AParser(datafeed.DataFeedParser):
             __regid__ = 'testparser'
-            def process(self, url):
+            def process(self, url, raise_on_error=False):
                 entity = self.extid2entity('http://www.cubicweb.org/', 'Card',
                                   item={'title': u'cubicweb.org',
                                         'content': u'the cw web site'})
@@ -64,12 +64,13 @@
             self.assertEqual(entity.cw_source[0].name, 'myfeed')
             self.assertEqual(entity.cw_metainformation(),
                              {'type': 'Card',
-                              'source': {'uri': 'system', 'type': 'native'},
+                              'source': {'uri': 'myfeed', 'type': 'datafeed'},
                               'extid': 'http://www.cubicweb.org/'}
                              )
+            self.assertEqual(entity.absolute_url(), 'extid': 'http://www.cubicweb.org/')
             # test repo cache keys
             self.assertEqual(self.repo._type_source_cache[entity.eid],
-                             ('Card', 'system', 'http://www.cubicweb.org/'))
+                             ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed'))
             self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
                              entity.eid)
             # test repull
@@ -83,7 +84,7 @@
             self.assertEqual(stats['created'], set())
             self.assertEqual(stats['updated'], set((entity.eid,)))
             self.assertEqual(self.repo._type_source_cache[entity.eid],
-                             ('Card', 'system', 'http://www.cubicweb.org/'))
+                             ('Card', 'system', 'http://www.cubicweb.org/', 'myfeed'))
             self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
                              entity.eid)
 
--- a/server/test/unittest_hook.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_hook.py	Tue Jun 28 16:33:53 2011 +0200
@@ -23,7 +23,7 @@
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
 
-from cubicweb.devtools import TestServerConfiguration
+from cubicweb.devtools import TestServerConfiguration, fake
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.server import hook
 from cubicweb.hooks import integrity, syncschema
@@ -124,10 +124,8 @@
     def test_call_hook(self):
         self.o.register(AddAnyHook)
         dis = set()
-        cw = mock_object(vreg=self.vreg,
-                         set_read_security=lambda *a,**k: None,
-                         set_write_security=lambda *a,**k: None,
-                         is_hook_activated=lambda x, cls: cls.category not in dis)
+        cw = fake.FakeSession()
+        cw.is_hook_activated = lambda cls: cls.category not in dis
         self.assertRaises(HookCalled,
                           self.o.call_hooks, 'before_add_entity', cw)
         dis.add('cat1')
@@ -203,10 +201,10 @@
 #         self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)])
 
 
-#     def _before_relation_hook(self, pool, subject, r_type, object):
+#     def _before_relation_hook(self, cnxset, subject, r_type, object):
 #         self.called.append((subject, r_type, object))
 
-#     def _after_relation_hook(self, pool, subject, r_type, object):
+#     def _after_relation_hook(self, cnxset, subject, r_type, object):
 #         self.called.append((subject, r_type, object))
 
 
--- a/server/test/unittest_ldapuser.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_ldapuser.py	Tue Jun 28 16:33:53 2011 +0200
@@ -137,7 +137,7 @@
 
     def test_authenticate(self):
         source = self.repo.sources_by_uri['ldapuser']
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(AuthenticationError,
                           source.authenticate, self.session, 'toto', 'toto')
 
@@ -239,7 +239,7 @@
         iworkflowable.fire_transition('deactivate')
         try:
             cnx.commit()
-            adim.clear_all_caches()
+            adim.cw_clear_all_caches()
             self.assertEqual(adim.in_state[0].name, 'deactivated')
             trinfo = iworkflowable.latest_trinfo()
             self.assertEqual(trinfo.owned_by[0].login, SYT)
@@ -265,7 +265,7 @@
         self.failUnless(self.sexecute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}))
 
     def test_exists1(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.session.create_entity('CWGroup', name=u'bougloup1')
         self.session.create_entity('CWGroup', name=u'bougloup2')
         self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"')
@@ -378,6 +378,23 @@
         rset = cu.execute('Any F WHERE X has_text "iaminguestsgrouponly", X firstname F')
         self.assertEqual(rset.rows, [[None]])
 
+    def test_copy_to_system_source(self):
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0]
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
+        self.commit()
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system'},
+                                                  'type': 'CWUser',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        source = self.repo.sources_by_uri['ldapuser']
+        source.synchronize()
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})
+        self.assertEqual(len(rset), 1)
+
     def test_nonregr1(self):
         self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, '
                      'X modification_date AA',
@@ -465,8 +482,8 @@
         self._schema = repo.schema
         super(RQL2LDAPFilterTC, self).setUp()
         ldapsource = repo.sources[-1]
-        self.pool = repo._get_pool()
-        session = mock_object(pool=self.pool)
+        self.cnxset = repo._get_cnxset()
+        session = mock_object(cnxset=self.cnxset)
         self.o = RQL2LDAPFilter(ldapsource, session)
         self.ldapclasses = ''.join(ldapsource.base_filters)
 
--- a/server/test/unittest_migractions.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_migractions.py	Tue Jun 28 16:33:53 2011 +0200
@@ -338,7 +338,7 @@
     @tag('longrun')
     def test_sync_schema_props_perms(self):
         cursor = self.mh.session
-        cursor.set_pool()
+        cursor.set_cnxset()
         nbrqlexpr_start = cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
         migrschema['titre'].rdefs[('Personne', 'String')].order = 7
         migrschema['adel'].rdefs[('Personne', 'String')].order = 6
--- a/server/test/unittest_msplanner.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_msplanner.py	Tue Jun 28 16:33:53 2011 +0200
@@ -296,7 +296,7 @@
                    True)
 
     def test_not_relation_no_split_external(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         # similar to the above test but with an eid coming from the external source.
         # the same plan may be used, since we won't find any record in the system source
         # linking 9999999 to a state
@@ -313,7 +313,7 @@
                    True)
 
     def test_simplified_var(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
                    {'x': 999999, 'u': self.session.user.eid},
                    {self.system: {'P': s[0], 'G': s[0], 'X': s[0],
@@ -329,7 +329,7 @@
                    False)
 
     def test_crossed_relation_eid_1_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         ueid = self.session.user.eid
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    {'x': 999999,},
@@ -337,14 +337,14 @@
                    True)
 
     def test_crossed_relation_eid_1_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    {'x': 999999},
                    {self.system: {'Y': s[0], 'x': s[0]}},
                    False)
 
     def test_crossed_relation_eid_2_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    {'x': 999999,},
                    {self.cards: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]},
@@ -352,7 +352,7 @@
                    False)
 
     def test_version_crossed_depends_on_1(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    {'x': 999999},
                    {self.cards: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]},
@@ -360,7 +360,7 @@
                    True)
 
     def test_version_crossed_depends_on_2(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    {'x': 999999},
                    {self.cards: {'X': s[0], 'AD': s[0]},
@@ -368,8 +368,8 @@
                     True)
 
     def test_simplified_var_3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards')
         self._test('Any S,T WHERE S eid %(s)s, N eid %(n)s, N type T, N is Note, S is State',
                    {'n': 999999, 's': 999998},
                    {self.cards: {'s': s[0], 'N': s[0]}}, False)
@@ -1266,7 +1266,7 @@
                    {'x': ueid})
 
     def test_not_relation_no_split_external(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         # similar to the above test but with an eid coming from the external source.
         # the same plan may be used, since we won't find any record in the system source
         # linking 9999999 to a state
@@ -1297,7 +1297,7 @@
                      )])
 
     def test_external_attributes_and_relation(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any A,B,C,D WHERE A eid %(x)s,A creation_date B,A modification_date C, A todo_by D?',
                    [('FetchStep', [('Any A,B,C WHERE A eid 999999, A creation_date B, A modification_date C, A is Note',
                                     [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime'}])],
@@ -1314,7 +1314,7 @@
 
     def test_simplified_var(self):
         ueid = self.session.user.eid
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
                    [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999' % (ueid, ueid),
                                        [{'X': 'Note', 'G': 'CWGroup', 'P': 'CWPermission'}])],
@@ -1529,7 +1529,7 @@
                    {'E': ueid})
 
     def test_eid_dont_cross_relation_1(self):
-        repo._type_source_cache[999999] = ('Personne', 'system', 999999)
+        repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system')
         self._test('Any Y,YT WHERE X eid %(x)s, X fiche Y, Y title YT',
                    [('OneFetchStep', [('Any Y,YT WHERE X eid 999999, X fiche Y, Y title YT',
                                        [{'X': 'Personne', 'Y': 'Card', 'YT': 'String'}])],
@@ -1537,7 +1537,7 @@
                    {'x': 999999})
 
     def test_eid_dont_cross_relation_2(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.dont_cross_relations.add('concerne')
         try:
             self._test('Any Y,S,YT,X WHERE Y concerne X, Y in_state S, X eid 999999, Y ref YT',
@@ -1552,7 +1552,7 @@
     # external source w/ .cross_relations == ['multisource_crossed_rel'] ######
 
     def test_crossed_relation_eid_1_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y', [{u'Y': 'Note'}])],
                       None, None, [self.system], {}, [])
@@ -1560,7 +1560,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_1_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])],
                      [self.cards, self.system], None,
@@ -1573,7 +1573,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_2_invariant(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y',
                    [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])],
                       None, None, [self.cards, self.system], {}, [])
@@ -1581,7 +1581,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_2_needattr(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T',
                    [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note',
                                        [{'T': 'String', 'Y': 'Note'}])],
@@ -1591,7 +1591,7 @@
                    {'x': 999999,})
 
     def test_crossed_relation_eid_not_1(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y',
                    [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])],
                      [self.cards, self.system], None, {'Y': 'table0.C0'}, []),
@@ -1608,7 +1608,7 @@
 #                    {'x': 999999,})
 
     def test_crossed_relation_base_XXXFIXME(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T',
                    [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])],
                      [self.cards, self.system], None,
@@ -1697,8 +1697,8 @@
     # edition queries tests ###################################################
 
     def test_insert_simplified_var_1(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1710,8 +1710,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_2(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1724,8 +1724,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'cards', 999998, 'cards')
         self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1737,8 +1737,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_4(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s',
                    [('InsertStep',
                       [('InsertRelationsStep', [])]
@@ -1746,8 +1746,8 @@
                    {'n': 999999, 's': 999998})
 
     def test_insert_simplified_var_5(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('State', 'system', None)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('State', 'system', None, 'system')
         self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s, A concerne N',
                    [('InsertStep',
                      [('InsertRelationsStep',
@@ -1784,7 +1784,7 @@
                    {'x': ueid, 'y': ueid})
 
     def test_delete_relation3(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('DELETE Y multisource_inlined_rel X WHERE X eid %(x)s, NOT (Y cw_source S, S name %(source)s)',
                    [('DeleteRelationsStep',
                      [('OneFetchStep',
@@ -1796,7 +1796,7 @@
                    {'x': 999999, 'source': 'cards'})
 
     def test_delete_entity1(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('DELETE Note X WHERE X eid %(x)s, NOT Y multisource_rel X',
                    [('DeleteEntitiesStep',
                      [('OneFetchStep', [('Any 999999 WHERE NOT EXISTS(Y multisource_rel 999999), Y is IN(Card, Note)',
@@ -1807,7 +1807,7 @@
                    {'x': 999999})
 
     def test_delete_entity2(self):
-        repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('DELETE Note X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y',
                    [('DeleteEntitiesStep',
                      [('OneFetchStep', [('Any X WHERE X eid 999999, NOT X multisource_inlined_rel Y, X is Note, Y is IN(Affaire, Note)',
@@ -1872,7 +1872,7 @@
 #                     ])
 
     def test_ldap_user_related_to_invariant_and_dont_cross_rel(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.dont_cross_relations.add('created_by')
         try:
             self._test('Any X,XL WHERE E eid %(x)s, E created_by X, X login XL',
@@ -1893,7 +1893,7 @@
             self.cards.dont_cross_relations.remove('created_by')
 
     def test_ambigous_cross_relation(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.support_relations['see_also'] = True
         self.cards.cross_relations.add('see_also')
         try:
@@ -2044,7 +2044,7 @@
                     ])
 
     def test_source_conflict_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         with self.assertRaises(BadRQLQuery) as cm:
             self._test('Any X WHERE X cw_source S, S name "system", X eid %(x)s',
                        [], {'x': 999999})
@@ -2067,7 +2067,7 @@
 
 
     def test_ambigous_cross_relation_source_specified(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self.cards.support_relations['see_also'] = True
         self.cards.cross_relations.add('see_also')
         try:
@@ -2198,7 +2198,7 @@
                     ])
 
     def test_nonregr7(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A is Affaire, A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, W multisource_rel WP)) OR (EXISTS(A concerne W)), W eid %(n)s',
                    [('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])],
                      [self.cards], None, {'WP': u'table0.C0'}, []),
@@ -2208,7 +2208,7 @@
                    {'n': 999999})
 
     def test_nonregr8(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X',
                    [('FetchStep', [('Any 999999 WHERE 999999 multisource_rel Y, Y is Note',
                                     [{'Y': 'Note'}])],
@@ -2223,8 +2223,8 @@
                    {'x': 999999})
 
     def test_nonregr9(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
-        repo._type_source_cache[999998] = ('Note', 'cards', 999998)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
+        repo._type_source_cache[999998] = ('Note', 'cards', 999998, 'cards')
         self._test('SET X migrated_from Y WHERE X eid %(x)s, Y multisource_rel Z, Z eid %(z)s, Y migrated_from Z',
                    [('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])],
                      [self.cards], None, {'Y': u'table0.C0'}, []),
@@ -2236,7 +2236,7 @@
                    {'x': 999999, 'z': 999998})
 
     def test_nonregr10(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
         self._test('Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E owned_by X, X login AA, X modification_date AB',
                    [('FetchStep',
                      [('Any X,AA,AB WHERE X login AA, X modification_date AB, X is CWUser',
@@ -2254,7 +2254,7 @@
                    {'x': 999999})
 
     def test_nonregr11(self):
-        repo._type_source_cache[999999] = ('Bookmark', 'system', 999999)
+        repo._type_source_cache[999999] = ('Bookmark', 'system', 999999, 'system')
         self._test('SET X bookmarked_by Y WHERE X eid %(x)s, Y login "hop"',
                    [('UpdateStep',
                      [('OneFetchStep', [('DISTINCT Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])],
@@ -2263,7 +2263,7 @@
                    {'x': 999999})
 
     def test_nonregr12(self):
-        repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X',
                    [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note',
                                     [{'X': 'Note', 'Z': 'Datetime'}])],
@@ -2347,38 +2347,38 @@
                    {'x': self.session.user.eid})
 
     def test_nonregr14_1(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE 999999 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999999, 'u': 999999})
 
     def test_nonregr14_2(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999)
-        repo._type_source_cache[999998] = ('Note', 'system', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999, 'ldap')
+        repo._type_source_cache[999998] = ('Note', 'system', 999998, 'system')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999998, 'u': 999999})
 
     def test_nonregr14_3(self):
-        repo._type_source_cache[999999] = ('CWUser', 'system', 999999)
-        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'system', 999999, 'system')
+        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any X WHERE X eid %(x)s, X owned_by U, U eid %(u)s',
                    [('OneFetchStep', [('Any 999998 WHERE 999998 owned_by 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'x': 999998, 'u': 999999})
 
     def test_nonregr_identity_no_source_access_1(self):
-        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any S WHERE S identity U, S eid %(s)s, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE 999999 identity 999999', [{}])],
                      None, None, [self.system], {}, [])],
                    {'s': 999999, 'u': 999999})
 
     def test_nonregr_identity_no_source_access_2(self):
-        repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999)
-        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998)
+        repo._type_source_cache[999999] = ('EmailAddress', 'system', 999999, 'system')
+        repo._type_source_cache[999998] = ('CWUser', 'ldap', 999998, 'ldap')
         self._test('Any X WHERE O use_email X, ((EXISTS(O identity U)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, U in_group G2, NOT G2 name "users")), X eid %(x)s, U eid %(u)s',
                    [('OneFetchStep', [('Any 999999 WHERE O use_email 999999, ((EXISTS(O identity 999998)) OR (EXISTS(O in_group G, G name IN("managers", "staff")))) OR (EXISTS(O in_group G2, 999998 in_group G2, NOT G2 name "users"))',
                                        [{'G': 'CWGroup', 'G2': 'CWGroup', 'O': 'CWUser'}])],
@@ -2386,7 +2386,7 @@
                    {'x': 999999, 'u': 999998})
 
     def test_nonregr_similar_subquery(self):
-        repo._type_source_cache[999999] = ('Personne', 'system', 999999)
+        repo._type_source_cache[999999] = ('Personne', 'system', 999999, 'system')
         self._test('Any T,TD,U,T,UL WITH T,TD,U,UL BEING ('
                    '(Any T,TD,U,UL WHERE X eid %(x)s, T comments X, T content TD, T created_by U?, U login UL)'
                    ' UNION '
@@ -2456,7 +2456,7 @@
 
 
     def test_linked_external_entities(self):
-        repo._type_source_cache[999999] = ('Tag', 'system', 999999)
+        repo._type_source_cache[999999] = ('Tag', 'system', 999999, 'system')
         self._test('Any X,XT WHERE X is Card, X title XT, T tags X, T eid %(t)s',
                    [('FetchStep',
                      [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -2472,7 +2472,7 @@
                    {'t': 999999})
 
     def test_version_depends_on(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E migrated_from X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2488,7 +2488,7 @@
                    {'x': 999999})
 
     def test_version_crossed_depends_on_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2511,7 +2511,7 @@
                    {'x': 999999})
 
     def test_version_crossed_depends_on_2(self):
-        self.repo._type_source_cache[999999] = ('Note', 'system', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'system', 999999, 'system')
         self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE',
                    [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note',
                                     [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])],
@@ -2587,7 +2587,7 @@
                        )
 
     def test_nonregr_dont_cross_rel_source_filtering_1(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
                    [('OneFetchStep', [('Any S WHERE 999999 in_state S, NOT S name "moved", S is State',
                                        [{'S': 'State'}])],
@@ -2596,7 +2596,7 @@
                    {'x': 999999})
 
     def test_nonregr_dont_cross_rel_source_filtering_2(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB',
                    [('OneFetchStep', [('Any X,AA,AB WHERE 999999 in_state X, X name AA, X modification_date AB, X is State',
                                        [{'AA': 'String', 'AB': 'Datetime', 'X': 'State'}])],
@@ -2605,7 +2605,7 @@
                    {'x': 999999})
 
     def test_nonregr_eid_query(self):
-        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+        self.repo._type_source_cache[999999] = ('Note', 'cards', 999999, 'cards')
         self._test('Any X WHERE X eid 999999',
                    [('OneFetchStep', [('Any 999999', [{}])],
                      None, None, [self.system], {}, []
@@ -2707,17 +2707,17 @@
                     ])
 
     def test_fully_simplified_extsource(self):
-        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998)
-        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999)
+        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs')
+        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs')
         self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999',
                    [('OneFetchStep', [('Any 999998,999999 WHERE NOT EXISTS(999998 multisource_rel 999999)', [{}])],
                      None, None, [self.vcs], {}, [])
                     ])
 
     def test_nonregr_fully_simplified_extsource(self):
-        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998)
-        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999)
-        self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000)
+        self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998, 'vcs')
+        self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999, 'vcs')
+        self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000, 'system')
         self._test('DISTINCT Any T,FALSE,L,M WHERE L eid 1000000, M eid 999999, T eid 999998',
                    [('OneFetchStep', [('DISTINCT Any 999998,FALSE,1000000,999999', [{}])],
                      None, None, [self.system], {}, [])
--- a/server/test/unittest_querier.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_querier.py	Tue Jun 28 16:33:53 2011 +0200
@@ -311,6 +311,14 @@
         seid = self.execute('State X WHERE X name "deactivated"')[0][0]
         rset = self.execute('Any U,L,S GROUPBY U,L,S WHERE X in_state S, U login L, S eid %s' % seid)
 
+    def test_select_groupby_funccall(self):
+        rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY YEAR(CD) WHERE X is CWUser, X creation_date CD')
+        self.assertListEqual(rset.rows, [[date.today().year, 2]])
+
+    def test_select_groupby_colnumber(self):
+        rset = self.execute('Any YEAR(CD), COUNT(X) GROUPBY 1 WHERE X is CWUser, X creation_date CD')
+        self.assertListEqual(rset.rows, [[date.today().year, 2]])
+
     def test_select_complex_orderby(self):
         rset1 = self.execute('Any N ORDERBY N WHERE X name N')
         self.assertEqual(sorted(rset1.rows), rset1.rows)
@@ -443,6 +451,15 @@
             self.assertEqual(rset.rows[0][0], result)
             self.assertEqual(rset.description, [('Int',)])
 
+    def test_regexp_based_pattern_matching(self):
+        peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0]
+        peid2 = self.execute("INSERT Personne X: X nom 'cidule'")[0][0]
+        rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "^b"')
+        self.assertEqual(len(rset.rows), 1, rset.rows)
+        self.assertEqual(rset.rows[0][0], peid1)
+        rset = self.execute('Any X WHERE X is Personne, X nom REGEXP "idu"')
+        self.assertEqual(len(rset.rows), 2, rset.rows)
+
     def test_select_aggregat_count(self):
         rset = self.execute('Any COUNT(X)')
         self.assertEqual(len(rset.rows), 1)
@@ -768,7 +785,7 @@
     def test_select_boolean(self):
         rset = self.execute('Any N WHERE X is CWEType, X name N, X final %(val)s',
                             {'val': True})
-        self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+        self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes',
                                                            'Date', 'Datetime',
                                                            'Decimal', 'Float',
                                                            'Int', 'Interval',
@@ -776,7 +793,7 @@
                                                            'TZDatetime', 'TZTime',
                                                            'Time'])
         rset = self.execute('Any N WHERE X is CWEType, X name N, X final TRUE')
-        self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes',
+        self.assertEqual(sorted(r[0] for r in rset.rows), ['BigInt', 'Boolean', 'Bytes',
                                                            'Date', 'Datetime',
                                                            'Decimal', 'Float',
                                                            'Int', 'Interval',
@@ -1099,7 +1116,7 @@
         #'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y'
         eeid, = self.o.execute(s, 'INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y WHERE Y is EmailAddress')[0]
         self.o.execute(s, "DELETE Email X")
-        sqlc = s.pool['system']
+        sqlc = s.cnxset['system']
         sqlc.execute('SELECT * FROM recipients_relation')
         self.assertEqual(len(sqlc.fetchall()), 0)
         sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid)
@@ -1212,7 +1229,7 @@
         self.assertEqual(rset.description, [('CWUser',)])
         self.assertRaises(Unauthorized,
                           self.execute, "Any P WHERE X is CWUser, X login 'bob', X upassword P")
-        cursor = self.pool['system']
+        cursor = self.cnxset['system']
         cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
                        % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
         passwd = str(cursor.fetchone()[0])
@@ -1227,7 +1244,7 @@
         self.assertEqual(rset.description[0][0], 'CWUser')
         rset = self.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'",
                             {'pwd': 'tutu'})
-        cursor = self.pool['system']
+        cursor = self.cnxset['system']
         cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
                        % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
         passwd = str(cursor.fetchone()[0])
--- a/server/test/unittest_repository.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_repository.py	Tue Jun 28 16:33:53 2011 +0200
@@ -24,6 +24,7 @@
 import sys
 import threading
 import time
+import logging
 from copy import deepcopy
 from datetime import datetime
 
@@ -62,13 +63,13 @@
             table = SQL_PREFIX + 'CWEType'
             namecol = SQL_PREFIX + 'name'
             finalcol = SQL_PREFIX + 'final'
-            self.session.set_pool()
+            self.session.set_cnxset()
             cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
                 namecol, table, finalcol))
             self.assertEqual(cu.fetchall(), [])
             cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
                                          % (namecol, table, finalcol, namecol), {'final': 'TRUE'})
-            self.assertEqual(cu.fetchall(), [(u'Boolean',), (u'Bytes',),
+            self.assertEqual(cu.fetchall(), [(u'BigInt',), (u'Boolean',), (u'Bytes',),
                                              (u'Date',), (u'Datetime',),
                                              (u'Decimal',),(u'Float',),
                                              (u'Int',),
@@ -259,7 +260,7 @@
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         # rollback state change which trigger TrInfo insertion
         session = repo._get_session(cnxid)
-        session.set_pool()
+        session.set_cnxset()
         user = session.user
         user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
         rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
@@ -292,7 +293,7 @@
         try:
             with self.assertRaises(Exception) as cm:
                 run_transaction()
-            self.assertEqual(str(cm.exception), 'try to access pool on a closed session')
+            self.assertEqual(str(cm.exception), 'try to access connections set on a closed session')
         finally:
             t.join()
 
@@ -382,9 +383,9 @@
     def test_internal_api(self):
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        session = repo._get_session(cnxid, setpool=True)
+        session = repo._get_session(cnxid, setcnxset=True)
         self.assertEqual(repo.type_and_source_from_eid(2, session),
-                         ('CWGroup', 'system', None))
+                         ('CWGroup', 'system', None, 'system'))
         self.assertEqual(repo.type_from_eid(2, session), 'CWGroup')
         self.assertEqual(repo.source_from_eid(2, session).uri, 'system')
         self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None)
@@ -394,7 +395,10 @@
 
     def test_public_api(self):
         self.assertEqual(self.repo.get_schema(), self.repo.schema)
-        self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
+        self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native',
+                                                              'uri': 'system',
+                                                              'use-cwuri-as-url': False}
+                                                  })
         # .properties() return a result set
         self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U')
 
@@ -402,7 +406,7 @@
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {}))
-        self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None))
+        self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None, 'system'))
         repo.close(cnxid)
         self.assertRaises(BadConnectionId, repo.user_info, cnxid)
         self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
@@ -519,38 +523,39 @@
 class DataHelpersTC(CubicWebTC):
 
     def test_create_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assert_(self.repo.system_source.create_eid(self.session))
 
     def test_source_from_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertEqual(self.repo.source_from_eid(1, self.session),
                           self.repo.sources_by_uri['system'])
 
     def test_source_from_eid_raise(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(UnknownEid, self.repo.source_from_eid, -2, self.session)
 
     def test_type_from_eid(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup')
 
     def test_type_from_eid_raise(self):
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session)
 
     def test_add_delete_info(self):
         entity = self.repo.vreg['etypes'].etype_class('Personne')(self.session)
         entity.eid = -1
         entity.complete = lambda x: None
-        self.session.set_pool()
+        self.session.set_cnxset()
         self.repo.add_info(self.session, entity, self.repo.system_source)
         cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
         data = cu.fetchall()
-        self.assertIsInstance(data[0][3], datetime)
+        self.assertIsInstance(data[0][4], datetime)
         data[0] = list(data[0])
-        data[0][3] = None
-        self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None, None)])
+        data[0][4] = None
+        self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', 'system',
+                                          None, None)])
         self.repo.delete_info(self.session, entity, 'system', None)
         #self.repo.commit()
         cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
@@ -566,7 +571,7 @@
         self.commit()
         ts = datetime.now()
         self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
-        self.session.set_pool()
+        self.session.set_cnxset()
         cu = self.session.system_sql('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp)
         omtime = cu.fetchone()[0]
         # our sqlite datetime adapter is ignore seconds fraction, so we have to
@@ -575,7 +580,7 @@
         self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp})
         self.commit()
         self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1)
-        self.session.set_pool()
+        self.session.set_cnxset()
         cu = self.session.system_sql('SELECT mtime FROM entities WHERE eid = %s' % eidp)
         mtime = cu.fetchone()[0]
         self.failUnless(omtime < mtime)
@@ -646,7 +651,7 @@
         CubicWebTC.setUp(self)
         CALLED[:] = ()
 
-    def _after_relation_hook(self, pool, fromeid, rtype, toeid):
+    def _after_relation_hook(self, cnxset, fromeid, rtype, toeid):
         self.called.append((fromeid, rtype, toeid))
 
     def test_inline_relation(self):
@@ -704,13 +709,18 @@
 
 
 class PerformanceTest(CubicWebTC):
-    def setup_database(self):
-        import logging
+    def setUp(self):
+        super(PerformanceTest, self).setUp()
         logger = logging.getLogger('cubicweb.session')
         #logger.handlers = [logging.StreamHandler(sys.stdout)]
         logger.setLevel(logging.INFO)
         self.info = logger.info
 
+    def tearDown(self):
+        super(PerformanceTest, self).tearDown()
+        logger = logging.getLogger('cubicweb.session')
+        logger.setLevel(logging.CRITICAL)
+
     def test_composite_deletion(self):
         req = self.request()
         personnes = []
@@ -807,6 +817,7 @@
         req.cnx.commit()
         t1 = time.time()
         self.info('add relations: %.2gs', t1-t0)
+
     def test_session_add_relation_inlined(self):
         """ to be compared with test_session_add_relations"""
         req = self.request()
@@ -847,7 +858,7 @@
         p2 = req.create_entity('Personne', nom=u'Florent')
         w = req.create_entity('Affaire', ref=u'wc')
         w.set_relations(todo_by=[p1,p2])
-        w.clear_all_caches()
+        w.cw_clear_all_caches()
         self.commit()
         self.assertEqual(len(w.todo_by), 1)
         self.assertEqual(w.todo_by[0].eid, p2.eid)
@@ -860,7 +871,7 @@
         w.set_relations(todo_by=p1)
         self.commit()
         w.set_relations(todo_by=p2)
-        w.clear_all_caches()
+        w.cw_clear_all_caches()
         self.commit()
         self.assertEqual(len(w.todo_by), 1)
         self.assertEqual(w.todo_by[0].eid, p2.eid)
--- a/server/test/unittest_rql2sql.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_rql2sql.py	Tue Jun 28 16:33:53 2011 +0200
@@ -544,6 +544,10 @@
 GROUP BY rel_todo_by0.eid_to
 ORDER BY 2 DESC'''),
 
+    ('Any R2 WHERE R2 concerne R, R eid RE, R2 eid > RE',
+     '''SELECT _R2.eid
+FROM concerne_relation AS rel_concerne0, entities AS _R2
+WHERE _R2.eid=rel_concerne0.eid_from AND _R2.eid>rel_concerne0.eid_to'''),
     ]
 
 ADVANCED_WITH_GROUP_CONCAT = [
@@ -1348,6 +1352,18 @@
                     '''SELECT SUBSTR(_P.cw_nom, 1, 1)
 FROM cw_Personne AS _P''')
 
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS text)
+FROM cw_Personne AS _P''')
+
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login ~ [0-9].*
+''')
+
     def test_parser_parse(self):
         for t in self._parse(PARSER):
             yield t
@@ -1653,6 +1669,9 @@
         for t in self._parse(HAS_TEXT_LG_INDEXER):
             yield t
 
+    def test_regexp(self):
+        self.skipTest('regexp-based pattern matching not implemented in sqlserver')
+
     def test_or_having_fake_terms(self):
         self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
                     '''SELECT _X.cw_eid
@@ -1749,6 +1768,10 @@
         for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
             yield t
 
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS nvarchar(max))
+FROM cw_Personne AS _P''')
 
 
 class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
@@ -1762,6 +1785,14 @@
                     '''SELECT MONTH(_P.cw_creation_date)
 FROM cw_Personne AS _P''')
 
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
+
     def test_union(self):
         for t in self._parse((
             ('(Any N ORDERBY 1 WHERE X name N, X is State)'
@@ -1902,6 +1933,18 @@
                     '''SELECT EXTRACT(MONTH from _P.cw_creation_date)
 FROM cw_Personne AS _P''')
 
+    def test_cast(self):
+        self._check("Any CAST(String, P) WHERE P is Personne",
+                    '''SELECT CAST(_P.cw_eid AS mediumtext)
+FROM cw_Personne AS _P''')
+
+    def test_regexp(self):
+        self._check("Any X WHERE X login REGEXP '[0-9].*'",
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE _X.cw_login REGEXP [0-9].*
+''')
+
     def test_from_clause_needed(self):
         queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')",
                     '''SELECT 1
--- a/server/test/unittest_security.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_security.py	Tue Jun 28 16:33:53 2011 +0200
@@ -221,7 +221,7 @@
         rset = cu.execute('Personne P')
         self.assertEqual(len(rset), 1)
         ent = rset.get_entity(0, 0)
-        session.set_pool() # necessary
+        session.set_cnxset() # necessary
         self.assertRaises(Unauthorized, ent.cw_check_perm, 'update')
         self.assertRaises(Unauthorized,
                           cu.execute, "SET P travaille S WHERE P is Personne, S is Societe")
@@ -579,7 +579,7 @@
         cnx = self.login('iaminusersgrouponly')
         session = self.session
         # needed to avoid check_perm error
-        session.set_pool()
+        session.set_cnxset()
         # needed to remove rql expr granting update perm to the user
         affaire_perms = self.schema['Affaire'].permissions.copy()
         self.schema['Affaire'].set_action_permissions('update', self.schema['Affaire'].get_groups('update'))
--- a/server/test/unittest_session.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_session.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,13 +15,12 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
+from __future__ import with_statement
 
-"""
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.server.session import _make_description
+from cubicweb.server.session import _make_description, hooks_control
 
 class Variable:
     def __init__(self, name):
@@ -46,11 +45,38 @@
         self.assertEqual(_make_description((Function('max', 'A'), Variable('B')), {}, solution),
                           ['Int','CWUser'])
 
+
 class InternalSessionTC(CubicWebTC):
     def test_dbapi_query(self):
         session = self.repo.internal_session()
         self.assertFalse(session.running_dbapi_query)
         session.close()
 
+
+class SessionTC(CubicWebTC):
+
+    def test_hooks_control(self):
+        session = self.session
+        self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
+        self.assertEqual(session.disabled_hook_categories, set())
+        self.assertEqual(session.enabled_hook_categories, set())
+        self.assertEqual(len(session._tx_data), 1)
+        with hooks_control(session, session.HOOKS_DENY_ALL, 'metadata'):
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            session.commit()
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            session.rollback()
+            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
+            self.assertEqual(session.disabled_hook_categories, set())
+            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+        # leaving context manager with no transaction running should reset the
+        # transaction local storage (and associated cnxset)
+        self.assertEqual(session._tx_data, {})
+        self.assertEqual(session.cnxset, None)
+
 if __name__ == '__main__':
     unittest_main()
--- a/server/test/unittest_undo.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/server/test/unittest_undo.py	Tue Jun 28 16:33:53 2011 +0200
@@ -153,8 +153,8 @@
         txuuid = self.commit()
         actions = self.cnx.transaction_info(txuuid).actions_list()
         self.assertEqual(len(actions), 1)
-        toto.clear_all_caches()
-        e.clear_all_caches()
+        toto.cw_clear_all_caches()
+        e.cw_clear_all_caches()
         errors = self.cnx.undo_transaction(txuuid)
         undotxuuid = self.commit()
         self.assertEqual(undotxuuid, None) # undo not undoable
@@ -195,7 +195,7 @@
         self.commit()
         errors = self.cnx.undo_transaction(txuuid)
         self.commit()
-        p.clear_all_caches()
+        p.cw_clear_all_caches()
         self.assertEqual(p.fiche[0].eid, c2.eid)
         self.assertEqual(len(errors), 1)
         self.assertEqual(errors[0],
@@ -235,7 +235,7 @@
         self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
         self.failIf(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
         self.failIf(self.execute('Any X,Y WHERE X fiche Y'))
-        self.session.set_pool()
+        self.session.set_cnxset()
         for eid in (p.eid, c.eid):
             self.failIf(session.system_sql(
                 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
--- a/sobjects/parsers.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/sobjects/parsers.py	Tue Jun 28 16:33:53 2011 +0200
@@ -31,26 +31,22 @@
 
 """
 
-import urllib2
-import StringIO
 import os.path as osp
-from cookielib import CookieJar
 from datetime import datetime, timedelta
-
-from lxml import etree
+from urllib import urlencode
+from cgi import parse_qs # in urlparse with python >= 2.6
 
 from logilab.common.date import todate, totime
 from logilab.common.textutils import splitstrip, text_to_dict
+from logilab.common.decorators import classproperty
 
 from yams.constraints import BASE_CONVERTERS
 from yams.schema import role_name as rn
 
-from cubicweb import ValidationError, typed_eid
+from cubicweb import ValidationError, RegistryException, typed_eid
+from cubicweb.view import Component
 from cubicweb.server.sources import datafeed
-
-def ensure_str_keys(dic):
-    for key in dic:
-        dic[str(key)] = dic.pop(key)
+from cubicweb.server.hook import match_rtype
 
 # XXX see cubicweb.cwvreg.YAMS_TO_PY
 # XXX see cubicweb.web.views.xmlrss.SERIALIZERS
@@ -72,15 +68,6 @@
     return time(seconds=int(ustr))
 DEFAULT_CONVERTERS['Interval'] = convert_interval
 
-# use a cookie enabled opener to use session cookie if any
-_OPENER = urllib2.build_opener()
-try:
-    from logilab.common import urllib2ext
-    _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
-except ImportError: # python-kerberos not available
-    pass
-_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
-
 def extract_typed_attrs(eschema, stringdict, converters=DEFAULT_CONVERTERS):
     typeddict = {}
     for rschema in eschema.subject_relations():
@@ -91,35 +78,6 @@
             typeddict[rschema.type] = converters[attrtype](stringdict[rschema])
     return typeddict
 
-def _parse_entity_etree(parent):
-    for node in list(parent):
-        try:
-            item = {'cwtype': unicode(node.tag),
-                    'cwuri': node.attrib['cwuri'],
-                    'eid': typed_eid(node.attrib['eid']),
-                    }
-        except KeyError:
-            # cw < 3.11 compat mode XXX
-            item = {'cwtype': unicode(node.tag),
-                    'cwuri': node.find('cwuri').text,
-                    'eid': typed_eid(node.find('eid').text),
-                    }
-        rels = {}
-        for child in node:
-            role = child.get('role')
-            if role:
-                # relation
-                related = rels.setdefault(role, {}).setdefault(child.tag, [])
-                related += [ritem for ritem, _ in _parse_entity_etree(child)]
-            else:
-                # attribute
-                item[child.tag] = unicode(child.text)
-        yield item, rels
-
-def build_search_rql(etype, attrs):
-    restrictions = ['X %(attr)s %%(%(attr)s)s'%{'attr': attr} for attr in attrs]
-    return 'Any X WHERE X is %s, %s' % (etype, ', '.join(restrictions))
-
 def rtype_role_rql(rtype, role):
     if role == 'object':
         return 'Y %s X WHERE X eid %%(x)s' % rtype
@@ -127,34 +85,40 @@
         return 'X %s Y WHERE X eid %%(x)s' % rtype
 
 
-def _check_no_option(action, options, eid, _):
-    if options:
-        msg = _("'%s' action doesn't take any options") % action
-        raise ValidationError(eid, {rn('options', 'subject'): msg})
+class CWEntityXMLParser(datafeed.DataFeedXMLParser):
+    """datafeed parser for the 'xml' entity view
 
-def _check_linkattr_option(action, options, eid, _):
-    if not 'linkattr' in options:
-        msg = _("'%s' action requires 'linkattr' option") % action
-        raise ValidationError(eid, {rn('options', 'subject'): msg})
+    Most of the logic is delegated to the following components:
+
+    * an "item builder" component, turning an etree xml node into a specific
+      python dictionnary representing an entity
 
-
-class CWEntityXMLParser(datafeed.DataFeedParser):
-    """datafeed parser for the 'xml' entity view"""
-    __regid__ = 'cw.entityxml'
+    * "action" components, selected given an entity, a relation and its role in
+      the relation, and responsible to link the entity to given related items
+      (eg dictionnary)
 
-    action_options = {
-        'copy': _check_no_option,
-        'link-or-create': _check_linkattr_option,
-        'link': _check_linkattr_option,
-        }
+    So the parser is only doing the gluing service and the connection to the
+    source.
+    """
+    __regid__ = 'cw.entityxml'
 
     def __init__(self, *args, **kwargs):
         super(CWEntityXMLParser, self).__init__(*args, **kwargs)
-        self.action_methods = {
-            'copy': self.related_copy,
-            'link-or-create': self.related_link_or_create,
-            'link': self.related_link,
-            }
+        self._parsed_urls = {}
+        self._processed_entities = set()
+
+    def select_linker(self, action, rtype, role, entity=None):
+        try:
+            return self._cw.vreg['components'].select(
+                'cw.entityxml.action.%s' % action, self._cw, entity=entity,
+                rtype=rtype, role=role, parser=self)
+        except RegistryException:
+            raise RegistryException('Unknown action %s' % action)
+
+    def list_actions(self):
+        reg = self._cw.vreg['components']
+        return sorted(clss[0].action for rid, clss in reg.iteritems()
+                      if rid.startswith('cw.entityxml.action.'))
 
     # mapping handling #########################################################
 
@@ -180,11 +144,15 @@
             raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
         try:
             action = options.pop('action')
-            self.action_options[action](action, options, schemacfg.eid, _)
+            linker = self.select_linker(action, rtype, role)
+            linker.check_options(options, schemacfg.eid)
         except KeyError:
             msg = _('"action" must be specified in options; allowed values are '
                     '%s') % ', '.join(self.action_methods)
             raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
+        except RegistryException:
+            msg = _('allowed values for "action" are %s') % ', '.join(self.list_actions())
+            raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
         if not checkonly:
             if role == 'subject':
                 etype = schemacfg.schema.stype.name
@@ -208,46 +176,25 @@
 
     # import handling ##########################################################
 
-    def process(self, url, partialcommit=True):
+    def process(self, url, raise_on_error=False, partialcommit=True):
         """IDataFeedParser main entry point"""
-        # XXX suppression support according to source configuration. If set, get
-        # all cwuri of entities from this source, and compare with newly
-        # imported ones
-        error = False
-        for item, rels in self.parse(url):
-            cwuri = item['cwuri']
-            try:
-                self.process_item(item, rels)
-                if partialcommit:
-                    # commit+set_pool instead of commit(reset_pool=False) to let
-                    # other a chance to get our pool
-                    self._cw.commit()
-                    self._cw.set_pool()
-            except ValidationError, exc:
-                if partialcommit:
-                    self.source.error('Skipping %s because of validation error %s' % (cwuri, exc))
-                    self._cw.rollback()
-                    self._cw.set_pool()
-                    error = True
-                else:
-                    raise
-        return error
-
-    def parse(self, url):
-        if not url.startswith('http'):
-            stream = StringIO.StringIO(url)
-        else:
-            for mappedurl in HOST_MAPPING:
-                if url.startswith(mappedurl):
-                    url = url.replace(mappedurl, HOST_MAPPING[mappedurl], 1)
-                    break
-            self.source.info('GET %s', url)
-            stream = _OPENER.open(url)
-        return _parse_entity_etree(etree.parse(stream).getroot())
+        super(CWEntityXMLParser, self).process(self.complete_url(url),
+                                               raise_on_error, partialcommit)
+    def parse_etree(self, parent):
+        for node in list(parent):
+            builder = self._cw.vreg['components'].select(
+                'cw.entityxml.item-builder', self._cw, node=node,
+                parser=self)
+            yield builder.build_item()
 
     def process_item(self, item, rels):
         entity = self.extid2entity(str(item.pop('cwuri')),  item.pop('cwtype'),
-                                   item=item)
+                                   cwsource=item.pop('cwsource'), item=item)
+        if entity is None:
+            return None
+        if entity.eid in self._processed_entities:
+            return entity
+        self._processed_entities.add(entity.eid)
         if not (self.created_during_pull(entity) or self.updated_during_pull(entity)):
             self.notify_updated(entity)
             item.pop('eid')
@@ -262,10 +209,11 @@
                                   rtype, role, entity.__regid__)
                 continue
             try:
-                actionmethod = self.action_methods[action]
-            except KeyError:
-                raise Exception('Unknown action %s' % action)
-            actionmethod(entity, rtype, role, related_items, rules)
+                linker = self.select_linker(action, rtype, role, entity)
+            except RegistryException:
+                self.source.error('no linker for action %s', action)
+            else:
+                linker.link_items(related_items, rules)
         return entity
 
     def before_entity_copy(self, entity, sourceparams):
@@ -273,112 +221,232 @@
         attrs = extract_typed_attrs(entity.e_schema, sourceparams['item'])
         entity.cw_edited.update(attrs)
 
-    def related_copy(self, entity, rtype, role, others, rules):
-        """implementation of 'copy' action
+    def complete_url(self, url, etype=None, add_relations=True):
+        """append to the url's query string information about relation that should
+        be included in the resulting xml, according to source mapping.
 
-        Takes no option.
+        If etype is not specified, try to guess it using the last path part of
+        the url.
         """
-        assert not any(x[1] for x in rules), "'copy' action takes no option"
-        ttypes = set([x[0] for x in rules])
-        others = [item for item in others if item['cwtype'] in ttypes]
-        eids = [] # local eids
-        if not others:
-            self._clear_relation(entity, rtype, role, ttypes)
-            return
-        for item in others:
-            item, _rels = self._complete_item(item)
-            other_entity = self.process_item(item, [])
-            eids.append(other_entity.eid)
-        self._set_relation(entity, rtype, role, eids)
+        try:
+            url, qs = url.split('?', 1)
+        except ValueError:
+            qs = ''
+        if etype is None:
+            try:
+                etype = url.rsplit('/', 1)[1]
+            except ValueError:
+                return url
+            try:
+                etype = self._cw.vreg.case_insensitive_etypes[etype]
+            except KeyError:
+                return url
+        params = parse_qs(qs)
+        if not 'vid' in params:
+            params['vid'] = ['xml']
+        if add_relations:
+            relations = params.setdefault('relation', [])
+            for rtype, role, _ in self.source.mapping.get(etype, ()):
+                reldef = '%s-%s' % (rtype, role)
+                if not reldef in relations:
+                    relations.append(reldef)
+        return url + '?' + self._cw.build_url_params(**params)
+
+    def complete_item(self, item, add_relations=True):
+        try:
+            return self._parsed_urls[(item['cwuri'], add_relations)]
+        except KeyError:
+            itemurl = self.complete_url(item['cwuri'], item['cwtype'],
+                                        add_relations)
+            item_rels = list(self.parse(itemurl))
+            assert len(item_rels) == 1, 'url %s expected to bring back one '\
+                   'and only one entity, got %s' % (itemurl, len(item_rels))
+            self._parsed_urls[(item['cwuri'], add_relations)] = item_rels[0]
+            return item_rels[0]
 
-    def related_link(self, entity, rtype, role, others, rules):
-        """implementation of 'link' action
+
+class CWEntityXMLItemBuilder(Component):
+    __regid__ = 'cw.entityxml.item-builder'
+
+    def __init__(self, _cw, parser, node, **kwargs):
+        super(CWEntityXMLItemBuilder, self).__init__(_cw, **kwargs)
+        self.parser = parser
+        self.node = node
+
+    def build_item(self):
+        node = self.node
+        item = dict(node.attrib.items())
+        item['cwtype'] = unicode(node.tag)
+        item.setdefault('cwsource', None)
+        try:
+            item['eid'] = typed_eid(item['eid'])
+        except KeyError:
+            # cw < 3.11 compat mode XXX
+            item['eid'] = typed_eid(node.find('eid').text)
+            item['cwuri'] = node.find('cwuri').text
+        rels = {}
+        for child in node:
+            role = child.get('role')
+            if role:
+                # relation
+                related = rels.setdefault(role, {}).setdefault(child.tag, [])
+                related += [ritem for ritem, _ in self.parser.parse_etree(child)]
+            else:
+                # attribute
+                item[child.tag] = unicode(child.text)
+        return item, rels
+
+
+class CWEntityXMLActionCopy(Component):
+    """implementation of cubicweb entity xml parser's'copy' action
 
-        requires an options to control search of the linked entity.
-        """
-        for ttype, options in rules:
-            assert 'linkattr' in options, (
-                "'link' action requires a list of attributes used to "
-                "search if the entity already exists")
-            self._related_link(entity, rtype, role, ttype, others, [options['linkattr']],
-                               create_when_not_found=False)
+    Takes no option.
+    """
+    __regid__ = 'cw.entityxml.action.copy'
+
+    def __init__(self, _cw, parser, rtype, role, entity=None, **kwargs):
+        super(CWEntityXMLActionCopy, self).__init__(_cw, **kwargs)
+        self.parser = parser
+        self.rtype = rtype
+        self.role = role
+        self.entity = entity
+
+    @classproperty
+    def action(cls):
+        return cls.__regid__.rsplit('.', 1)[-1]
+
+    def check_options(self, options, eid):
+        self._check_no_options(options, eid)
 
-    def related_link_or_create(self, entity, rtype, role, others, rules):
-        """implementation of 'link-or-create' action
+    def _check_no_options(self, options, eid, msg=None):
+        if options:
+            if msg is None:
+                msg = self._cw._("'%s' action doesn't take any options") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+    def link_items(self, others, rules):
+        assert not any(x[1] for x in rules), "'copy' action takes no option"
+        ttypes = frozenset([x[0] for x in rules])
+        eids = [] # local eids
+        for item in others:
+            if item['cwtype'] in ttypes:
+                item = self.parser.complete_item(item)[0]
+                other_entity = self.parser.process_item(item, [])
+                if other_entity is not None:
+                    eids.append(other_entity.eid)
+        if eids:
+            self._set_relation(eids)
+        else:
+            self._clear_relation(ttypes)
 
-        requires an options to control search of the linked entity.
-        """
+    def _clear_relation(self, ttypes):
+        if not self.parser.created_during_pull(self.entity):
+            if len(ttypes) > 1:
+                typerestr = ', Y is IN(%s)' % ','.join(ttypes)
+            else:
+                typerestr = ', Y is %s' % ','.join(ttypes)
+            self._cw.execute('DELETE ' + rtype_role_rql(self.rtype, self.role) + typerestr,
+                             {'x': self.entity.eid})
+
+    def _set_relation(self, eids):
+        assert eids
+        rtype = self.rtype
+        rqlbase = rtype_role_rql(rtype, self.role)
+        eidstr = ','.join(str(eid) for eid in eids)
+        self._cw.execute('DELETE %s, NOT Y eid IN (%s)' % (rqlbase, eidstr),
+                         {'x': self.entity.eid})
+        if self.role == 'object':
+            rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype)
+        else:
+            rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype)
+        self._cw.execute(rql, {'x': self.entity.eid})
+
+
+class CWEntityXMLActionLink(CWEntityXMLActionCopy):
+    """implementation of cubicweb entity xml parser's'link' action
+
+    requires a 'linkattr' option to control search of the linked entity.
+    """
+    __regid__ = 'cw.entityxml.action.link'
+
+    def check_options(self, options, eid):
+        if not 'linkattr' in options:
+            msg = self._cw._("'%s' action requires 'linkattr' option") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+    create_when_not_found = False
+
+    def link_items(self, others, rules):
         for ttype, options in rules:
-            assert 'linkattr' in options, (
-                "'link-or-create' action requires a list of attributes used to "
-                "search if the entity already exists")
-            self._related_link(entity, rtype, role, ttype, others, [options['linkattr']],
-                               create_when_not_found=True)
+            searchattrs = splitstrip(options.get('linkattr', ''))
+            self._related_link(ttype, others, searchattrs)
 
-    def _related_link(self, entity, rtype, role, ttype, others, searchattrs,
-                      create_when_not_found):
+    def _related_link(self, ttype, others, searchattrs):
         def issubset(x,y):
             return all(z in y for z in x)
         eids = [] # local eids
+        source = self.parser.source
         for item in others:
             if item['cwtype'] != ttype:
                 continue
             if not issubset(searchattrs, item):
-                item, _rels = self._complete_item(item, False)
+                item = self.parser.complete_item(item, False)[0]
                 if not issubset(searchattrs, item):
-                    self.source.error('missing attribute, got %s expected keys %s'
-                                      % item, searchattrs)
+                    source.error('missing attribute, got %s expected keys %s',
+                                 item, searchattrs)
                     continue
-            kwargs = dict((attr, item[attr]) for attr in searchattrs)
-            rql = build_search_rql(item['cwtype'], kwargs)
-            rset = self._cw.execute(rql, kwargs)
-            if len(rset) > 1:
-                self.source.error('ambiguous link: found %s entity %s with attributes %s',
-                                  len(rset), item['cwtype'], kwargs)
-            elif len(rset) == 1:
-                eids.append(rset[0][0])
-            elif create_when_not_found:
-                ensure_str_keys(kwargs) # XXX necessary with python < 2.6
+            # XXX str() needed with python < 2.6
+            kwargs = dict((str(attr), item[attr]) for attr in searchattrs)
+            targets = self._find_entities(item, kwargs)
+            if len(targets) > 1:
+                source.error('ambiguous link: found %s entity %s with attributes %s',
+                             len(targets), item['cwtype'], kwargs)
+            elif len(targets) == 1:
+                eids.append(targets[0].eid)
+            elif self.create_when_not_found:
                 eids.append(self._cw.create_entity(item['cwtype'], **kwargs).eid)
             else:
-                self.source.error('can not find %s entity with attributes %s',
-                                  item['cwtype'], kwargs)
-        if not eids:
-            self._clear_relation(entity, rtype, role, (ttype,))
+                source.error('can not find %s entity with attributes %s',
+                             item['cwtype'], kwargs)
+        if eids:
+            self._set_relation(eids)
         else:
-            self._set_relation(entity, rtype, role, eids)
+            self._clear_relation((ttype,))
 
-    def _complete_item(self, item, add_relations=True):
-        itemurl = item['cwuri'] + '?vid=xml'
-        if add_relations:
-            for rtype, role, _ in self.source.mapping.get(item['cwtype'], ()):
-                itemurl += '&relation=%s-%s' % (rtype, role)
-        item_rels = list(self.parse(itemurl))
-        assert len(item_rels) == 1
-        return item_rels[0]
+    def _find_entities(self, item, kwargs):
+        return tuple(self._cw.find_entities(item['cwtype'], **kwargs))
+
+
+class CWEntityXMLActionLinkInState(CWEntityXMLActionLink):
+    """custom implementation of cubicweb entity xml parser's'link' action for
+    in_state relation
+    """
+    __select__ = match_rtype('in_state')
 
-    def _clear_relation(self, entity, rtype, role, ttypes):
-        if entity.eid not in self.stats['created']:
-            if len(ttypes) > 1:
-                typerestr = ', Y is IN(%s)' % ','.join(ttypes)
-            else:
-                typerestr = ', Y is %s' % ','.join(ttypes)
-            self._cw.execute('DELETE ' + rtype_role_rql(rtype, role) + typerestr,
-                             {'x': entity.eid})
+    def check_options(self, options, eid):
+        super(CWEntityXMLActionLinkInState, self).check_options(options, eid)
+        if not 'name' in options['linkattr']:
+            msg = self._cw._("'%s' action for in_state relation should at least have 'linkattr=name' option") % self.action
+            raise ValidationError(eid, {rn('options', 'subject'): msg})
 
-    def _set_relation(self, entity, rtype, role, eids):
-        rqlbase = rtype_role_rql(rtype, role)
-        rql = 'DELETE %s' % rqlbase
-        if eids:
-            eidstr = ','.join(str(eid) for eid in eids)
-            rql += ', NOT Y eid IN (%s)' % eidstr
-        self._cw.execute(rql, {'x': entity.eid})
-        if eids:
-            if role == 'object':
-                rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rqlbase, eidstr, rtype)
-            else:
-                rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rqlbase, eidstr, rtype)
-            self._cw.execute(rql, {'x': entity.eid})
+    def _find_entities(self, item, kwargs):
+        assert 'name' in item # XXX else, complete_item
+        state_name = item['name']
+        wf = self.entity.cw_adapt_to('IWorkflowable').current_workflow
+        state = wf.state_by_name(state_name)
+        if state is None:
+            return ()
+        return (state,)
+
+
+class CWEntityXMLActionLinkOrCreate(CWEntityXMLActionLink):
+    """implementation of cubicweb entity xml parser's'link-or-create' action
+
+    requires a 'linkattr' option to control search of the linked entity.
+    """
+    __regid__ = 'cw.entityxml.action.link-or-create'
+    create_when_not_found = True
+
 
 def registration_callback(vreg):
     vreg.register_all(globals().values(), __name__)
--- a/sobjects/test/unittest_parsers.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/sobjects/test/unittest_parsers.py	Tue Jun 28 16:33:53 2011 +0200
@@ -57,11 +57,14 @@
     <Tag cwuri="http://pouet.org/9" eid="9"/>
     <Tag cwuri="http://pouet.org/10" eid="10"/>
   </tags>
+  <in_state role="subject">
+    <State cwuri="http://pouet.org/11" eid="11" name="activated"/>
+  </in_state>
  </CWUser>
 </rset>
 '''.splitlines())
 
-RELATEDXML ={
+RELATEDXML = {
     'http://pouet.org/6': u'''
 <rset size="1">
  <EmailAddress eid="6" cwuri="http://pouet.org/6">
@@ -101,20 +104,47 @@
 ''',
     }
 
+
+OTHERXML = ''.join(u'''
+<rset size="1">
+ <CWUser eid="5" cwuri="http://pouet.org/5" cwsource="myfeed">
+  <login>sthenault</login>
+  <upassword>toto</upassword>
+  <last_login_time>2011-01-25 14:14:06</last_login_time>
+  <creation_date>2010-01-22 10:27:59</creation_date>
+  <modification_date>2011-01-25 14:14:06</modification_date>
+ </CWUser>
+</rset>
+'''.splitlines()
+)
 class CWEntityXMLParserTC(CubicWebTC):
-    def setup_database(self):
-        req = self.request()
-        source = req.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+    test_db_id = 'xmlparser'
+    @classmethod
+    def pre_setup_database(cls, session, config):
+        source = session.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
                                    parser=u'cw.entityxml', url=BASEXML)
-        self.commit()
+        session.create_entity('CWSource', name=u'myotherfeed', type=u'datafeed',
+                              parser=u'cw.entityxml', url=OTHERXML)
+        session.commit()
         source.init_mapping([(('CWUser', 'use_email', '*'),
                               u'role=subject\naction=copy'),
                              (('CWUser', 'in_group', '*'),
                               u'role=subject\naction=link\nlinkattr=name'),
+                             (('CWUser', 'in_state', '*'),
+                              u'role=subject\naction=link\nlinkattr=name'),
                              (('*', 'tags', 'CWUser'),
                               u'role=object\naction=link-or-create\nlinkattr=name'),
                             ])
-        req.create_entity('Tag', name=u'hop')
+        session.create_entity('Tag', name=u'hop')
+
+    def test_complete_url(self):
+        dfsource = self.repo.sources_by_uri['myfeed']
+        parser = dfsource._get_parser(self.session)
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'),
+                         'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=xml')
+        self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
+                         'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
+
 
     def test_actions(self):
         dfsource = self.repo.sources_by_uri['myfeed']
@@ -122,6 +152,8 @@
                          {u'CWUser': {
                              (u'in_group', u'subject', u'link'): [
                                  (u'CWGroup', {u'linkattr': u'name'})],
+                             (u'in_state', u'subject', u'link'): [
+                                 (u'State', {u'linkattr': u'name'})],
                              (u'tags', u'object', u'link-or-create'): [
                                  (u'Tag', {u'linkattr': u'name'})],
                              (u'use_email', u'subject', u'copy'): [
@@ -139,11 +171,13 @@
         self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
         self.assertEqual(user.cwuri, 'http://pouet.org/5')
         self.assertEqual(user.cw_source[0].name, 'myfeed')
+        self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
         self.assertEqual(len(user.use_email), 1)
         # copy action
         email = user.use_email[0]
         self.assertEqual(email.address, 'syt@logilab.fr')
         self.assertEqual(email.cwuri, 'http://pouet.org/6')
+        self.assertEqual(email.absolute_url(), 'http://pouet.org/6')
         self.assertEqual(email.cw_source[0].name, 'myfeed')
         # link action
         self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"'))
@@ -156,14 +190,67 @@
         self.assertEqual(tag.cwuri, 'http://testing.fr/cubicweb/%s' % tag.eid)
         self.assertEqual(tag.cw_source[0].name, 'system')
 
+        session.set_cnxset()
         stats = dfsource.pull_data(session, force=True, raise_on_error=True)
         self.assertEqual(stats['created'], set())
         self.assertEqual(len(stats['updated']), 2)
         self.repo._type_source_cache.clear()
         self.repo._extid_cache.clear()
+        session.set_cnxset()
         stats = dfsource.pull_data(session, force=True, raise_on_error=True)
         self.assertEqual(stats['created'], set())
         self.assertEqual(len(stats['updated']), 2)
+        session.commit()
+
+        # test move to system source
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid})
+        self.commit()
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, email.eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'EmailAddress',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+        self.commit()
+        # test everything is still fine after source synchronization
+        session.set_cnxset()
+        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, email.eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'EmailAddress',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+        session.commit()
+
+        # test delete entity
+        e.cw_delete()
+        self.commit()
+        # test everything is still fine after source synchronization
+        session.set_cnxset()
+        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
+        self.assertEqual(len(rset), 0)
+        rset = self.sexecute('Any X WHERE X use_email E, X login "sthenault"')
+        self.assertEqual(len(rset), 0)
+
+    def test_external_entity(self):
+        dfsource = self.repo.sources_by_uri['myotherfeed']
+        session = self.repo.internal_session()
+        stats = dfsource.pull_data(session, force=True, raise_on_error=True)
+        user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
+        self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
+        self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+        self.assertEqual(user.cwuri, 'http://pouet.org/5')
+        self.assertEqual(user.cw_source[0].name, 'myfeed')
 
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
--- a/test/unittest_dbapi.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/test/unittest_dbapi.py	Tue Jun 28 16:33:53 2011 +0200
@@ -32,7 +32,8 @@
     def test_public_repo_api(self):
         cnx = self.login('anon')
         self.assertEqual(cnx.get_schema(), self.repo.schema)
-        self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
+        self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system',
+                                                        'use-cwuri-as-url': False}})
         self.restore_connection() # proper way to close cnx
         self.assertRaises(ProgrammingError, cnx.get_schema)
         self.assertRaises(ProgrammingError, cnx.source_defs)
--- a/test/unittest_entity.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/test/unittest_entity.py	Tue Jun 28 16:33:53 2011 +0200
@@ -572,7 +572,7 @@
         self.assertEqual(person.rest_path(), 'personne/doe')
         # ambiguity test
         person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe')
-        person.clear_all_caches()
+        person.cw_clear_all_caches()
         self.assertEqual(person.rest_path(), 'personne/eid/%s' % person.eid)
         self.assertEqual(person2.rest_path(), 'personne/eid/%s' % person2.eid)
         # unique attr with None value (wikiid in this case)
@@ -610,7 +610,9 @@
         req = self.request()
         note = req.create_entity('Note', type=u'z')
         metainf = note.cw_metainformation()
-        self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
+        self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system',
+                                              'use-cwuri-as-url': False},
+                                   'type': u'Note', 'extid': None})
         self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid)
         metainf['source'] = metainf['source'].copy()
         metainf['source']['base-url']  = 'http://cubicweb2.com/'
--- a/test/unittest_schema.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/test/unittest_schema.py	Tue Jun 28 16:33:53 2011 +0200
@@ -29,7 +29,7 @@
 from yams import BadSchemaDefinition
 from yams.constraints import SizeConstraint, StaticVocabularyConstraint
 from yams.buildobjs import RelationDefinition, EntityType, RelationType
-from yams.reader import PyFileReader
+from yams.reader import fill_schema
 
 from cubicweb.schema import (
     CubicWebSchema, CubicWebEntitySchema, CubicWebSchemaLoader,
@@ -158,7 +158,7 @@
         self.assert_(isinstance(schema, CubicWebSchema))
         self.assertEqual(schema.name, 'data')
         entities = sorted([str(e) for e in schema.entities()])
-        expected_entities = ['BaseTransition', 'Bookmark', 'Boolean', 'Bytes', 'Card',
+        expected_entities = ['BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card',
                              'Date', 'Datetime', 'Decimal',
                              'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType',
                              'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
@@ -208,7 +208,7 @@
 
                               'read_permission', 'relation_type', 'relations', 'require_group',
 
-                              'specializes', 'state_of', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synopsis',
+                              'specializes', 'state_of', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synchronizing', 'synopsis',
 
                               'tags', 'timestamp', 'title', 'to_entity', 'to_state', 'transition_of', 'travaille', 'type',
 
@@ -259,18 +259,23 @@
         self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')],
                           ['U has_update_permission X'])
 
+    def test_nonregr_allowed_type_names(self):
+        schema = CubicWebSchema('Test Schema')
+        schema.add_entity_type(EntityType('NaN'))
+
+
 class BadSchemaTC(TestCase):
     def setUp(self):
         self.loader = CubicWebSchemaLoader()
         self.loader.defined = {}
         self.loader.loaded_files = []
         self.loader.post_build_callbacks = []
-        self.loader._pyreader = PyFileReader(self.loader)
 
     def _test(self, schemafile, msg):
         self.loader.handle_file(join(DATADIR, schemafile))
+        sch = self.loader.schemacls('toto')
         with self.assertRaises(BadSchemaDefinition) as cm:
-            self.loader._build_schema('toto', False)
+            fill_schema(sch, self.loader.defined, False)
         self.assertEqual(str(cm.exception), msg)
 
     def test_lowered_etype(self):
--- a/test/unittest_selectors.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/test/unittest_selectors.py	Tue Jun 28 16:33:53 2011 +0200
@@ -102,6 +102,10 @@
         self.assertIs(csel.search_selector(is_instance), sel)
         csel = AndSelector(Selector(), sel)
         self.assertIs(csel.search_selector(is_instance), sel)
+        self.assertIs(csel.search_selector((AndSelector, OrSelector)), csel)
+        self.assertIs(csel.search_selector((OrSelector, AndSelector)), csel)
+        self.assertIs(csel.search_selector((is_instance, score_entity)),  sel)
+        self.assertIs(csel.search_selector((score_entity, is_instance)), sel)
 
     def test_inplace_and(self):
         selector = _1_()
@@ -193,7 +197,7 @@
 class WorkflowSelectorTC(CubicWebTC):
     def _commit(self):
         self.commit()
-        self.wf_entity.clear_all_caches()
+        self.wf_entity.cw_clear_all_caches()
 
     def setup_database(self):
         wf = self.shell().add_workflow("wf_test", 'StateFull', default=True)
--- a/test/unittest_utils.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/test/unittest_utils.py	Tue Jun 28 16:33:53 2011 +0200
@@ -21,9 +21,12 @@
 import decimal
 import datetime
 
+
 from logilab.common.testlib import TestCase, DocTest, unittest_main
 
-from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.utils import (make_uid, UStringIO, SizeConstrainedList,
+                            RepeatList, HTMLHead)
 from cubicweb.entity import Entity
 
 try:
@@ -155,6 +158,102 @@
     def test_encoding_unknown_stuff(self):
         self.assertEqual(self.encode(TestCase), 'null')
 
+class HTMLHeadTC(CubicWebTC):
+    def test_concat_urls(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        urls = [base_url + u'bob1.js',
+                base_url + u'bob2.js',
+                base_url + u'bob3.js']
+        result = head.concat_urls(urls)
+        expected = u'http://test.fr/data/??bob1.js,bob2.js,bob3.js'
+        self.assertEqual(result, expected)
+
+    def test_group_urls(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        urls_spec = [(base_url + u'bob0.js', None),
+                     (base_url + u'bob1.js', None),
+                     (u'http://ext.com/bob2.js', None),
+                     (u'http://ext.com/bob3.js', None),
+                     (base_url + u'bob4.css', 'all'),
+                     (base_url + u'bob5.css', 'all'),
+                     (base_url + u'bob6.css', 'print'),
+                     (base_url + u'bob7.css', 'print'),
+                     (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+                     (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+                     ]
+        result = head.group_urls(urls_spec)
+        expected = [(base_url + u'??bob0.js,bob1.js', None),
+                    (u'http://ext.com/bob2.js', None),
+                    (u'http://ext.com/bob3.js', None),
+                    (base_url + u'??bob4.css,bob5.css', 'all'),
+                    (base_url + u'??bob6.css,bob7.css', 'print'),
+                    (base_url + u'bob8.css', ('all', u'[if IE 8]')),
+                    (base_url + u'bob9.css', ('print', u'[if IE 8]'))
+                    ]
+        self.assertEqual(list(result), expected)
+
+    def test_getvalue_with_concat(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead(base_url)
+        head.add_js(base_url + u'bob0.js')
+        head.add_js(base_url + u'bob1.js')
+        head.add_js(u'http://ext.com/bob2.js')
+        head.add_js(u'http://ext.com/bob3.js')
+        head.add_css(base_url + u'bob4.css')
+        head.add_css(base_url + u'bob5.css')
+        head.add_css(base_url + u'bob6.css', 'print')
+        head.add_css(base_url + u'bob7.css', 'print')
+        head.add_ie_css(base_url + u'bob8.css')
+        head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+        result = head.getvalue()
+        expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/??bob4.css,bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/??bob6.css,bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]--> 
+<script type="text/javascript" src="http://test.fr/data/??bob0.js,bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+        self.assertEqual(result, expected)
+
+    def test_getvalue_without_concat(self):
+        base_url = u'http://test.fr/data/'
+        head = HTMLHead()
+        head.add_js(base_url + u'bob0.js')
+        head.add_js(base_url + u'bob1.js')
+        head.add_js(u'http://ext.com/bob2.js')
+        head.add_js(u'http://ext.com/bob3.js')
+        head.add_css(base_url + u'bob4.css')
+        head.add_css(base_url + u'bob5.css')
+        head.add_css(base_url + u'bob6.css', 'print')
+        head.add_css(base_url + u'bob7.css', 'print')
+        head.add_ie_css(base_url + u'bob8.css')
+        head.add_ie_css(base_url + u'bob9.css', 'print', u'[if lt IE 7]')
+        result = head.getvalue()
+        expected = u"""<head>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob4.css"/>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob5.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob6.css"/>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob7.css"/>
+<!--[if lt IE 8]>
+<link rel="stylesheet" type="text/css" media="all" href="http://test.fr/data/bob8.css"/>
+<!--[if lt IE 7]>
+<link rel="stylesheet" type="text/css" media="print" href="http://test.fr/data/bob9.css"/>
+<![endif]--> 
+<script type="text/javascript" src="http://test.fr/data/bob0.js"></script>
+<script type="text/javascript" src="http://test.fr/data/bob1.js"></script>
+<script type="text/javascript" src="http://ext.com/bob2.js"></script>
+<script type="text/javascript" src="http://ext.com/bob3.js"></script>
+</head>
+"""
+        self.assertEqual(result, expected)
 
 class DocTest(DocTest):
     from cubicweb import utils as module
--- a/toolsutils.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/toolsutils.py	Tue Jun 28 16:33:53 2011 +0200
@@ -159,15 +159,11 @@
         print '-> set permissions to 0600 for %s' % filepath
     chmod(filepath, 0600)
 
-def read_config(config_file):
-    """read the instance configuration from a file and return it as a
-    dictionnary
-
-    :type config_file: str
-    :param config_file: path to the configuration file
-
-    :rtype: dict
-    :return: a dictionary with specified values associated to option names
+def read_config(config_file, raise_if_unreadable=False):
+    """read some simple configuration from `config_file` and return it as a
+    dictionary. If `raise_if_unreadable` is false (the default), an empty
+    dictionary will be returned if the file is inexistant or unreadable, else
+    :exc:`ExecutionError` will be raised.
     """
     from logilab.common.fileutils import lines
     config = current = {}
@@ -190,8 +186,12 @@
             value = value.strip()
             current[option] = value or None
     except IOError, ex:
-        warning('missing or non readable configuration file %s (%s)',
-                config_file, ex)
+        if raise_if_unreadable:
+            raise ExecutionError('%s. Are you logged with the correct user '
+                                 'to use this instance?' % ex)
+        else:
+            warning('missing or non readable configuration file %s (%s)',
+                    config_file, ex)
     return config
 
 
--- a/uilib.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/uilib.py	Tue Jun 28 16:33:53 2011 +0200
@@ -62,12 +62,18 @@
         return value
     if attrtype == 'Date':
         return ustrftime(value, req.property_value('ui.date-format'))
-    if attrtype in ('Time', 'TZTime'):
+    if attrtype == 'Time':
         return ustrftime(value, req.property_value('ui.time-format'))
-    if attrtype in ('Datetime', 'TZDatetime'):
+    if attrtype == 'TZTime':
+        return ustrftime(value, req.property_value('ui.time-format')) + u' UTC'
+    if attrtype == 'Datetime':
         if displaytime:
             return ustrftime(value, req.property_value('ui.datetime-format'))
         return ustrftime(value, req.property_value('ui.date-format'))
+    if attrtype == 'TZDatetime':
+        if displaytime:
+            return ustrftime(value, req.property_value('ui.datetime-format')) + u' UTC'
+        return ustrftime(value, req.property_value('ui.date-format'))
     if attrtype == 'Boolean':
         if value:
             return req._('yes')
--- a/utils.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/utils.py	Tue Jun 28 16:33:53 2011 +0200
@@ -51,20 +51,6 @@
     return str(key) + uuid4().hex
 
 
-def dump_class(cls, clsname):
-    """create copy of a class by creating an empty class inheriting
-    from the given cls.
-
-    Those class will be used as place holder for attribute and relation
-    description
-    """
-    # type doesn't accept unicode name
-    # return type.__new__(type, str(clsname), (cls,), {})
-    # __autogenerated__ attribute is just a marker
-    return type(str(clsname), (cls,), {'__autogenerated__': True,
-                                       '__doc__': cls.__doc__,
-                                       '__module__': cls.__module__})
-
 def support_args(callable, *argnames):
     """return true if the callable support given argument names"""
     if isinstance(callable, type):
@@ -241,7 +227,7 @@
     xhtml_safe_script_opening = u'<script type="text/javascript"><!--//--><![CDATA[//><!--\n'
     xhtml_safe_script_closing = u'\n//--><!]]></script>'
 
-    def __init__(self):
+    def __init__(self, datadir_url=None):
         super(HTMLHead, self).__init__()
         self.jsvars = []
         self.jsfiles = []
@@ -249,6 +235,7 @@
         self.ie_cssfiles = []
         self.post_inlined_scripts = []
         self.pagedata_unload = False
+        self.datadir_url = datadir_url
 
 
     def add_raw(self, rawheader):
@@ -285,7 +272,7 @@
         if jsfile not in self.jsfiles:
             self.jsfiles.append(jsfile)
 
-    def add_css(self, cssfile, media):
+    def add_css(self, cssfile, media='all'):
         """adds `cssfile` to the list of javascripts used in the webpage
 
         This function checks if the file has already been added
@@ -305,6 +292,45 @@
             self.post_inlined_scripts.append(self.js_unload_code)
             self.pagedata_unload = True
 
+    def concat_urls(self, urls):
+        """concatenates urls into one url usable by Apache mod_concat
+
+        This method returns the url without modifying it if there is only
+        one element in the list
+        :param urls: list of local urls/filenames to concatenate
+        """
+        if len(urls) == 1:
+            return urls[0]
+        len_prefix = len(self.datadir_url)
+        concated = u','.join(url[len_prefix:] for url in urls)
+        return (u'%s??%s' % (self.datadir_url, concated))
+
+    def group_urls(self, urls_spec):
+        """parses urls_spec in order to generate concatenated urls
+        for js and css includes
+
+        This method checks if the file is local and if it shares options
+        with direct neighbors
+        :param urls_spec: entire list of urls/filenames to inspect
+        """
+        concatable = []
+        prev_islocal = False
+        prev_key = None
+        for url, key in urls_spec:
+            islocal = url.startswith(self.datadir_url)
+            if concatable and (islocal != prev_islocal or key != prev_key):
+                yield (self.concat_urls(concatable), prev_key)
+                del concatable[:]
+            if not islocal:
+                yield (url, key)
+            else:
+                concatable.append(url)
+            prev_islocal = islocal
+            prev_key = key
+        if concatable:
+            yield (self.concat_urls(concatable), prev_key)
+
+
     def getvalue(self, skiphead=False):
         """reimplement getvalue to provide a consistent (and somewhat browser
         optimzed cf. http://stevesouders.com/cuzillion) order in external
@@ -322,18 +348,20 @@
                 w(vardecl + u'\n')
             w(self.xhtml_safe_script_closing)
         # 2/ css files
-        for cssfile, media in self.cssfiles:
+        for cssfile, media in (self.group_urls(self.cssfiles) if self.datadir_url else self.cssfiles):
             w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
               (media, xml_escape(cssfile)))
         # 3/ ie css if necessary
         if self.ie_cssfiles:
-            for cssfile, media, iespec in self.ie_cssfiles:
+            ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles)
+            for cssfile, (media, iespec) in (self.group_urls(ie_cssfiles) if self.datadir_url else ie_cssfiles):
                 w(u'<!--%s>\n' % iespec)
                 w(u'<link rel="stylesheet" type="text/css" media="%s" href="%s"/>\n' %
                   (media, xml_escape(cssfile)))
             w(u'<![endif]--> \n')
         # 4/ js files
-        for jsfile in self.jsfiles:
+        jsfiles = ((x, None) for x in self.jsfiles)
+        for jsfile, media in self.group_urls(jsfiles) if self.datadir_url else jsfiles:
             w(u'<script type="text/javascript" src="%s"></script>\n' %
               xml_escape(jsfile))
         # 5/ post inlined scripts (i.e. scripts depending on other JS files)
--- a/vregistry.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/vregistry.py	Tue Jun 28 16:33:53 2011 +0200
@@ -184,7 +184,10 @@
 
         raise :exc:`NoSelectableObject` if not object apply
         """
-        return self._select_best(self[__oid], *args, **kwargs)
+        obj =  self._select_best(self[__oid], *args, **kwargs)
+        if obj is None:
+            raise NoSelectableObject(args, kwargs, self[__oid] )
+        return obj
 
     def select_or_none(self, __oid, *args, **kwargs):
         """return the most specific object among those with the given oid
@@ -202,16 +205,18 @@
         context
         """
         for appobjects in self.itervalues():
-            try:
-                yield self._select_best(appobjects, *args, **kwargs)
-            except NoSelectableObject:
+            obj = self._select_best(appobjects,  *args, **kwargs)
+            if obj is None:
                 continue
+            yield obj
 
     def _select_best(self, appobjects, *args, **kwargs):
         """return an instance of the most specific object according
         to parameters
 
-        raise `NoSelectableObject` if not object apply
+        return None if not object apply (don't raise `NoSelectableObject` since
+        it's costly when searching appobjects using `possible_objects`
+        (e.g. searching for hooks).
         """
         if len(args) > 1:
             warn('[3.5] only the request param can not be named when calling select*',
@@ -224,7 +229,7 @@
             elif appobjectscore > 0 and appobjectscore == score:
                 winners.append(appobject)
         if winners is None:
-            raise NoSelectableObject(args, kwargs, appobjects)
+            return None
         if len(winners) > 1:
             # log in production environement / test, error while debugging
             msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)'
--- a/web/component.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/component.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -57,8 +57,6 @@
     page_link_templ = u'<span class="slice"><a href="%s" title="%s">%s</a></span>'
     selected_page_link_templ = u'<span class="selectedSlice"><a href="%s" title="%s">%s</a></span>'
     previous_page_link_templ = next_page_link_templ = page_link_templ
-    no_previous_page_link = u'&lt;&lt;'
-    no_next_page_link = u'&gt;&gt;'
 
     def __init__(self, req, rset, **kwargs):
         super(NavigationComponent, self).__init__(req, rset=rset, **kwargs)
@@ -131,7 +129,37 @@
             return self.selected_page_link_templ % (url, content, content)
         return self.page_link_templ % (url, content, content)
 
-    def previous_link(self, path, params, content='&lt;&lt;', title=_('previous_results')):
+    @property
+    def prev_icon_url(self):
+        return xml_escape(self._cw.data_url('go_prev.png'))
+
+    @property
+    def next_icon_url(self):
+        return xml_escape(self._cw.data_url('go_next.png'))
+
+    @property
+    def no_previous_page_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext_nogo"/>' %
+                (self.prev_icon_url, self._cw._('there is no previous page')))
+
+    @property
+    def no_next_page_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext_nogo"/>' %
+                (self.next_icon_url, self._cw._('there is no next page')))
+
+    @property
+    def no_content_prev_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext"/>' % (
+                (self.prev_icon_url, self._cw._('no content prev link'))))
+
+    @property
+    def no_content_next_link(self):
+        return (u'<img src="%s" alt="%s" class="prevnext"/>' %
+                (self.next_icon_url, self._cw._('no content next link')))
+
+    def previous_link(self, path, params, content=None, title=_('previous_results')):
+        if not content:
+            content = self.no_content_prev_link
         start = self.starting_from
         if not start :
             return self.no_previous_page_link
@@ -140,7 +168,9 @@
         url = xml_escape(self.page_url(path, params, start, stop))
         return self.previous_page_link_templ % (url, title, content)
 
-    def next_link(self, path, params, content='&gt;&gt;', title=_('next_results')):
+    def next_link(self, path, params, content=None, title=_('next_results')):
+        if not content:
+            content = self.no_content_next_link
         start = self.starting_from + self.page_size
         if start >= self.total:
             return self.no_next_page_link
--- a/web/controller.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/controller.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -114,7 +114,7 @@
                           [recipient], body, subject)
         if not self._cw.vreg.config.sendmails([(msg, [recipient])]):
             msg = self._cw._('could not connect to the SMTP server')
-            url = self._cw.build_url(__message=msg)
+            url = self._cw.build_url(__message=msgid)
             raise Redirect(url)
 
     def reset(self):
@@ -123,8 +123,10 @@
         """
         newparams = {}
         # sets message if needed
-        if self._cw.message:
-            newparams['_cwmsgid'] = self._cw.set_redirect_message(self._cw.message)
+        # XXX - don't call .message twice since it pops the id
+        msg = self._cw.message
+        if msg:
+            newparams['_cwmsgid'] = self._cw.set_redirect_message(msg)
         if self._cw.form.has_key('__action_apply'):
             self._return_to_edition_view(newparams)
         if self._cw.form.has_key('__action_cancel'):
@@ -165,7 +167,7 @@
         elif self._edited_entity:
             # clear caches in case some attribute participating to the rest path
             # has been modified
-            self._edited_entity.clear_all_caches()
+            self._edited_entity.cw_clear_all_caches()
             path = self._edited_entity.rest_path()
         else:
             path = 'view'
--- a/web/data/cubicweb.ajax.js	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.ajax.js	Tue Jun 28 16:33:53 2011 +0200
@@ -22,6 +22,9 @@
  *
  * dummy ultra minimalist implementation of deferred for jQuery
  */
+
+cw.ajax = new Namespace('cw.ajax');
+
 function Deferred() {
     this.__init__(this);
 }
@@ -86,6 +89,65 @@
 
 var JSON_BASE_URL = baseuri() + 'json?';
 
+
+jQuery.extend(cw.ajax, {
+    /* variant of jquery evalScript with cache: true in ajax call */
+    _evalscript: function ( i, elem ) {
+       if ( elem.src ) {
+           jQuery.ajax({
+               url: elem.src,
+               async: false,
+               cache: true,
+               dataType: "script"
+           });
+       } else {
+           jQuery.globalEval( elem.text || elem.textContent || elem.innerHTML || "" );
+       }
+       if ( elem.parentNode ) {
+           elem.parentNode.removeChild( elem );
+       }
+    },
+
+    evalscripts: function ( scripts ) {
+        if ( scripts.length ) {
+            jQuery.each(scripts, cw.ajax._evalscript);
+        }
+    },
+
+    /**
+     * returns true if `url` is a mod_concat-like url
+     * (e.g. http://..../data??resource1.js,resource2.js)
+     */
+    _modconcatLikeUrl: function(url) {
+        var base = baseuri();
+        if (!base.endswith('/')) { base += '/'; }
+        var modconcat_rgx = new RegExp('(' + base + 'data/([a-z0-9]+/)?)\\?\\?(.+)');
+        return modconcat_rgx.exec(url);
+    },
+
+    /**
+     * decomposes a mod_concat-like url into its corresponding list of
+     * resources' urls
+     * >>> _listResources('http://foo.com/data/??a.js,b.js,c.js')
+     * ['http://foo.com/data/a.js', 'http://foo.com/data/b.js', 'http://foo.com/data/c.js']
+     */
+    _listResources: function(src) {
+        var resources = [];
+        var groups = cw.ajax._modconcatLikeUrl(src);
+        if (groups == null) {
+            resources.push(src);
+        } else {
+            var dataurl = groups[1];
+            $.each(cw.utils.lastOf(groups).split(','),
+                 function() {
+                     resources.push(dataurl + this);
+                 }
+            );
+        }
+        return resources;
+    }
+});
+
 //============= utility function handling remote calls responses. ==============//
 function _loadAjaxHtmlHead($node, $head, tag, srcattr) {
     var jqtagfilter = tag + '[' + srcattr + ']';
@@ -93,28 +155,47 @@
         cw['loaded_'+srcattr] = [];
         var loaded = cw['loaded_'+srcattr];
         jQuery('head ' + jqtagfilter).each(function(i) {
-                   loaded.push(this.getAttribute(srcattr));
-               });
+            // tab1.push.apply(tab1, tab2) <=> tab1 += tab2 (python-wise)
+            loaded.push.apply(loaded, cw.ajax._listResources(this.getAttribute(srcattr)));
+        });
     } else {
         var loaded = cw['loaded_'+srcattr];
     }
     $node.find(tag).each(function(i) {
-        var url = this.getAttribute(srcattr);
+        var $srcnode = jQuery(this);
+        var url = $srcnode.attr(srcattr);
         if (url) {
-            if (jQuery.inArray(url, loaded) == -1) {
-                // take care to <script> tags: jQuery append method script nodes
-                // don't appears in the DOM (See comments on
-                // http://api.jquery.com/append/), which cause undesired
-                // duplicated load in our case. After trying to use bare DOM api
-                // to avoid this, we switched to handle a list of already loaded
-                // stuff ourselves, since bare DOM api gives bug with the
-                // server-response event, since we loose control on when the
-                // script is loaded (jQuery load it immediatly).
-                loaded.push(url);
-                jQuery(this).appendTo($head);
+            /* special handling of <script> tags: script nodes appended by jquery
+             * use uncached ajax calls and do not appear in the DOM
+             * (See comments in response to Syt on // http://api.jquery.com/append/),
+             * which cause undesired duplicated load in our case. We now handle
+             * a list of already loaded resources, since bare DOM api gives bugs with the
+             * server-response event, and we lose control on when the
+             * script is loaded (jQuery loads it immediately). */
+            var resources = cw.ajax._listResources(url);
+            var missingResources = $.grep(resources, function(resource) {
+                return $.inArray(resource, loaded) == -1;
+            });
+            loaded.push.apply(loaded, missingResources);
+            if (missingResources.length == 1) {
+                // only one resource missing: build a node with a single resource url
+                // (maybe the browser has it in cache already)
+                $srcnode.attr(srcattr, missingResources[0]);
+            } else if (missingResources.length > 1) {
+                // several resources missing: build a node with a concatenated
+                // resources url
+                var dataurl = cw.ajax._modconcatLikeUrl(url)[1];
+                var missing_path = $.map(missingResources, function(resource) {
+                    return resource.substring(dataurl.length);
+                });
+                $srcnode.attr(srcattr, dataurl + '??' + missing_path.join(','));
             }
-        } else {
-            jQuery(this).appendTo($head);
+            // === will work if both arguments are of the same type
+            if ( $srcnode.attr('type') === 'text/javascript' ) {
+                cw.ajax.evalscripts($srcnode);
+            } else {
+                $srcnode.appendTo($head);
+            }
         }
     });
     $node.find(jqtagfilter).remove();
--- a/web/data/cubicweb.css	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.css	Tue Jun 28 16:33:53 2011 +0200
@@ -120,6 +120,19 @@
   border: none;
 }
 
+
+img.prevnext {
+  width: 22px;
+  height: 22px;
+}
+
+img.prevnext_nogo {
+  width: 22px;
+  height: 22px;
+  filter:alpha(opacity=25); /* IE */
+  opacity:.25;
+}
+
 fieldset {
   border: none;
 }
--- a/web/data/cubicweb.facets.css	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.facets.css	Tue Jun 28 16:33:53 2011 +0200
@@ -109,11 +109,25 @@
 div#facetLoading {
   display: none;
   position: fixed;
-  padding-left: 20px;
+  background: #f2f2f2;
   top: 400px;
   width: 200px;
-  height: 100px;
+  padding: 1em;
   font-size:      120%;
   font-weight:    bold;
   text-align:     center;
 }
+
+div.facetTitleSelected {
+  background: url("required.png") no-repeat right top;
+}
+
+table.filter {
+  background-color: #EBE8D9;
+  border: dotted grey 1px;
+}
+
+div.facet {
+ padding: none;
+ margin: .3em!important;
+}
--- a/web/data/cubicweb.facets.js	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.facets.js	Tue Jun 28 16:33:53 2011 +0200
@@ -238,6 +238,18 @@
     });
 }
 
+// change css class of facets that have a value selected
+function updateFacetTitles() {
+    $('.facet').each(function() {
+        var $divTitle = $(this).find('.facetTitle');
+        var facetSelected = $(this).find('.facetValueSelected');
+        if (facetSelected.length) {
+            $divTitle.addClass('facetTitleSelected');
+        } else {
+            $divTitle.removeClass('facetTitleSelected');
+        }
+    });
+}
 
 // we need to differenciate cases where initFacetBoxEvents is called with one
 // argument or without any argument. If we use `initFacetBoxEvents` as the
@@ -245,4 +257,34 @@
 // his, so we use this small anonymous function instead.
 jQuery(document).ready(function() {
     initFacetBoxEvents();
+    jQuery(cw).bind('facets-content-loaded', onFacetContentLoaded);
+    jQuery(cw).bind('facets-content-loading', onFacetFiltering);
+    jQuery(cw).bind('facets-content-loading', updateFacetTitles);
 });
+
+function showFacetLoading(parentid) {
+    var loadingWidth = 200; // px
+    var loadingHeight = 100; // px
+    var $msg = jQuery('#facetLoading');
+    var $parent = jQuery('#' + parentid);
+    var leftPos = $parent.offset().left + ($parent.width() - loadingWidth) / 2;
+    $parent.fadeTo('normal', 0.2);
+    $msg.css('left', leftPos).show();
+}
+
+function onFacetFiltering(event, divid /* ... */) {
+    showFacetLoading(divid);
+}
+
+function onFacetContentLoaded(event, divid, rql, vid, extraparams) {
+    jQuery('#facetLoading').hide();
+}
+
+jQuery(document).ready(function () {
+    if (jQuery('div.facetBody').length) {
+        var $loadingDiv = $(DIV({id:'facetLoading'},
+                                facetLoadingMsg));
+        $loadingDiv.corner();
+        $('body').append($loadingDiv);
+    }
+});
--- a/web/data/cubicweb.js	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.js	Tue Jun 28 16:33:53 2011 +0200
@@ -308,6 +308,17 @@
     },
 
     /**
+     * returns the last element of an array-like object or undefined if empty
+     */
+    lastOf: function(array) {
+        if (array.length) {
+            return array[array.length-1];
+        } else {
+            return undefined;
+        }
+    },
+
+    /**
      * .. function:: difference(lst1, lst2)
      *
      * returns a list containing all elements in `lst1` that are not
--- a/web/data/cubicweb.old.css	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/data/cubicweb.old.css	Tue Jun 28 16:33:53 2011 +0200
@@ -69,6 +69,18 @@
   text-align: center;
 }
 
+img.prevnext {
+  width: 22px;
+  height: 22px;
+}
+
+img.prevnext_nogo {
+  width: 22px;
+  height: 22px;
+  filter:alpha(opacity=25); /* IE */
+  opacity:.25;
+}
+
 p {
   margin: 0em 0px 0.2em;
   padding-top: 2px;
@@ -613,7 +625,7 @@
 
 span.selectedSlice a:visited,
 span.selectedSlice a {
-  color: #000;
+  background-color: #EBE8D9;
 }
 
 /* FIXME should be moved to cubes/folder */
Binary file web/data/go_next.png has changed
Binary file web/data/go_prev.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.qtip.min.js	Tue Jun 28 16:33:53 2011 +0200
@@ -0,0 +1,15 @@
+/*
+ * jquery.qtip. The jQuery tooltip plugin
+ *
+ * Copyright (c) 2009 Craig Thompson
+ * http://craigsworks.com
+ *
+ * Licensed under MIT
+ * http://www.opensource.org/licenses/mit-license.php
+ *
+ * Launch  : February 2009
+ * Version : 1.0.0-rc3
+ * Released: Tuesday 12th May, 2009 - 00:00
+ * Debug: jquery.qtip.debug.js
+ */
+(function(f){f.fn.qtip=function(B,u){var y,t,A,s,x,w,v,z;if(typeof B=="string"){if(typeof f(this).data("qtip")!=="object"){f.fn.qtip.log.error.call(self,1,f.fn.qtip.constants.NO_TOOLTIP_PRESENT,false)}if(B=="api"){return f(this).data("qtip").interfaces[f(this).data("qtip").current]}else{if(B=="interfaces"){return f(this).data("qtip").interfaces}}}else{if(!B){B={}}if(typeof B.content!=="object"||(B.content.jquery&&B.content.length>0)){B.content={text:B.content}}if(typeof B.content.title!=="object"){B.content.title={text:B.content.title}}if(typeof B.position!=="object"){B.position={corner:B.position}}if(typeof B.position.corner!=="object"){B.position.corner={target:B.position.corner,tooltip:B.position.corner}}if(typeof B.show!=="object"){B.show={when:B.show}}if(typeof B.show.when!=="object"){B.show.when={event:B.show.when}}if(typeof B.show.effect!=="object"){B.show.effect={type:B.show.effect}}if(typeof B.hide!=="object"){B.hide={when:B.hide}}if(typeof B.hide.when!=="object"){B.hide.when={event:B.hide.when}}if(typeof B.hide.effect!=="object"){B.hide.effect={type:B.hide.effect}}if(typeof B.style!=="object"){B.style={name:B.style}}B.style=c(B.style);s=f.extend(true,{},f.fn.qtip.defaults,B);s.style=a.call({options:s},s.style);s.user=f.extend(true,{},B)}return f(this).each(function(){if(typeof B=="string"){w=B.toLowerCase();A=f(this).qtip("interfaces");if(typeof A=="object"){if(u===true&&w=="destroy"){while(A.length>0){A[A.length-1].destroy()}}else{if(u!==true){A=[f(this).qtip("api")]}for(y=0;y<A.length;y++){if(w=="destroy"){A[y].destroy()}else{if(A[y].status.rendered===true){if(w=="show"){A[y].show()}else{if(w=="hide"){A[y].hide()}else{if(w=="focus"){A[y].focus()}else{if(w=="disable"){A[y].disable(true)}else{if(w=="enable"){A[y].disable(false)}}}}}}}}}}}else{v=f.extend(true,{},s);v.hide.effect.length=s.hide.effect.length;v.show.effect.length=s.show.effect.length;if(v.position.container===false){v.position.container=f(document.body)}if(v.position.target===false){v.position.target=f(this)}if(v.show.when.target===false){v.show.when.target=f(this)}if(v.hide.when.target===false){v.hide.when.target=f(this)}t=f.fn.qtip.interfaces.length;for(y=0;y<t;y++){if(typeof f.fn.qtip.interfaces[y]=="undefined"){t=y;break}}x=new d(f(this),v,t);f.fn.qtip.interfaces[t]=x;if(typeof f(this).data("qtip")=="object"){if(typeof f(this).attr("qtip")==="undefined"){f(this).data("qtip").current=f(this).data("qtip").interfaces.length}f(this).data("qtip").interfaces.push(x)}else{f(this).data("qtip",{current:0,interfaces:[x]})}if(v.content.prerender===false&&v.show.when.event!==false&&v.show.ready!==true){v.show.when.target.bind(v.show.when.event+".qtip-"+t+"-create",{qtip:t},function(C){z=f.fn.qtip.interfaces[C.data.qtip];z.options.show.when.target.unbind(z.options.show.when.event+".qtip-"+C.data.qtip+"-create");z.cache.mouse={x:C.pageX,y:C.pageY};p.call(z);z.options.show.when.target.trigger(z.options.show.when.event)})}else{x.cache.mouse={x:v.show.when.target.offset().left,y:v.show.when.target.offset().top};p.call(x)}}})};function d(u,t,v){var s=this;s.id=v;s.options=t;s.status={animated:false,rendered:false,disabled:false,focused:false};s.elements={target:u.addClass(s.options.style.classes.target),tooltip:null,wrapper:null,content:null,contentWrapper:null,title:null,button:null,tip:null,bgiframe:null};s.cache={mouse:{},position:{},toggle:0};s.timers={};f.extend(s,s.options.api,{show:function(y){var x,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"show")}if(s.elements.tooltip.css("display")!=="none"){return s}s.elements.tooltip.stop(true,false);x=s.beforeShow.call(s,y);if(x===false){return s}function w(){if(s.options.position.type!=="static"){s.focus()}s.onShow.call(s,y);if(f.browser.msie){s.elements.tooltip.get(0).style.removeAttribute("filter")}}s.cache.toggle=1;if(s.options.position.type!=="static"){s.updatePosition(y,(s.options.show.effect.length>0))}if(typeof s.options.show.solo=="object"){z=f(s.options.show.solo)}else{if(s.options.show.solo===true){z=f("div.qtip").not(s.elements.tooltip)}}if(z){z.each(function(){if(f(this).qtip("api").status.rendered===true){f(this).qtip("api").hide()}})}if(typeof s.options.show.effect.type=="function"){s.options.show.effect.type.call(s.elements.tooltip,s.options.show.effect.length);s.elements.tooltip.queue(function(){w();f(this).dequeue()})}else{switch(s.options.show.effect.type.toLowerCase()){case"fade":s.elements.tooltip.fadeIn(s.options.show.effect.length,w);break;case"slide":s.elements.tooltip.slideDown(s.options.show.effect.length,function(){w();if(s.options.position.type!=="static"){s.updatePosition(y,true)}});break;case"grow":s.elements.tooltip.show(s.options.show.effect.length,w);break;default:s.elements.tooltip.show(null,w);break}s.elements.tooltip.addClass(s.options.style.classes.active)}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_SHOWN,"show")},hide:function(y){var x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"hide")}else{if(s.elements.tooltip.css("display")==="none"){return s}}clearTimeout(s.timers.show);s.elements.tooltip.stop(true,false);x=s.beforeHide.call(s,y);if(x===false){return s}function w(){s.onHide.call(s,y)}s.cache.toggle=0;if(typeof s.options.hide.effect.type=="function"){s.options.hide.effect.type.call(s.elements.tooltip,s.options.hide.effect.length);s.elements.tooltip.queue(function(){w();f(this).dequeue()})}else{switch(s.options.hide.effect.type.toLowerCase()){case"fade":s.elements.tooltip.fadeOut(s.options.hide.effect.length,w);break;case"slide":s.elements.tooltip.slideUp(s.options.hide.effect.length,w);break;case"grow":s.elements.tooltip.hide(s.options.hide.effect.length,w);break;default:s.elements.tooltip.hide(null,w);break}s.elements.tooltip.removeClass(s.options.style.classes.active)}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_HIDDEN,"hide")},updatePosition:function(w,x){var C,G,L,J,H,E,y,I,B,D,K,A,F,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updatePosition")}else{if(s.options.position.type=="static"){return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.CANNOT_POSITION_STATIC,"updatePosition")}}G={position:{left:0,top:0},dimensions:{height:0,width:0},corner:s.options.position.corner.target};L={position:s.getPosition(),dimensions:s.getDimensions(),corner:s.options.position.corner.tooltip};if(s.options.position.target!=="mouse"){if(s.options.position.target.get(0).nodeName.toLowerCase()=="area"){J=s.options.position.target.attr("coords").split(",");for(C=0;C<J.length;C++){J[C]=parseInt(J[C])}H=s.options.position.target.parent("map").attr("name");E=f('img[usemap="#'+H+'"]:first').offset();G.position={left:Math.floor(E.left+J[0]),top:Math.floor(E.top+J[1])};switch(s.options.position.target.attr("shape").toLowerCase()){case"rect":G.dimensions={width:Math.ceil(Math.abs(J[2]-J[0])),height:Math.ceil(Math.abs(J[3]-J[1]))};break;case"circle":G.dimensions={width:J[2]+1,height:J[2]+1};break;case"poly":G.dimensions={width:J[0],height:J[1]};for(C=0;C<J.length;C++){if(C%2==0){if(J[C]>G.dimensions.width){G.dimensions.width=J[C]}if(J[C]<J[0]){G.position.left=Math.floor(E.left+J[C])}}else{if(J[C]>G.dimensions.height){G.dimensions.height=J[C]}if(J[C]<J[1]){G.position.top=Math.floor(E.top+J[C])}}}G.dimensions.width=G.dimensions.width-(G.position.left-E.left);G.dimensions.height=G.dimensions.height-(G.position.top-E.top);break;default:return f.fn.qtip.log.error.call(s,4,f.fn.qtip.constants.INVALID_AREA_SHAPE,"updatePosition");break}G.dimensions.width-=2;G.dimensions.height-=2}else{if(s.options.position.target.add(document.body).length===1){G.position={left:f(document).scrollLeft(),top:f(document).scrollTop()};G.dimensions={height:f(window).height(),width:f(window).width()}}else{if(typeof s.options.position.target.attr("qtip")!=="undefined"){G.position=s.options.position.target.qtip("api").cache.position}else{G.position=s.options.position.target.offset()}G.dimensions={height:s.options.position.target.outerHeight(),width:s.options.position.target.outerWidth()}}}y=f.extend({},G.position);if(G.corner.search(/right/i)!==-1){y.left+=G.dimensions.width}if(G.corner.search(/bottom/i)!==-1){y.top+=G.dimensions.height}if(G.corner.search(/((top|bottom)Middle)|center/)!==-1){y.left+=(G.dimensions.width/2)}if(G.corner.search(/((left|right)Middle)|center/)!==-1){y.top+=(G.dimensions.height/2)}}else{G.position=y={left:s.cache.mouse.x,top:s.cache.mouse.y};G.dimensions={height:1,width:1}}if(L.corner.search(/right/i)!==-1){y.left-=L.dimensions.width}if(L.corner.search(/bottom/i)!==-1){y.top-=L.dimensions.height}if(L.corner.search(/((top|bottom)Middle)|center/)!==-1){y.left-=(L.dimensions.width/2)}if(L.corner.search(/((left|right)Middle)|center/)!==-1){y.top-=(L.dimensions.height/2)}I=(f.browser.msie)?1:0;B=(f.browser.msie&&parseInt(f.browser.version.charAt(0))===6)?1:0;if(s.options.style.border.radius>0){if(L.corner.search(/Left/)!==-1){y.left-=s.options.style.border.radius}else{if(L.corner.search(/Right/)!==-1){y.left+=s.options.style.border.radius}}if(L.corner.search(/Top/)!==-1){y.top-=s.options.style.border.radius}else{if(L.corner.search(/Bottom/)!==-1){y.top+=s.options.style.border.radius}}}if(I){if(L.corner.search(/top/)!==-1){y.top-=I}else{if(L.corner.search(/bottom/)!==-1){y.top+=I}}if(L.corner.search(/left/)!==-1){y.left-=I}else{if(L.corner.search(/right/)!==-1){y.left+=I}}if(L.corner.search(/leftMiddle|rightMiddle/)!==-1){y.top-=1}}if(s.options.position.adjust.screen===true){y=o.call(s,y,G,L)}if(s.options.position.target==="mouse"&&s.options.position.adjust.mouse===true){if(s.options.position.adjust.screen===true&&s.elements.tip){K=s.elements.tip.attr("rel")}else{K=s.options.position.corner.tooltip}y.left+=(K.search(/right/i)!==-1)?-6:6;y.top+=(K.search(/bottom/i)!==-1)?-6:6}if(!s.elements.bgiframe&&f.browser.msie&&parseInt(f.browser.version.charAt(0))==6){f("select, object").each(function(){A=f(this).offset();A.bottom=A.top+f(this).height();A.right=A.left+f(this).width();if(y.top+L.dimensions.height>=A.top&&y.left+L.dimensions.width>=A.left){k.call(s)}})}y.left+=s.options.position.adjust.x;y.top+=s.options.position.adjust.y;F=s.getPosition();if(y.left!=F.left||y.top!=F.top){z=s.beforePositionUpdate.call(s,w);if(z===false){return s}s.cache.position=y;if(x===true){s.status.animated=true;s.elements.tooltip.animate(y,200,"swing",function(){s.status.animated=false})}else{s.elements.tooltip.css(y)}s.onPositionUpdate.call(s,w);if(typeof w!=="undefined"&&w.type&&w.type!=="mousemove"){f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_POSITION_UPDATED,"updatePosition")}}return s},updateWidth:function(w){var x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateWidth")}else{if(w&&typeof w!=="number"){return f.fn.qtip.log.error.call(s,2,"newWidth must be of type number","updateWidth")}}x=s.elements.contentWrapper.siblings().add(s.elements.tip).add(s.elements.button);if(!w){if(typeof s.options.style.width.value=="number"){w=s.options.style.width.value}else{s.elements.tooltip.css({width:"auto"});x.hide();if(f.browser.msie){s.elements.wrapper.add(s.elements.contentWrapper.children()).css({zoom:"normal"})}w=s.getDimensions().width+1;if(!s.options.style.width.value){if(w>s.options.style.width.max){w=s.options.style.width.max}if(w<s.options.style.width.min){w=s.options.style.width.min}}}}if(w%2!==0){w-=1}s.elements.tooltip.width(w);x.show();if(s.options.style.border.radius){s.elements.tooltip.find(".qtip-betweenCorners").each(function(y){f(this).width(w-(s.options.style.border.radius*2))})}if(f.browser.msie){s.elements.wrapper.add(s.elements.contentWrapper.children()).css({zoom:"1"});s.elements.wrapper.width(w);if(s.elements.bgiframe){s.elements.bgiframe.width(w).height(s.getDimensions.height)}}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_WIDTH_UPDATED,"updateWidth")},updateStyle:function(w){var z,A,x,y,B;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateStyle")}else{if(typeof w!=="string"||!f.fn.qtip.styles[w]){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.STYLE_NOT_DEFINED,"updateStyle")}}s.options.style=a.call(s,f.fn.qtip.styles[w],s.options.user.style);s.elements.content.css(q(s.options.style));if(s.options.content.title.text!==false){s.elements.title.css(q(s.options.style.title,true))}s.elements.contentWrapper.css({borderColor:s.options.style.border.color});if(s.options.style.tip.corner!==false){if(f("<canvas>").get(0).getContext){z=s.elements.tooltip.find(".qtip-tip canvas:first");x=z.get(0).getContext("2d");x.clearRect(0,0,300,300);y=z.parent("div[rel]:first").attr("rel");B=b(y,s.options.style.tip.size.width,s.options.style.tip.size.height);h.call(s,z,B,s.options.style.tip.color||s.options.style.border.color)}else{if(f.browser.msie){z=s.elements.tooltip.find('.qtip-tip [nodeName="shape"]');z.attr("fillcolor",s.options.style.tip.color||s.options.style.border.color)}}}if(s.options.style.border.radius>0){s.elements.tooltip.find(".qtip-betweenCorners").css({backgroundColor:s.options.style.border.color});if(f("<canvas>").get(0).getContext){A=g(s.options.style.border.radius);s.elements.tooltip.find(".qtip-wrapper canvas").each(function(){x=f(this).get(0).getContext("2d");x.clearRect(0,0,300,300);y=f(this).parent("div[rel]:first").attr("rel");r.call(s,f(this),A[y],s.options.style.border.radius,s.options.style.border.color)})}else{if(f.browser.msie){s.elements.tooltip.find('.qtip-wrapper [nodeName="arc"]').each(function(){f(this).attr("fillcolor",s.options.style.border.color)})}}}return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_STYLE_UPDATED,"updateStyle")},updateContent:function(A,y){var z,x,w;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateContent")}else{if(!A){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.NO_CONTENT_PROVIDED,"updateContent")}}z=s.beforeContentUpdate.call(s,A);if(typeof z=="string"){A=z}else{if(z===false){return}}if(f.browser.msie){s.elements.contentWrapper.children().css({zoom:"normal"})}if(A.jquery&&A.length>0){A.clone(true).appendTo(s.elements.content).show()}else{s.elements.content.html(A)}x=s.elements.content.find("img[complete=false]");if(x.length>0){w=0;x.each(function(C){f('<img src="'+f(this).attr("src")+'" />').load(function(){if(++w==x.length){B()}})})}else{B()}function B(){s.updateWidth();if(y!==false){if(s.options.position.type!=="static"){s.updatePosition(s.elements.tooltip.is(":visible"),true)}if(s.options.style.tip.corner!==false){n.call(s)}}}s.onContentUpdate.call(s);return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_CONTENT_UPDATED,"loadContent")},loadContent:function(w,z,A){var y;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"loadContent")}y=s.beforeContentLoad.call(s);if(y===false){return s}if(A=="post"){f.post(w,z,x)}else{f.get(w,z,x)}function x(B){s.onContentLoad.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_CONTENT_LOADED,"loadContent");s.updateContent(B)}return s},updateTitle:function(w){if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"updateTitle")}else{if(!w){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.NO_CONTENT_PROVIDED,"updateTitle")}}returned=s.beforeTitleUpdate.call(s);if(returned===false){return s}if(s.elements.button){s.elements.button=s.elements.button.clone(true)}s.elements.title.html(w);if(s.elements.button){s.elements.title.prepend(s.elements.button)}s.onTitleUpdate.call(s);return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_TITLE_UPDATED,"updateTitle")},focus:function(A){var y,x,w,z;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"focus")}else{if(s.options.position.type=="static"){return f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.CANNOT_FOCUS_STATIC,"focus")}}y=parseInt(s.elements.tooltip.css("z-index"));x=6000+f("div.qtip[qtip]").length-1;if(!s.status.focused&&y!==x){z=s.beforeFocus.call(s,A);if(z===false){return s}f("div.qtip[qtip]").not(s.elements.tooltip).each(function(){if(f(this).qtip("api").status.rendered===true){w=parseInt(f(this).css("z-index"));if(typeof w=="number"&&w>-1){f(this).css({zIndex:parseInt(f(this).css("z-index"))-1})}f(this).qtip("api").status.focused=false}});s.elements.tooltip.css({zIndex:x});s.status.focused=true;s.onFocus.call(s,A);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_FOCUSED,"focus")}return s},disable:function(w){if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"disable")}if(w){if(!s.status.disabled){s.status.disabled=true;f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_DISABLED,"disable")}else{f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.TOOLTIP_ALREADY_DISABLED,"disable")}}else{if(s.status.disabled){s.status.disabled=false;f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_ENABLED,"disable")}else{f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.TOOLTIP_ALREADY_ENABLED,"disable")}}return s},destroy:function(){var w,x,y;x=s.beforeDestroy.call(s);if(x===false){return s}if(s.status.rendered){s.options.show.when.target.unbind("mousemove.qtip",s.updatePosition);s.options.show.when.target.unbind("mouseout.qtip",s.hide);s.options.show.when.target.unbind(s.options.show.when.event+".qtip");s.options.hide.when.target.unbind(s.options.hide.when.event+".qtip");s.elements.tooltip.unbind(s.options.hide.when.event+".qtip");s.elements.tooltip.unbind("mouseover.qtip",s.focus);s.elements.tooltip.remove()}else{s.options.show.when.target.unbind(s.options.show.when.event+".qtip-create")}if(typeof s.elements.target.data("qtip")=="object"){y=s.elements.target.data("qtip").interfaces;if(typeof y=="object"&&y.length>0){for(w=0;w<y.length-1;w++){if(y[w].id==s.id){y.splice(w,1)}}}}delete f.fn.qtip.interfaces[s.id];if(typeof y=="object"&&y.length>0){s.elements.target.data("qtip").current=y.length-1}else{s.elements.target.removeData("qtip")}s.onDestroy.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_DESTROYED,"destroy");return s.elements.target},getPosition:function(){var w,x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"getPosition")}w=(s.elements.tooltip.css("display")!=="none")?false:true;if(w){s.elements.tooltip.css({visiblity:"hidden"}).show()}x=s.elements.tooltip.offset();if(w){s.elements.tooltip.css({visiblity:"visible"}).hide()}return x},getDimensions:function(){var w,x;if(!s.status.rendered){return f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.TOOLTIP_NOT_RENDERED,"getDimensions")}w=(!s.elements.tooltip.is(":visible"))?true:false;if(w){s.elements.tooltip.css({visiblity:"hidden"}).show()}x={height:s.elements.tooltip.outerHeight(),width:s.elements.tooltip.outerWidth()};if(w){s.elements.tooltip.css({visiblity:"visible"}).hide()}return x}})}function p(){var s,w,u,t,v,y,x;s=this;s.beforeRender.call(s);s.status.rendered=true;s.elements.tooltip='<div qtip="'+s.id+'" class="qtip '+(s.options.style.classes.tooltip||s.options.style)+'"style="display:none; -moz-border-radius:0; -webkit-border-radius:0; border-radius:0;position:'+s.options.position.type+';">  <div class="qtip-wrapper" style="position:relative; overflow:hidden; text-align:left;">    <div class="qtip-contentWrapper" style="overflow:hidden;">       <div class="qtip-content '+s.options.style.classes.content+'"></div></div></div></div>';s.elements.tooltip=f(s.elements.tooltip);s.elements.tooltip.appendTo(s.options.position.container);s.elements.tooltip.data("qtip",{current:0,interfaces:[s]});s.elements.wrapper=s.elements.tooltip.children("div:first");s.elements.contentWrapper=s.elements.wrapper.children("div:first").css({background:s.options.style.background});s.elements.content=s.elements.contentWrapper.children("div:first").css(q(s.options.style));if(f.browser.msie){s.elements.wrapper.add(s.elements.content).css({zoom:1})}if(s.options.hide.when.event=="unfocus"){s.elements.tooltip.attr("unfocus",true)}if(typeof s.options.style.width.value=="number"){s.updateWidth()}if(f("<canvas>").get(0).getContext||f.browser.msie){if(s.options.style.border.radius>0){m.call(s)}else{s.elements.contentWrapper.css({border:s.options.style.border.width+"px solid "+s.options.style.border.color})}if(s.options.style.tip.corner!==false){e.call(s)}}else{s.elements.contentWrapper.css({border:s.options.style.border.width+"px solid "+s.options.style.border.color});s.options.style.border.radius=0;s.options.style.tip.corner=false;f.fn.qtip.log.error.call(s,2,f.fn.qtip.constants.CANVAS_VML_NOT_SUPPORTED,"render")}if((typeof s.options.content.text=="string"&&s.options.content.text.length>0)||(s.options.content.text.jquery&&s.options.content.text.length>0)){u=s.options.content.text}else{if(typeof s.elements.target.attr("title")=="string"&&s.elements.target.attr("title").length>0){u=s.elements.target.attr("title").replace("\\n","<br />");s.elements.target.attr("title","")}else{if(typeof s.elements.target.attr("alt")=="string"&&s.elements.target.attr("alt").length>0){u=s.elements.target.attr("alt").replace("\\n","<br />");s.elements.target.attr("alt","")}else{u=" ";f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.NO_VALID_CONTENT,"render")}}}if(s.options.content.title.text!==false){j.call(s)}s.updateContent(u);l.call(s);if(s.options.show.ready===true){s.show()}if(s.options.content.url!==false){t=s.options.content.url;v=s.options.content.data;y=s.options.content.method||"get";s.loadContent(t,v,y)}s.onRender.call(s);f.fn.qtip.log.error.call(s,1,f.fn.qtip.constants.EVENT_RENDERED,"render")}function m(){var F,z,t,B,x,E,u,G,D,y,w,C,A,s,v;F=this;F.elements.wrapper.find(".qtip-borderBottom, .qtip-borderTop").remove();t=F.options.style.border.width;B=F.options.style.border.radius;x=F.options.style.border.color||F.options.style.tip.color;E=g(B);u={};for(z in E){u[z]='<div rel="'+z+'" style="'+((z.search(/Left/)!==-1)?"left":"right")+":0; position:absolute; height:"+B+"px; width:"+B+'px; overflow:hidden; line-height:0.1px; font-size:1px">';if(f("<canvas>").get(0).getContext){u[z]+='<canvas height="'+B+'" width="'+B+'" style="vertical-align: top"></canvas>'}else{if(f.browser.msie){G=B*2+3;u[z]+='<v:arc stroked="false" fillcolor="'+x+'" startangle="'+E[z][0]+'" endangle="'+E[z][1]+'" style="width:'+G+"px; height:"+G+"px; margin-top:"+((z.search(/bottom/)!==-1)?-2:-1)+"px; margin-left:"+((z.search(/Right/)!==-1)?E[z][2]-3.5:-1)+'px; vertical-align:top; display:inline-block; behavior:url(#default#VML)"></v:arc>'}}u[z]+="</div>"}D=F.getDimensions().width-(Math.max(t,B)*2);y='<div class="qtip-betweenCorners" style="height:'+B+"px; width:"+D+"px; overflow:hidden; background-color:"+x+'; line-height:0.1px; font-size:1px;">';w='<div class="qtip-borderTop" dir="ltr" style="height:'+B+"px; margin-left:"+B+'px; line-height:0.1px; font-size:1px; padding:0;">'+u.topLeft+u.topRight+y;F.elements.wrapper.prepend(w);C='<div class="qtip-borderBottom" dir="ltr" style="height:'+B+"px; margin-left:"+B+'px; line-height:0.1px; font-size:1px; padding:0;">'+u.bottomLeft+u.bottomRight+y;F.elements.wrapper.append(C);if(f("<canvas>").get(0).getContext){F.elements.wrapper.find("canvas").each(function(){A=E[f(this).parent("[rel]:first").attr("rel")];r.call(F,f(this),A,B,x)})}else{if(f.browser.msie){F.elements.tooltip.append('<v:image style="behavior:url(#default#VML);"></v:image>')}}s=Math.max(B,(B+(t-B)));v=Math.max(t-B,0);F.elements.contentWrapper.css({border:"0px solid "+x,borderWidth:v+"px "+s+"px"})}function r(u,w,s,t){var v=u.get(0).getContext("2d");v.fillStyle=t;v.beginPath();v.arc(w[0],w[1],s,0,Math.PI*2,false);v.fill()}function e(v){var t,s,x,u,w;t=this;if(t.elements.tip!==null){t.elements.tip.remove()}s=t.options.style.tip.color||t.options.style.border.color;if(t.options.style.tip.corner===false){return}else{if(!v){v=t.options.style.tip.corner}}x=b(v,t.options.style.tip.size.width,t.options.style.tip.size.height);t.elements.tip='<div class="'+t.options.style.classes.tip+'" dir="ltr" rel="'+v+'" style="position:absolute; height:'+t.options.style.tip.size.height+"px; width:"+t.options.style.tip.size.width+'px; margin:0 auto; line-height:0.1px; font-size:1px;">';if(f("<canvas>").get(0).getContext){t.elements.tip+='<canvas height="'+t.options.style.tip.size.height+'" width="'+t.options.style.tip.size.width+'"></canvas>'}else{if(f.browser.msie){u=t.options.style.tip.size.width+","+t.options.style.tip.size.height;w="m"+x[0][0]+","+x[0][1];w+=" l"+x[1][0]+","+x[1][1];w+=" "+x[2][0]+","+x[2][1];w+=" xe";t.elements.tip+='<v:shape fillcolor="'+s+'" stroked="false" filled="true" path="'+w+'" coordsize="'+u+'" style="width:'+t.options.style.tip.size.width+"px; height:"+t.options.style.tip.size.height+"px; line-height:0.1px; display:inline-block; behavior:url(#default#VML); vertical-align:"+((v.search(/top/)!==-1)?"bottom":"top")+'"></v:shape>';t.elements.tip+='<v:image style="behavior:url(#default#VML);"></v:image>';t.elements.contentWrapper.css("position","relative")}}t.elements.tooltip.prepend(t.elements.tip+"</div>");t.elements.tip=t.elements.tooltip.find("."+t.options.style.classes.tip).eq(0);if(f("<canvas>").get(0).getContext){h.call(t,t.elements.tip.find("canvas:first"),x,s)}if(v.search(/top/)!==-1&&f.browser.msie&&parseInt(f.browser.version.charAt(0))===6){t.elements.tip.css({marginTop:-4})}n.call(t,v)}function h(t,v,s){var u=t.get(0).getContext("2d");u.fillStyle=s;u.beginPath();u.moveTo(v[0][0],v[0][1]);u.lineTo(v[1][0],v[1][1]);u.lineTo(v[2][0],v[2][1]);u.fill()}function n(u){var t,w,s,x,v;t=this;if(t.options.style.tip.corner===false||!t.elements.tip){return}if(!u){u=t.elements.tip.attr("rel")}w=positionAdjust=(f.browser.msie)?1:0;t.elements.tip.css(u.match(/left|right|top|bottom/)[0],0);if(u.search(/top|bottom/)!==-1){if(f.browser.msie){if(parseInt(f.browser.version.charAt(0))===6){positionAdjust=(u.search(/top/)!==-1)?-3:1}else{positionAdjust=(u.search(/top/)!==-1)?1:2}}if(u.search(/Middle/)!==-1){t.elements.tip.css({left:"50%",marginLeft:-(t.options.style.tip.size.width/2)})}else{if(u.search(/Left/)!==-1){t.elements.tip.css({left:t.options.style.border.radius-w})}else{if(u.search(/Right/)!==-1){t.elements.tip.css({right:t.options.style.border.radius+w})}}}if(u.search(/top/)!==-1){t.elements.tip.css({top:-positionAdjust})}else{t.elements.tip.css({bottom:positionAdjust})}}else{if(u.search(/left|right/)!==-1){if(f.browser.msie){positionAdjust=(parseInt(f.browser.version.charAt(0))===6)?1:((u.search(/left/)!==-1)?1:2)}if(u.search(/Middle/)!==-1){t.elements.tip.css({top:"50%",marginTop:-(t.options.style.tip.size.height/2)})}else{if(u.search(/Top/)!==-1){t.elements.tip.css({top:t.options.style.border.radius-w})}else{if(u.search(/Bottom/)!==-1){t.elements.tip.css({bottom:t.options.style.border.radius+w})}}}if(u.search(/left/)!==-1){t.elements.tip.css({left:-positionAdjust})}else{t.elements.tip.css({right:positionAdjust})}}}s="padding-"+u.match(/left|right|top|bottom/)[0];x=t.options.style.tip.size[(s.search(/left|right/)!==-1)?"width":"height"];t.elements.tooltip.css("padding",0);t.elements.tooltip.css(s,x);if(f.browser.msie&&parseInt(f.browser.version.charAt(0))==6){v=parseInt(t.elements.tip.css("margin-top"))||0;v+=parseInt(t.elements.content.css("margin-top"))||0;t.elements.tip.css({marginTop:v})}}function j(){var s=this;if(s.elements.title!==null){s.elements.title.remove()}s.elements.title=f('<div class="'+s.options.style.classes.title+'">').css(q(s.options.style.title,true)).css({zoom:(f.browser.msie)?1:0}).prependTo(s.elements.contentWrapper);if(s.options.content.title.text){s.updateTitle.call(s,s.options.content.title.text)}if(s.options.content.title.button!==false&&typeof s.options.content.title.button=="string"){s.elements.button=f('<a class="'+s.options.style.classes.button+'" style="float:right; position: relative"></a>').css(q(s.options.style.button,true)).html(s.options.content.title.button).prependTo(s.elements.title).click(function(t){if(!s.status.disabled){s.hide(t)}})}}function l(){var t,v,u,s;t=this;v=t.options.show.when.target;u=t.options.hide.when.target;if(t.options.hide.fixed){u=u.add(t.elements.tooltip)}if(t.options.hide.when.event=="inactive"){s=["click","dblclick","mousedown","mouseup","mousemove","mouseout","mouseenter","mouseleave","mouseover"];function y(z){if(t.status.disabled===true){return}clearTimeout(t.timers.inactive);t.timers.inactive=setTimeout(function(){f(s).each(function(){u.unbind(this+".qtip-inactive");t.elements.content.unbind(this+".qtip-inactive")});t.hide(z)},t.options.hide.delay)}}else{if(t.options.hide.fixed===true){t.elements.tooltip.bind("mouseover.qtip",function(){if(t.status.disabled===true){return}clearTimeout(t.timers.hide)})}}function x(z){if(t.status.disabled===true){return}if(t.options.hide.when.event=="inactive"){f(s).each(function(){u.bind(this+".qtip-inactive",y);t.elements.content.bind(this+".qtip-inactive",y)});y()}clearTimeout(t.timers.show);clearTimeout(t.timers.hide);t.timers.show=setTimeout(function(){t.show(z)},t.options.show.delay)}function w(z){if(t.status.disabled===true){return}if(t.options.hide.fixed===true&&t.options.hide.when.event.search(/mouse(out|leave)/i)!==-1&&f(z.relatedTarget).parents("div.qtip[qtip]").length>0){z.stopPropagation();z.preventDefault();clearTimeout(t.timers.hide);return false}clearTimeout(t.timers.show);clearTimeout(t.timers.hide);t.elements.tooltip.stop(true,true);t.timers.hide=setTimeout(function(){t.hide(z)},t.options.hide.delay)}if((t.options.show.when.target.add(t.options.hide.when.target).length===1&&t.options.show.when.event==t.options.hide.when.event&&t.options.hide.when.event!=="inactive")||t.options.hide.when.event=="unfocus"){t.cache.toggle=0;v.bind(t.options.show.when.event+".qtip",function(z){if(t.cache.toggle==0){x(z)}else{w(z)}})}else{v.bind(t.options.show.when.event+".qtip",x);if(t.options.hide.when.event!=="inactive"){u.bind(t.options.hide.when.event+".qtip",w)}}if(t.options.position.type.search(/(fixed|absolute)/)!==-1){t.elements.tooltip.bind("mouseover.qtip",t.focus)}if(t.options.position.target==="mouse"&&t.options.position.type!=="static"){v.bind("mousemove.qtip",function(z){t.cache.mouse={x:z.pageX,y:z.pageY};if(t.status.disabled===false&&t.options.position.adjust.mouse===true&&t.options.position.type!=="static"&&t.elements.tooltip.css("display")!=="none"){t.updatePosition(z)}})}}function o(u,v,A){var z,s,x,y,t,w;z=this;if(A.corner=="center"){return v.position}s=f.extend({},u);y={x:false,y:false};t={left:(s.left<f.fn.qtip.cache.screen.scroll.left),right:(s.left+A.dimensions.width+2>=f.fn.qtip.cache.screen.width+f.fn.qtip.cache.screen.scroll.left),top:(s.top<f.fn.qtip.cache.screen.scroll.top),bottom:(s.top+A.dimensions.height+2>=f.fn.qtip.cache.screen.height+f.fn.qtip.cache.screen.scroll.top)};x={left:(t.left&&(A.corner.search(/right/i)!=-1||(A.corner.search(/right/i)==-1&&!t.right))),right:(t.right&&(A.corner.search(/left/i)!=-1||(A.corner.search(/left/i)==-1&&!t.left))),top:(t.top&&A.corner.search(/top/i)==-1),bottom:(t.bottom&&A.corner.search(/bottom/i)==-1)};if(x.left){if(z.options.position.target!=="mouse"){s.left=v.position.left+v.dimensions.width}else{s.left=z.cache.mouse.x}y.x="Left"}else{if(x.right){if(z.options.position.target!=="mouse"){s.left=v.position.left-A.dimensions.width}else{s.left=z.cache.mouse.x-A.dimensions.width}y.x="Right"}}if(x.top){if(z.options.position.target!=="mouse"){s.top=v.position.top+v.dimensions.height}else{s.top=z.cache.mouse.y}y.y="top"}else{if(x.bottom){if(z.options.position.target!=="mouse"){s.top=v.position.top-A.dimensions.height}else{s.top=z.cache.mouse.y-A.dimensions.height}y.y="bottom"}}if(s.left<0){s.left=u.left;y.x=false}if(s.top<0){s.top=u.top;y.y=false}if(z.options.style.tip.corner!==false){s.corner=new String(A.corner);if(y.x!==false){s.corner=s.corner.replace(/Left|Right|Middle/,y.x)}if(y.y!==false){s.corner=s.corner.replace(/top|bottom/,y.y)}if(s.corner!==z.elements.tip.attr("rel")){e.call(z,s.corner)}}return s}function q(u,t){var v,s;v=f.extend(true,{},u);for(s in v){if(t===true&&s.search(/(tip|classes)/i)!==-1){delete v[s]}else{if(!t&&s.search(/(width|border|tip|title|classes|user)/i)!==-1){delete v[s]}}}return v}function c(s){if(typeof s.tip!=="object"){s.tip={corner:s.tip}}if(typeof s.tip.size!=="object"){s.tip.size={width:s.tip.size,height:s.tip.size}}if(typeof s.border!=="object"){s.border={width:s.border}}if(typeof s.width!=="object"){s.width={value:s.width}}if(typeof s.width.max=="string"){s.width.max=parseInt(s.width.max.replace(/([0-9]+)/i,"$1"))}if(typeof s.width.min=="string"){s.width.min=parseInt(s.width.min.replace(/([0-9]+)/i,"$1"))}if(typeof s.tip.size.x=="number"){s.tip.size.width=s.tip.size.x;delete s.tip.size.x}if(typeof s.tip.size.y=="number"){s.tip.size.height=s.tip.size.y;delete s.tip.size.y}return s}function a(){var s,t,u,x,v,w;s=this;u=[true,{}];for(t=0;t<arguments.length;t++){u.push(arguments[t])}x=[f.extend.apply(f,u)];while(typeof x[0].name=="string"){x.unshift(c(f.fn.qtip.styles[x[0].name]))}x.unshift(true,{classes:{tooltip:"qtip-"+(arguments[0].name||"defaults")}},f.fn.qtip.styles.defaults);v=f.extend.apply(f,x);w=(f.browser.msie)?1:0;v.tip.size.width+=w;v.tip.size.height+=w;if(v.tip.size.width%2>0){v.tip.size.width+=1}if(v.tip.size.height%2>0){v.tip.size.height+=1}if(v.tip.corner===true){v.tip.corner=(s.options.position.corner.tooltip==="center")?false:s.options.position.corner.tooltip}return v}function b(v,u,t){var s={bottomRight:[[0,0],[u,t],[u,0]],bottomLeft:[[0,0],[u,0],[0,t]],topRight:[[0,t],[u,0],[u,t]],topLeft:[[0,0],[0,t],[u,t]],topMiddle:[[0,t],[u/2,0],[u,t]],bottomMiddle:[[0,0],[u,0],[u/2,t]],rightMiddle:[[0,0],[u,t/2],[0,t]],leftMiddle:[[u,0],[u,t],[0,t/2]]};s.leftTop=s.bottomRight;s.rightTop=s.bottomLeft;s.leftBottom=s.topRight;s.rightBottom=s.topLeft;return s[v]}function g(s){var t;if(f("<canvas>").get(0).getContext){t={topLeft:[s,s],topRight:[0,s],bottomLeft:[s,0],bottomRight:[0,0]}}else{if(f.browser.msie){t={topLeft:[-90,90,0],topRight:[-90,90,-s],bottomLeft:[90,270,0],bottomRight:[90,270,-s]}}}return t}function k(){var s,t,u;s=this;u=s.getDimensions();t='<iframe class="qtip-bgiframe" frameborder="0" tabindex="-1" src="javascript:false" style="display:block; position:absolute; z-index:-1; filter:alpha(opacity=\'0\'); border: 1px solid red; height:'+u.height+"px; width:"+u.width+'px" />';s.elements.bgiframe=s.elements.wrapper.prepend(t).children(".qtip-bgiframe:first")}f(document).ready(function(){f.fn.qtip.cache={screen:{scroll:{left:f(window).scrollLeft(),top:f(window).scrollTop()},width:f(window).width(),height:f(window).height()}};var s;f(window).bind("resize scroll",function(t){clearTimeout(s);s=setTimeout(function(){if(t.type==="scroll"){f.fn.qtip.cache.screen.scroll={left:f(window).scrollLeft(),top:f(window).scrollTop()}}else{f.fn.qtip.cache.screen.width=f(window).width();f.fn.qtip.cache.screen.height=f(window).height()}for(i=0;i<f.fn.qtip.interfaces.length;i++){var u=f.fn.qtip.interfaces[i];if(u.status.rendered===true&&(u.options.position.type!=="static"||u.options.position.adjust.scroll&&t.type==="scroll"||u.options.position.adjust.resize&&t.type==="resize")){u.updatePosition(t,true)}}},100)});f(document).bind("mousedown.qtip",function(t){if(f(t.target).parents("div.qtip").length===0){f(".qtip[unfocus]").each(function(){var u=f(this).qtip("api");if(f(this).is(":visible")&&!u.status.disabled&&f(t.target).add(u.elements.target).length>1){u.hide(t)}})}})});f.fn.qtip.interfaces=[];f.fn.qtip.log={error:function(){return this}};f.fn.qtip.constants={};f.fn.qtip.defaults={content:{prerender:false,text:false,url:false,data:null,title:{text:false,button:false}},position:{target:false,corner:{target:"bottomRight",tooltip:"topLeft"},adjust:{x:0,y:0,mouse:true,screen:false,scroll:true,resize:true},type:"absolute",container:false},show:{when:{target:false,event:"mouseover"},effect:{type:"fade",length:100},delay:140,solo:false,ready:false},hide:{when:{target:false,event:"mouseout"},effect:{type:"fade",length:100},delay:0,fixed:false},api:{beforeRender:function(){},onRender:function(){},beforePositionUpdate:function(){},onPositionUpdate:function(){},beforeShow:function(){},onShow:function(){},beforeHide:function(){},onHide:function(){},beforeContentUpdate:function(){},onContentUpdate:function(){},beforeContentLoad:function(){},onContentLoad:function(){},beforeTitleUpdate:function(){},onTitleUpdate:function(){},beforeDestroy:function(){},onDestroy:function(){},beforeFocus:function(){},onFocus:function(){}}};f.fn.qtip.styles={defaults:{background:"white",color:"#111",overflow:"hidden",textAlign:"left",width:{min:0,max:250},padding:"5px 9px",border:{width:1,radius:0,color:"#d3d3d3"},tip:{corner:false,color:false,size:{width:13,height:13},opacity:1},title:{background:"#e1e1e1",fontWeight:"bold",padding:"7px 12px"},button:{cursor:"pointer"},classes:{target:"",tip:"qtip-tip",title:"qtip-title",button:"qtip-button",content:"qtip-content",active:"qtip-active"}},cream:{border:{width:3,radius:0,color:"#F9E98E"},title:{background:"#F0DE7D",color:"#A27D35"},background:"#FBF7AA",color:"#A27D35",classes:{tooltip:"qtip-cream"}},light:{border:{width:3,radius:0,color:"#E2E2E2"},title:{background:"#f1f1f1",color:"#454545"},background:"white",color:"#454545",classes:{tooltip:"qtip-light"}},dark:{border:{width:3,radius:0,color:"#303030"},title:{background:"#404040",color:"#f3f3f3"},background:"#505050",color:"#f3f3f3",classes:{tooltip:"qtip-dark"}},red:{border:{width:3,radius:0,color:"#CE6F6F"},title:{background:"#f28279",color:"#9C2F2F"},background:"#F79992",color:"#9C2F2F",classes:{tooltip:"qtip-red"}},green:{border:{width:3,radius:0,color:"#A9DB66"},title:{background:"#b9db8c",color:"#58792E"},background:"#CDE6AC",color:"#58792E",classes:{tooltip:"qtip-green"}},blue:{border:{width:3,radius:0,color:"#ADD9ED"},title:{background:"#D0E9F5",color:"#5E99BD"},background:"#E5F6FE",color:"#4D9FBF",classes:{tooltip:"qtip-blue"}}}})(jQuery);
\ No newline at end of file
--- a/web/facet.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/facet.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -405,6 +405,10 @@
         """
         raise NotImplementedError
 
+    @property
+    def wdgclass(self):
+        raise NotImplementedError
+
 
 class VocabularyFacet(AbstractFacet):
     """This abstract class extend :class:`AbstractFacet` to use the
@@ -418,6 +422,10 @@
     """
     needs_update = True
 
+    @property
+    def wdgclass(self):
+        return FacetVocabularyWidget
+
     def get_widget(self):
         """Return the widget instance to use to display this facet.
 
@@ -427,7 +435,7 @@
         vocab = self.vocabulary()
         if len(vocab) <= 1:
             return None
-        wdg = FacetVocabularyWidget(self)
+        wdg = self.wdgclass(self)
         selected = frozenset(typed_eid(eid) for eid in self._cw.list_form_param(self.__regid__))
         for label, value in vocab:
             if value is None:
@@ -1051,18 +1059,22 @@
         self.facet = facet
         self.items = []
 
+    def height(self):
+        return len(self.items) + 1
+
     def append(self, item):
         self.items.append(item)
 
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
         facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
-               (xml_escape(facetid), title))
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+          (xml_escape(facetid), title))
         if self.facet.support_and():
             _ = self.facet._cw._
-            self.w(u'''<select name="%s" class="radio facetOperator" title="%s">
+            w(u'''<select name="%s" class="radio facetOperator" title="%s">
   <option value="OR">%s</option>
   <option value="AND">%s</option>
 </select>''' % (facetid + '_andor', _('and/or between different values'),
@@ -1072,11 +1084,11 @@
             cssclass += ' hidden'
         if len(self.items) > 6:
             cssclass += ' overflowed'
-        self.w(u'<div class="%s">\n' % cssclass)
+        w(u'<div class="%s">\n' % cssclass)
         for item in self.items:
-            item.render(w=self.w)
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+            item.render(w=w)
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
 class FacetStringWidget(HTMLWidget):
@@ -1084,14 +1096,18 @@
         self.facet = facet
         self.value = None
 
+    def height(self):
+        return 3
+
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
         facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
                (facetid, title))
-        self.w(u'<input name="%s" type="text" value="%s" />\n' % (facetid, self.value or u''))
-        self.w(u'</div>\n')
+        w(u'<input name="%s" type="text" value="%s" />\n' % (facetid, self.value or u''))
+        w(u'</div>\n')
 
 
 class FacetRangeWidget(HTMLWidget):
@@ -1124,7 +1140,11 @@
         self.minvalue = minvalue
         self.maxvalue = maxvalue
 
+    def height(self):
+        return 3
+
     def _render(self):
+        w = self.w
         facet = self.facet
         facet._cw.add_js('jquery.ui.js')
         facet._cw.add_css('jquery.ui.css')
@@ -1138,26 +1158,26 @@
             'formatter': self.formatter,
             })
         title = xml_escape(self.facet.title)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
-        self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
-               (facetid, title))
+        w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
+          (facetid, title))
         cssclass = 'facetBody'
         if not self.facet.start_unfolded:
             cssclass += ' hidden'
-        self.w(u'<div class="%s">\n' % cssclass)
-        self.w(u'<span id="%s_inf"></span> - <span id="%s_sup"></span>'
-               % (sliderid, sliderid))
-        self.w(u'<input type="hidden" name="%s_inf" value="%s" />'
-               % (facetid, self.minvalue))
-        self.w(u'<input type="hidden" name="%s_sup" value="%s" />'
-               % (facetid, self.maxvalue))
-        self.w(u'<input type="hidden" name="min_%s_inf" value="%s" />'
-               % (facetid, self.minvalue))
-        self.w(u'<input type="hidden" name="max_%s_sup" value="%s" />'
-               % (facetid, self.maxvalue))
-        self.w(u'<div id="%s"></div>' % sliderid)
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+        w(u'<div class="%s">\n' % cssclass)
+        w(u'<span id="%s_inf"></span> - <span id="%s_sup"></span>'
+          % (sliderid, sliderid))
+        w(u'<input type="hidden" name="%s_inf" value="%s" />'
+          % (facetid, self.minvalue))
+        w(u'<input type="hidden" name="%s_sup" value="%s" />'
+          % (facetid, self.maxvalue))
+        w(u'<input type="hidden" name="min_%s_inf" value="%s" />'
+          % (facetid, self.minvalue))
+        w(u'<input type="hidden" name="max_%s_sup" value="%s" />'
+          % (facetid, self.maxvalue))
+        w(u'<div id="%s"></div>' % sliderid)
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
 class DateFacetRangeWidget(FacetRangeWidget):
@@ -1189,6 +1209,7 @@
         self.selected = selected
 
     def _render(self):
+        w = self.w
         cssclass = 'facetValue facetCheckBox'
         if self.selected:
             cssclass += ' facetValueSelected'
@@ -1197,11 +1218,11 @@
         else:
             imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="%s" cubicweb:value="%s">\n'
-               % (cssclass, xml_escape(unicode(self.value))))
-        self.w(u'<img src="%s" alt="%s"/>&#160;' % (imgsrc, imgalt))
-        self.w(u'<a href="javascript: {}">%s</a>' % xml_escape(self.label))
-        self.w(u'</div>')
+        w(u'<div class="%s" cubicweb:value="%s">\n'
+          % (cssclass, xml_escape(unicode(self.value))))
+        w(u'<img src="%s" alt="%s"/>&#160;' % (imgsrc, imgalt))
+        w(u'<a href="javascript: {}">%s</a>' % xml_escape(self.label))
+        w(u'</div>')
 
 
 class CheckBoxFacetWidget(HTMLWidget):
@@ -1214,10 +1235,14 @@
         self.value = value
         self.selected = selected
 
+    def height(self):
+        return 2
+
     def _render(self):
+        w = self.w
         title = xml_escape(self.facet.title)
         facetid = xml_escape(self.facet.__regid__)
-        self.w(u'<div id="%s" class="facet">\n' % facetid)
+        w(u'<div id="%s" class="facet">\n' % facetid)
         cssclass = 'facetValue facetCheckBox'
         if self.selected:
             cssclass += ' facetValueSelected'
@@ -1226,14 +1251,14 @@
         else:
             imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="%s" cubicweb:value="%s">\n'
-               % (cssclass, xml_escape(unicode(self.value))))
-        self.w(u'<div class="facetCheckBoxWidget">')
-        self.w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" />&#160;' % (imgsrc, imgalt))
-        self.w(u'<label class="facetTitle" cubicweb:facetName="%s"><a href="javascript: {}">%s</a></label>' % (facetid, title))
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
-        self.w(u'</div>\n')
+        w(u'<div class="%s" cubicweb:value="%s">\n'
+          % (cssclass, xml_escape(unicode(self.value))))
+        w(u'<div class="facetCheckBoxWidget">')
+        w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" />&#160;' % (imgsrc, imgalt))
+        w(u'<label class="facetTitle" cubicweb:facetName="%s"><a href="javascript: {}">%s</a></label>' % (facetid, title))
+        w(u'</div>\n')
+        w(u'</div>\n')
+        w(u'</div>\n')
 
 
 class FacetSeparator(HTMLWidget):
--- a/web/form.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/form.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -112,7 +112,12 @@
                     if value:
                         self.add_hidden(param, value)
         if submitmsg is not None:
-            self.add_hidden(u'__message', submitmsg)
+            self.set_message(submitmsg)
+
+    def set_message(self, submitmsg):
+        """sets a submitmsg if exists, using _cwmsgid mechanism """
+        cwmsgid = self._cw.set_redirect_message(submitmsg)
+        self.add_hidden(u'_cwmsgid', cwmsgid)
 
     @property
     def root_form(self):
--- a/web/formfields.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/formfields.py	Tue Jun 28 16:33:53 2011 +0200
@@ -37,6 +37,7 @@
 .. autoclass:: cubicweb.web.formfields.StringField()
 .. autoclass:: cubicweb.web.formfields.PasswordField()
 .. autoclass:: cubicweb.web.formfields.IntField()
+.. autoclass:: cubicweb.web.formfields.BigIntField()
 .. autoclass:: cubicweb.web.formfields.FloatField()
 .. autoclass:: cubicweb.web.formfields.BooleanField()
 .. autoclass:: cubicweb.web.formfields.DateField()
@@ -830,21 +831,25 @@
         return super(EditableFileField, self)._process_form_value(form)
 
 
-class IntField(Field):
-    """Use this field to edit integers (`Int` yams type). This field additionaly
-    support `min` and `max` attributes that specify a minimum and/or maximum
-    value for the integer (`None` meaning no boundary).
+class BigIntField(Field):
+    """Use this field to edit big integers (`BigInt` yams type). This field
+    additionaly support `min` and `max` attributes that specify a minimum and/or
+    maximum value for the integer (`None` meaning no boundary).
 
     Unless explicitly specified, the widget for this field will be a
     :class:`~cubicweb.web.formwidgets.TextInput`.
     """
+    default_text_input_size = 10
+
     def __init__(self, min=None, max=None, **kwargs):
-        super(IntField, self).__init__(**kwargs)
+        super(BigIntField, self).__init__(**kwargs)
         self.min = min
         self.max = max
+
+    def init_widget(self, widget):
+        super(BigIntField, self).init_widget(widget)
         if isinstance(self.widget, fw.TextInput):
-            self.widget.attrs.setdefault('size', 5)
-            self.widget.attrs.setdefault('maxlength', 15)
+            self.widget.attrs.setdefault('size', self.default_text_input_size)
 
     def _ensure_correctly_typed(self, form, value):
         if isinstance(value, basestring):
@@ -858,6 +863,19 @@
         return value
 
 
+class IntField(BigIntField):
+    """Use this field to edit integers (`Int` yams type). Similar to
+    :class:`~cubicweb.web.formfields.BigIntField` but set max length when text
+    input widget is used (the default).
+    """
+    default_text_input_size = 5
+
+    def init_widget(self, widget):
+        super(IntField, self).init_widget(widget)
+        if isinstance(self.widget, fw.TextInput):
+            self.widget.attrs.setdefault('maxlength', 15)
+
+
 class BooleanField(Field):
     """Use this field to edit booleans (`Boolean` yams type).
 
@@ -1208,6 +1226,7 @@
 
     'Boolean':  BooleanField,
     'Int':      IntField,
+    'BigInt':   BigIntField,
     'Float':    FloatField,
     'Decimal':  StringField,
 
--- a/web/request.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/request.py	Tue Jun 28 16:33:53 2011 +0200
@@ -92,7 +92,7 @@
             self.uiprops = vreg.config.uiprops
             self.datadir_url = vreg.config.datadir_url
         # raw html headers that can be added from any view
-        self.html_headers = HTMLHead()
+        self.html_headers = HTMLHead(self.datadir_url)
         # form parameters
         self.setup_params(form)
         # dictionnary that may be used to store request data that has to be
@@ -214,6 +214,12 @@
             if param == '_cwmsgid':
                 self.set_message_id(val)
             elif param == '__message':
+                warn('[3.13] __message in request parameter is deprecated (may '
+                     'only be given to .build_url). Seeing this message usualy '
+                     'means your application hold some <form> where you should '
+                     'replace use of __message hidden input by form.set_message, '
+                     'so new _cwmsgid mechanism is properly used',
+                     DeprecationWarning)
                 self.set_message(val)
             else:
                 self.form[param] = val
@@ -256,7 +262,7 @@
         """used by AutomaticWebTest to clear html headers between tests on
         the same resultset
         """
-        self.html_headers = HTMLHead()
+        self.html_headers = HTMLHead(self.datadir_url)
         return self
 
     # web state helpers #######################################################
@@ -264,7 +270,7 @@
     @property
     def message(self):
         try:
-            return self.session.data.pop(self._msgid, '')
+            return self.session.data.pop(self._msgid, u'')
         except AttributeError:
             try:
                 return self._msg
@@ -283,6 +289,7 @@
         return make_uid()
 
     def set_redirect_message(self, msg):
+        # TODO - this should probably be merged with append_to_redirect_message
         assert isinstance(msg, unicode)
         msgid = self.redirect_message_id()
         self.session.data[msgid] = msg
@@ -292,7 +299,7 @@
         msgid = self.redirect_message_id()
         currentmsg = self.session.data.get(msgid)
         if currentmsg is not None:
-            currentmsg = '%s %s' % (currentmsg, msg)
+            currentmsg = u'%s %s' % (currentmsg, msg)
         else:
             currentmsg = msg
         self.session.data[msgid] = currentmsg
@@ -415,7 +422,8 @@
 
     @cached # so it's writed only once
     def fckeditor_config(self):
-        self.add_js('fckeditor/fckeditor.js')
+        fckeditor_url = self.build_url('fckeditor/fckeditor.js')
+        self.add_js(fckeditor_url, localfile=False)
         self.html_headers.define_var('fcklang', self.lang)
         self.html_headers.define_var('fckconfigpath',
                                      self.data_url('cubicweb.fckcwconfig.js'))
@@ -625,6 +633,16 @@
 
     # urls/path management ####################################################
 
+    def build_url(self, *args, **kwargs):
+        """return an absolute URL using params dictionary key/values as URL
+        parameters. Values are automatically URL quoted, and the
+        publishing method to use may be specified or will be guessed.
+        """
+        if '__message' in kwargs:
+            msg = kwargs.pop('__message')
+            kwargs['_cwmsgid'] = self.set_redirect_message(msg)
+        return super(CubicWebRequestBase, self).build_url(*args, **kwargs)
+
     def url(self, includeparams=True):
         """return currently accessed url"""
         return self.base_url() + self.relative_path(includeparams)
@@ -890,10 +908,20 @@
 def _parse_accept_header(raw_header, value_parser=None, value_sort_key=None):
     """returns an ordered list accepted types
 
-    returned value is a list of 2-tuple (value, score), ordered
-    by score. Exact type of `value` will depend on what `value_parser`
-    will reutrn. if `value_parser` is None, then the raw value, as found
-    in the http header, is used.
+    :param value_parser: a function to parse a raw accept chunk. If None
+    is provided, the function defaults to identity. If a function is provided,
+    it must accept 2 parameters ``value`` and ``other_params``. ``value`` is
+    the value found before the first ';', `other_params` is a dictionary
+    built from all other chunks after this first ';'
+
+    :param value_sort_key: a key function to sort values found in the accept
+    header. This function will be passed a 3-tuple
+    (raw_value, parsed_value, score). If None is provided, the default
+    sort_key is 1./score
+
+    :return: a list of 3-tuple (raw_value, parsed_value, score),
+    ordered by score. ``parsed_value`` will be the return value of
+    ``value_parser(raw_value)``
     """
     if value_sort_key is None:
         value_sort_key = lambda infos: 1./infos[-1]
@@ -928,7 +956,7 @@
     'text/html;level=1', `mimetypeinfo` will be ('text', '*', {'level': '1'})
     """
     try:
-        media_type, media_subtype = value.strip().split('/')
+        media_type, media_subtype = value.strip().split('/', 1)
     except ValueError: # safety belt : '/' should always be present
         media_type = value.strip()
         media_subtype = '*'
--- a/web/test/unittest_views_basecontrollers.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Tue Jun 28 16:33:53 2011 +0200
@@ -194,7 +194,7 @@
                     'use_email-object:'+emaileid: peid,
                     }
         path, params = self.expect_redirect_publish(req, 'edit')
-        email.clear_all_caches()
+        email.cw_clear_all_caches()
         self.assertEqual(email.address, 'adim@logilab.fr')
 
 
@@ -238,7 +238,7 @@
                 }
         with self.assertRaises(ValidationError) as cm:
             self.ctrl_publish(req)
-        self.assertEqual(cm.exception.errors, {'amount-subject': 'value must be >= 0'})
+        self.assertEqual(cm.exception.errors, {'amount-subject': 'value -10 must be >= 0'})
         req = self.request(rollbackfirst=True)
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
@@ -248,7 +248,7 @@
                     }
         with self.assertRaises(ValidationError) as cm:
             self.ctrl_publish(req)
-        self.assertEqual(cm.exception.errors, {'amount-subject': 'value must be <= 100'})
+        self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'})
         req = self.request(rollbackfirst=True)
         req.form = {'eid': ['X'],
                     '__type:X': 'Salesterm',
--- a/web/test/unittest_views_editforms.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/test/unittest_views_editforms.py	Tue Jun 28 16:33:53 2011 +0200
@@ -64,6 +64,7 @@
                                ])
         self.assertListEqual(rbc(e, 'main', 'metadata'),
                               [('last_login_time', 'subject'),
+                               ('cw_source', 'subject'),
                                ('creation_date', 'subject'),
                                ('cwuri', 'subject'),
                                ('modification_date', 'subject'),
@@ -119,7 +120,8 @@
                               [('nom', 'subject'),
                                ])
         self.assertListEqual(rbc(e, 'main', 'metadata'),
-                              [('creation_date', 'subject'),
+                              [('cw_source', 'subject'),
+                               ('creation_date', 'subject'),
                                ('cwuri', 'subject'),
                                ('modification_date', 'subject'),
                                ('created_by', 'subject'),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/test/unittest_views_xmlrss.py	Tue Jun 28 16:33:53 2011 +0200
@@ -0,0 +1,38 @@
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.web.views.xmlrss import SERIALIZERS
+class EntityXMLViewTC(CubicWebTC):
+    """see also cw.sobjects.test.unittest_parsers"""
+    def test(self):
+        req = self.request(relation=['tags-object', 'in_group-subject',
+                                     'in_state-subject', 'use_email-subject'])
+        self.assertMultiLineEqual(
+            req.user.view('xml'),
+            '''\
+<CWUser eid="6" cwuri="None6" cwsource="system">
+  <login>admin</login>
+  <upassword/>
+  <firstname/>
+  <surname/>
+  <last_login_time/>
+  <creation_date>%(cdate)s</creation_date>
+  <modification_date>%(mdate)s</modification_date>
+  <tags role="object">
+  </tags>
+  <in_group role="subject">
+    <CWGroup eid="%(group_eid)s" cwuri="None%(group_eid)s"/>
+  </in_group>
+  <in_state role="subject">
+    <State eid="%(state_eid)s" cwuri="None%(state_eid)s" name="activated"/>
+  </in_state>
+  <use_email role="subject">
+  </use_email>
+</CWUser>
+''' % {'cdate': SERIALIZERS['Datetime'](req.user.creation_date),
+       'mdate': SERIALIZERS['Datetime'](req.user.modification_date),
+       'state_eid': req.user.in_state[0].eid,
+       'group_eid': req.user.in_group[0].eid})
+
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/web/views/basecontrollers.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/basecontrollers.py	Tue Jun 28 16:33:53 2011 +0200
@@ -102,7 +102,7 @@
         msg = self._cw._('you have been logged out')
         # force base_url so on dual http/https configuration, we generate an url
         # on the http version of the site
-        return self._cw.build_url('view', vid='index', __message=msg,
+        return self._cw.build_url('view', vid='loggedout',
                                   base_url=self._cw.vreg.config['base-url'])
 
 
--- a/web/views/basetemplates.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/basetemplates.py	Tue Jun 28 16:33:53 2011 +0200
@@ -25,7 +25,7 @@
 
 from cubicweb.appobject import objectify_selector
 from cubicweb.selectors import match_kwargs, no_cnx, anonymous_user
-from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW
+from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW, StartupView
 from cubicweb.utils import UStringIO
 from cubicweb.schema import display_name
 from cubicweb.web import component, formfields as ff, formwidgets as fw
@@ -66,19 +66,19 @@
         self.wview('logform', rset=self.cw_rset, id='loginBox', klass='')
 
 
-class LoggedOutTemplate(LogInOutTemplate):
+class LoggedOutTemplate(StartupView):
     __regid__ = 'loggedout'
+    __select__ = anonymous_user()
     title = 'logged out'
 
-    def content(self, w):
-        # FIXME Deprecated code ?
+    def call(self):
         msg = self._cw._('you have been logged out')
-        w(u'<h2>%s</h2>\n' % msg)
-        if self._cw.vreg.config.anonymous_user()[0]:
-            indexurl = self._cw.build_url('view', vid='index', __message=msg)
-            w(u'<p><a href="%s">%s</a><p>' % (
-                xml_escape(indexurl),
-                self._cw._('go back to the index page')))
+        if self._cw.cnx:
+            comp = self._cw.vreg['components'].select('applmessages', self._cw)
+            comp.render(w=self.w, msg=msg)
+            self.wview('index')
+        else:
+            self.w(u'<h2>%s</h2>' % msg)
 
 
 @objectify_selector
--- a/web/views/cwsources.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/cwsources.py	Tue Jun 28 16:33:53 2011 +0200
@@ -229,7 +229,8 @@
 
 class CWSourceManagementView(StartupView):
     __regid__ = 'cw.source-management'
-    rql = ('Any S, ST, SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST')
+    rql = ('Any S, ST, SP, SD, SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST, '
+           'S latest_retrieval SD, S parser SP')
     title = _('data sources management')
 
     def call(self, **kwargs):
@@ -240,4 +241,4 @@
                 self._cw.build_url('add/%s' % eschema),
                 self._cw._('add a CWSource')))
             self.w(u'<div class="clear"></div>')
-        self.wview('table', self._cw.execute(self.rql), displaycols=range(2))
+        self.wview('table', self._cw.execute(self.rql), displaycols=range(4))
--- a/web/views/facets.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/facets.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -73,6 +73,7 @@
         req = self._cw
         req.add_js( self.needs_js )
         req.add_css( self.needs_css)
+        req.html_headers.define_var('facetLoadingMsg', req._('facet-loading-msg'))
         if self.roundcorners:
             req.html_headers.add_onload('jQuery(".facet").corner("tl br 10px");')
         rset, vid, divid, paginate = self._get_context()
@@ -202,6 +203,11 @@
     rtype = 'has_text'
     role = 'subject'
     order = 0
+
+    @property
+    def wdgclass(self):
+        return facet.FacetStringWidget
+
     @property
     def title(self):
         return self._cw._('has_text')
@@ -212,7 +218,7 @@
         default implentation expects a .vocabulary method on the facet and
         return a combobox displaying this vocabulary
         """
-        return facet.FacetStringWidget(self)
+        return self.wdgclass(self)
 
     def add_rql_restrictions(self):
         """add restriction for this facet into the rql syntax tree"""
--- a/web/views/navigation.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/navigation.py	Tue Jun 28 16:33:53 2011 +0200
@@ -40,10 +40,10 @@
         self.clean_params(params)
         basepath = self._cw.relative_path(includeparams=False)
         self.w(u'<div class="pagination">')
-        self.w(u'%s&#160;' % self.previous_link(basepath, params))
+        self.w(self.previous_link(basepath, params))
         self.w(u'[&#160;%s&#160;]' %
                u'&#160;| '.join(self.iter_page_links(basepath, params)))
-        self.w(u'&#160;%s' % self.next_link(basepath, params))
+        self.w(u'&#160;&#160;%s' % self.next_link(basepath, params))
         self.w(u'</div>')
 
     def index_display(self, start, stop):
@@ -74,12 +74,12 @@
         basepath = self._cw.relative_path(includeparams=False)
         w = self.w
         w(u'<div class="pagination">')
-        w(u'%s&#160;' % self.previous_link(basepath, params))
+        w(self.previous_link(basepath, params))
         w(u'<select onchange="javascript: document.location=this.options[this.selectedIndex].value">')
         for option in self.iter_page_links(basepath, params):
             w(option)
         w(u'</select>')
-        w(u'&#160;%s' % self.next_link(basepath, params))
+        w(u'&#160;&#160;%s' % self.next_link(basepath, params))
         w(u'</div>')
 
 
--- a/web/views/owl.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/owl.py	Tue Jun 28 16:33:53 2011 +0200
@@ -40,6 +40,7 @@
 
                 'Boolean': 'xsd:boolean',
                 'Int': 'xsd:int',
+                'BigInt': 'xsd:int',
                 'Float': 'xsd:float',
                 'Decimal' : 'xsd:decimal',
 
--- a/web/views/plots.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/plots.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -33,14 +33,14 @@
     """accept result set with at least one line and two columns of result
     all columns after second must be of numerical types"""
     for etype in rset.description[0]:
-        if etype not in ('Int', 'Float'):
+        if etype not in ('Int', 'BigInt', 'Float'):
             return 0
     return 1
 
 @objectify_selector
 def second_column_is_number(cls, req, rset=None, *args, **kwargs):
     etype = rset.description[0][1]
-    if etype not  in ('Int', 'Float'):
+    if etype not  in ('Int', 'BigInt', 'Float'):
         return 0
     return 1
 
@@ -50,7 +50,7 @@
     if etypes[0] not in ('Date', 'Datetime', 'TZDatetime'):
         return 0
     for etype in etypes[1:]:
-        if etype not in ('Int', 'Float'):
+        if etype not in ('Int', 'BigInt', 'Float'):
             return 0
     return 1
 
--- a/web/views/sparql.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/sparql.py	Tue Jun 28 16:33:53 2011 +0200
@@ -80,6 +80,7 @@
 
     'Boolean': 'boolean',
     'Int': 'integer',
+    'BigInt': 'integer',
     'Float': 'float',
 
     'Datetime': 'dateTime',
--- a/web/views/tableview.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/tableview.py	Tue Jun 28 16:33:53 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -22,7 +22,7 @@
 
 from logilab.mtconverter import xml_escape
 
-from cubicweb.selectors import nonempty_rset, match_form_params
+from cubicweb.selectors import nonempty_rset
 from cubicweb.utils import make_uid, json_dumps
 from cubicweb.view import EntityView, AnyRsetView
 from cubicweb import tags
@@ -31,6 +31,7 @@
 from cubicweb.web.component import Link
 from cubicweb.web.htmlwidgets import (TableWidget, TableColumn, MenuWidget,
                                       PopupBoxMenu)
+from cubicweb.web import facet
 from cubicweb.web.facet import prepare_facets_rqlst, filter_hiddens
 
 class TableView(AnyRsetView):
@@ -42,6 +43,7 @@
     __regid__ = 'table'
     title = _('table')
     finalview = 'final'
+    wdg_stack_size = 8
 
     def form_filter(self, divid, displaycols, displayactions, displayfilter,
                     paginate, hidden=True):
@@ -72,6 +74,8 @@
         w = self.w
         self._cw.add_css('cubicweb.facets.css')
         self._cw.add_js( ('cubicweb.ajax.js', 'cubicweb.facets.js'))
+        self._cw.html_headers.define_var('facetLoadingMsg',
+                                         self._cw._('facet-loading-msg'))
         # drop False / None values from vidargs
         vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
         w(u'<form method="post" cubicweb:facetargs="%s" action="">' %
@@ -81,12 +85,36 @@
         w(u'<input type="hidden" name="fromformfilter" value="1" />')
         filter_hiddens(w, facets=','.join(wdg.facet.__regid__ for wdg in fwidgets),
                        baserql=baserql)
+        self._build_form_table(fwidgets)
+
+    def _facet_widget_sort(self, fwidgets):
+        fwidgets.sort(key=lambda x: x.height())
+
+    def _build_form_table(self, fwidgets):
+        # sort by widget height
+        w = self.w
+        self._facet_widget_sort(fwidgets)
         w(u'<table class="filter">\n')
+        widget_queue = []
+        queue_size = 0
         w(u'<tr>\n')
         for wdg in fwidgets:
+            height = wdg.height()
+            if queue_size + height <= self.wdg_stack_size:
+                widget_queue.append(wdg)
+                queue_size += height
+                continue
             w(u'<td>')
-            wdg.render(w=w)
-            w(u'</td>\n')
+            for queued in widget_queue:
+                queued.render(w=w)
+            w(u'</td>')
+            widget_queue = [wdg]
+            queue_size = height
+        if widget_queue:
+            w(u'<td>')
+            for queued in widget_queue:
+                queued.render(w=w)
+            w(u'</td>')
         w(u'</tr>\n')
         w(u'</table>\n')
         w(u'</fieldset>\n')
--- a/web/views/urlpublishing.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/urlpublishing.py	Tue Jun 28 16:33:53 2011 +0200
@@ -260,9 +260,8 @@
             else:
                 try:
                     action = actionsreg._select_best(actions, req, rset=rset)
+                    if action is not None:
+                        raise Redirect(action.url())
                 except RegistryException:
-                    continue
-                else:
-                    # XXX avoid redirect
-                    raise Redirect(action.url())
+                    pass # continue searching
         raise PathDontMatch()
--- a/web/views/xmlrss.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/views/xmlrss.py	Tue Jun 28 16:33:53 2011 +0200
@@ -73,11 +73,13 @@
 class XMLItemView(EntityView):
     __regid__ = 'xmlitem'
 
-    def cell_call(self, row, col):
-        """ element as an item for an xml feed """
-        entity = self.cw_rset.complete_entity(row, col)
-        self.w(u'<%s eid="%s" cwuri="%s">\n'
-               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri)))
+    def entity_call(self, entity):
+        """element as an item for an xml feed"""
+        entity.complete()
+        source = entity.cw_metainformation()['source']['uri']
+        self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n'
+               % (entity.__regid__, entity.eid, xml_escape(entity.cwuri),
+                  xml_escape(source)))
         for rschema, attrschema in entity.e_schema.attribute_definitions():
             attr = rschema.type
             if attr in ('eid', 'cwuri'):
@@ -114,17 +116,31 @@
                 continue
             self.w(u'  <%s role="%s">\n' % (rtype, role))
             for related in entity.related(rtype, role, entities=True):
-                # XXX put unique attributes as xml attribute, they are much
-                # probably used to search existing entities in client data feed,
-                # and putting it here may avoid an extra request to get those
-                # attributes values
-                self.w(u'    <%s eid="%s" cwuri="%s"/>\n'
-                       % (related.e_schema, related.eid,
-                          xml_escape(related.cwuri)))
+                related.view('xmlrelateditem', w=self.w)
             self.w(u'  </%s>\n' % rtype)
         self.w(u'</%s>\n' % (entity.e_schema))
 
 
+class XMLRelatedItemView(EntityView):
+    __regid__ = 'xmlrelateditem'
+
+    def entity_call(self, entity):
+        # XXX put unique attributes as xml attribute, they are much probably
+        # used to search existing entities in client data feed, and putting it
+        # here may avoid an extra request to get those attributes values
+        self.w(u'    <%s eid="%s" cwuri="%s"/>\n'
+               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri)))
+
+
+class XMLRelatedItemStateView(XMLRelatedItemView):
+    __select__ = is_instance('State')
+
+    def entity_call(self, entity):
+        self.w(u'    <%s eid="%s" cwuri="%s" name="%s"/>\n'
+               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri),
+                  xml_escape(entity.name)))
+
+
 class XMLRsetView(AnyRsetView):
     """dumps raw rset as xml"""
     __regid__ = 'rsetxml'
--- a/web/webconfig.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/webconfig.py	Tue Jun 28 16:33:53 2011 +0200
@@ -300,19 +300,17 @@
         if not (self.repairing or self.creating):
             self.global_set_option('base-url', baseurl)
         httpsurl = self['https-url']
+        if (self.debugmode or self.mode == 'test'):
+            datadir_path = 'data/'
+        else:
+            datadir_path = 'data/%s/' % self.instance_md5_version()
         if httpsurl:
             if httpsurl[-1] != '/':
                 httpsurl += '/'
                 if not self.repairing:
                     self.global_set_option('https-url', httpsurl)
-            if self.debugmode:
-                self.https_datadir_url = httpsurl + 'data/'
-            else:
-                self.https_datadir_url = httpsurl + 'data%s/' % self.instance_md5_version()
-        if self.debugmode:
-            self.datadir_url = baseurl + 'data/'
-        else:
-            self.datadir_url = baseurl + 'data%s/' % self.instance_md5_version()
+            self.https_datadir_url = httpsurl + datadir_path
+        self.datadir_url = baseurl + datadir_path
 
     def _build_ui_properties(self):
         # self.datadir_url[:-1] to remove trailing /
--- a/web/webctl.py	Tue Jun 28 16:32:38 2011 +0200
+++ b/web/webctl.py	Tue Jun 28 16:33:53 2011 +0200
@@ -21,9 +21,22 @@
 
 __docformat__ = "restructuredtext en"
 
+import os, os.path as osp
+from shutil import copy
+
 from logilab.common.shellutils import ASK
 
-from cubicweb.toolsutils import CommandHandler, underline_title
+from cubicweb import ExecutionError
+from cubicweb.cwctl import CWCTL
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from cubicweb.toolsutils import Command, CommandHandler, underline_title
+
+
+try:
+    from os import symlink as linkdir
+except ImportError:
+    from shutil import copytree as linkdir
+
 
 class WebCreateHandler(CommandHandler):
     cmdname = 'create'
@@ -43,3 +56,57 @@
 
     def postcreate(self, *args, **kwargs):
         """hooks called once instance's initialization has been completed"""
+
+
+class GenStaticDataDir(Command):
+    """Create a directory merging all data directory content from cubes and CW.
+    """
+    name = 'gen-static-datadir'
+    arguments = '<instance> [dirpath]'
+    min_args = 1
+    max_args = 2
+
+    options = ()
+
+    def run(self, args):
+        appid = args.pop(0)
+        config = cwcfg.config_for(appid)
+        if args:
+            dest = args[0]
+        else:
+            dest = osp.join(config.appdatahome, 'data')
+        if osp.exists(dest):
+            raise ExecutionError('Directory %s already exists. '
+                                 'Remove it first.' % dest)
+        config.quick_start = True # notify this is not a regular start
+        # list all resources (no matter their order)
+        resources = set()
+        for datadir in self._datadirs(config):
+            for dirpath, dirnames, filenames in os.walk(datadir):
+                rel_dirpath = dirpath[len(datadir)+1:]
+                resources.update(osp.join(rel_dirpath, f) for f in filenames)
+        # locate resources and copy them to destination
+        for resource in resources:
+            dirname = osp.dirname(resource)
+            dest_resource = osp.join(dest, dirname)
+            if not osp.isdir(dest_resource):
+                os.makedirs(dest_resource)
+            resource_dir, resource_path = config.locate_resource(resource)
+            copy(osp.join(resource_dir, resource_path), dest_resource)
+        # handle md5 version subdirectory
+        linkdir(dest, osp.join(dest, config.instance_md5_version()))
+        print ('You can use apache rewrite rule below :\n'
+               'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
+
+    def _datadirs(self, config):
+        repo = config.repository()
+        if config._cubes is None:
+            # web only config
+            config.init_cubes(repo.get_cubes())
+        for cube in repo.get_cubes():
+            cube_datadir = osp.join(cwcfg.cube_dir(cube), 'data')
+            if osp.isdir(cube_datadir):
+                yield cube_datadir
+        yield osp.join(config.shared_dir(), 'data')
+
+CWCTL.register(GenStaticDataDir)