3.17 is the new stable stable
authorDavid Douard <david.douard@logilab.fr>
Fri, 14 Jun 2013 16:26:25 +0200
branchstable
changeset 9013 b4bcabf55e77
parent 9012 2cf127d4f5fd (current diff)
parent 9010 1f3d4d829e63 (diff)
child 9014 dfa4da8a53a0
child 9128 d988eec2d5d3
3.17 is the new stable
.hgtags
__pkginfo__.py
cubicweb.spec
cwctl.py
debian/changelog
selectors.py
server/serverconfig.py
server/test/unittest_ldapuser.py
web/data/cubicweb.gmap.js
web/data/cubicweb.iprogress.css
web/data/cubicweb.iprogress.js
web/data/cubicweb.mailform.css
web/data/gmap.utility.labeledmarker.js
web/data/gmap_blue_marker.png
web/test/unittest_views_embeding.py
web/views/iprogress.py
--- a/.hgtags	Fri Jun 14 16:13:24 2013 +0200
+++ b/.hgtags	Fri Jun 14 16:26:25 2013 +0200
@@ -291,9 +291,19 @@
 ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-version-3.16.3
 ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-debian-version-3.16.3-1
 ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-centos-version-3.16.3-1
+cc1a0aad580cf93d26959f97d8d6638e786c1082 cubicweb-version-3.17.0
+22be40c492e9034483bfec379ca11462ea97825b cubicweb-debian-version-3.17.0-1
+09a0c7ea6c3cb97bbbeed3795b3c3715ceb9566b cubicweb-debian-version-3.17.0-2
 041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-version-3.16.4
 041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-debian-version-3.16.4-1
 041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-centos-version-3.16.4-1
 810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-version-3.16.5
 810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-debiann-version-3.16.5-1
 810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-centos-version-3.16.5-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-version-3.17.1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-version-3.17.1
+73f2ad404716cd211b735e67ee16875f1fff7374 cubicweb-debian-version-3.17.1-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-debian-version-3.17.1-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-centos-version-3.17.1-1
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-version-3.17.2
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-debian-version-3.17.2-1
--- a/__init__.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/__init__.py	Fri Jun 14 16:26:25 2013 +0200
@@ -38,10 +38,10 @@
 import sys, os, logging
 from StringIO import StringIO
 
+from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods
 from yams.constraints import BASE_CONVERTERS
 
-
 if os.environ.get('APYCOT_ROOT'):
     logging.basicConfig(level=logging.CRITICAL)
 else:
@@ -57,8 +57,9 @@
 from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound
 
 # convert eid to the right type, raise ValueError if it's not a valid eid
-typed_eid = int
-
+@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.')
+def typed_eid(eid):
+    return int(eid)
 
 #def log_thread(f, w, a):
 #    print f.f_code.co_filename, f.f_code.co_name
--- a/__pkginfo__.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/__pkginfo__.py	Fri Jun 14 16:26:25 2013 +0200
@@ -22,7 +22,7 @@
 
 modname = distname = "cubicweb"
 
-numversion = (3, 16, 5)
+numversion = (3, 17, 2)
 version = '.'.join(str(num) for num in numversion)
 
 description = "a repository of entities / relations for knowledge management"
@@ -43,7 +43,7 @@
     'logilab-common': '>= 0.59.0',
     'logilab-mtconverter': '>= 0.8.0',
     'rql': '>= 0.31.2',
-    'yams': '>= 0.36.0',
+    'yams': '>= 0.37.0',
     #gettext                    # for xgettext, msgcat, etc...
     # web dependancies
     'simplejson': '>= 2.0.9',
@@ -51,7 +51,7 @@
     'Twisted': '',
     # XXX graphviz
     # server dependencies
-    'logilab-database': '>= 1.8.2',
+    'logilab-database': '>= 1.10',
     'pysqlite': '>= 2.5.5', # XXX install pysqlite2
     'passlib': '',
     }
--- a/appobject.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/appobject.py	Fri Jun 14 16:26:25 2013 +0200
@@ -34,7 +34,6 @@
 from logging import getLogger
 
 from logilab.common.deprecation import deprecated, class_renamed
-from logilab.common.decorators import classproperty
 from logilab.common.logging_ext import set_log_methods
 
 # first line imports for bw compat
--- a/cubicweb.spec	Fri Jun 14 16:13:24 2013 +0200
+++ b/cubicweb.spec	Fri Jun 14 16:26:25 2013 +0200
@@ -7,7 +7,7 @@
 %endif
 
 Name:           cubicweb
-Version:        3.16.5
+Version:        3.17.2
 Release:        logilab.1%{?dist}
 Summary:        CubicWeb is a semantic web application framework
 Source0:        http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz
@@ -23,8 +23,8 @@
 Requires:       %{python}-logilab-common >= 0.59.0
 Requires:       %{python}-logilab-mtconverter >= 0.8.0
 Requires:       %{python}-rql >= 0.31.2
-Requires:       %{python}-yams >= 0.36.0
-Requires:       %{python}-logilab-database >= 1.9.0
+Requires:       %{python}-yams >= 0.37.0
+Requires:       %{python}-logilab-database >= 1.10.0
 Requires:       %{python}-passlib
 Requires:       %{python}-lxml
 Requires:       %{python}-twisted-web
--- a/cwconfig.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/cwconfig.py	Fri Jun 14 16:26:25 2013 +0200
@@ -565,19 +565,27 @@
                         todo.append(depcube)
         return cubes
 
-    @classmethod
-    def reorder_cubes(cls, cubes):
+    def reorder_cubes(self, cubes):
         """reorder cubes from the top level cubes to inner dependencies
         cubes
         """
         from logilab.common.graph import ordered_nodes, UnorderableGraph
+        # See help string for 'ui-cube' in web/webconfig.py for the reasons
+        # behind this hack.
+        uicube = self.get('ui-cube', None)
         graph = {}
+        if uicube:
+            graph[uicube] = set()
         for cube in cubes:
             cube = CW_MIGRATION_MAP.get(cube, cube)
-            graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+            graph[cube] = set(dep for dep in self.cube_dependencies(cube)
                               if dep in cubes)
-            graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+            graph[cube] |= set(dep for dep in self.cube_recommends(cube)
                                if dep in cubes)
+            if uicube and cube != uicube \
+                    and cube not in self.cube_dependencies(uicube) \
+                    and cube not in self.cube_recommends(uicube):
+                graph[cube].add(uicube)
         try:
             return ordered_nodes(graph)
         except UnorderableGraph as ex:
--- a/cwctl.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/cwctl.py	Fri Jun 14 16:26:25 2013 +0200
@@ -38,10 +38,9 @@
     def getpgid():
         """win32 getpgid implementation"""
 
-
-
 from logilab.common.clcommands import CommandLine
 from logilab.common.shellutils import ASK
+from logilab.common.configuration import merge_options
 
 from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
 from cubicweb.utils import support_args
@@ -205,9 +204,12 @@
 class ListCommand(Command):
     """List configurations, cubes and instances.
 
-    list available configurations, installed cubes, and registered instances
+    List available configurations, installed cubes, and registered instances.
+
+    If given, the optional argument allows to restrict listing only a category of items.
     """
     name = 'list'
+    arguments = '[all|cubes|configurations|instances]'
     options = (
         ('verbose',
          {'short': 'v', 'action' : 'store_true',
@@ -216,92 +218,107 @@
 
     def run(self, args):
         """run the command with its specific arguments"""
-        if args:
+        if not args:
+            mode = 'all'
+        elif len(args) == 1:
+            mode = args[0]
+        else:
             raise BadCommandUsage('Too many arguments')
+
         from cubicweb.migration import ConfigurationProblem
-        print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
-        print
-        print 'Available configurations:'
-        for config in CONFIGURATIONS:
-            print '*', config.name
-            for line in config.__doc__.splitlines():
-                line = line.strip()
-                if not line:
-                    continue
-                print '   ', line
-        print
-        cfgpb = ConfigurationProblem(cwcfg)
-        try:
-            cubesdir = pathsep.join(cwcfg.cubes_search_path())
-            namesize = max(len(x) for x in cwcfg.available_cubes())
-        except ConfigurationError as ex:
-            print 'No cubes available:', ex
-        except ValueError:
-            print 'No cubes available in %s' % cubesdir
-        else:
-            print 'Available cubes (%s):' % cubesdir
-            for cube in cwcfg.available_cubes():
-                try:
-                    tinfo = cwcfg.cube_pkginfo(cube)
-                    tversion = tinfo.version
-                    cfgpb.add_cube(cube, tversion)
-                except (ConfigurationError, AttributeError) as ex:
-                    tinfo = None
-                    tversion = '[missing cube information: %s]' % ex
-                print '* %s %s' % (cube.ljust(namesize), tversion)
-                if self.config.verbose:
-                    if tinfo:
-                        descr = getattr(tinfo, 'description', '')
-                        if not descr:
-                            descr = getattr(tinfo, 'short_desc', '')
+
+        if mode == 'all':
+            print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
+            print
+
+        if mode in ('all', 'config', 'configurations'):
+            print 'Available configurations:'
+            for config in CONFIGURATIONS:
+                print '*', config.name
+                for line in config.__doc__.splitlines():
+                    line = line.strip()
+                    if not line:
+                        continue
+                    print '   ', line
+            print
+
+        if mode in ('all', 'cubes'):
+            cfgpb = ConfigurationProblem(cwcfg)
+            try:
+                cubesdir = pathsep.join(cwcfg.cubes_search_path())
+                namesize = max(len(x) for x in cwcfg.available_cubes())
+            except ConfigurationError as ex:
+                print 'No cubes available:', ex
+            except ValueError:
+                print 'No cubes available in %s' % cubesdir
+            else:
+                print 'Available cubes (%s):' % cubesdir
+                for cube in cwcfg.available_cubes():
+                    try:
+                        tinfo = cwcfg.cube_pkginfo(cube)
+                        tversion = tinfo.version
+                        cfgpb.add_cube(cube, tversion)
+                    except (ConfigurationError, AttributeError) as ex:
+                        tinfo = None
+                        tversion = '[missing cube information: %s]' % ex
+                    print '* %s %s' % (cube.ljust(namesize), tversion)
+                    if self.config.verbose:
+                        if tinfo:
+                            descr = getattr(tinfo, 'description', '')
+                            if not descr:
+                                descr = getattr(tinfo, 'short_desc', '')
+                                if descr:
+                                    warn('[3.8] short_desc is deprecated, update %s'
+                                         ' pkginfo' % cube, DeprecationWarning)
+                                else:
+                                    descr = tinfo.__doc__
                             if descr:
-                                warn('[3.8] short_desc is deprecated, update %s'
-                                     ' pkginfo' % cube, DeprecationWarning)
-                            else:
-                                descr = tinfo.__doc__
-                        if descr:
-                            print '    '+ '    \n'.join(descr.splitlines())
-                    modes = detect_available_modes(cwcfg.cube_dir(cube))
-                    print '    available modes: %s' % ', '.join(modes)
-        print
-        try:
-            regdir = cwcfg.instances_dir()
-        except ConfigurationError as ex:
-            print 'No instance available:', ex
+                                print '    '+ '    \n'.join(descr.splitlines())
+                        modes = detect_available_modes(cwcfg.cube_dir(cube))
+                        print '    available modes: %s' % ', '.join(modes)
             print
-            return
-        instances = list_instances(regdir)
-        if instances:
-            print 'Available instances (%s):' % regdir
-            for appid in instances:
-                modes = cwcfg.possible_configurations(appid)
-                if not modes:
-                    print '* %s (BROKEN instance, no configuration found)' % appid
-                    continue
-                print '* %s (%s)' % (appid, ', '.join(modes))
-                try:
-                    config = cwcfg.config_for(appid, modes[0])
-                except Exception as exc:
-                    print '    (BROKEN instance, %s)' % exc
-                    continue
-        else:
-            print 'No instance available in %s' % regdir
-        print
-        # configuration management problem solving
-        cfgpb.solve()
-        if cfgpb.warnings:
-            print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
-        if cfgpb.errors:
-            print 'Errors:'
-            for op, cube, version, src in cfgpb.errors:
-                if op == 'add':
-                    print '* cube', cube,
-                    if version:
-                        print ' version', version,
-                    print 'is not installed, but required by %s' % src
-                else:
-                    print '* cube %s version %s is installed, but version %s is required by %s' % (
-                        cube, cfgpb.cubes[cube], version, src)
+
+        if mode in ('all', 'instances'):
+            try:
+                regdir = cwcfg.instances_dir()
+            except ConfigurationError as ex:
+                print 'No instance available:', ex
+                print
+                return
+            instances = list_instances(regdir)
+            if instances:
+                print 'Available instances (%s):' % regdir
+                for appid in instances:
+                    modes = cwcfg.possible_configurations(appid)
+                    if not modes:
+                        print '* %s (BROKEN instance, no configuration found)' % appid
+                        continue
+                    print '* %s (%s)' % (appid, ', '.join(modes))
+                    try:
+                        config = cwcfg.config_for(appid, modes[0])
+                    except Exception as exc:
+                        print '    (BROKEN instance, %s)' % exc
+                        continue
+            else:
+                print 'No instance available in %s' % regdir
+            print
+
+        if mode == 'all':
+            # configuration management problem solving
+            cfgpb.solve()
+            if cfgpb.warnings:
+                print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
+            if cfgpb.errors:
+                print 'Errors:'
+                for op, cube, version, src in cfgpb.errors:
+                    if op == 'add':
+                        print '* cube', cube,
+                        if version:
+                            print ' version', version,
+                        print 'is not installed, but required by %s' % src
+                    else:
+                        print '* cube %s version %s is installed, but version %s is required by %s' % (
+                            cube, cfgpb.cubes[cube], version, src)
 
 def check_options_consistency(config):
     if config.automatic and config.config_level > 0:
@@ -347,6 +364,12 @@
           ' "list" command. Default to "all-in-one", e.g. an installation '
           'embedding both the RQL repository and the web server.',
           }),
+        ('no-db-create',
+         {'short': 'S',
+          'action': 'store_true',
+          'default': False,
+          'help': 'stop after creation and do not continue with db-create',
+          }),
         )
 
     def run(self, args):
@@ -415,7 +438,8 @@
             print 'set %s as owner of the data directory' % config['uid']
             chown(config.appdatahome, config['uid'])
         print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1]
-        helper.postcreate(self.config.automatic, self.config.config_level)
+        if not self.config.no_db_create:
+            helper.postcreate(self.config.automatic, self.config.config_level)
 
     def _handle_win32(self, config, appid):
         if sys.platform != 'win32':
@@ -811,7 +835,6 @@
     name = 'versions'
 
     def versions_instance(self, appid):
-        from logilab.common.changelog import Version
         config = cwcfg.config_for(appid)
         # should not raise error if db versions don't match fs versions
         config.repairing = True
@@ -822,7 +845,6 @@
         for key in sorted(vcconf):
             print key+': %s.%s.%s' % vcconf[key]
 
-
 class ShellCommand(Command):
     """Run an interactive migration shell on an instance. This is a python shell
     with enhanced migration commands predefined in the namespace. An additional
@@ -989,6 +1011,33 @@
         for cube in cwcfg.available_cubes():
             print cube
 
+class ConfigureInstanceCommand(InstanceCommand):
+    """Configure instance.
+
+    <instance>...
+      identifier of the instance to configure.
+    """
+    name = 'configure'
+    actionverb = 'configured'
+
+    options = merge_options(InstanceCommand.options +
+                            (('param',
+                              {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2',
+                               'default': None,
+                               'help': 'set <key> to <value> in configuration file.',
+                               }),
+                             ))
+
+    def configure_instance(self, appid):
+        if self.config.param is not None:
+            appcfg = cwcfg.config_for(appid)
+            for key, value in self.config.param.iteritems():
+                try:
+                    appcfg.global_set_option(key, value)
+                except KeyError:
+                    raise ConfigurationError('unknown configuration key "%s" for mode %s' % (key, appcfg.name))
+            appcfg.save()
+
 for cmdcls in (ListCommand,
                CreateInstanceCommand, DeleteInstanceCommand,
                StartInstanceCommand, StopInstanceCommand, RestartInstanceCommand,
@@ -998,10 +1047,10 @@
                ShellCommand,
                RecompileInstanceCatalogsCommand,
                ListInstancesCommand, ListCubesCommand,
+               ConfigureInstanceCommand,
                ):
     CWCTL.register(cmdcls)
 
-
 def run(args):
     """command line tool"""
     import os
--- a/cwvreg.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/cwvreg.py	Fri Jun 14 16:26:25 2013 +0200
@@ -197,14 +197,14 @@
 from os.path import join, dirname, realpath
 from warnings import warn
 from datetime import datetime, date, time, timedelta
-from functools import partial, reduce
+from functools import reduce
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated, class_deprecated
 from logilab.common.modutils import cleanup_sys_modules
 from logilab.common.registry import (
     RegistryStore, Registry, obj_registries,
-    ObjectNotFound, NoSelectableObject, RegistryNotFound)
+    ObjectNotFound, RegistryNotFound)
 
 from rql import RQLHelper
 from yams.constraints import BASE_CONVERTERS
--- a/dataimport.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/dataimport.py	Fri Jun 14 16:26:25 2013 +0200
@@ -70,10 +70,11 @@
 import sys
 import threading
 import traceback
+import warnings
 import cPickle
 import os.path as osp
+import inspect
 from collections import defaultdict
-from contextlib import contextmanager
 from copy import copy
 from datetime import date, datetime
 from time import asctime
@@ -323,7 +324,6 @@
     return [(k, len(v)) for k, v in buckets.items()
             if k is not None and len(v) > 1]
 
-
 # sql generator utility functions #############################################
 
 
@@ -396,7 +396,7 @@
                     columns = list(data[0])
                 execmany_func(cu, statement, data, table, columns, encoding)
             except Exception:
-                print 'unable to copy data into table %s', table
+                print 'unable to copy data into table %s' % table
                 # Error in import statement, save data in dump_output_dir
                 if dump_output_dir is not None:
                     pdata = {'data': data, 'statement': statement,
@@ -431,7 +431,16 @@
         # If an error is raised, do not continue.
         formatted_row = []
         for col in columns:
-            value = row[col]
+            try:
+                value = row[col]
+            except KeyError:
+                warnings.warn(u"Column %s is not accessible in row %s" 
+                              % (col, row), RuntimeWarning)
+                # XXX 'value' set to None so that the import does not end in 
+                # error. 
+                # Instead, the extra keys are set to NULL from the 
+                # database point of view.
+                value = None
             if value is None:
                 value = 'NULL'
             elif isinstance(value, (long, int, float)):
@@ -506,7 +515,7 @@
         item['eid'] = data['eid']
         return item
 
-    def relate(self, eid_from, rtype, eid_to, inlined=False):
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
         """Add new relation"""
         relation = eid_from, rtype, eid_to
         self.relations.add(relation)
@@ -523,6 +532,18 @@
         """
         pass
 
+    def flush(self):
+        """The method is provided so that all stores share a common API.
+        It just tries to call the commit method.
+        """
+        print 'starting flush'
+        try:
+            self.commit()
+        except:
+            print 'failed to flush'
+        else:
+            print 'flush done'
+
     def rql(self, *args):
         if self._rql is not None:
             return self._rql(*args)
@@ -538,76 +559,6 @@
     def nb_inserted_relations(self):
         return len(self.relations)
 
-    @deprecated("[3.7] index support will disappear")
-    def build_index(self, name, type, func=None, can_be_empty=False):
-        """build internal index for further search"""
-        index = {}
-        if func is None or not callable(func):
-            func = lambda x: x['eid']
-        for eid in self.types[type]:
-            index.setdefault(func(self.eids[eid]), []).append(eid)
-        if not can_be_empty:
-            assert index, "new index '%s' cannot be empty" % name
-        self.indexes[name] = index
-
-    @deprecated("[3.7] index support will disappear")
-    def build_rqlindex(self, name, type, key, rql, rql_params=False,
-                       func=None, can_be_empty=False):
-        """build an index by rql query
-
-        rql should return eid in first column
-        ctl.store.build_index('index_name', 'users', 'login', 'Any U WHERE U is CWUser')
-        """
-        self.types[type] = []
-        rset = self.rql(rql, rql_params or {})
-        if not can_be_empty:
-            assert rset, "new index type '%s' cannot be empty (0 record found)" % type
-        for entity in rset.entities():
-            getattr(entity, key) # autopopulate entity with key attribute
-            self.eids[entity.eid] = dict(entity)
-            if entity.eid not in self.types[type]:
-                self.types[type].append(entity.eid)
-
-        # Build index with specified key
-        func = lambda x: x[key]
-        self.build_index(name, type, func, can_be_empty=can_be_empty)
-
-    @deprecated("[3.7] index support will disappear")
-    def fetch(self, name, key, unique=False, decorator=None):
-        """index fetcher method
-
-        decorator is a callable method or an iterator of callable methods (usually a lambda function)
-        decorator=lambda x: x[:1] (first value is returned)
-        decorator=lambda x: x.lower (lowercased value is returned)
-
-        decorator is handy when you want to improve index keys but without
-        changing the original field
-
-        Same check functions can be reused here.
-        """
-        eids = self.indexes[name].get(key, [])
-        if decorator is not None:
-            if not hasattr(decorator, '__iter__'):
-                decorator = (decorator,)
-            for f in decorator:
-                eids = f(eids)
-        if unique:
-            assert len(eids) == 1, u'expected a single one value for key "%s" in index "%s". Got %i' % (key, name, len(eids))
-            eids = eids[0]
-        return eids
-
-    @deprecated("[3.7] index support will disappear")
-    def find(self, type, key, value):
-        for idx in self.types[type]:
-            item = self.items[idx]
-            if item[key] == value:
-                yield item
-
-    @deprecated("[3.7] checkpoint() deprecated. use commit() instead")
-    def checkpoint(self):
-        self.commit()
-
-
 class RQLObjectStore(ObjectStore):
     """ObjectStore that works with an actual RQL repository (production mode)"""
     _rql = None # bw compat
@@ -630,10 +581,6 @@
         self.session = session
         self._commit = commit or session.commit
 
-    @deprecated("[3.7] checkpoint() deprecated. use commit() instead")
-    def checkpoint(self):
-        self.commit()
-
     def commit(self):
         txuuid = self._commit()
         self.session.set_cnxset()
@@ -657,9 +604,9 @@
                                       for k in item)
         return self.rql(query, item)[0][0]
 
-    def relate(self, eid_from, rtype, eid_to, inlined=False):
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
         eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
-            eid_from, rtype, eid_to)
+            eid_from, rtype, eid_to, **kwargs)
         self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                  {'x': int(eid_from), 'y': int(eid_to)})
 
@@ -809,8 +756,8 @@
         self._nb_inserted_relations = 0
         self.rql = session.execute
         # deactivate security
-        session.set_read_security(False)
-        session.set_write_security(False)
+        session.read_security = False
+        session.write_security = False
 
     def create_entity(self, etype, **kwargs):
         for k, v in kwargs.iteritems():
@@ -825,20 +772,23 @@
         session = self.session
         self.source.add_entity(session, entity)
         self.source.add_info(session, entity, self.source, None, complete=False)
+        kwargs = dict()
+        if inspect.getargspec(self.add_relation).keywords:
+            kwargs['subjtype'] = entity.cw_etype
         for rtype, targeteids in rels.iteritems():
             # targeteids may be a single eid or a list of eids
             inlined = self.rschema(rtype).inlined
             try:
                 for targeteid in targeteids:
                     self.add_relation(session, entity.eid, rtype, targeteid,
-                                      inlined)
+                                      inlined, **kwargs)
             except TypeError:
                 self.add_relation(session, entity.eid, rtype, targeteids,
-                                  inlined)
+                                  inlined, **kwargs)
         self._nb_inserted_entities += 1
         return entity
 
-    def relate(self, eid_from, rtype, eid_to):
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
         assert not rtype.startswith('reverse_')
         self.add_relation(self.session, eid_from, rtype, eid_to,
                           self.rschema(rtype).inlined)
@@ -962,12 +912,12 @@
         """Flush data to the database"""
         self.source.flush()
 
-    def relate(self, subj_eid, rtype, obj_eid, subjtype=None):
+    def relate(self, subj_eid, rtype, obj_eid, **kwargs):
         if subj_eid is None or obj_eid is None:
             return
         # XXX Could subjtype be inferred ?
         self.source.add_relation(self.session, subj_eid, rtype, obj_eid,
-                                 self.rschema(rtype).inlined, subjtype)
+                                 self.rschema(rtype).inlined, **kwargs)
 
     def drop_indexes(self, etype):
         """Drop indexes for a given entity type"""
@@ -1081,18 +1031,20 @@
                                encoding=self.dbencoding)
         except:
             print 'failed to flush'
+        else:
+            print 'flush done'
         finally:
             _entities_sql.clear()
             _relations_sql.clear()
             _insertdicts.clear()
             _inlined_relations_sql.clear()
-            print 'flush done'
 
     def add_relation(self, session, subject, rtype, object,
-                     inlined=False, subjtype=None):
+                     inlined=False, **kwargs):
         if inlined:
             _sql = self._sql.inlined_relations
             data = {'cw_eid': subject, SQL_PREFIX + rtype: object}
+            subjtype = kwargs.get('subjtype')
             if subjtype is None:
                 # Try to infer it
                 targets = [t.type for t in
@@ -1102,7 +1054,9 @@
                 else:
                     raise ValueError('You should give the subject etype for '
                                      'inlined relation %s'
-                                     ', as it cannot be inferred' % rtype)
+                                     ', as it cannot be inferred: '
+                                     'this type is given as keyword argument '
+                                     '``subjtype``'% rtype)
             statement = self.sqlgen.update(SQL_PREFIX + subjtype,
                                            data, ['cw_eid'])
         else:
@@ -1117,13 +1071,13 @@
     def add_entity(self, session, entity):
         with self._storage_handler(entity, 'added'):
             attrs = self.preprocess_entity(entity)
-            rtypes = self._inlined_rtypes_cache.get(entity.__regid__, ())
+            rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ())
             if isinstance(rtypes, str):
                 rtypes = (rtypes,)
             for rtype in rtypes:
                 if rtype not in attrs:
                     attrs[rtype] = None
-            sql = self.sqlgen.insert(SQL_PREFIX + entity.__regid__, attrs)
+            sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
             self._sql.eid_insertdicts[entity.eid] = attrs
             self._append_to_entities(sql, attrs)
 
@@ -1156,7 +1110,7 @@
             assert isinstance(extid, str)
             extid = b64encode(extid)
         uri = 'system' if source.copy_based_source else source.uri
-        attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
+        attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
                  'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
         self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs)
         # insert core relations: is, is_instance_of and cw_source
@@ -1175,7 +1129,7 @@
             self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
                                          (entity.eid, source.eid))
         # now we can update the full text index
-        if self.do_fti and self.need_fti_indexation(entity.__regid__):
+        if self.do_fti and self.need_fti_indexation(entity.cw_etype):
             if complete:
                 entity.complete(entity.e_schema.indexable_attributes())
             self.index_entity(session, entity=entity)
--- a/dbapi.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/dbapi.py	Fri Jun 14 16:26:25 2013 +0200
@@ -415,6 +415,7 @@
         """return the definition of sources used by the repository."""
         return self.cnx.source_defs()
 
+    @deprecated('[3.17] do not use hijack_user. create new Session object')
     def hijack_user(self, user):
         """return a fake request/session using specified user"""
         req = DBAPIRequest(self.vreg)
--- a/debian/changelog	Fri Jun 14 16:13:24 2013 +0200
+++ b/debian/changelog	Fri Jun 14 16:26:25 2013 +0200
@@ -1,3 +1,27 @@
+cubicweb (3.17.2-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- David Douard <david.douard@logilab.fr>  Thu, 13 Jun 2013 17:32:18 +0200
+
+cubicweb (3.17.1-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- David Douard <david.douard@logilab.fr>  Thu, 06 Jun 2013 12:28:49 +0200
+
+cubicweb (3.17.0-2) unstable; urgency=low
+
+  * fix yams Depends on cubicweb-common
+
+ -- Pierre-Yves David <pierre-yves.david@logilab.fr>  Fri, 03 May 2013 16:26:50 +0200
+
+cubicweb (3.17.0-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Pierre-Yves David <pierre-yves.david@logilab.fr>  Mon, 29 Apr 2013 11:20:56 +0200
+
 cubicweb (3.16.5-1) unstable; urgency=low
 
   * new upstream release
--- a/debian/control	Fri Jun 14 16:13:24 2013 +0200
+++ b/debian/control	Fri Jun 14 16:26:25 2013 +0200
@@ -16,7 +16,7 @@
  python-unittest2,
  python-logilab-mtconverter,
  python-rql,
- python-yams,
+ python-yams (>= 0.37),
  python-lxml,
 Standards-Version: 3.9.1
 Homepage: http://www.cubicweb.org
@@ -25,8 +25,15 @@
 Package: cubicweb
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version})
-Recommends: postgresql | mysql | sqlite3
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-server (= ${source:Version}),
+ cubicweb-twisted (= ${source:Version})
+Recommends:
+ postgresql
+ | mysql
+ | sqlite3
 Description: the complete CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -42,9 +49,21 @@
 Conflicts: cubicweb-multisources
 Replaces: cubicweb-multisources
 Provides: cubicweb-multisources
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.8.2), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2, python-passlib
-Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
-Suggests: python-zmq
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version}),
+ cubicweb-ctl (= ${source:Version}),
+ python-logilab-database (>= 1.10.0),
+ cubicweb-postgresql-support
+ | cubicweb-mysql-support
+ | python-pysqlite2,
+ python-passlib
+Recommends:
+ pyro (<< 4.0.0),
+ cubicweb-documentation (= ${source:Version})
+Suggests:
+ python-zmq
 Description: server part of the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -56,7 +75,10 @@
 Package: cubicweb-postgresql-support
 Architecture: all
 # postgresql-client packages for backup/restore of non local database
-Depends: ${misc:Depends}, python-psycopg2, postgresql-client
+Depends:
+ ${misc:Depends},
+ python-psycopg2,
+ postgresql-client
 Description: postgres support for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -66,7 +88,10 @@
 Package: cubicweb-mysql-support
 Architecture: all
 # mysql-client packages for backup/restore of non local database
-Depends: ${misc:Depends}, python-mysqldb, mysql-client
+Depends:
+ ${misc:Depends},
+ python-mysqldb,
+ mysql-client
 Description: mysql support for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -78,8 +103,15 @@
 Architecture: all
 XB-Python-Version: ${python:Versions}
 Provides: cubicweb-web-frontend
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
-Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-web (= ${source:Version}),
+ cubicweb-ctl (= ${source:Version}),
+ python-twisted-web
+Recommends:
+ pyro (<< 4.0.0),
+ cubicweb-documentation (= ${source:Version})
 Description: twisted-based web interface for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -92,8 +124,18 @@
 Package: cubicweb-web
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 2.0.9)
-Recommends: python-docutils (>= 0.6), python-vobject, fckeditor, python-fyzz, python-imaging, python-rdflib
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version}),
+ python-simplejson (>= 2.0.9)
+Recommends:
+ python-docutils (>= 0.6),
+ python-vobject,
+ fckeditor,
+ python-fyzz,
+ python-imaging,
+ python-rdflib
 Description: web interface library for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -107,8 +149,19 @@
 Package: cubicweb-common
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.59.0), python-yams (>= 0.36.0), python-rql (>= 0.31.2), python-lxml
-Recommends: python-simpletal (>= 4.0), python-crypto
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ graphviz,
+ gettext,
+ python-logilab-mtconverter (>= 0.8.0),
+ python-logilab-common (>= 0.59.0),
+ python-yams (>= 0.37.0),
+ python-rql (>= 0.31.2),
+ python-lxml
+Recommends:
+ python-simpletal (>= 4.0),
+ python-crypto
 Conflicts: cubicweb-core
 Replaces: cubicweb-core
 Description: common library for the CubicWeb framework
@@ -121,7 +174,10 @@
 Package: cubicweb-ctl
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version})
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version})
 Description: tool to manage the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -133,8 +189,15 @@
 Package: cubicweb-dev
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
-Suggests: w3c-dtd-xhtml, xvfb
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-server (= ${source:Version}),
+ cubicweb-web (= ${source:Version}),
+ python-pysqlite2
+Suggests:
+ w3c-dtd-xhtml,
+ xvfb
 Description: tests suite and development tools for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -144,7 +207,8 @@
 
 Package: cubicweb-documentation
 Architecture: all
-Recommends: doc-base
+Recommends:
+ doc-base
 Description: documentation for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
--- a/devtools/__init__.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/__init__.py	Fri Jun 14 16:26:25 2013 +0200
@@ -28,15 +28,14 @@
 import warnings
 from hashlib import sha1 # pylint: disable=E0611
 from datetime import timedelta
-from os.path import (abspath, join, exists, basename, dirname, normpath, split,
-                     isfile, isabs, splitext, isdir, expanduser)
+from os.path import (abspath, join, exists, split, isabs, isdir)
 from functools import partial
 
 from logilab.common.date import strptime
 from logilab.common.decorators import cached, clear_cache
 
-from cubicweb import ConfigurationError, ExecutionError, BadConnectionId
-from cubicweb import CW_SOFTWARE_ROOT, schema, cwconfig
+from cubicweb import ExecutionError, BadConnectionId
+from cubicweb import schema, cwconfig
 from cubicweb.server.serverconfig import ServerConfiguration
 from cubicweb.etwist.twconfig import TwistedConfiguration
 
--- a/devtools/devctl.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/devctl.py	Fri Jun 14 16:26:25 2013 +0200
@@ -122,7 +122,6 @@
 
 
 def _generate_schema_pot(w, vreg, schema, libconfig=None):
-    from copy import deepcopy
     from cubicweb.i18n import add_msg
     from cubicweb.schema import NO_I18NCONTEXT, CONSTRAINTS
     w('# schema pot file, generated on %s\n'
--- a/devtools/fake.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/fake.py	Fri Jun 14 16:26:25 2013 +0200
@@ -25,7 +25,6 @@
 from cubicweb.req import RequestSessionBase
 from cubicweb.cwvreg import CWRegistryStore
 from cubicweb.web.request import CubicWebRequestBase
-from cubicweb.web.http_headers import Headers
 
 from cubicweb.devtools import BASE_URL, BaseApptestConfiguration
 
@@ -163,10 +162,6 @@
 
     # for use with enabled_security context manager
     read_security = write_security = True
-    def init_security(self, *args):
-        return None, None
-    def reset_security(self, *args):
-        return
 
 class FakeRepo(object):
     querier = None
--- a/devtools/htmlparser.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/htmlparser.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,10 +19,12 @@
 
 import re
 import sys
+from xml import sax
+from cStringIO import StringIO
 
 from lxml import etree
 
-from logilab.common.deprecation import class_deprecated
+from logilab.common.deprecation import class_deprecated, class_renamed
 
 from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE
 
@@ -31,22 +33,67 @@
 
 ERR_COUNT = 0
 
-class Validator(object):
+_REM_SCRIPT_RGX = re.compile(r"<script[^>]*>.*?</script>", re.U|re.M|re.I|re.S)
+def _remove_script_tags(data):
+    """Remove the script (usually javascript) tags to help the lxml
+    XMLParser / HTMLParser do their job. Without that, they choke on
+    tags embedded in JS strings.
+    """
+    # Notice we may want to use lxml cleaner, but it's far too intrusive:
+    #
+    # cleaner = Cleaner(scripts=True,
+    #                   javascript=False,
+    #                   comments=False,
+    #                   style=False,
+    #                   links=False,
+    #                   meta=False,
+    #                   page_structure=False,
+    #                   processing_instructions=False,
+    #                   embedded=False,
+    #                   frames=False,
+    #                   forms=False,
+    #                   annoying_tags=False,
+    #                   remove_tags=(),
+    #                   remove_unknown_tags=False,
+    #                   safe_attrs_only=False,
+    #                   add_nofollow=False)
+    # >>> cleaner.clean_html('<body></body>')
+    # '<span></span>'
+    # >>> cleaner.clean_html('<!DOCTYPE html><body></body>')
+    # '<html><body></body></html>'
+    # >>> cleaner.clean_html('<body><div/></body>')
+    # '<div></div>'
+    # >>> cleaner.clean_html('<html><body><div/><br></body><html>')
+    # '<html><body><div></div><br></body></html>'
+    # >>> cleaner.clean_html('<html><body><div/><br><span></body><html>')
+    # '<html><body><div></div><br><span></span></body></html>'
+    #
+    # using that, we'll miss most actual validation error we want to
+    # catch. For now, use dumb regexp
+    return _REM_SCRIPT_RGX.sub('', data)
 
-    def parse_string(self, data, sysid=None):
+
+class Validator(object):
+    """ base validator API """
+    parser = None
+
+    def parse_string(self, source):
+        etree = self._parse(self.preprocess_data(source))
+        return PageInfo(source, etree)
+
+    def preprocess_data(self, data):
+        return data
+
+    def _parse(self, pdata):
         try:
-            data = self.preprocess_data(data)
-            return PageInfo(data, etree.fromstring(data, self.parser))
+            return etree.fromstring(pdata, self.parser)
         except etree.XMLSyntaxError as exc:
             def save_in(fname=''):
                 file(fname, 'w').write(data)
-            new_exc = AssertionError(u'invalid xml %s' % exc)
+            new_exc = AssertionError(u'invalid document: %s' % exc)
             new_exc.position = exc.position
             raise new_exc
 
-    def preprocess_data(self, data):
-        return data
-
 
 class DTDValidator(Validator):
     def __init__(self):
@@ -60,7 +107,7 @@
             return data
         # parse using transitional DTD
         data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE)
-        tree = etree.fromstring(data, self.parser)
+        tree = self._parse(data)
         namespace = tree.nsmap.get(None)
         # this is the list of authorized child tags for <blockquote> nodes
         expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \
@@ -79,20 +126,64 @@
             STRICT_DOCTYPE, data)
 
 
-class SaxOnlyValidator(Validator):
+class XMLValidator(Validator):
+    """XML validator, checks that XML is well-formed and used XMLNS are defined"""
 
     def __init__(self):
         Validator.__init__(self)
         self.parser = etree.XMLParser()
 
+SaxOnlyValidator = class_renamed('SaxOnlyValidator',
+                                 XMLValidator,
+                                 '[3.17] you should use the '
+                                 'XMLValidator class instead')
 
-class XMLDemotingValidator(SaxOnlyValidator):
+
+class XMLSyntaxValidator(Validator):
+    """XML syntax validator, check XML is well-formed"""
+
+    class MySaxErrorHandler(sax.ErrorHandler):
+        """override default handler to avoid choking because of unknown entity"""
+        def fatalError(self, exception):
+            # XXX check entity in htmlentitydefs
+            if not str(exception).endswith('undefined entity'):
+                raise exception
+    _parser = sax.make_parser()
+    _parser.setContentHandler(sax.handler.ContentHandler())
+    _parser.setErrorHandler(MySaxErrorHandler())
+
+    def __init__(self):
+        super(XMLSyntaxValidator, self).__init__()
+        # XMLParser() wants xml namespaces defined
+        # XMLParser(recover=True) will accept almost anything
+        #
+        # -> use the later but preprocess will check xml well-formness using a
+        #    dumb SAX parser
+        self.parser = etree.XMLParser(recover=True)
+
+    def preprocess_data(self, data):
+        return _remove_script_tags(data)
+
+    def _parse(self, data):
+        inpsrc = sax.InputSource()
+        inpsrc.setByteStream(StringIO(data))
+        try:
+            self._parser.parse(inpsrc)
+        except sax.SAXParseException, exc:
+            new_exc = AssertionError(u'invalid document: %s' % exc)
+            new_exc.position = (exc._linenum, exc._colnum)
+            raise new_exc
+        return super(XMLSyntaxValidator, self)._parse(data)
+
+
+class XMLDemotingValidator(XMLValidator):
     """ some views produce html instead of xhtml, using demote_to_html
 
     this is typically related to the use of external dependencies
     which do not produce valid xhtml (google maps, ...)
     """
     __metaclass__ = class_deprecated
+    __deprecation_warning__ = '[3.10] this is now handled in testlib.py'
 
     def preprocess_data(self, data):
         if data.startswith('<?xml'):
@@ -106,8 +197,10 @@
 
     def __init__(self):
         Validator.__init__(self)
-        self.parser = etree.HTMLParser()
+        self.parser = etree.HTMLParser(recover=False)
 
+    def preprocess_data(self, data):
+        return _remove_script_tags(data)
 
 
 class PageInfo(object):
@@ -115,7 +208,6 @@
     def __init__(self, source, root):
         self.source = source
         self.etree = root
-        self.source = source
         self.raw_text = u''.join(root.xpath('//text()'))
         self.namespace = self.etree.nsmap
         self.default_ns = self.namespace.get(None)
@@ -234,4 +326,8 @@
                     continue
         return False
 
-VALMAP = {None: None, 'dtd': DTDValidator, 'xml': SaxOnlyValidator}
+VALMAP = {None: None,
+          'dtd': DTDValidator,
+          'xml': XMLValidator,
+          'html': HTMLValidator,
+          }
--- a/devtools/httptest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/httptest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -78,10 +78,6 @@
     def pyro_enabled(self):
         return False
 
-    def load_configuration(self):
-        super(CubicWebServerConfig, self).load_configuration()
-        self.global_set_option('force-html-content-type', True)
-
 
 class CubicWebServerTC(CubicWebTC):
     """Class for running test web server. See :class:`CubicWebServerConfig`.
--- a/devtools/qunit.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/qunit.py	Fri Jun 14 16:26:25 2013 +0200
@@ -29,7 +29,6 @@
 from logilab.common.shellutils import getlogin
 
 import cubicweb
-from cubicweb.view import StartupView
 from cubicweb.web.controller import Controller
 from cubicweb.devtools.httptest import CubicWebServerTC
 
--- a/devtools/repotest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/repotest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -262,8 +262,8 @@
         u = self.repo._build_user(self.session, self.session.user.eid)
         u._groups = set(groups)
         s = Session(u, self.repo)
-        s._threaddata.cnxset = self.cnxset
-        s._threaddata.ctx_count = 1
+        s._tx.cnxset = self.cnxset
+        s._tx.ctx_count = 1
         # register session to ensure it gets closed
         self._dumb_sessions.append(s)
         return s
@@ -311,7 +311,8 @@
             del self.repo.sources_by_uri[source.uri]
         undo_monkey_patch()
         for session in self._dumb_sessions:
-            session._threaddata.cnxset = None
+            if session._tx.cnxset is not None:
+                session._tx.cnxset = None
             session.close()
 
     def _prepare_plan(self, rql, kwargs=None):
--- a/devtools/test/unittest_dbfill.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_dbfill.py	Fri Jun 14 16:26:25 2013 +0200
@@ -24,7 +24,6 @@
 
 from logilab.common.testlib import TestCase, unittest_main
 
-from cubicweb.schema import Schema, EntitySchema
 from cubicweb.devtools.fill import ValueGenerator, make_tel
 from cubicweb.devtools import ApptestConfiguration
 
--- a/devtools/test/unittest_fill.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_fill.py	Fri Jun 14 16:26:25 2013 +0200
@@ -18,12 +18,10 @@
 """unit tests for cubicweb.devtools.fill module
 
 """
+import re
 
 from logilab.common.testlib import TestCase, unittest_main
 
-from cubicweb.schema import Schema, EntitySchema
-
-import re
 from cubicweb.devtools.fill import ValueGenerator, _ValueGenerator
 
 ISODATE_SRE = re.compile('(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})$')
--- a/devtools/test/unittest_httptest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_httptest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -20,7 +20,7 @@
 import httplib
 
 from logilab.common.testlib import Tags
-from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebServerConfig
+from cubicweb.devtools.httptest import CubicWebServerTC
 
 
 class TwistedCWAnonTC(CubicWebServerTC):
--- a/devtools/test/unittest_qunit.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_qunit.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,5 +1,5 @@
 from logilab.common.testlib import unittest_main
-from cubicweb.devtools.qunit import make_qunit_html, QUnitTestCase
+from cubicweb.devtools.qunit import QUnitTestCase
 
 from os import path as osp
 
--- a/devtools/test/unittest_testlib.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_testlib.py	Fri Jun 14 16:26:25 2013 +0200
@@ -97,7 +97,9 @@
 class HTMLPageInfoTC(TestCase):
     """test cases for PageInfo"""
     def setUp(self):
-        parser = htmlparser.DTDValidator()
+        parser = htmlparser.HTMLValidator()
+        # disable cleanup that would remove doctype
+        parser.preprocess_data = lambda data: data
         self.page_info = parser.parse_string(HTML_PAGE2)
 
     def test_source1(self):
--- a/devtools/testlib.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/testlib.py	Fri Jun 14 16:26:25 2013 +0200
@@ -18,7 +18,6 @@
 """this module contains base classes and utilities for cubicweb tests"""
 __docformat__ = "restructuredtext en"
 
-import os
 import sys
 import re
 import urlparse
@@ -40,15 +39,14 @@
 from logilab.common.deprecation import deprecated, class_deprecated
 from logilab.common.shellutils import getlogin
 
-from cubicweb import ValidationError, NoSelectableObject, AuthenticationError
+from cubicweb import ValidationError, NoSelectableObject
 from cubicweb import cwconfig, dbapi, devtools, web, server
 from cubicweb.utils import json
 from cubicweb.sobjects import notification
 from cubicweb.web import Redirect, application
-from cubicweb.server.session import Session
 from cubicweb.server.hook import SendMailOp
 from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
-from cubicweb.devtools import BASE_URL, fake, htmlparser, DEFAULT_EMPTY_DB_ID
+from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID
 from cubicweb.utils import json
 
 # low-level utilities ##########################################################
@@ -812,8 +810,8 @@
         # snippets
         #'text/html': DTDValidator,
         #'application/xhtml+xml': DTDValidator,
-        'application/xml': htmlparser.SaxOnlyValidator,
-        'text/xml': htmlparser.SaxOnlyValidator,
+        'application/xml': htmlparser.XMLValidator,
+        'text/xml': htmlparser.XMLValidator,
         'application/json': JsonValidator,
         'text/plain': None,
         'text/comma-separated-values': None,
@@ -891,8 +889,12 @@
                     content_type = view.content_type
         if content_type is None:
             content_type = 'text/html'
-        if content_type in ('text/html', 'application/xhtml+xml'):
-            if output and output.startswith('<?xml'):
+        if content_type in ('text/html', 'application/xhtml+xml') and output:
+            if output.startswith('<!DOCTYPE html>'):
+                # only check XML well-formness since HTMLValidator isn't html5
+                # compatible and won't like various other extensions
+                default_validator = htmlparser.XMLSyntaxValidator
+            elif output.startswith('<?xml'):
                 default_validator = htmlparser.DTDValidator
             else:
                 default_validator = htmlparser.HTMLValidator
@@ -940,6 +942,7 @@
                     # define filter
                     if isinstance(content, str):
                         content = unicode(content, sys.getdefaultencoding(), 'replace')
+                    content = validator.preprocess_data(content)
                     content = content.splitlines()
                     width = int(log(len(content), 10)) + 1
                     line_template = " %" + ("%i" % width) + "i: %s"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/3.17.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,60 @@
+What's new in CubicWeb 3.17?
+============================
+
+New functionalities
+--------------------
+
+* add a command to compare db schema and file system schema
+  (see `#464991 <http://www.cubicweb.org/464991>`_)
+
+* Add CubicWebRequestBase.content with the content of the HTTP request (see #2742453)
+  (see `#2742453 <http://www.cubicweb.org/2742453>`_)
+
+* Add directive bookmark to ReST rendering
+  (see `#2545595 <http://www.cubicweb.org/ticket/2545595>`_)
+
+* Allow user defined final type
+  (see `#124342 <https://www.logilab.org/ticket/124342>`_)
+
+
+API changes
+-----------
+
+* drop typed_eid() in favour of int() (see `#2742462 <http://www.cubicweb.org/2742462>`_)
+
+* The SIOC views and adapters have been removed from CubicWeb and moved to the
+  `sioc` cube.
+
+* The web page embedding views and adapters have been removed from CubicWeb and
+  moved to the `embed` cube.
+
+* The email sending views and controllers have been removed from CubicWeb and
+  moved to the `massmailing` cube.
+
+* ``RenderAndSendNotificationView`` is deprecated in favor of
+  ``ActualNotificationOp`` the new operation use the more efficient *data*
+  idiom.
+
+* Looping task can now have a interval <= ``0``. Negative interval disable the
+  looping task entirely.
+
+* We now serve html instead of xhtml.
+  (see `#2065651 <http://www.cubicweb.org/ticket/2065651>`_)
+
+
+Deprecation
+---------------------
+
+* ``ldapuser`` have been deprecated. It'll be fully dropped in the next
+  version. If you are still using ldapuser switch to ``ldapfeed`` **NOW**!
+
+* ``hijack_user`` have been deprecated. It will be dropped soon.
+
+Deprecated Code Drops
+----------------------
+
+* The progress views and adapters have been removed from CubicWeb. These
+  classes were deprecated since 3.14.0. They are still available in the
+  `iprogress` cube.
+
+* API deprecated since 3.7 have been dropped.
--- a/doc/book/en/admin/pyro.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/admin/pyro.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -53,7 +53,7 @@
     cnx.commit()
 
 Calling :meth:`cubicweb.dbapi.load_appobjects`, will populate the
-cubicweb registrires (see :ref:`VRegistryIntro`) with the application
+cubicweb registries (see :ref:`VRegistryIntro`) with the application
 objects installed on the host where the script runs. You'll then be
 allowed to use the ORM goodies and custom entity methods and views. Of
 course this is optional, without it you can still get the repository
--- a/doc/book/en/conf.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/conf.py	Fri Jun 14 16:26:25 2013 +0200
@@ -31,7 +31,6 @@
 # All configuration values have a default value; values that are commented out
 # serve to show the default value.
 
-import sys, os
 from os import path as osp
 
 path = __file__
--- a/doc/book/en/devrepo/repo/notifications.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/devrepo/repo/notifications.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -3,4 +3,27 @@
 Notifications management
 ========================
 
-.. XXX FILLME
+CubicWeb provides a machinery to ease notifications handling. To use it for a
+notification:
+
+* write a view inheriting from
+  :class:`~cubicweb.sobjects.notification.NotificationView`.  The usual view api
+  is used to generated the email (plain text) content, and additional
+  :meth:`~cubicweb.sobjects.notification.NotificationView.subject` and
+  :meth:`~cubicweb.sobjects.notification.NotificationView.recipients` methods
+  are used to build the email's subject and
+  recipients. :class:`NotificationView` provides default implementation for both
+  methods.
+
+* write a hook for event that should trigger this notification, select the view
+  (without rendering it), and give it to
+  :func:`cubicweb.hooks.notification.notify_on_commit` so that the notification
+  will be sent if the transaction succeed.
+
+
+.. XXX explain recipient finder and provide example
+
+API details
+~~~~~~~~~~~
+.. autoclass:: cubicweb.sobjects.notification.NotificationView
+.. autofunction:: cubicweb.hooks.notification.notify_on_commit
--- a/doc/book/en/devrepo/repo/sessions.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/devrepo/repo/sessions.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -199,3 +199,8 @@
      if hasattr(req.cnx, 'foo_user') and req.foo_user:
          return 1
      return 0
+
+Full API Session
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. autoclass:: cubicweb.server.session.Session
--- a/doc/book/en/intro/concepts.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/intro/concepts.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -29,8 +29,7 @@
 
 .. note::
 
- The command :command:`cubicweb-ctl list` displays the list of cubes
- installed on your system.
+ The command :command:`cubicweb-ctl list` displays the list of available cubes.
 
 .. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
 .. _`cubicweb-blog`: http://www.cubicweb.org/project/cubicweb-blog
@@ -89,7 +88,7 @@
 state of an object changes. See :ref:`HookIntro` below.
 
 .. [1] not to be confused with a Mercurial repository or a Debian repository.
-.. _`Python Remote Objects`: http://pyro.sourceforge.net/
+.. _`Python Remote Objects`: http://pythonhosted.org/Pyro4/
 
 .. _WebEngineIntro:
 
--- a/doc/book/en/intro/history.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/intro/history.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -28,5 +28,5 @@
 and energy originally put in the design of the framework.
 
 
-.. _Narval: http://www.logilab.org/project/narval
+.. _Narval: http://www.logilab.org/project/narval-moved
 .. _Logilab: http://www.logilab.fr/
--- a/doc/book/en/tutorials/base/conclusion.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/tutorials/base/conclusion.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -3,7 +3,7 @@
 What's next?
 ------------
 
-In this tutorial, we have seen have you can, right after the installation of
+In this tutorial, we have seen that you can, right after the installation of
 |cubicweb|, build a web application in a few minutes by defining a data model as
 assembling cubes. You get a working application that you can then customize there
 and there while keeping something that works. This is important in agile
--- a/doc/book/en/tutorials/index.rst	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/tutorials/index.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -18,3 +18,4 @@
    base/index
    advanced/index
    tools/windmill.rst
+   textreports/index
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/textreports/index.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,13 @@
+.. -*- coding: utf-8 -*-
+
+Writing text reports with RestructuredText
+==========================================
+
+|cubicweb| offers several text formats for the RichString type used in schemas,
+including restructuredtext.
+
+Three additional restructuredtext roles are defined by |cubicweb|:
+
+.. autodocfunction:: cubicweb.ext.rest.eid_reference_role
+.. autodocfunction:: cubicweb.ext.rest.rql_role
+.. autodocfunction:: cubicweb.ext.rest.bookmark_role
--- a/doc/tools/generate_modules.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/tools/generate_modules.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,8 +19,6 @@
 
 """
 
-import sys
-
 EXCLUDE_DIRS = ('test', 'tests', 'examples', 'data', 'doc', 'dist',
                 '.hg', 'migration')
 if __name__ == '__main__':
--- a/doc/tools/pyjsrest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/tools/pyjsrest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -136,9 +136,7 @@
     'cubicweb.preferences',
     'cubicweb.edition',
     'cubicweb.reledit',
-    'cubicweb.iprogress',
     'cubicweb.rhythm',
-    'cubicweb.gmap',
     'cubicweb.timeline-ext',
 ]
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/data_import_tutorial.rst	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,646 @@
+Importing relational data into a CubicWeb instance
+==================================================
+
+Introduction
+~~~~~~~~~~~~
+
+This tutorial explains how to import data from an external source (e.g. a collection of files) 
+into a CubicWeb cube instance.
+
+First, once we know the format of the data we wish to import, we devise a 
+*data model*, that is, a CubicWeb (Yams) schema which reflects the way the data
+is structured. This schema is implemented in the ``schema.py`` file.
+In this tutorial, we will describe such a schema for a particular data set, 
+the Diseasome data (see below).
+
+Once the schema is defined, we create a cube and an instance. 
+The cube is a specification of an application, whereas an instance 
+is the application per se. 
+
+Once the schema is defined and the instance is created, the import can be performed, via
+the following steps:
+
+1. Build a custom parser for the data to be imported. Thus, one obtains a Python
+   memory representation of the data.
+
+2. Map the parsed data to the data model defined in ``schema.py``.
+
+3. Perform the actual import of the data. This comes down to "populating"
+   the data model with the memory representation obtained at 1, according to
+   the mapping defined at 2.
+
+This tutorial illustrates all the above steps in the context of relational data
+stored in the RDF format.
+
+More specifically, we describe the import of Diseasome_ RDF/OWL data.
+
+.. _Diseasome: http://datahub.io/dataset/fu-berlin-diseasome
+
+Building a data model
+~~~~~~~~~~~~~~~~~~~~~
+
+The first thing to do when using CubicWeb for creating an application from scratch
+is to devise a *data model*, that is, a relational representation of the problem to be
+modeled or of the structure of the data to be imported. 
+
+In such a schema, we define
+an entity type (``EntityType`` objects) for each type of entity to import. Each such type
+has several attributes. If the attributes are of known CubicWeb (Yams) types, viz. numbers,
+strings or characters, then they are defined as attributes, as e.g. ``attribute = Int()``
+for an attribute named ``attribute`` which is an integer. 
+
+Each such type also has a set of
+relations, which are defined like the attributes, except that they represent, in fact,
+relations between the entities of the type under discussion and the objects of a type which
+is specified in the relation definition. 
+
+For example, for the Diseasome data, we have two types of entities, genes and diseases.
+Thus, we create two classes which inherit from ``EntityType``::
+
+    class Disease(EntityType):
+        # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+        label = String(maxsize=512, fulltextindexed=True)
+        ...
+
+        #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseasome/associatedGene
+        associated_genes = SubjectRelation('Gene', cardinality='**')
+        ...
+
+        #Corresponds to 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseasome/chromosomalLocation'
+        chromosomal_location = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+
+
+    class Gene(EntityType):
+        ...
+
+In this schema, there are attributes whose values are numbers or strings. Thus, they are 
+defined by using the CubicWeb / Yams primitive types, e.g., ``label = String(maxsize=12)``. 
+These types can have several constraints or attributes, such as ``maxsize``. 
+There are also relations, either between the entity types themselves, or between them
+and a CubicWeb type, ``ExternalUri``. The latter defines a class of URI objects in 
+CubicWeb. For instance, the ``chromosomal_location`` attribute is a relation between 
+a ``Disease`` entity and an ``ExternalUri`` entity. The relation is marked by the CubicWeb /
+Yams ``SubjectRelation`` method. The latter can have several optional keyword arguments, such as
+``cardinality`` which specifies the number of subjects and objects related by the relation type 
+specified. For example, the ``'?*'`` cardinality in the ``chromosomal_relation`` relation type says
+that zero or more ``Disease`` entities are related to zero or one ``ExternalUri`` entities.
+In other words, a ``Disease`` entity is related to at most one ``ExternalUri`` entity via the
+``chromosomal_location`` relation type, and that we can have zero or more ``Disease`` entities in the
+data base. 
+For a relation between the entity types themselves, the ``associated_genes`` between a ``Disease``
+entity and a ``Gene`` entity is defined, so that any number of ``Gene`` entities can be associated
+to a ``Disease``, and there can be any number of ``Disease`` s if a ``Gene`` exists.
+
+Of course, before being able to use the CubicWeb / Yams built-in objects, we need to import them::
+
+    
+    from yams.buildobjs import EntityType, SubjectRelation, String, Int
+    from cubicweb.schemas.base import ExternalUri
+
+Building a custom data parser
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The data we wish to import is structured in the RDF format,
+as a text file containing a set of lines. 
+On each line, there are three fields. 
+The first two fields are URIs ("Universal Resource Identifiers"). 
+The third field is either an URI or a string. Each field bares a particular meaning:
+
+- the leftmost field is an URI that holds the entity to be imported. 
+  Note that the entities defined in the data model (i.e., in ``schema.py``) should 
+  correspond to the entities whose URIs are specified in the import file.
+
+- the middle field is an URI that holds a relation whose subject is the  entity 
+  defined by the leftmost field. Note that this should also correspond
+  to the definitions in the data model.
+
+- the rightmost field is either an URI or a string. When this field is an URI, 
+  it gives the object of the relation defined by the middle field.
+  When the rightmost field is a string, the middle field is interpreted as an attribute
+  of the subject (introduced by the leftmost field) and the rightmost field is
+  interpreted as the value of the attribute.
+
+Note however that some attributes (i.e. relations whose objects are strings) 
+have their objects defined as strings followed by ``^^`` and by another URI;
+we ignore this part.
+
+Let us show some examples:
+
+- of line holding an attribute definition:
+  ``<http://www4.wiwiss.fu-berlin.de/diseasome/resource/genes/CYP17A1> 
+  <http://www.w3.org/2000/01/rdf-schema#label> "CYP17A1" .``
+  The line contains the definition of the ``label`` attribute of an
+  entity of type ``gene``. The value of ``label`` is '``CYP17A1``'.
+
+- of line holding a relation definition:
+  ``<http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseases/1> 
+  <http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseasome/associatedGene> 
+  <http://www4.wiwiss.fu-berlin.de/diseasome/resource/genes/HADH2> .``
+  The line contains the definition of the ``associatedGene`` relation between
+  a ``disease`` subject entity identified by ``1`` and a ``gene`` object 
+  entity defined by ``HADH2``.
+
+Thus, for parsing the data, we can (:note: see the ``diseasome_parser`` module):
+
+1. define a couple of regular expressions for parsing the two kinds of lines, 
+   ``RE_ATTS`` for parsing the attribute definitions, and ``RE_RELS`` for parsing
+   the relation definitions.
+
+2. define a function that iterates through the lines of the file and retrieves
+   (``yield`` s) a (subject, relation, object) tuple for each line.
+   We called it ``_retrieve_structure`` in the ``diseasome_parser`` module.
+   The function needs the file name and the types for which information
+   should be retrieved.
+
+Alternatively, instead of hand-making the parser, one could use the RDF parser provided
+in the ``dataio`` cube.
+
+.. XXX To further study and detail the ``dataio`` cube usage.
+
+Once we get to have the (subject, relation, object) triples, we need to map them into
+the data model.
+
+
+Mapping the data to the schema
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the case of diseasome data, we can just define two dictionaries for mapping
+the names of the relations as extracted by the parser, to the names of the relations
+as defined in the ``schema.py`` data model. In the ``diseasome_parser`` module 
+they are called ``MAPPING_ATTS`` and ``MAPPING_RELS``. 
+Given that the relation and attribute names are given in CamelCase in the original data,
+mappings are necessary if we follow the PEP08 when naming the attributes in the data model.
+For example, the RDF relation ``chromosomalLocation`` is mapped into the schema relation 
+``chromosomal_location``.
+
+Once these mappings have been defined, we just iterate over the (subject, relation, object)
+tuples provided by the parser and we extract the entities, with their attributes and relations.
+For each entity, we thus have a dictionary with two keys, ``attributes`` and ``relations``.
+The value associated to the ``attributes`` key is a dictionary containing (attribute: value) 
+pairs, where "value" is a string, plus the ``cwuri`` key / attribute holding the URI of 
+the entity itself.
+The value associated to the ``relations`` key is a dictionary containing (relation: value)
+pairs, where "value" is an URI.
+This is implemented in the ``entities_from_rdf`` interface function of the module 
+``diseasome_parser``. This function provides an iterator on the dictionaries containing
+the ``attributes`` and ``relations`` keys for all entities.
+
+However, this is a simple case. In real life, things can get much more complicated, and the 
+mapping can be far from trivial, especially when several data sources (which can follow 
+different formatting and even structuring conventions) must be mapped into the same data model.
+
+Importing the data
+~~~~~~~~~~~~~~~~~~
+
+The data import code should be placed in a Python module. Let us call it 
+``diseasome_import.py``. Then, this module should be called via
+``cubicweb-ctl``, as follows::
+
+    cubicweb-ctl shell diseasome_import.py -- <other arguments e.g. data file>
+
+In the import module, we should use a *store* for doing the import.
+A store is an object which provides three kinds of methods for
+importing data:
+
+- a method for importing the entities, along with the values
+  of their attributes.
+- a method for importing the relations between the entities.
+- a method for committing the imports to the database.
+
+In CubicWeb, we have four stores:
+
+1. ``ObjectStore`` base class for the stores in CubicWeb.
+   It only provides a skeleton for all other stores and
+   provides the means for creating the memory structures
+   (dictionaries) that hold the entities and the relations
+   between them.
+
+2. ``RQLObjectStore``: store which uses the RQL language for performing
+   database insertions and updates. It relies on all the CubicWeb hooks 
+   machinery, especially for dealing with security issues (database access
+   permissions).
+
+2. ``NoHookRQLObjectStore``: store which uses the RQL language for
+   performing database insertions and updates, but for which 
+   all hooks are deactivated. This implies that 
+   certain checks with respect to the CubicWeb / Yams schema 
+   (data model) are not performed. However, all SQL queries 
+   obtained from the RQL ones are executed in a sequential
+   manner, one query per inserted entity.
+
+4. ``SQLGenObjectStore``: store which uses the SQL language directly. 
+   It inserts entities either sequentially, by executing an SQL query 
+   for each entity, or directly by using one PostGRES ``COPY FROM`` 
+   query for a set of similarly structured entities. 
+
+For really massive imports (millions or billions of entities), there
+is a cube ``dataio`` which contains another store, called 
+``MassiveObjectStore``. This store is similar to ``SQLGenObjectStore``,
+except that anything related to CubicWeb is bypassed. That is, even the
+CubicWeb EID entity identifiers are not handled. This store is the fastest,
+but has a slightly different API from the other four stores mentioned above.
+Moreover, it has an important limitation, in that it doesn't insert inlined [#]_
+relations in the database. 
+
+.. [#] An inlined relation is a relation defined in the schema
+       with the keyword argument ``inlined=True``. Such a relation
+       is inserted in the database as an attribute of the entity
+       whose subject it is.
+
+In the following section we will see how to import data by using the stores
+in CubicWeb's ``dataimport`` module.
+
+Using the stores in ``dataimport``
+++++++++++++++++++++++++++++++++++
+
+``ObjectStore`` is seldom used in real life for importing data, since it is
+only the base store for the other stores and it doesn't perform an actual
+import of the data. Nevertheless, the other three stores, which import data,
+are based on ``ObjectStore`` and provide the same API.
+
+All three stores ``RQLObjectStore``, ``NoHookRQLObjectStore`` and
+``SQLGenObjectStore`` provide exactly the same API for importing data, that is
+entities and relations, in an SQL database. 
+
+Before using a store, one must import the ``dataimport`` module and then initialize 
+the store, with the current ``session`` as a parameter::
+
+    import cubicweb.dataimport as cwdi
+    ...
+
+    store = cwdi.RQLObjectStore(session)
+
+Each such store provides three methods for data import:
+
+#. ``create_entity(Etype, **attributes)``, which allows us to add
+   an entity of the Yams type ``Etype`` to the database. This entity's attributes
+   are specified in the ``attributes`` dictionary. The method returns the entity 
+   created in the database. For example, we add two entities,
+   a person, of ``Person`` type, and a location, of ``Location`` type::
+
+        person = store.create_entity('Person', name='Toto', age='18', height='190')
+
+        location = store.create_entity('Location', town='Paris', arrondissement='13')
+
+#. ``relate(subject_eid, r_type, object_eid)``, which allows us to add a relation
+   of the Yams type ``r_type`` to the database. The relation's subject is an entity
+   whose EID is ``subject_eid``; its object is another entity, whose EID is 
+   ``object_eid``.  For example [#]_::
+
+        store.relate(person.eid(), 'lives_in', location.eid(), **kwargs)
+
+   ``kwargs`` is only used by the ``SQLGenObjectStore``'s ``relate`` method and is here
+   to allow us to specify the type of the subject of the relation, when the relation is
+   defined as inlined in the schema. 
+
+.. [#] The ``eid`` method of an entity defined via ``create_entity`` returns
+       the entity identifier as assigned by CubicWeb when creating the entity.
+       This only works for entities defined via the stores in the CubicWeb's
+       ``dataimport`` module.
+
+    The keyword argument that is understood by ``SQLGenObjectStore`` is called 
+   ``subjtype`` and holds the type of the subject entity. For the example considered here,
+   this comes to having [#]_::
+
+        store.relate(person.eid(), 'lives_in', location.eid(), subjtype=person.cw_etype)
+
+   If ``subjtype`` is not specified, then the store tries to infer the type of the subject.
+   However, this doesn't always work, e.g. when there are several possible subject types
+   for a given relation type. 
+
+.. [#] The ``cw_etype`` attribute of an entity defined via ``create_entity`` holds
+       the type of the entity just created. This only works for entities defined via
+       the stores in the CubicWeb's ``dataimport`` module. In the example considered
+       here, ``person.cw_etype`` holds ``'Person'``.
+    
+   All the other stores but ``SQLGenObjectStore`` ignore the ``kwargs`` parameters.
+
+#. ``flush()``, which allows us to perform the actual commit into the database, along
+   with some cleanup operations. Ideally, this method should be called as often as 
+   possible, that is after each insertion in the database, so that database sessions
+   are kept as atomic as possible. In practice, we usually call this method twice: 
+   first, after all the entities have been created, second, after all relations have
+   been created. 
+
+   Note however that before each commit the database insertions
+   have to be consistent with the schema. Thus, if, for instance,
+   an entity has an attribute defined through a relation (viz.
+   a ``SubjectRelation``) with a ``"1"`` or ``"+"`` object 
+   cardinality, we have to create the entity under discussion,
+   the object entity of the relation under discussion, and the
+   relation itself, before committing the additions to the database.
+
+   The ``flush`` method is simply called as::
+
+        store.flush().
+
+
+Using the ``MassiveObjectStore`` in the ``dataio`` cube
++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+This store, available in the ``dataio`` cube, allows us to
+fully dispense with the CubicWeb import mechanisms and hence
+to interact directly with the database server, via SQL queries.
+
+Moreover, these queries rely on PostGreSQL's ``COPY FROM`` instruction
+to create several entities in a single query. This brings tremendous 
+performance improvements with respect to the RQL-based data insertion
+procedures.
+
+However, the API of this store is slightly different from the API of
+the stores in CubicWeb's ``dataimport`` module.
+
+Before using the store, one has to import the ``dataio`` cube's 
+``dataimport`` module, then initialize the store by giving it the
+``session`` parameter::
+
+    from cubes.dataio import dataimport as mcwdi
+    ...
+
+    store = mcwdi.MassiveObjectStore(session)
+
+The ``MassiveObjectStore`` provides six methods for inserting data
+into the database:
+
+#. ``init_rtype_table(SubjEtype, r_type, ObjEtype)``, which specifies the
+   creation of the tables associated to the relation types in the database.
+   Each such table has three column, the type of the subject entity, the
+   type of the relation (that is, the name of the attribute in the subject
+   entity which is defined via the relation), and the type of the object
+   entity. For example::
+
+        store.init_rtype_table('Person', 'lives_in', 'Location')
+
+   Please note that these tables can be created before the entities, since
+   they only specify their types, not their unique identifiers.
+
+#. ``create_entity(Etype, **attributes)``, which allows us to add new entities,
+   whose attributes are given in the ``attributes`` dictionary. 
+   Please note however that, by default, this method does *not* return 
+   the created entity. The method is called, for example, as in::
+
+        store.create_entity('Person', name='Toto', age='18', height='190', 
+                            uri='http://link/to/person/toto_18_190')
+        store.create_entity('Location', town='Paris', arrondissement='13',
+                            uri='http://link/to/location/paris_13')
+   
+   In order to be able to link these entities via the relations when needed,
+   we must provide ourselves a means for uniquely identifying the entities.
+   In general, this is done via URIs, stored in attributes like ``uri`` or
+   ``cwuri``. The name of the attribute is irrelevant as long as its value is
+   unique for each entity.
+
+#. ``relate_by_iid(subject_iid, r_type, object_iid)`` allows us to actually 
+   relate the entities uniquely identified by ``subject_iid`` and 
+   ``object_iid`` via a relation of type ``r_type``. For example::
+
+        store.relate_by_iid('http://link/to/person/toto_18_190',
+                            'lives_in',
+                            'http://link/to/location/paris_13')
+
+   Please note that this method does *not* work for inlined relations!
+
+#. ``convert_relations(SubjEtype, r_type, ObjEtype, subj_iid_attribute,
+   obj_iid_attribute)``
+   allows us to actually insert
+   the relations in the database. At one call of this method, one inserts
+   all the relations of type ``rtype`` between entities of given types.
+   ``subj_iid_attribute`` and ``object_iid_attribute`` are the names
+   of the attributes which store the unique identifiers of the entities,
+   as assigned by the user. These names can be identical, as long as
+   their values are unique. For example, for inserting all relations
+   of type ``lives_in`` between ``People`` and ``Location`` entities,
+   we write::
+        
+        store.convert_relations('Person', 'lives_in', 'Location', 'uri', 'uri')
+
+#. ``flush()`` performs the actual commit in the database. It only needs 
+   to be called after ``create_entity`` and ``relate_by_iid`` calls. 
+   Please note that ``relate_by_iid`` does *not* perform insertions into
+   the database, hence calling ``flush()`` for it would have no effect.
+
+#. ``cleanup()`` performs database cleanups, by removing temporary tables.
+   It should only be called at the end of the import.
+
+
+
+.. XXX to add smth on the store's parameter initialization.
+
+
+
+Application to the Diseasome data
++++++++++++++++++++++++++++++++++
+
+Import setup
+############
+
+We define an import function, ``diseasome_import``, which does basically four things:
+
+#. creates and initializes the store to be used, via a line such as::
+    
+        store = cwdi.SQLGenObjectStore(session)
+   
+   where ``cwdi`` is the imported ``cubicweb.dataimport`` or 
+   ``cubes.dataio.dataimport``.
+
+#. calls the diseasome parser, that is, the ``entities_from_rdf`` function in the 
+   ``diseasome_parser`` module and iterates on its result, in a line such as::
+        
+        for entity, relations in parser.entities_from_rdf(filename, ('gene', 'disease')):
+        
+   where ``parser`` is the imported ``diseasome_parser`` module, and ``filename`` is the 
+   name of the file containing the data (with its path), e.g. ``../data/diseasome_dump.nt``.
+
+#. creates the entities to be inserted in the database; for Diseasome, there are two 
+   kinds of entities:
+   
+   #. entities defined in the data model, viz. ``Gene`` and ``Disease`` in our case.
+   #. entities which are built in CubicWeb / Yams, viz. ``ExternalUri`` which define
+      URIs.
+   
+   As we are working with RDF data, each entity is defined through a series of URIs. Hence,
+   each "relational attribute" [#]_ of an entity is defined via an URI, that is, in CubicWeb
+   terms, via an ``ExternalUri`` entity. The entities are created, in the loop presented above,
+   as such::
+        
+        ent = store.create_entity(etype, **entity)
+        
+   where ``etype`` is the appropriate entity type, either ``Gene`` or ``Disease``.
+
+.. [#] By "relational attribute" we denote an attribute (of an entity) which
+       is defined through a relation, e.g. the ``chromosomal_location`` attribute
+       of ``Disease`` entities, which is defined through a relation between a
+       ``Disease`` and an ``ExternalUri``.
+   
+   The ``ExternalUri`` entities are as many as URIs in the data file. For them, we define a unique
+   attribute, ``uri``, which holds the URI under discussion::
+        
+        extu = store.create_entity('ExternalUri', uri="http://path/of/the/uri")
+
+#. creates the relations between the entities. We have relations between:
+   
+   #. entities defined in the schema, e.g. between ``Disease`` and ``Gene``
+      entities, such as the ``associated_genes`` relation defined for 
+      ``Disease`` entities.
+   #. entities defined in the schema and ``ExternalUri`` entities, such as ``gene_id``.
+   
+   The way relations are added to the database depends on the store: 
+   
+   - for the stores in the CubicWeb ``dataimport`` module, we only use 
+     ``store.relate``, in 
+     another loop, on the relations (that is, a 
+     loop inside the preceding one, mentioned at step 2)::
+        
+        for rtype, rels in relations.iteritems():
+            ...
+            
+            store.relate(ent.eid(), rtype, extu.eid(), **kwargs)
+        
+     where ``kwargs`` is a dictionary designed to accommodate the need for specifying
+     the type of the subject entity of the relation, when the relation is inlined and
+     ``SQLGenObjectStore`` is used. For example::
+            
+            ...
+            store.relate(ent.eid(), 'chromosomal_location', extu.eid(), subjtype='Disease')
+   
+   - for the ``MassiveObjectStore`` in the ``dataio`` cube's ``dataimport`` module, 
+     the relations are created in three steps:
+     
+     #. first, a table is created for each relation type, as in::
+            
+            ...
+            store.init_rtype_table(ent.cw_etype, rtype, extu.cw_etype)
+            
+        which comes down to lines such as::
+            
+            store.init_rtype_table('Disease', 'associated_genes', 'Gene')
+            store.init_rtype_table('Gene', 'gene_id', 'ExternalUri')
+            
+     #. second, the URI of each entity will be used as its identifier, in the 
+        ``relate_by_iid`` method, such as::
+            
+            disease_uri = 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseases/3'
+            gene_uri = '<http://www4.wiwiss.fu-berlin.de/diseasome/resource/genes/HSD3B2'
+            store.relate_by_iid(disease_uri, 'associated_genes', gene_uri)
+            
+     #. third, the relations for each relation type will be added to the database, 
+        via the ``convert_relations`` method, such as in::
+            
+            store.convert_relations('Disease', 'associated_genes', 'Gene', 'cwuri', 'cwuri')
+            
+        and::
+            
+            store.convert_relations('Gene', 'hgnc_id', 'ExternalUri', 'cwuri', 'uri')
+            
+        where ``cwuri`` and ``uri`` are the attributes which store the URIs of the entities
+        defined in the data model, and of the ``ExternalUri`` entities, respectively.
+
+#. flushes all relations and entities::
+    
+    store.flush()
+
+   which performs the actual commit of the inserted entities and relations in the database.
+
+If the ``MassiveObjectStore`` is used, then a cleanup of temporary SQL tables should be performed
+at the end of the import::
+
+    store.cleanup()
+
+Timing benchmarks
+#################
+
+In order to time the import script, we just decorate the import function with the ``timed``
+decorator::
+    
+    from logilab.common.decorators import timed
+    ...
+    
+    @timed
+    def diseasome_import(session, filename):
+        ...
+
+After running the import function as shown in the "Importing the data" section, we obtain two time measurements::
+
+    diseasome_import clock: ... / time: ...
+
+Here, the meanings of these measurements are [#]_:
+
+- ``clock`` is the time spent by CubicWeb, on the server side (i.e. hooks and data pre- / post-processing on SQL 
+  queries),
+
+- ``time`` is the sum between ``clock`` and the time spent in PostGreSQL.
+
+.. [#] The meanings of the ``clock`` and ``time`` measurements, when using the ``@timed``
+       decorators, were taken from `a blog post on massive data import in CubicWeb`_.
+
+.. _a blog post on massive data import in CubicWeb: http://www.cubicweb.org/blogentry/2116712
+
+The import function is put in an import module, named ``diseasome_import`` here. The module is called
+directly from the CubicWeb shell, as follows::
+
+    cubicweb-ctl shell diseasome_instance diseasome_import.py \
+    -- -df diseasome_import_file.nt -st StoreName
+
+The module accepts two arguments:
+
+- the data file, introduced by ``-df [--datafile]``, and
+- the store, introduced by ``-st [--store]``.
+
+The timings (in seconds) for different stores are given in the following table, for 
+importing 4213 ``Disease`` entities and 3919 ``Gene`` entities with the import module
+just described:
+
++--------------------------+------------------------+--------------------------------+------------+
+| Store                    | CubicWeb time (clock)  | PostGreSQL time (time - clock) | Total time |
++==========================+========================+================================+============+
+| ``RQLObjectStore``       | 225.98                 | 62.05                          | 288.03     |
++--------------------------+------------------------+--------------------------------+------------+
+| ``NoHookRQLObjectStore`` | 62.73                  | 51.38                          | 114.11     |
++--------------------------+------------------------+--------------------------------+------------+
+| ``SQLGenObjectStore``    | 20.41                  | 11.03                          | 31.44      |
++--------------------------+------------------------+--------------------------------+------------+
+| ``MassiveObjectStore``   | 4.84                   | 6.93                           | 11.77      |
++--------------------------+------------------------+--------------------------------+------------+
+
+
+Conclusions
+~~~~~~~~~~~
+
+In this tutorial we have seen how to import data in a CubicWeb application instance. We have first seen how to
+create a schema, then how to create a parser of the data and a mapping of the data to the schema.
+Finally, we have seen four ways of importing data into CubicWeb.
+
+Three of those are integrated into CubicWeb, namely the ``RQLObjectStore``, ``NoHookRQLObjectStore`` and
+``SQLGenObjectStore`` stores, which have a common API:
+
+- ``RQLObjectStore`` is by far the slowest, especially its time spent on the 
+  CubicWeb side, and so it should be used only for small amounts of 
+  "sensitive" data (i.e. where security is a concern).
+
+- ``NoHookRQLObjectStore`` slashes by almost four the time spent on the CubicWeb side, 
+  but is also quite slow; on the PostGres side it is as slow as the previous store. 
+  It should be used for data where security is not a concern,
+  but consistency (with the data model) is.
+
+- ``SQLGenObjectStore`` slashes by three the time spent on the CubicWeb side and by five the time 
+  spent on the PostGreSQL side. It should be used for relatively great amounts of data, where
+  security and data consistency are not a concern. Compared to the previous store, it has the
+  disadvantage that, for inlined relations, we must specify their subjects' types.
+
+For really huge amounts of data there is a fourth store, ``MassiveObjectStore``, available
+from the ``dataio`` cube. It provides a blazing performance with respect to all other stores:
+it is almost 25 times faster than ``RQLObjectStore`` and almost three times faster than 
+``SQLGenObjectStore``. However, it has a few usage caveats that should be taken into account:
+
+#. it cannot insert relations defined as inlined in the schema,
+#. no security or consistency check is performed on the data,
+#. its API is slightly different from the other stores.
+
+Hence, this store should be used when security and data consistency are not a concern,
+and there are no inlined relations in the schema.
+
+
+
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/diseasome_import.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""This module imports the Diseasome data into a CubicWeb instance.
+"""
+
+# Python imports
+import sys
+import argparse
+
+# Logilab import, for timing
+from logilab.common.decorators import timed
+
+# CubicWeb imports
+import cubicweb.dataimport as cwdi
+from cubes.dataio import dataimport as mcwdi
+
+# Diseasome parser import
+import diseasome_parser as parser
+
+def _is_of_class(instance, class_name):
+    """Helper function to determine whether an instance is
+    of a specified class or not.
+    Returns a True if this is the case and False otherwise.
+    """
+    if instance.__class__.__name__ == class_name:
+        return True
+    else:
+        return False
+
+@timed
+def diseasome_import(session, file_name, store):
+    """Main function for importing Diseasome data.
+
+    It uses the Diseasome data parser to get the contents of the
+    data from a file, then uses a store for importing the data
+    into a CubicWeb instance.
+
+    >>> diseasome_import(session, 'file_name', Store)
+
+    """
+    exturis = dict(session.execute('Any U, X WHERE X is ExternalUri, X uri U'))
+    uri_to_eid = {}
+    uri_to_etype = {}
+    all_relations = {}
+    etypes = {('http://www4.wiwiss.fu-berlin.de/'
+               'diseasome/resource/diseasome/genes'): 'Gene',
+              ('http://www4.wiwiss.fu-berlin.de/'
+               'diseasome/resource/diseasome/diseases'): 'Disease'}
+    # Read the parsed data
+    for entity, relations in parser.entities_from_rdf(file_name, 
+                                                      ('gene', 'disease')):
+        uri = entity.get('cwuri', None)
+        types = list(relations.get('types', []))
+        if not types:
+            continue
+        etype = etypes.get(types[0])
+        if not etype:
+            sys.stderr.write('Entity type %s not recognized.', types[0])
+            sys.stderr.flush()
+        if _is_of_class(store, 'MassiveObjectStore'):
+            for relation in (set(relations).intersection(('classes', 
+                            'possible_drugs', 'omim', 'omim_page', 
+                            'chromosomal_location', 'same_as', 'gene_id',
+                            'hgnc_id', 'hgnc_page'))):
+                store.init_rtype_table(etype, relation, 'ExternalUri')
+            for relation in set(relations).intersection(('subtype_of',)):
+                store.init_rtype_table(etype, relation, 'Disease')
+            for relation in set(relations).intersection(('associated_genes',)):
+                store.init_rtype_table(etype, relation, 'Gene')
+        # Create the entities
+        ent = store.create_entity(etype, **entity)
+        if not _is_of_class(store, 'MassiveObjectStore'):
+            uri_to_eid[uri] = ent.eid
+            uri_to_etype[uri] = ent.cw_etype
+        else:
+            uri_to_eid[uri] = uri
+            uri_to_etype[uri] = etype
+        # Store relations for after
+        all_relations[uri] = relations
+    # Perform a first commit, of the entities
+    store.flush()
+    kwargs = {}
+    for uri, relations in all_relations.iteritems():
+        from_eid = uri_to_eid.get(uri)
+        # ``subjtype`` should be initialized if ``SQLGenObjectStore`` is used
+        # and there are inlined relations in the schema.
+        # If ``subjtype`` is not given, while ``SQLGenObjectStore`` is used
+        # and there are inlined relations in the schema, the store
+        # tries to infer the type of the subject, but this does not always 
+        # work, e.g. when there are several object types for the relation.
+        # ``subjtype`` is ignored for other stores, or if there are no
+        # inlined relations in the schema.
+        kwargs['subjtype'] = uri_to_etype.get(uri)
+        if not from_eid:
+            continue
+        for rtype, rels in relations.iteritems():
+            if rtype in ('classes', 'possible_drugs', 'omim', 'omim_page',
+                         'chromosomal_location', 'same_as', 'gene_id',
+                         'hgnc_id', 'hgnc_page'):
+                for rel in list(rels):
+                    if rel not in exturis:
+                        # Create the "ExternalUri" entities, which are the
+                        # objects of the relations
+                        extu = store.create_entity('ExternalUri', uri=rel)
+                        if not _is_of_class(store, 'MassiveObjectStore'):
+                            rel_eid = extu.eid
+                        else:
+                            # For the "MassiveObjectStore", the EIDs are 
+                            # in fact the URIs.
+                            rel_eid = rel
+                        exturis[rel] = rel_eid
+                    else:
+                        rel_eid = exturis[rel]
+                    # Create the relations that have "ExternalUri"s as objects
+                    if not _is_of_class(store, 'MassiveObjectStore'):
+                        store.relate(from_eid, rtype, rel_eid, **kwargs)
+                    else:
+                        store.relate_by_iid(from_eid, rtype, rel_eid)
+            elif rtype in ('subtype_of', 'associated_genes'):
+                for rel in list(rels):
+                    to_eid = uri_to_eid.get(rel)
+                    if to_eid:
+                        # Create relations that have objects of other type 
+                        # than "ExternalUri"
+                        if not _is_of_class(store, 'MassiveObjectStore'):
+                            store.relate(from_eid, rtype, to_eid, **kwargs)
+                        else:
+                            store.relate_by_iid(from_eid, rtype, to_eid)
+                    else:
+                        sys.stderr.write('Missing entity with URI %s '
+                                         'for relation %s' % (rel, rtype))
+                        sys.stderr.flush()
+    # Perform a second commit, of the "ExternalUri" entities.
+    # when the stores in the CubicWeb ``dataimport`` module are used,
+    # relations are also committed.
+    store.flush()
+    # If the ``MassiveObjectStore`` is used, then entity and relation metadata
+    # are pushed as well. By metadata we mean information on the creation
+    # time and author.
+    if _is_of_class(store, 'MassiveObjectStore'):
+        store.flush_meta_data()
+        for relation in ('classes', 'possible_drugs', 'omim', 'omim_page', 
+                         'chromosomal_location', 'same_as'):
+            # Afterwards, relations are actually created in the database.
+            store.convert_relations('Disease', relation, 'ExternalUri',
+                                    'cwuri', 'uri')
+        store.convert_relations('Disease', 'subtype_of', 'Disease', 
+                                'cwuri', 'cwuri')
+        store.convert_relations('Disease', 'associated_genes', 'Gene', 
+                                'cwuri', 'cwuri')
+        for relation in ('gene_id', 'hgnc_id', 'hgnc_page', 'same_as'):
+            store.convert_relations('Gene', relation, 'ExternalUri', 
+                                    'cwuri', 'uri')
+        # Clean up temporary tables in the database
+        store.cleanup()
+
+if __name__ == '__main__':
+    # Change sys.argv so that ``cubicweb-ctl shell`` can work out the options
+    # we give to our ``diseasome_import.py`` script.
+    sys.argv = [arg for 
+                arg in sys.argv[sys.argv.index("--") - 1:] if arg != "--"]
+    PARSER = argparse.ArgumentParser(description="Import Diseasome data")
+    PARSER.add_argument("-df", "--datafile", type=str,
+                        help="RDF data file name")
+    PARSER.add_argument("-st", "--store", type=str,
+                        default="RQLObjectStore",
+                        help="data import store")
+    ARGS = PARSER.parse_args()
+    if ARGS.datafile:
+        FILENAME = ARGS.datafile
+        if ARGS.store in (st + "ObjectStore" for 
+                          st in ("RQL", "NoHookRQL", "SQLGen")):
+            IMPORT_STORE = getattr(cwdi, ARGS.store)(session)
+        elif ARGS.store == "MassiveObjectStore":
+            IMPORT_STORE = mcwdi.MassiveObjectStore(session)
+        else:
+            sys.exit("Import store unknown")
+        diseasome_import(session, FILENAME, IMPORT_STORE)
+    else:
+        sys.exit("Data file not found or not specified")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/diseasome_parser.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+
+"""
+Diseasome data import module.
+Its interface is the ``entities_from_rdf`` function.
+"""
+
+import re
+RE_RELS = re.compile(r'^<(.*?)>\s<(.*?)>\s<(.*?)>\s*\.')
+RE_ATTS = re.compile(r'^<(.*?)>\s<(.*?)>\s"(.*)"(\^\^<(.*?)>|)\s*\.')
+
+MAPPING_ATTS = {'bio2rdfSymbol': 'bio2rdf_symbol',
+                'label': 'label',
+                'name': 'name',
+                'classDegree': 'class_degree',
+                'degree': 'degree',
+                'size': 'size'}
+
+MAPPING_RELS = {'geneId': 'gene_id',
+                'hgncId': 'hgnc_id', 
+                'hgncIdPage': 'hgnc_page', 
+                'sameAs': 'same_as', 
+                'class': 'classes', 
+                'diseaseSubtypeOf': 'subtype_of', 
+                'associatedGene': 'associated_genes', 
+                'possibleDrug': 'possible_drugs',
+                'type': 'types',
+                'omim': 'omim', 
+                'omimPage': 'omim_page', 
+                'chromosomalLocation': 'chromosomal_location'}
+
+def _retrieve_reltype(uri):
+    """
+    Retrieve a relation type from an URI.
+
+    Internal function which takes an URI containing a relation type as input
+    and returns the name of the relation.
+    If no URI string is given, then the function returns None.
+    """
+    if uri:
+        return uri.rsplit('/', 1)[-1].rsplit('#', 1)[-1]
+
+def _retrieve_etype(tri_uri):
+    """
+    Retrieve entity type from a triple of URIs.
+
+    Internal function whith takes a tuple of three URIs as input
+    and returns the type of the entity, as obtained from the
+    first member of the tuple.
+    """
+    if tri_uri:
+        return tri_uri.split('> <')[0].rsplit('/', 2)[-2].rstrip('s')
+
+def _retrieve_structure(filename, etypes):
+    """
+    Retrieve a (subject, relation, object) tuples iterator from a file.
+
+    Internal function which takes as input a file name and a tuple of 
+    entity types, and returns an iterator of (subject, relation, object)
+    tuples.
+    """
+    with open(filename) as fil:
+        for line in fil:
+            if _retrieve_etype(line) not in etypes:
+                continue
+            match = RE_RELS.match(line)
+            if not match:
+                match = RE_ATTS.match(line)
+            subj = match.group(1)
+            relation = _retrieve_reltype(match.group(2))
+            obj = match.group(3)
+            yield subj, relation, obj
+
+def entities_from_rdf(filename, etypes):
+    """
+    Return entities from an RDF file.
+
+    Module interface function which takes as input a file name and
+    a tuple of entity types, and returns an iterator on the 
+    attributes and relations of each entity. The attributes
+    and relations are retrieved as dictionaries.
+    
+    >>> for entities, relations in entities_from_rdf('data_file', 
+                                                     ('type_1', 'type_2')):
+        ...
+    """
+    entities = {}
+    for subj, rel, obj in _retrieve_structure(filename, etypes):
+        entities.setdefault(subj, {})
+        entities[subj].setdefault('attributes', {})
+        entities[subj].setdefault('relations', {})
+        entities[subj]['attributes'].setdefault('cwuri', unicode(subj))
+        if rel in MAPPING_ATTS:
+            entities[subj]['attributes'].setdefault(MAPPING_ATTS[rel], 
+                                                    unicode(obj))
+        if rel in MAPPING_RELS:
+            entities[subj]['relations'].setdefault(MAPPING_RELS[rel], set())
+            entities[subj]['relations'][MAPPING_RELS[rel]].add(unicode(obj))
+    return ((ent.get('attributes'), ent.get('relations')) 
+            for ent in entities.itervalues())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/schema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""cubicweb-diseasome schema"""
+
+from yams.buildobjs import EntityType, SubjectRelation, String, Int
+
+
+class Disease(EntityType):
+    """Disease entity definition.
+
+    A Disease entity is characterized by several attributes which are 
+    defined by URIs:
+
+    - a name, which we define as a CubicWeb / Yams String object
+    - a label, also defined as a Yams String
+    - a class degree, defined as a Yams Int (that is, an integer)
+    - a degree, also defined as a Yams Int
+    - size, also defined as an Int
+    - classes, defined as a set containing zero, one or several objects 
+      identified by their URIs, that is, objects of type ``ExternalUri``
+    - subtype_of, defined as a set containing zero, one or several
+      objects of type ``Disease``
+    - associated_genes, defined as a set containing zero, one or several
+      objects of type ``Gene``, that is, of genes associated to the
+      disease
+    - possible_drugs, defined as a set containing zero, one or several
+      objects, identified by their URIs, that is, of type ``ExternalUri``
+    - omim and omim_page are identifiers in the OMIM (Online Mendelian
+      Inheritance in Man) database, which contains an inventory of "human
+      genes and genetic phenotypes" 
+      (see http://http://www.ncbi.nlm.nih.gov/omim). Given that a disease
+      only has unique omim and omim_page identifiers, when it has them,
+      these attributes have been defined through relations such that
+      for each disease there is at most one omim and one omim_page. 
+      Each such identifier is defined through an URI, that is, through
+      an ``ExternalUri`` entity.
+      That is, these relations are of cardinality "?*". For optimization
+      purposes, one might be tempted to defined them as inlined, by setting
+      the ``inlined`` keyword argument to ``True``.
+    - chromosomal_location is also defined through a relation of 
+      cardinality "?*", since any disease has at most one chromosomal
+      location associated to it.
+    - same_as is also defined through an URI, and hence through a
+      relation having ``ExternalUri`` entities as objects.
+
+    For more information on this data set and the data set itself, 
+    please consult http://datahub.io/dataset/fu-berlin-diseasome.
+    """
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/name
+    name = String(maxsize=256, fulltextindexed=True)
+    # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+    label = String(maxsize=512, fulltextindexed=True)
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/classDegree
+    class_degree = Int()
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/degree
+    degree = Int()
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/size
+    size = Int()
+    #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/class
+    classes = SubjectRelation('ExternalUri', cardinality='**')
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/diseaseSubtypeOf
+    subtype_of = SubjectRelation('Disease', cardinality='**')
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/associatedGene
+    associated_genes = SubjectRelation('Gene', cardinality='**')
+    #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/possibleDrug
+    possible_drugs = SubjectRelation('ExternalUri', cardinality='**')
+    #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/omim
+    omim = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+    #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/omimPage
+    omim_page = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+    #Corresponds to 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/chromosomalLocation'
+    chromosomal_location = SubjectRelation('ExternalUri', cardinality='?*',
+                                           inlined=True)
+    #Corresponds to http://www.w3.org/2002/07/owl#sameAs
+    same_as = SubjectRelation('ExternalUri', cardinality='**')
+
+
+class Gene(EntityType):
+    """Gene entity defintion.
+
+    A gene is characterized by the following attributes:
+
+    - label, defined through a Yams String.
+    - bio2rdf_symbol, also defined as a Yams String, since it is 
+      just an identifier.
+    - gene_id is an URI identifying a gene, hence it is defined
+      as a relation with an ``ExternalUri`` object.
+    - a pair of unique identifiers in the HUGO Gene Nomenclature
+      Committee (http://http://www.genenames.org/). They are defined
+      as ``ExternalUri`` entities as well.
+    - same_as is also defined through an URI, and hence through a
+      relation having ``ExternalUri`` entities as objects.
+    """
+    # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+    label = String(maxsize=512, fulltextindexed=True)
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/geneId
+    gene_id = SubjectRelation('ExternalUri', cardinality='**')
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/hgncId
+    hgnc_id = SubjectRelation('ExternalUri', cardinality='**')
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/hgncIdPage
+    hgnc_page = SubjectRelation('ExternalUri', cardinality='**')
+    # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+    # diseasome/bio2rdfSymbol
+    bio2rdf_symbol = String(maxsize=64, fulltextindexed=True)
+    #Corresponds to http://www.w3.org/2002/07/owl#sameAs
+    same_as = SubjectRelation('ExternalUri', cardinality='**')
--- a/entities/__init__.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/__init__.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,12 +19,10 @@
 
 __docformat__ = "restructuredtext en"
 
-from warnings import warn
 
-from logilab.common.deprecation import deprecated
-from logilab.common.decorators import cached
+from logilab.common.decorators import classproperty
 
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
 from cubicweb.entity import Entity
 
 
@@ -60,6 +58,11 @@
 
     # meta data api ###########################################################
 
+    @classproperty
+    def cw_etype(self):
+        """entity Etype as a string"""
+        return self.__regid__
+
     def dc_title(self):
         """return a suitable *unicode* title for this entity"""
         for rschema, attrschema in self.e_schema.attribute_definitions():
--- a/entities/adapters.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/adapters.py	Fri Jun 14 16:26:25 2013 +0200
@@ -26,12 +26,11 @@
 
 from logilab.mtconverter import TransformError
 from logilab.common.decorators import cached
-from logilab.common.deprecation import class_deprecated
 
 from cubicweb import ValidationError, view
 from cubicweb.predicates import (implements, is_instance, relation_possible,
                                 match_exception)
-from cubicweb.interfaces import IDownloadable, ITree, IProgress, IMileStone
+from cubicweb.interfaces import IDownloadable, ITree
 
 
 class IEmailableAdapter(view.EntityAdapter):
@@ -330,7 +329,7 @@
             _done = set()
         for child in self.children():
             if child.eid in _done:
-                self.error('loop in %s tree: %s', child.__regid__.lower(), child)
+                self.error('loop in %s tree: %s', child.cw_etype.lower(), child)
                 continue
             yield child
             _done.add(child.eid)
@@ -357,7 +356,7 @@
         entity = adapter.entity
         while entity is not None:
             if entity.eid in path:
-                self.error('loop in %s tree: %s', entity.__regid__.lower(), entity)
+                self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity)
                 break
             path.append(entity.eid)
             try:
@@ -404,117 +403,3 @@
                       "%(cls)s is deprecated") % {'cls': cls.__name__}
         warn(msg, DeprecationWarning, stacklevel=2)
         return type.__call__(cls, *args, **kwargs)
-
-
-class IProgressAdapter(view.EntityAdapter):
-    """something that has a cost, a state and a progression.
-
-    You should at least override progress_info an in_progress methods on
-    concrete implementations.
-    """
-    __metaclass__ = adapter_deprecated
-    __deprecation_warning__ = '[3.14] IProgressAdapter has been moved to iprogress cube'
-    __needs_bw_compat__ = True
-    __regid__ = 'IProgress'
-    __select__ = implements(IProgress, warn=False) # XXX for bw compat, should be abstract
-
-    @property
-    @view.implements_adapter_compat('IProgress')
-    def cost(self):
-        """the total cost"""
-        return self.progress_info()['estimated']
-
-    @property
-    @view.implements_adapter_compat('IProgress')
-    def revised_cost(self):
-        return self.progress_info().get('estimatedcorrected', self.cost)
-
-    @property
-    @view.implements_adapter_compat('IProgress')
-    def done(self):
-        """what is already done"""
-        return self.progress_info()['done']
-
-    @property
-    @view.implements_adapter_compat('IProgress')
-    def todo(self):
-        """what remains to be done"""
-        return self.progress_info()['todo']
-
-    @view.implements_adapter_compat('IProgress')
-    def progress_info(self):
-        """returns a dictionary describing progress/estimated cost of the
-        version.
-
-        - mandatory keys are (''estimated', 'done', 'todo')
-
-        - optional keys are ('notestimated', 'notestimatedcorrected',
-          'estimatedcorrected')
-
-        'noestimated' and 'notestimatedcorrected' should default to 0
-        'estimatedcorrected' should default to 'estimated'
-        """
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IProgress')
-    def finished(self):
-        """returns True if status is finished"""
-        return not self.in_progress()
-
-    @view.implements_adapter_compat('IProgress')
-    def in_progress(self):
-        """returns True if status is not finished"""
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IProgress')
-    def progress(self):
-        """returns the % progress of the task item"""
-        try:
-            return 100. * self.done / self.revised_cost
-        except ZeroDivisionError:
-            # total cost is 0 : if everything was estimated, task is completed
-            if self.progress_info().get('notestimated'):
-                return 0.
-            return 100
-
-    @view.implements_adapter_compat('IProgress')
-    def progress_class(self):
-        return ''
-
-
-class IMileStoneAdapter(IProgressAdapter):
-    __metaclass__ = adapter_deprecated
-    __deprecation_warning__ = '[3.14] IMileStoneAdapter has been moved to iprogress cube'
-    __needs_bw_compat__ = True
-    __regid__ = 'IMileStone'
-    __select__ = implements(IMileStone, warn=False) # XXX for bw compat, should be abstract
-
-    parent_type = None # specify main task's type
-
-    @view.implements_adapter_compat('IMileStone')
-    def get_main_task(self):
-        """returns the main ITask entity"""
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IMileStone')
-    def initial_prevision_date(self):
-        """returns the initial expected end of the milestone"""
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IMileStone')
-    def eta_date(self):
-        """returns expected date of completion based on what remains
-        to be done
-        """
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IMileStone')
-    def completion_date(self):
-        """returns date on which the subtask has been completed"""
-        raise NotImplementedError
-
-    @view.implements_adapter_compat('IMileStone')
-    def contractors(self):
-        """returns the list of persons supposed to work on this task"""
-        raise NotImplementedError
-
--- a/entities/lib.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/lib.py	Fri Jun 14 16:26:25 2013 +0200
@@ -23,8 +23,6 @@
 from urlparse import urlsplit, urlunsplit
 from datetime import datetime
 
-from logilab.common.deprecation import deprecated
-
 from cubicweb import UnknownProperty
 from cubicweb.entity import _marker
 from cubicweb.entities import AnyEntity, fetch_config
--- a/entities/sources.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/sources.py	Fri Jun 14 16:26:25 2013 +0200
@@ -27,7 +27,6 @@
 from logilab.common.configuration import OptionError
 from logilab.mtconverter import xml_escape
 
-from cubicweb import ValidationError
 from cubicweb.entities import AnyEntity, fetch_config
 
 class _CWSourceCfgMixIn(object):
@@ -124,7 +123,7 @@
     fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options'])
 
     def dc_title(self):
-        return self._cw._(self.__regid__) + ' #%s' % self.eid
+        return self._cw._(self.cw_etype) + ' #%s' % self.eid
 
     @property
     def schema(self):
--- a/entities/test/unittest_base.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/test/unittest_base.py	Fri Jun 14 16:26:25 2013 +0200
@@ -25,7 +25,6 @@
 
 from cubicweb.devtools.testlib import CubicWebTC
 
-from cubicweb import ValidationError
 from cubicweb.interfaces import IMileStone, ICalendarable
 from cubicweb.entities import AnyEntity
 
@@ -48,8 +47,13 @@
         self.assertEqual(entity.dc_creator(), u'member')
 
     def test_type(self):
+        #dc_type may be translated
         self.assertEqual(self.member.dc_type(), 'CWUser')
 
+    def test_cw_etype(self):
+        #cw_etype is never translated
+        self.assertEqual(self.member.cw_etype, 'CWUser')
+
     def test_entity_meta_attributes(self):
         # XXX move to yams
         self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
@@ -172,7 +176,7 @@
                 self.assertEqual(eclass.__bases__[0].__bases__, (Foo,))
         # check Division eclass is still selected for plain Division entities
         eclass = self.select_eclass('Division')
-        self.assertEqual(eclass.__regid__, 'Division')
+        self.assertEqual(eclass.cw_etype, 'Division')
 
 if __name__ == '__main__':
     unittest_main()
--- a/entities/wfobjs.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/wfobjs.py	Fri Jun 14 16:26:25 2013 +0200
@@ -24,7 +24,6 @@
 
 __docformat__ = "restructuredtext en"
 
-from warnings import warn
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated
@@ -186,7 +185,7 @@
     fetch_attrs, cw_fetch_order = fetch_config(['name', 'type'])
 
     def __init__(self, *args, **kwargs):
-        if self.__regid__ == 'BaseTransition':
+        if self.cw_etype == 'BaseTransition':
             raise WorkflowException('should not be instantiated')
         super(BaseTransition, self).__init__(*args, **kwargs)
 
@@ -449,10 +448,10 @@
         """return the default workflow for entities of this type"""
         # XXX CWEType method
         wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, '
-                                  'ET name %(et)s', {'et': self.entity.__regid__})
+                                  'ET name %(et)s', {'et': self.entity.cw_etype})
         if wfrset:
             return wfrset.get_entity(0, 0)
-        self.warning("can't find any workflow for %s", self.entity.__regid__)
+        self.warning("can't find any workflow for %s", self.entity.cw_etype)
         return None
 
     @property
--- a/entity.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/entity.py	Fri Jun 14 16:26:25 2013 +0200
@@ -26,18 +26,17 @@
 from logilab.common.decorators import cached
 from logilab.common.deprecation import deprecated
 from logilab.common.registry import yes
-from logilab.mtconverter import TransformData, TransformError, xml_escape
+from logilab.mtconverter import TransformData, xml_escape
 
 from rql.utils import rqlvar_maker
 from rql.stmts import Select
 from rql.nodes import (Not, VariableRef, Constant, make_relation,
                        Relation as RqlRelation)
 
-from cubicweb import Unauthorized, typed_eid, neg_role
+from cubicweb import Unauthorized, neg_role
 from cubicweb.utils import support_args
 from cubicweb.rset import ResultSet
 from cubicweb.appobject import AppObject
-from cubicweb.req import _check_cw_unsafe
 from cubicweb.schema import (RQLVocabularyConstraint, RQLConstraint,
                              GeneratedConstraint)
 from cubicweb.rqlrewrite import RQLRewriter
@@ -555,7 +554,10 @@
         return '<Entity %s %s %s at %s>' % (
             self.e_schema, self.eid, list(self.cw_attr_cache), id(self))
 
-    def __cmp__(self, other):
+    def __lt__(self, other):
+        raise NotImplementedError('comparison not implemented for %s' % self.__class__)
+
+    def __eq__(self, other):
         raise NotImplementedError('comparison not implemented for %s' % self.__class__)
 
     def _cw_update_attr_cache(self, attrcache):
@@ -627,7 +629,7 @@
         meaning that the entity has to be created
         """
         try:
-            typed_eid(self.eid)
+            int(self.eid)
             return True
         except (ValueError, TypeError):
             return False
@@ -793,7 +795,7 @@
         for rtype in self.skip_copy_for:
             skip_copy_for['subject'].add(rtype)
             warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, '
-                 'use cw_skip_for instead with list of couples (rtype, role)' % self.__regid__,
+                 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype,
                  DeprecationWarning)
         for rtype, role in self.cw_skip_copy_for:
             assert role in ('subject', 'object'), role
@@ -845,7 +847,7 @@
     def as_rset(self): # XXX .cw_as_rset
         """returns a resultset containing `self` information"""
         rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
-                         {'x': self.eid}, [(self.__regid__,)])
+                         {'x': self.eid}, [(self.cw_etype,)])
         rset.req = self._cw
         return rset
 
@@ -1287,7 +1289,6 @@
         an entity or eid, a list of entities or eids, or None (meaning that all
         relations of the given type from or to this object should be deleted).
         """
-        _check_cw_unsafe(kwargs)
         assert kwargs
         assert self.cw_is_saved(), "should not call set_attributes while entity "\
                "hasn't been saved yet"
@@ -1397,10 +1398,6 @@
 
     @deprecated('[3.10] use entity.cw_attr_cache[attr]')
     def __getitem__(self, key):
-        if key == 'eid':
-            warn('[3.7] entity["eid"] is deprecated, use entity.eid instead',
-                 DeprecationWarning, stacklevel=2)
-            return self.eid
         return self.cw_attr_cache[key]
 
     @deprecated('[3.10] use entity.cw_attr_cache.get(attr[, default])')
@@ -1424,15 +1421,10 @@
         the attribute to skip_security since we don't want to check security
         for such attributes set by hooks.
         """
-        if attr == 'eid':
-            warn('[3.7] entity["eid"] = value is deprecated, use entity.eid = value instead',
-                 DeprecationWarning, stacklevel=2)
-            self.eid = value
-        else:
-            try:
-                self.cw_edited[attr] = value
-            except AttributeError:
-                self.cw_attr_cache[attr] = value
+        try:
+            self.cw_edited[attr] = value
+        except AttributeError:
+            self.cw_attr_cache[attr] = value
 
     @deprecated('[3.10] use del entity.cw_edited[attr]')
     def __delitem__(self, attr):
--- a/etwist/http.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/http.py	Fri Jun 14 16:26:25 2013 +0200
@@ -8,8 +8,6 @@
 
 __docformat__ = "restructuredtext en"
 
-from cubicweb.web.http_headers import Headers
-
 class HTTPResponse(object):
     """An object representing an HTTP Response to be sent to the client.
     """
--- a/etwist/request.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/request.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,14 +19,8 @@
 
 __docformat__ = "restructuredtext en"
 
-from datetime import datetime
 
-from twisted.web import http
-
-from cubicweb.web import DirectResponse
 from cubicweb.web.request import CubicWebRequestBase
-from cubicweb.web.httpcache import GMTOFFSET
-from cubicweb.web.http_headers import Headers
 
 
 class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
@@ -39,6 +33,7 @@
                 self.form[key] = (name, stream)
             else:
                 self.form[key] = (unicode(name, self.encoding), stream)
+        self.content = self._twreq.content # stream
 
     def http_method(self):
         """returns 'POST', 'GET', 'HEAD', etc."""
--- a/etwist/server.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/server.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,35 +19,25 @@
 __docformat__ = "restructuredtext en"
 
 import sys
-import os
-import os.path as osp
 import select
 import traceback
 import threading
-import re
-from hashlib import md5 # pylint: disable=E0611
-from os.path import join
-from time import mktime
-from datetime import date, timedelta
 from urlparse import urlsplit, urlunsplit
 from cgi import FieldStorage, parse_header
 
 from twisted.internet import reactor, task, threads
-from twisted.internet.defer import maybeDeferred
 from twisted.web import http, server
-from twisted.web import static, resource
+from twisted.web import resource
 from twisted.web.server import NOT_DONE_YET
 
 
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import monkeypatch
 
-from cubicweb import (AuthenticationError, ConfigurationError,
-                      CW_EVENT_MANAGER, CubicWebException)
+from cubicweb import ConfigurationError, CW_EVENT_MANAGER
 from cubicweb.utils import json_dumps
 from cubicweb.web import DirectResponse
 from cubicweb.web.application import CubicWebPublisher
-from cubicweb.web.http_headers import generateDateTime
 from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
 from cubicweb.etwist.http import HTTPResponse
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/test/data/views.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,29 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""only for unit tests !"""
+
+from cubicweb.view import View
+from cubicweb.predicates import match_http_method
+
+class PutView(View):
+    __regid__ = 'put'
+    __select__ = match_http_method('PUT')
+    binary = True
+
+    def call(self):
+        self.w(self._cw.content.read())
--- a/etwist/test/unittest_server.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/test/unittest_server.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,6 +19,7 @@
 import os, os.path as osp, glob
 
 from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.devtools.httptest import CubicWebServerTC
 from cubicweb.etwist.server import host_prefixed_baseurl
 
 
@@ -53,6 +54,13 @@
         self._check('http://localhost:8080/hg/', 'code.cubicweb.org',
                     'http://localhost:8080/hg/')
 
+
+class ETwistHTTPTC(CubicWebServerTC):
+    def test_put_content(self):
+        body = 'hop'
+        response = self.web_request('?vid=put', method='PUT', body=body)
+        self.assertEqual(body, response.body)
+
 if __name__ == '__main__':
     from logilab.common.testlib import unittest_main
     unittest_main()
--- a/etwist/twconfig.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/twconfig.py	Fri Jun 14 16:26:25 2013 +0200
@@ -28,10 +28,10 @@
 
 from os.path import join
 
-from logilab.common.configuration import Method
+from logilab.common.configuration import Method, merge_options
 
 from cubicweb.cwconfig import CONFIGURATIONS
-from cubicweb.web.webconfig import WebConfiguration, merge_options
+from cubicweb.web.webconfig import WebConfiguration
 
 
 class TwistedConfiguration(WebConfiguration):
@@ -103,8 +103,8 @@
         return join(self.apphome, '%s-%s.py' % (self.appid, self.name))
 
     def default_base_url(self):
-        from socket import gethostname
-        return 'http://%s:%s/' % (self['host'] or gethostname(), self['port'] or 8080)
+        from socket import getfqdn
+        return 'http://%s:%s/' % (self['host'] or getfqdn(), self['port'] or 8080)
 
 
 CONFIGURATIONS.append(TwistedConfiguration)
--- a/ext/rest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/ext/rest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -36,6 +36,7 @@
 from itertools import chain
 from logging import getLogger
 from os.path import join
+from urlparse import urlsplit
 
 from docutils import statemachine, nodes, utils, io
 from docutils.core import Publisher
@@ -128,6 +129,63 @@
     set_classes(options)
     return [nodes.raw('', content, format='html')], []
 
+def bookmark_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
+    """:bookmark:`<bookmark-eid>` or :bookmark:`<eid>:<vid>`
+
+    Example: :bookmark:`1234:table`
+
+    Replace the directive with the output of applying the view to the resultset
+    returned by the query stored in the bookmark. By default, the view is the one
+    stored in the bookmark, but it can be overridden by the directive as in the
+    example above.
+
+    "X eid %(userid)s" can be used in the RQL query stored in the Bookmark, for
+    this query will be executed with the argument {'userid': _cw.user.eid}.
+    """
+    _cw = inliner.document.settings.context._cw
+    text = text.strip()
+    try:
+        if ':' in text:
+            eid, vid = text.rsplit(u':', 1)
+            eid = int(eid)
+        else:
+            eid, vid = int(text), None
+    except ValueError:
+        msg = inliner.reporter.error(
+            'EID number must be a positive number; "%s" is invalid.'
+            % text, line=lineno)
+        prb = inliner.problematic(rawtext, rawtext, msg)
+        return [prb], [msg]
+    try:
+        bookmark = _cw.entity_from_eid(eid)
+    except UnknownEid:
+        msg = inliner.reporter.error('Unknown EID %s.' % text, line=lineno)
+        prb = inliner.problematic(rawtext, rawtext, msg)
+        return [prb], [msg]
+    try:
+        params = dict(_cw.url_parse_qsl(urlsplit(bookmark.path).query))
+        rql = params['rql']
+        if vid is None:
+            vid = params.get('vid')
+    except (ValueError, KeyError), exc:
+        msg = inliner.reporter.error('Could not parse bookmark path %s [%s].'
+                                     % (bookmark.path, exc), line=lineno)
+        prb = inliner.problematic(rawtext, rawtext, msg)
+        return [prb], [msg]
+    try:
+        rset = _cw.execute(rql, {'userid': _cw.user.eid})
+        if rset:
+            if vid is None:
+                vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
+        else:
+            vid = 'noresult'
+        view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+        content = view.render()
+    except Exception, exc:
+        content = 'An error occured while interpreting directive bookmark: %r' % exc
+    set_classes(options)
+    return [nodes.raw('', content, format='html')], []
+
 def winclude_directive(name, arguments, options, content, lineno,
                        content_offset, block_text, state, state_machine):
     """Include a reST file as part of the content of this reST file.
@@ -323,6 +381,7 @@
     _INITIALIZED = True
     register_canonical_role('eid', eid_reference_role)
     register_canonical_role('rql', rql_role)
+    register_canonical_role('bookmark', bookmark_role)
     directives.register_directive('winclude', winclude_directive)
     if pygments_directive is not None:
         directives.register_directive('sourcecode', pygments_directive)
--- a/ext/test/unittest_rest.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/ext/test/unittest_rest.py	Fri Jun 14 16:26:25 2013 +0200
@@ -75,5 +75,12 @@
         out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`')
         self.assertEqual(out, u'<p><h1>CWUser_plural</h1><div class="section"><a href="http://testing.fr/cubicweb/cwuser/admin" title="">admin</a></div><div class="section"><a href="http://testing.fr/cubicweb/cwuser/anon" title="">anon</a></div></p>\n')
 
+    def test_bookmark_role(self):
+        context = self.context()
+        rset = self.execute('INSERT Bookmark X: X title "hello", X path "/view?rql=Any X WHERE X is CWUser"')
+        eid = rset[0][0]
+        out = rest_publish(context, ':bookmark:`%s`' % eid)
+        self.assertEqual(out, u'<p><h1>CWUser_plural</h1><div class="section"><a href="http://testing.fr/cubicweb/cwuser/admin" title="">admin</a></div><div class="section"><a href="http://testing.fr/cubicweb/cwuser/anon" title="">anon</a></div></p>\n')
+
 if __name__ == '__main__':
     unittest_main()
--- a/hooks/metadata.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/hooks/metadata.py	Fri Jun 14 16:26:25 2013 +0200
@@ -158,7 +158,7 @@
         entity = self.entity
         extid = entity.cw_metainformation()['extid']
         repo._type_source_cache[entity.eid] = (
-            entity.__regid__, self.newsource.uri, None, self.newsource.uri)
+            entity.cw_etype, self.newsource.uri, None, self.newsource.uri)
         if self.oldsource.copy_based_source:
             uri = 'system'
         else:
@@ -216,7 +216,7 @@
             # but has been moved, ignore it'.
             self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s',
                                 {'eid': self.eidfrom})
-            attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': None,
+            attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None,
                      'source': 'system', 'asource': 'system',
                      'mtime': datetime.now()}
             self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs)
--- a/hooks/notification.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/hooks/notification.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,23 +20,61 @@
 __docformat__ = "restructuredtext en"
 
 from logilab.common.textutils import normalize_text
+from logilab.common.deprecation import deprecated
 
 from cubicweb import RegistryNotFound
 from cubicweb.predicates import is_instance
 from cubicweb.server import hook
 from cubicweb.sobjects.supervising import SupervisionMailOp
 
-class RenderAndSendNotificationView(hook.Operation):
-    """delay rendering of notification view until precommit"""
-    view = None # make pylint happy
+
+@deprecated('[3.17] use notify_on_commit instead')
+def RenderAndSendNotificationView(session, view, viewargs=None):
+    notify_on_commit(session, view, viewargs)
+
+
+def notify_on_commit(session, view, viewargs=None):
+    """register a notification view (see
+    :class:`~cubicweb.sobjects.notification.NotificationView`) to be sent at
+    post-commit time, ie only if the transaction has succeeded.
+
+    `viewargs` is an optional dictionary containing extra argument to be given
+    to :meth:`~cubicweb.sobjects.notification.NotificationView.render_and_send`
+    """
+    if viewargs is None:
+        viewargs = {}
+    notif_op = _RenderAndSendNotificationOp.get_instance(session)
+    notif_op.add_data((view, viewargs))
+
+
+class _RenderAndSendNotificationOp(hook.DataOperationMixIn, hook.Operation):
+    """End of the notification chain. Do render and send views after commit
+
+    All others Operations end up adding data to this Operation.
+    The notification are done on ``postcommit_event`` to make sure to prevent
+    sending notification about rollbacked data.
+    """
+
+    containercls = list
 
     def postcommit_event(self):
-        view = self.view
-        if view.cw_rset is not None and not view.cw_rset:
-            return # entity added and deleted in the same transaction (cache effect)
-        if view.cw_rset and self.session.deleted_in_transaction(view.cw_rset[view.cw_row or 0][view.cw_col or 0]):
-            return # entity added and deleted in the same transaction
-        self.view.render_and_send(**getattr(self, 'viewargs', {}))
+        deleted = self.session.deleted_in_transaction
+        for view, viewargs in self.get_data():
+            if view.cw_rset is not None:
+                if not view.cw_rset:
+                    # entity added and deleted in the same transaction
+                    # (cache effect)
+                    continue
+                elif deleted(view.cw_rset[view.cw_row or 0][view.cw_col or 0]):
+                    # entity added and deleted in the same transaction
+                    continue
+            try:
+                view.render_and_send(**viewargs)
+            except Exception:
+                # error in post commit are not propagated
+                # We keep this logic here to prevent a small notification error
+                # to prevent them all.
+                self.exception('Notification failed')
 
 
 class NotificationHook(hook.Hook):
@@ -73,9 +111,10 @@
         # #103822)
         if comment and entity.comment_format != 'text/rest':
             comment = normalize_text(comment, 80)
-        RenderAndSendNotificationView(self._cw, view=view, viewargs={
-            'comment': comment, 'previous_state': entity.previous_state.name,
-            'current_state': entity.new_state.name})
+        viewargs = {'comment': comment,
+                    'previous_state': entity.previous_state.name,
+                    'current_state': entity.new_state.name}
+        notify_on_commit(self._cw, view, viewargs=viewargs)
 
 class RelationChangeHook(NotificationHook):
     __regid__ = 'notifyrelationchange'
@@ -91,7 +130,7 @@
                                 rset=rset, row=0)
         if view is None:
             return
-        RenderAndSendNotificationView(self._cw, view=view)
+        notify_on_commit(self._cw, view)
 
 
 class EntityChangeHook(NotificationHook):
@@ -106,18 +145,21 @@
         view = self.select_view('notif_%s' % self.event, rset=rset, row=0)
         if view is None:
             return
-        RenderAndSendNotificationView(self._cw, view=view)
+        notify_on_commit(self._cw, view)
 
 
 class EntityUpdatedNotificationOp(hook.SingleLastOperation):
+    """scrap all changed entity to prepare a Notification Operation for them"""
 
     def precommit_event(self):
+        # precommit event that creates postcommit operation
         session = self.session
         for eid in session.transaction_data['changes']:
             view = session.vreg['views'].select('notif_entity_updated', session,
                                                 rset=session.eid_rset(eid),
                                                 row=0)
-            RenderAndSendNotificationView(session, view=view)
+            notify_on_commit(self.session, view,
+                    viewargs={'changes': session.transaction_data['changes'][eid]})
 
 
 class EntityUpdateHook(NotificationHook):
@@ -198,5 +240,5 @@
             # missing required relation
             title = '#%s' % self.entity.eid
         self._cw.transaction_data.setdefault('pendingchanges', []).append(
-            ('delete_entity', (self.entity.eid, self.entity.__regid__, title)))
+            ('delete_entity', (self.entity.eid, self.entity.cw_etype, title)))
         return True
--- a/hooks/syncschema.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/hooks/syncschema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -216,7 +216,10 @@
         # commit event should not raise error, while set_schema has chances to
         # do so because it triggers full vreg reloading
         try:
-            repo.set_schema(repo.schema, rebuildinfered=rebuildinfered)
+            if rebuildinfered:
+                repo.schema.rebuild_infered_relations()
+            # trigger vreg reload
+            repo.set_schema(repo.schema)
             # CWUser class might have changed, update current session users
             cwuser_cls = self.session.vreg['etypes'].etype_class('CWUser')
             for session in repo._sessions.itervalues():
--- a/hooks/test/unittest_syncschema.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/hooks/test/unittest_syncschema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -34,7 +34,7 @@
 
     def setUp(self):
         super(SchemaModificationHooksTC, self).setUp()
-        self.repo.fill_schema()
+        self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False)
         self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
 
     def index_exists(self, etype, attr, unique=False):
--- a/interfaces.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/interfaces.py	Fri Jun 14 16:26:25 2013 +0200
@@ -148,34 +148,6 @@
     def marker_icon(self):
         """returns the icon that should be used as the marker"""
 
-# XXX deprecates in favor of ISIOCItemAdapter
-class ISiocItem(Interface):
-    """interface for entities which may be represented as an ISIOC item"""
-
-    def isioc_content(self):
-        """return item's content"""
-
-    def isioc_container(self):
-        """return container entity"""
-
-    def isioc_type(self):
-        """return container type (post, BlogPost, MailMessage)"""
-
-    def isioc_replies(self):
-        """return replies items"""
-
-    def isioc_topics(self):
-        """return topics items"""
-
-# XXX deprecates in favor of ISIOCContainerAdapter
-class ISiocContainer(Interface):
-    """interface for entities which may be represented as an ISIOC container"""
-
-    def isioc_type(self):
-        """return container type (forum, Weblog, MailingList)"""
-
-    def isioc_items(self):
-        """return contained items"""
 
 # XXX deprecates in favor of IEmailableAdapter
 class IFeed(Interface):
--- a/mail.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/mail.py	Fri Jun 14 16:26:25 2013 +0200
@@ -27,9 +27,6 @@
 from email.header import Header
 from socket import gethostname
 
-from cubicweb.view import EntityView
-from cubicweb.entity import Entity
-
 def header(ustring):
     return Header(ustring.encode('UTF-8'), 'UTF-8')
 
@@ -142,140 +139,3 @@
         image = MIMEImage(data)
         image.add_header('Content-ID', '<%s>' % htmlId)
         self.attach(image)
-
-
-class NotificationView(EntityView):
-    """abstract view implementing the "email" API (eg to simplify sending
-    notification)
-    """
-    # XXX refactor this class to work with len(rset) > 1
-
-    msgid_timestamp = True
-
-    # to be defined on concrete sub-classes
-    content = None # body of the mail
-    message = None # action verb of the subject
-
-    # this is usually the method to call
-    def render_and_send(self, **kwargs):
-        """generate and send an email message for this view"""
-        delayed = kwargs.pop('delay_to_commit', None)
-        for recipients, msg in self.render_emails(**kwargs):
-            if delayed is None:
-                self.send(recipients, msg)
-            elif delayed:
-                self.send_on_commit(recipients, msg)
-            else:
-                self.send_now(recipients, msg)
-
-    def cell_call(self, row, col=0, **kwargs):
-        self.w(self._cw._(self.content) % self.context(**kwargs))
-
-    def render_emails(self, **kwargs):
-        """generate and send emails for this view (one per recipient)"""
-        self._kwargs = kwargs
-        recipients = self.recipients()
-        if not recipients:
-            self.info('skipping %s notification, no recipients', self.__regid__)
-            return
-        if self.cw_rset is not None:
-            entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
-            # if the view is using timestamp in message ids, no way to reference
-            # previous email
-            if not self.msgid_timestamp:
-                refs = [self.construct_message_id(eid)
-                        for eid in entity.cw_adapt_to('INotifiable').notification_references(self)]
-            else:
-                refs = ()
-            msgid = self.construct_message_id(entity.eid)
-        else:
-            refs = ()
-            msgid = None
-        req = self._cw
-        self.user_data = req.user_data()
-        origlang = req.lang
-        for something in recipients:
-            if isinstance(something, Entity):
-                # hi-jack self._cw to get a session for the returned user
-                self._cw = self._cw.hijack_user(something)
-                emailaddr = something.cw_adapt_to('IEmailable').get_email()
-            else:
-                emailaddr, lang = something
-                self._cw.set_language(lang)
-            # since the same view (eg self) may be called multiple time and we
-            # need a fresh stream at each iteration, reset it explicitly
-            self.w = None
-            # XXX call render before subject to set .row/.col attributes on the
-            #     view
-            try:
-                content = self.render(row=0, col=0, **kwargs)
-                subject = self.subject()
-            except SkipEmail:
-                continue
-            except Exception as ex:
-                # shouldn't make the whole transaction fail because of rendering
-                # error (unauthorized or such) XXX check it doesn't actually
-                # occurs due to rollback on such error
-                self.exception(str(ex))
-                continue
-            msg = format_mail(self.user_data, [emailaddr], content, subject,
-                              config=self._cw.vreg.config, msgid=msgid, references=refs)
-            yield [emailaddr], msg
-        # restore language
-        req.set_language(origlang)
-
-    # recipients / email sending ###############################################
-
-    def recipients(self):
-        """return a list of either 2-uple (email, language) or user entity to
-        who this email should be sent
-        """
-        finder = self._cw.vreg['components'].select(
-            'recipients_finder', self._cw, rset=self.cw_rset,
-            row=self.cw_row or 0, col=self.cw_col or 0)
-        return finder.recipients()
-
-    def send_now(self, recipients, msg):
-        self._cw.vreg.config.sendmails([(msg, recipients)])
-
-    def send_on_commit(self, recipients, msg):
-        raise NotImplementedError
-
-    send = send_now
-
-    # email generation helpers #################################################
-
-    def construct_message_id(self, eid):
-        return construct_message_id(self._cw.vreg.config.appid, eid,
-                                    self.msgid_timestamp)
-
-    def format_field(self, attr, value):
-        return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value}
-
-    def format_section(self, attr, value):
-        return '%(attr)s\n%(ul)s\n%(value)s\n' % {
-            'attr': attr, 'ul': '-'*len(attr), 'value': value}
-
-    def subject(self):
-        entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
-        subject = self._cw._(self.message)
-        etype = entity.dc_type()
-        eid = entity.eid
-        login = self.user_data['login']
-        return self._cw._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals()
-
-    def context(self, **kwargs):
-        entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
-        for key, val in kwargs.iteritems():
-            if val and isinstance(val, unicode) and val.strip():
-               kwargs[key] = self._cw._(val)
-        kwargs.update({'user': self.user_data['login'],
-                       'eid': entity.eid,
-                       'etype': entity.dc_type(),
-                       'url': entity.absolute_url(),
-                       'title': entity.dc_long_title(),})
-        return kwargs
-
-
-class SkipEmail(Exception):
-    """raise this if you decide to skip an email during its generation"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.17.0_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,1 @@
+add_attribute('CWAttribute', 'extra_props')
--- a/misc/migration/3.3.5_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.3.5_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,8 +1,1 @@
-# some entities have been added before schema entities, fix the 'is' and
-# 'is_instance_of' relations
-for rtype in ('is', 'is_instance_of'):
-    sql('INSERT INTO %s_relation '
-        'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET '
-        'WHERE X.type=ET.cw_name AND NOT EXISTS('
-        '      SELECT 1 from is_relation '
-        '      WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % rtype)
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.4.0_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.4.0_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,2 +1,1 @@
-drop_attribute('CWEType', 'meta')
-drop_attribute('CWRType', 'meta')
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.4.0_common.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.4.0_common.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,6 +1,1 @@
-from os.path import join
-from cubicweb.toolsutils import create_dir
-
-option_renamed('pyro-application-id', 'pyro-instance-id')
-
-create_dir(join(config.appdatahome, 'backup'))
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.4.3_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.4.3_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,2 +1,1 @@
-# sync and restart to make sure cwuri does not appear in forms
-sync_schema_props_perms()
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.5.0_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.5.0_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,10 +1,1 @@
-add_relation_type('prefered_form')
-
-rql('SET X prefered_form Y WHERE Y canonical TRUE, X identical_to Y')
-commit()
-
-drop_attribute('EmailAddress', 'canonical')
-drop_relation_definition('EmailAddress', 'identical_to', 'EmailAddress')
-
-if 'see_also' in schema:
-    sync_schema_props_perms('see_also', syncprops=False, syncrdefs=False)
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.5.10_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.5.10_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,5 +1,1 @@
-sync_schema_props_perms('state_of')
-sync_schema_props_perms('transition_of')
-for etype in ('State', 'BaseTransition', 'Transition', 'WorkflowTransition'):
-    sync_schema_props_perms((etype, 'name', 'String'))
-
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.5.3_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.5.3_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,7 +1,1 @@
-# type attribute might already be there if migrating from
-# version < 3.5 to version >= 3.5.3, BaseTransition being added
-# in bootstrap_migration
-if versions_map['cubicweb'][0] >= (3, 5, 0):
-    add_attribute('BaseTransition', 'type')
-    sync_schema_props_perms('state_of')
-    sync_schema_props_perms('transition_of')
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/3.6.1_Any.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/3.6.1_Any.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,2 +1,1 @@
-sync_schema_props_perms(syncprops=False)
-sync_schema_props_perms('destination_state', syncperms=False)
+raise NotImplementedError("Cannot migrate such an old version. Use intermediate Cubiweb version (try 3.16.x)")
--- a/misc/migration/bootstrapmigration_repository.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/migration/bootstrapmigration_repository.py	Fri Jun 14 16:26:25 2013 +0200
@@ -34,6 +34,26 @@
     ss.execschemarql(rql, rdef, ss.rdef2rql(rdef, CSTRMAP, groupmap=None))
     commit(ask_confirm=False)
 
+if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0):
+    try:
+        add_cube('sioc', update_database=False)
+    except ImportError:
+        if not confirm('In cubicweb 3.17 sioc views have been moved to the sioc '
+                       'cube, which is not installed.  Continue anyway?'):
+            raise
+    try:
+        add_cube('embed', update_database=False)
+    except ImportError:
+        if not confirm('In cubicweb 3.17 embedding views have been moved to the embed '
+                       'cube, which is not installed.  Continue anyway?'):
+            raise
+    try:
+        add_cube('geocoding', update_database=False)
+    except ImportError:
+        if not confirm('In cubicweb 3.17 geocoding views have been moved to the geocoding '
+                       'cube, which is not installed.  Continue anyway?'):
+            raise
+
 if applcubicwebversion <= (3, 13, 0) and cubicwebversion >= (3, 13, 1):
     sql('ALTER TABLE entities ADD asource VARCHAR(64)')
     sql('UPDATE entities SET asource=cw_name  '
--- a/misc/scripts/drop_external_entities.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/scripts/drop_external_entities.py	Fri Jun 14 16:26:25 2013 +0200
@@ -10,14 +10,14 @@
     try:
         suri = ecnx.describe(meta['extid'])[1]
     except UnknownEid:
-        print 'cant describe', e.__regid__, e.eid, meta
+        print 'cant describe', e.cw_etype, e.eid, meta
         continue
     if suri != 'system':
         try:
-            print 'deleting', e.__regid__, e.eid, suri, e.dc_title().encode('utf8')
+            print 'deleting', e.cw_etype, e.eid, suri, e.dc_title().encode('utf8')
             repo.delete_info(session, e, suri, scleanup=e.eid)
         except UnknownEid:
-            print '  cant delete', e.__regid__, e.eid, meta
+            print '  cant delete', e.cw_etype, e.eid, meta
 
 
 commit()
--- a/misc/scripts/ldapuser2ldapfeed.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/scripts/ldapuser2ldapfeed.py	Fri Jun 14 16:26:25 2013 +0200
@@ -39,7 +39,7 @@
 extids = set()
 duplicates = []
 for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
-    etype = entity.__regid__
+    etype = entity.cw_etype
     if not source.support_entity(etype):
         print "source doesn't support %s, delete %s" % (etype, entity.eid)
         todelete[etype].append(entity)
--- a/misc/scripts/pyroforge2datafeed.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/misc/scripts/pyroforge2datafeed.py	Fri Jun 14 16:26:25 2013 +0200
@@ -47,7 +47,7 @@
 todelete = {}
 host = source.config['base-url'].split('://')[1]
 for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
-        etype = entity.__regid__
+        etype = entity.cw_etype
         if not source.support_entity(etype):
             print "source doesn't support %s, delete %s" % (etype, entity.eid)
         elif etype in DONT_GET_BACK_ETYPES:
@@ -84,8 +84,8 @@
 for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
                    {'s': source.eid}).entities():
     schemaent = mappart.cw_schema[0]
-    if schemaent.__regid__ != 'CWEType':
-        assert schemaent.__regid__ == 'CWRType'
+    if schemaent.cw_etype != 'CWEType':
+        assert schemaent.cw_etype == 'CWRType'
         sch = schema._eid_index[schemaent.eid]
         for rdef in sch.rdefs.itervalues():
             if not source.support_entity(rdef.subject) \
--- a/mixins.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/mixins.py	Fri Jun 14 16:26:25 2013 +0200
@@ -236,7 +236,7 @@
         self.close_item(entity)
 
     def open_item(self, entity):
-        self.w(u'<li class="%s">\n' % entity.__regid__.lower())
+        self.w(u'<li class="%s">\n' % entity.cw_etype.lower())
     def close_item(self, entity):
         self.w(u'</li>\n')
 
--- a/predicates.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/predicates.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1489,6 +1489,15 @@
         return frozenset(req.form)
 
 
+class match_http_method(ExpectedValuePredicate):
+    """Return non-zero score if one of the HTTP methods specified as
+    initializer arguments is the HTTP method of the request (GET, POST, ...).
+    """
+
+    def __call__(self, cls, req, **kwargs):
+        return int(req.http_method() in self.expected)
+
+
 class match_edited_type(ExpectedValuePredicate):
     """return non-zero if main edited entity type is the one specified as
     initializer argument, or is among initializer arguments if `mode` == 'any'.
--- a/req.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/req.py	Fri Jun 14 16:26:25 2013 +0200
@@ -29,7 +29,7 @@
 from logilab.common.deprecation import deprecated
 from logilab.common.date import ustrftime, strptime, todate, todatetime
 
-from cubicweb import Unauthorized, NoSelectableObject, typed_eid, uilib
+from cubicweb import Unauthorized, NoSelectableObject, uilib
 from cubicweb.rset import ResultSet
 
 ONESECOND = timedelta(0, 1, 0)
@@ -38,12 +38,6 @@
 class FindEntityError(Exception):
     """raised when find_one_entity() can not return one and only one entity"""
 
-def _check_cw_unsafe(kwargs):
-    if kwargs.pop('_cw_unsafe', False):
-        warn('[3.7] _cw_unsafe argument is deprecated, now unsafe by '
-             'default, control it using cw_[read|write]_security.',
-             DeprecationWarning, stacklevel=3)
-
 class Cache(dict):
     def __init__(self):
         super(Cache, self).__init__()
@@ -74,6 +68,7 @@
         # cache result of execution for (rql expr / eids),
         # should be emptied on commit/rollback of the server session / web
         # connection
+        self.user = None
         self.local_perm_cache = {}
         self._ = unicode
 
@@ -114,7 +109,7 @@
         (we have the eid, we can suppose it exists and user has access to the
         entity)
         """
-        eid = typed_eid(eid)
+        eid = int(eid)
         if etype is None:
             etype = self.describe(eid)[0]
         rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid},
@@ -154,7 +149,6 @@
         ...               works_for=c)
 
         """
-        _check_cw_unsafe(kwargs)
         cls = self.vreg['etypes'].etype_class(etype)
         return cls.cw_instantiate(self.execute, **kwargs)
 
--- a/rqlrewrite.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/rqlrewrite.py	Fri Jun 14 16:26:25 2013 +0200
@@ -30,7 +30,7 @@
 from logilab.common import tempattr
 from logilab.common.graph import has_path
 
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
 
 
 def add_types_restriction(schema, rqlst, newroot=None, solutions=None):
@@ -220,7 +220,7 @@
             vi = {}
             self.varinfos.append(vi)
             try:
-                vi['const'] = typed_eid(selectvar)
+                vi['const'] = int(selectvar)
                 vi['rhs_rels'] = vi['lhs_rels'] = {}
             except ValueError:
                 try:
--- a/schema.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/schema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -90,11 +90,9 @@
 
 _LOGGER = getLogger('cubicweb.schemaloader')
 
-# schema entities created from serialized schema have an eid rproperty
+# entity and relation schema created from serialized schema have an eid
 ybo.ETYPE_PROPERTIES += ('eid',)
 ybo.RTYPE_PROPERTIES += ('eid',)
-ybo.RDEF_PROPERTIES += ('eid',)
-
 
 PUB_SYSTEM_ENTITY_PERMS = {
     'read':   ('managers', 'users', 'guests',),
@@ -296,6 +294,9 @@
 
 
 RelationDefinitionSchema._RPROPERTIES['eid'] = None
+# remember rproperties defined at this point. Others will have to be serialized in
+# CWAttribute.extra_props
+KNOWN_RPROPERTIES = RelationDefinitionSchema.ALL_PROPERTIES()
 
 def rql_expression(self, expression, mainvars=None, eid=None):
     """rql expression factory"""
@@ -700,10 +701,15 @@
     def __repr__(self):
         return '%s(%s)' % (self.__class__.__name__, self.full_rql)
 
-    def __cmp__(self, other):
+    def __lt__(self, other):
         if hasattr(other, 'expression'):
-            return cmp(other.expression, self.expression)
-        return -1
+            return self.expression < other.expression
+        return True
+
+    def __eq__(self, other):
+        if hasattr(other, 'expression'):
+            return self.expression == other.expression
+        return False
 
     def __deepcopy__(self, memo):
         return self.__class__(self.expression, self.mainvars)
--- a/schemas/bootstrap.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/schemas/bootstrap.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,7 +21,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
-from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
+from yams.buildobjs import (EntityType, RelationType, RelationDefinition, Bytes,
                             SubjectRelation, RichString, String, Boolean, Int)
 from cubicweb.schema import (
     RQLConstraint,
@@ -84,6 +84,7 @@
     fulltextindexed = Boolean(description=_('index this attribute\'s value in the plain text index'))
     internationalizable = Boolean(description=_('is this attribute\'s value translatable'))
     defaultval = String(maxsize=256)
+    extra_props = Bytes(description=_('additional type specific properties'))
 
     description = RichString(internationalizable=True,
                              description=_('semantic description of this attribute'))
--- a/selectors.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/selectors.py	Fri Jun 14 16:26:25 2013 +0200
@@ -35,8 +35,6 @@
 EClassSelector = class_renamed('EClassSelector', EClassPredicate)
 EntitySelector = class_renamed('EntitySelector', EntityPredicate)
 
-# XXX pre 3.7? bw compat
-
 
 class on_transition(is_in_state):
     """Return 1 if entity is in one of the transitions given as argument list
--- a/server/checkintegrity.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/checkintegrity.py	Fri Jun 14 16:26:25 2013 +0200
@@ -329,7 +329,7 @@
                 else:
                     rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype)
                 for entity in session.execute(rql).entities():
-                    sys.stderr.write(msg % (entity.__regid__, entity.eid, role, rschema))
+                    sys.stderr.write(msg % (entity.cw_etype, entity.eid, role, rschema))
                     if fix:
                         #if entity.cw_describe()['source']['uri'] == 'system': XXX
                         entity.cw_delete() # XXX this is BRUTAL!
@@ -350,7 +350,7 @@
                 rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % (
                     rschema, rdef.subject)
                 for entity in session.execute(rql).entities():
-                    sys.stderr.write(msg % (entity.__regid__, entity.eid, rschema))
+                    sys.stderr.write(msg % (entity.cw_etype, entity.eid, rschema))
                     if fix:
                         entity.cw_delete()
                     notify_fixed(fix)
--- a/server/edition.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/edition.py	Fri Jun 14 16:26:25 2013 +0200
@@ -48,9 +48,12 @@
         # dict|set keyable
         return hash(id(self))
 
-    def __cmp__(self, other):
+    def __lt__(self, other):
         # we don't want comparison by value inherited from dict
-        return cmp(id(self), id(other))
+        return id(self) < id(other)
+
+    def __eq__(self, other):
+        return id(self) == id(other)
 
     def __setitem__(self, attr, value):
         assert attr != 'eid'
--- a/server/ldaputils.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/ldaputils.py	Fri Jun 14 16:26:25 2013 +0200
@@ -32,6 +32,8 @@
 
 from __future__ import division # XXX why?
 
+from datetime import datetime
+
 import ldap
 from ldap.ldapobject import ReconnectLDAPObject
 from ldap.filter import filter_format
@@ -88,8 +90,8 @@
 
         ('user-base-dn',
          {'type' : 'string',
-          'default': 'ou=People,dc=logilab,dc=fr',
-          'help': 'base DN to lookup for users',
+          'default': '',
+          'help': 'base DN to lookup for users; disable user importation mechanism if unset',
           'group': 'ldap-source', 'level': 1,
           }),
         ('user-scope',
@@ -160,10 +162,9 @@
         self.user_base_scope = globals()[typedconfig['user-scope']]
         self.user_login_attr = typedconfig['user-login-attr']
         self.user_default_groups = typedconfig['user-default-group']
-        self.user_attrs = typedconfig['user-attrs-map']
-        self.user_rev_attrs = {'eid': 'dn'}
-        for ldapattr, cwattr in self.user_attrs.items():
-            self.user_rev_attrs[cwattr] = ldapattr
+        self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
+        self.user_attrs.update(typedconfig['user-attrs-map'])
+        self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems())
         self.base_filters = [filter_format('(%s=%s)', ('objectClass', o))
                              for o in typedconfig['user-classes']]
         if typedconfig['user-filter']:
@@ -328,7 +329,6 @@
             else:
                 itemdict = self._process_ldap_item(rec_dn, items)
                 result.append(itemdict)
-        #print '--->', result
         self.debug('ldap built results %s', len(result))
         return result
 
@@ -342,11 +342,14 @@
                 if not value.startswith('{SSHA}'):
                     value = utils.crypt_password(value)
                 itemdict[key] = Binary(value)
+            elif self.user_attrs.get(key) == 'modification_date':
+                itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ')
             else:
-                for i, val in enumerate(value):
-                    value[i] = unicode(val, 'utf-8', 'replace')
-                if isinstance(value, list) and len(value) == 1:
+                value = [unicode(val, 'utf-8', 'replace') for val in value]
+                if len(value) == 1:
                     itemdict[key] = value = value[0]
+                else:
+                    itemdict[key] = value
         return itemdict
 
     def _process_no_such_object(self, session, dn):
--- a/server/migractions.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/migractions.py	Fri Jun 14 16:26:25 2013 +0200
@@ -300,8 +300,8 @@
         if self.config is not None:
             session = self.repo._get_session(self.cnx.sessionid)
             if session.cnxset is None:
-                session.set_read_security(False)
-                session.set_write_security(False)
+                session.read_security = False
+                session.write_security = False
             session.set_cnxset()
             return session
         # no access to session on remote instance
@@ -1515,14 +1515,6 @@
         if commit:
             self.commit()
 
-    @deprecated("[3.7] use session.disable_hook_categories('integrity')")
-    def cmd_deactivate_verification_hooks(self):
-        self.session.disable_hook_categories('integrity')
-
-    @deprecated("[3.7] use session.enable_hook_categories('integrity')")
-    def cmd_reactivate_verification_hooks(self):
-        self.session.enable_hook_categories('integrity')
-
     @deprecated("[3.15] use rename_relation_type(oldname, newname)")
     def cmd_rename_relation(self, oldname, newname, commit=True):
         self.cmd_rename_relation_type(oldname, newname, commit)
--- a/server/querier.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/querier.py	Fri Jun 14 16:26:25 2013 +0200
@@ -31,7 +31,7 @@
 from yams import BASE_TYPES
 
 from cubicweb import ValidationError, Unauthorized, QueryError, UnknownEid
-from cubicweb import Binary, server, typed_eid
+from cubicweb import Binary, server
 from cubicweb.rset import ResultSet
 
 from cubicweb.utils import QueryCache, RepeatList
@@ -392,7 +392,7 @@
             for var in rqlst.defined_vars.itervalues():
                 if var.stinfo['constnode'] is not None:
                     eid = var.stinfo['constnode'].eval(self.args)
-                    varkwargs[var.name] = typed_eid(eid)
+                    varkwargs[var.name] = int(eid)
         # dictionary of variables restricted for security reason
         localchecks = {}
         restricted_vars = set()
@@ -564,11 +564,11 @@
         for subj, rtype, obj in self.relation_defs():
             # if a string is given into args instead of an int, we get it here
             if isinstance(subj, basestring):
-                subj = typed_eid(subj)
+                subj = int(subj)
             elif not isinstance(subj, (int, long)):
                 subj = subj.entity.eid
             if isinstance(obj, basestring):
-                obj = typed_eid(obj)
+                obj = int(obj)
             elif not isinstance(obj, (int, long)):
                 obj = obj.entity.eid
             if repo.schema.rschema(rtype).inlined:
--- a/server/repository.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/repository.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -50,7 +50,7 @@
                       UnknownEid, AuthenticationError, ExecutionError,
                       ETypeNotSupportedBySources, MultiSourcesError,
                       BadConnectionId, Unauthorized, ValidationError,
-                      RepositoryError, UniqueTogetherError, typed_eid, onevent)
+                      RepositoryError, UniqueTogetherError, onevent)
 from cubicweb import cwvreg, schema, server
 from cubicweb.server import ShuttingDown, utils, hook, pool, querier, sources
 from cubicweb.server.session import Session, InternalSession, InternalManager
@@ -167,6 +167,9 @@
 
         self.pyro_registered = False
         self.pyro_uri = None
+        # every pyro client is handled in its own thread; map these threads to
+        # the session we opened for them so we can clean up when they go away
+        self._pyro_sessions = {}
         self.app_instances_bus = NullEventBus()
         self.info('starting repository from %s', self.config.apphome)
         # dictionary of opened sessions
@@ -192,9 +195,12 @@
         self._type_source_cache = {}
         # cache (extid, source uri) -> eid
         self._extid_cache = {}
-        # open some connections set
+        # open some connection sets
         if config.init_cnxset_pool:
             self.init_cnxset_pool()
+        # the hooks manager
+        self.hm = hook.HooksManager(self.vreg)
+        # registry hook to fix user class on registry reload
         @onevent('after-registry-reload', self)
         def fix_user_classes(self):
             # After registery reload the 'CWUser' class used for CWEtype
@@ -206,49 +212,55 @@
                     session.user.__class__ = usercls
 
     def init_cnxset_pool(self):
+        """should be called bootstrap_repository, as this is what it does"""
         config = self.config
         self._cnxsets_pool = Queue.Queue()
+        # 0. init a cnxset that will be used to fetch bootstrap information from
+        #    the database
         self._cnxsets_pool.put_nowait(pool.ConnectionsSet(self.sources))
+        # 1. set used cubes
+        if config.creating or not config.read_instance_schema:
+            config.bootstrap_cubes()
+        else:
+            self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False)
+            config.init_cubes(self.get_cubes())
+        # 2. load schema
         if config.quick_start:
-            # quick start, usually only to get a minimal repository to get cubes
+            # quick start: only to get a minimal repository to get cubes
             # information (eg dump/restore/...)
-            config._cubes = ()
-            # only load hooks and entity classes in the registry
+            #
+            # restrict appobject_path to only load hooks and entity classes in
+            # the registry
             config.cube_appobject_path = set(('hooks', 'entities'))
             config.cubicweb_appobject_path = set(('hooks', 'entities'))
-            self.set_schema(config.load_schema())
+            # limit connections pool to 1
             config['connections-pool-size'] = 1
-            # will be reinitialized later from cubes found in the database
-            config._cubes = None
-        elif config.creating or not config.read_instance_schema:
+        if config.quick_start or config.creating or not config.read_instance_schema:
+            # load schema from the file system
             if not config.creating:
-                # test start: use the file system schema (quicker)
                 self.warning("set fs instance'schema")
-            config.bootstrap_cubes()
             self.set_schema(config.load_schema())
         else:
             # normal start: load the instance schema from the database
-            self.fill_schema()
-        if not config.creating:
-            self.init_sources_from_database()
-            if 'CWProperty' in self.schema:
-                self.vreg.init_properties(self.properties())
-        else:
+            self.info('loading schema from the repository')
+            self.set_schema(self.deserialize_schema())
+        # 3. initialize data sources
+        if config.creating:
             # call init_creating so that for instance native source can
             # configurate tsearch according to postgres version
             for source in self.sources:
                 source.init_creating()
-        # close initialization connetions set and reopen fresh ones for proper
-        # initialization now that we know cubes
+        else:
+            self.init_sources_from_database()
+            if 'CWProperty' in self.schema:
+                self.vreg.init_properties(self.properties())
+        # 4. close initialization connection set and reopen fresh ones for
+        #    proper initialization
         self._get_cnxset().close(True)
-        # list of available_cnxsets (we can't iterate on Queue instance)
-        self.cnxsets = []
+        self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue)
         for i in xrange(config['connections-pool-size']):
             self.cnxsets.append(pool.ConnectionsSet(self.sources))
             self._cnxsets_pool.put_nowait(self.cnxsets[-1])
-        if config.quick_start:
-            config.init_cubes(self.get_cubes())
-        self.hm = hook.HooksManager(self.vreg)
 
     # internals ###############################################################
 
@@ -313,13 +325,9 @@
         source_config['type'] = type
         return sources.get_source(type, source_config, self, eid)
 
-    def set_schema(self, schema, resetvreg=True, rebuildinfered=True):
-        if rebuildinfered:
-            schema.rebuild_infered_relations()
+    def set_schema(self, schema, resetvreg=True):
         self.info('set schema %s %#x', schema.name, id(schema))
         if resetvreg:
-            if self.config._cubes is None:
-                self.config.init_cubes(self.get_cubes())
             # trigger full reload of all appobjects
             self.vreg.set_schema(schema)
         else:
@@ -331,12 +339,10 @@
             source.set_schema(schema)
         self.schema = schema
 
-    def fill_schema(self):
-        """load schema from the repository"""
+    def deserialize_schema(self):
+        """load schema from the database"""
         from cubicweb.server.schemaserial import deserialize_schema
-        self.info('loading schema from the repository')
         appschema = schema.CubicWebSchema(self.config.appid)
-        self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False)
         self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema))
         with self.internal_session() as session:
             try:
@@ -349,8 +355,7 @@
                 raise Exception('Is the database initialised ? (cause: %s)' %
                                 (ex.args and ex.args[0].strip() or 'unknown')), \
                                 None, sys.exc_info()[-1]
-        self.set_schema(appschema)
-
+        return appschema
 
     def _prepare_startup(self):
         """Prepare "Repository as a server" for startup.
@@ -754,6 +759,12 @@
             # try to get a user object
             user = self.authenticate_user(session, login, **kwargs)
         session = Session(user, self, cnxprops)
+        if threading.currentThread() in self._pyro_sessions:
+            # assume no pyro client does one get_repository followed by
+            # multiple repo.connect
+            assert self._pyro_sessions[threading.currentThread()] == None
+            self.debug('record session %s', session)
+            self._pyro_sessions[threading.currentThread()] = session
         user._cw = user.cw_rset.req = session
         user.cw_clear_relation_cache()
         self._sessions[session.id] = session
@@ -844,7 +855,7 @@
         self.debug('begin commit for session %s', sessionid)
         try:
             session = self._get_session(sessionid)
-            session.set_tx_data(txid)
+            session.set_tx(txid)
             return session.commit()
         except (ValidationError, Unauthorized):
             raise
@@ -857,7 +868,7 @@
         self.debug('begin rollback for session %s', sessionid)
         try:
             session = self._get_session(sessionid)
-            session.set_tx_data(txid)
+            session.set_tx(txid)
             session.rollback()
         except Exception:
             self.exception('unexpected error')
@@ -874,6 +885,8 @@
         # during `session_close` hooks
         session.commit()
         session.close()
+        if threading.currentThread() in self._pyro_sessions:
+            self._pyro_sessions[threading.currentThread()] = None
         del self._sessions[sessionid]
         self.info('closed session %s for user %s', sessionid, session.user.login)
 
@@ -1014,7 +1027,7 @@
         except KeyError:
             raise BadConnectionId('No such session %s' % sessionid)
         if setcnxset:
-            session.set_tx_data(txid) # must be done before set_cnxset
+            session.set_tx(txid) # must be done before set_cnxset
             session.set_cnxset()
         return session
 
@@ -1027,7 +1040,7 @@
         uri)` for the entity of the given `eid`
         """
         try:
-            eid = typed_eid(eid)
+            eid = int(eid)
         except ValueError:
             raise UnknownEid(eid)
         try:
@@ -1055,7 +1068,7 @@
         rqlcache = self.querier._rql_cache
         for eid in eids:
             try:
-                etype, uri, extid, auri = etcache.pop(typed_eid(eid)) # may be a string in some cases
+                etype, uri, extid, auri = etcache.pop(int(eid)) # may be a string in some cases
                 rqlcache.pop( ('%s X WHERE X eid %s' % (etype, eid),), None)
                 extidcache.pop((extid, uri), None)
             except KeyError:
@@ -1084,7 +1097,7 @@
                     key, args[key]))
             cachekey.append(etype)
             # ensure eid is correctly typed in args
-            args[key] = typed_eid(args[key])
+            args[key] = int(args[key])
         return tuple(cachekey)
 
     def eid2extid(self, source, eid, session=None):
@@ -1177,7 +1190,7 @@
                     hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid)
                     self.system_source.delete_info_multi(session, [entity], uri)
                     if source.should_call_hooks:
-                        session._threaddata.pending_operations = pending_operations
+                        session._tx.pending_operations = pending_operations
             raise
 
     def add_info(self, session, entity, source, extid=None, complete=True):
@@ -1336,7 +1349,7 @@
                 suri = 'system'
             extid = source.get_extid(entity)
             self._extid_cache[(str(extid), suri)] = entity.eid
-        self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid,
+        self._type_source_cache[entity.eid] = (entity.cw_etype, suri, extid,
                                                source.uri)
         return extid
 
@@ -1350,13 +1363,13 @@
         entity._cw_is_saved = False # entity has an eid but is not yet saved
         # init edited_attributes before calling before_add_entity hooks
         entity.cw_edited = edited
-        source = self.locate_etype_source(entity.__regid__)
+        source = self.locate_etype_source(entity.cw_etype)
         # allocate an eid to the entity before calling hooks
         entity.eid = self.system_source.create_eid(session)
         # set caches asap
         extid = self.init_entity_caches(session, entity, source)
         if server.DEBUG & server.DBG_REPO:
-            print 'ADD entity', self, entity.__regid__, entity.eid, edited
+            print 'ADD entity', self, entity.cw_etype, entity.eid, edited
         prefill_entity_caches(entity)
         if source.should_call_hooks:
             self.hm.call_hooks('before_add_entity', session, entity=entity)
@@ -1389,7 +1402,7 @@
         """
         entity = edited.entity
         if server.DEBUG & server.DBG_REPO:
-            print 'UPDATE entity', entity.__regid__, entity.eid, \
+            print 'UPDATE entity', entity.cw_etype, entity.eid, \
                   entity.cw_attr_cache, edited
         hm = self.hm
         eschema = entity.e_schema
@@ -1635,22 +1648,23 @@
         # into the pyro name server
         if self._use_pyrons():
             self.looping_task(60*10, self._ensure_pyro_ns)
+        pyro_sessions = self._pyro_sessions
         # install hacky function to free cnxset
-        self.looping_task(60, self._cleanup_pyro)
+        def handleConnection(conn, tcpserver, sessions=pyro_sessions):
+            sessions[threading.currentThread()] = None
+            return tcpserver.getAdapter().__class__.handleConnection(tcpserver.getAdapter(), conn, tcpserver)
+        daemon.getAdapter().handleConnection = handleConnection
+        def removeConnection(conn, sessions=pyro_sessions):
+            daemon.__class__.removeConnection(daemon, conn)
+            session = sessions.pop(threading.currentThread(), None)
+            if session is None:
+                # client was not yet connected to the repo
+                return
+            if not session.closed:
+                session.close()
+        daemon.removeConnection = removeConnection
         return daemon
 
-    def _cleanup_pyro(self):
-        """Very hacky function to cleanup session left by dead Pyro thread.
-
-        There is no clean pyro callback to detect this.
-        """
-        for session in self._sessions.values():
-            for thread, cnxset in session._threads_in_transaction.copy():
-                if not thread.isAlive():
-                    self.warning('Freeing cnxset used by dead pyro threads: %',
-                                 thread)
-                    session._free_thread_cnxset(thread, cnxset)
-
     def _ensure_pyro_ns(self):
         if not self._use_pyrons():
             return
--- a/server/schemaserial.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/schemaserial.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,16 +21,18 @@
 
 import os
 from itertools import chain
+import json
 
 from logilab.common.shellutils import ProgressBar
 
 from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo
 
-from cubicweb import CW_SOFTWARE_ROOT, typed_eid
-from cubicweb.schema import (CONSTRAINTS, ETYPE_NAME_MAP,
+from cubicweb import CW_SOFTWARE_ROOT, Binary
+from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP,
                              VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES)
 from cubicweb.server import sqlutils
 
+
 def group_mapping(cursor, interactive=True):
     """create a group mapping from an rql cursor
 
@@ -58,7 +60,7 @@
                 if not value:
                     continue
                 try:
-                    eid = typed_eid(value)
+                    eid = int(value)
                 except ValueError:
                     print 'eid should be an integer'
                     continue
@@ -195,7 +197,14 @@
             if rdefs is not None:
                 ertidx[rdefeid] = rdefs
                 set_perms(rdefs, permsidx)
-
+    # Get the type parameters for additional base types.
+    try:
+        extra_props = dict(session.execute('Any X, XTP WHERE X is CWAttribute, '
+                                           'X extra_props XTP'))
+    except Exception:
+        session.critical('Previous CRITICAL notification about extra_props is not '
+                         'a problem if you are migrating to cubicweb 3.17')
+        extra_props = {} # not yet in the schema (introduced by 3.17 migration)
     for values in session.execute(
         'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is CWAttribute,'
         'X relation_type RT, X cardinality CARD, X ordernum ORD, X indexed IDX,'
@@ -203,10 +212,12 @@
         'X fulltextindexed FTIDX, X from_entity SE, X to_entity OE',
         build_descr=False):
         rdefeid, seid, reid, oeid, card, ord, desc, idx, ftidx, i18n, default = values
+        typeparams = extra_props.get(rdefeid)
+        typeparams = json.load(typeparams) if typeparams else {}
         _add_rdef(rdefeid, seid, reid, oeid,
                   cardinality=card, description=desc, order=ord,
                   indexed=idx, fulltextindexed=ftidx, internationalizable=i18n,
-                  default=default)
+                  default=default, **typeparams)
     for values in session.execute(
         'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,'
         'X cardinality CARD, X ordernum ORD, X description DESC, '
@@ -509,10 +520,14 @@
 def _rdef_values(rdef):
     amap = {'order': 'ordernum', 'default': 'defaultval'}
     values = {}
-    for prop, default in rdef.rproperty_defs(rdef.object).iteritems():
+    extra = {}
+    for prop in rdef.rproperty_defs(rdef.object):
         if prop in ('eid', 'constraints', 'uid', 'infered', 'permissions'):
             continue
         value = getattr(rdef, prop)
+        if prop not in KNOWN_RPROPERTIES:
+            extra[prop] = value
+            continue
         # XXX type cast really necessary?
         if prop in ('indexed', 'fulltextindexed', 'internationalizable'):
             value = bool(value)
@@ -526,6 +541,8 @@
             if not isinstance(value, unicode):
                 value = unicode(value)
         values[amap.get(prop, prop)] = value
+    if extra:
+        values['extra_props'] = Binary(json.dumps(extra))
     relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
     return relations, values
 
--- a/server/serverconfig.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/serverconfig.py	Fri Jun 14 16:26:25 2013 +0200
@@ -27,7 +27,7 @@
 from logilab.common.decorators import wproperty, cached
 
 from cubicweb.toolsutils import read_config, restrict_perms_to_user
-from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration, merge_options
+from cubicweb.cwconfig import CONFIGURATIONS, CubicWebConfiguration
 from cubicweb.server import SOURCE_TYPES
 
 
@@ -96,7 +96,7 @@
     cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(['sobjects', 'hooks'])
     cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(['sobjects', 'hooks'])
 
-    options = merge_options((
+    options = lgconfig.merge_options((
         # ctl configuration
         ('host',
          {'type' : 'string',
@@ -333,7 +333,7 @@
             sconfig = sourcescfg[section]
             if isinstance(sconfig, dict):
                 # get a Configuration object
-                assert section == 'system'
+                assert section == 'system', '%r is not system' % section
                 _sconfig = SourceConfiguration(
                     self, options=SOURCE_TYPES['native'].options)
                 for attr, val in sconfig.items():
@@ -342,7 +342,7 @@
                     except lgconfig.OptionError:
                         # skip adapter, may be present on pre 3.10 instances
                         if attr != 'adapter':
-                            self.error('skip unknown option %s in sources file')
+                            self.error('skip unknown option %s in sources file' % attr)
                 sconfig = _sconfig
             stream.write('[%s]\n%s\n' % (section, generate_source_config(sconfig)))
         restrict_perms_to_user(sourcesfile)
--- a/server/serverctl.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/serverctl.py	Fri Jun 14 16:26:25 2013 +0200
@@ -28,12 +28,12 @@
 import subprocess
 
 from logilab.common import nullobject
-from logilab.common.configuration import Configuration
+from logilab.common.configuration import Configuration, merge_options
 from logilab.common.shellutils import ASK, generate_password
 
 from cubicweb import AuthenticationError, ExecutionError, ConfigurationError
 from cubicweb.toolsutils import Command, CommandHandler, underline_title
-from cubicweb.cwctl import CWCTL, check_options_consistency
+from cubicweb.cwctl import CWCTL, check_options_consistency, ConfigureInstanceCommand
 from cubicweb.server import SOURCE_TYPES
 from cubicweb.server.repository import Repository
 from cubicweb.server.serverconfig import (
@@ -1065,12 +1065,65 @@
             if val:
                 print key, ':', val
 
+class SchemaDiffCommand(Command):
+    """Generate a diff between schema and fsschema description.
+
+    <instance>
+      the identifier of the instance
+    <diff-tool>
+      the name of the diff tool to compare the two generated files.
+    """
+    name = 'schema-diff'
+    arguments = '<instance> <diff-tool>'
+    min_args = max_args = 2
+
+    def run(self, args):
+        from yams.diff import schema_diff
+        appid = args.pop(0)
+        diff_tool = args.pop(0)
+        config = ServerConfiguration.config_for(appid)
+        repo, cnx = repo_cnx(config)
+        session = repo._get_session(cnx.sessionid, setcnxset=True)
+        fsschema = config.load_schema(expand_cubes=True)
+        schema_diff(repo.schema, fsschema, diff_tool)
+
 
 for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand,
                  GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
                  StartRepositoryCommand,
                  DBDumpCommand, DBRestoreCommand, DBCopyCommand,
                  AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
-                 SynchronizeInstanceSchemaCommand, SynchronizeSourceCommand
+                 SynchronizeInstanceSchemaCommand, SynchronizeSourceCommand, SchemaDiffCommand,
                  ):
     CWCTL.register(cmdclass)
+
+# extend configure command to set options in sources config file ###############
+
+db_options = (
+    ('db',
+     {'short': 'd', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2',
+      'default': None,
+      'help': 'set <key> to <value> in "source" configuration file.',
+      }),
+    )
+
+ConfigureInstanceCommand.options = merge_options(
+        ConfigureInstanceCommand.options + db_options)
+
+configure_instance = ConfigureInstanceCommand.configure_instance
+def configure_instance2(self, appid):
+    configure_instance(self, appid)
+    if self.config.db is not None:
+        appcfg = ServerConfiguration.config_for(appid)
+        srccfg = appcfg.read_sources_file()
+        for key, value in self.config.db.iteritems():
+            try:
+                srccfg['system'][key] = value
+            except KeyError:
+                raise ConfigurationError('unknown configuration key "%s" for source' % key)
+        admcfg = Configuration(options=USER_OPTIONS)
+        admcfg['login'] = srccfg['admin']['login']
+        admcfg['password'] = srccfg['admin']['password']
+        srccfg['admin'] = admcfg
+        appcfg.write_sources_file(srccfg)
+ConfigureInstanceCommand.configure_instance = configure_instance2
--- a/server/session.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/session.py	Fri Jun 14 16:26:25 2013 +0200
@@ -48,28 +48,28 @@
 
 @objectify_predicate
 def is_user_session(cls, req, **kwargs):
-    """repository side only predicate returning 1 if the session is a regular
-    user session and not an internal session
-    """
+    """return 1 when session is not internal.
+
+    This predicate can only be used repository side only. """
     return not req.is_internal_session
 
 @objectify_predicate
 def is_internal_session(cls, req, **kwargs):
-    """repository side only predicate returning 1 if the session is not a regular
-    user session but an internal session
-    """
+    """return 1 when session is not internal.
+
+    This predicate can only be used repository side only. """
     return req.is_internal_session
 
 @objectify_predicate
 def repairing(cls, req, **kwargs):
-    """repository side only predicate returning 1 if the session is not a regular
-    user session but an internal session
-    """
+    """return 1 when repository is running in repair mode"""
     return req.vreg.config.repairing
 
 
 class transaction(object):
-    """context manager to enter a transaction for a session: when exiting the
+    """Ensure that the transaction is either commited or rollbacked at exit
+
+    Context manager to enter a transaction for a session: when exiting the
     `with` block on exception, call `session.rollback()`, else call
     `session.commit()` on normal exit
     """
@@ -87,8 +87,16 @@
         else:
             self.session.commit(free_cnxset=self.free_cnxset)
 
+@deprecated('[3.17] use <object>.allow/deny_all_hooks_but instead')
+def hooks_control(obj, mode, *categories):
+    assert mode in  (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL)
+    if mode == HOOKS_ALLOW_ALL:
+        return obj.allow_all_hooks_but(*categories)
+    elif mode == HOOKS_DENY_ALL:
+        return obj.deny_all_hooks_but(*categories)
 
-class hooks_control(object):
+
+class _hooks_control(object):
     """context manager to control activated hooks categories.
 
     If mode is session.`HOOKS_DENY_ALL`, given hooks categories will
@@ -99,10 +107,10 @@
 
     .. sourcecode:: python
 
-       with hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, 'integrity'):
+       with _hooks_control(self.session, self.session.HOOKS_ALLOW_ALL, 'integrity'):
            # ... do stuff with all but 'integrity' hooks activated
 
-       with hooks_control(self.session, self.session.HOOKS_DENY_ALL, 'integrity'):
+       with _hooks_control(self.session, self.session.HOOKS_DENY_ALL, 'integrity'):
            # ... do stuff with none but 'integrity' hooks activated
 
     This is an internal api, you should rather use
@@ -111,45 +119,612 @@
     methods.
     """
     def __init__(self, session, mode, *categories):
+        assert mode in (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL)
         self.session = session
+        self.tx = session._tx
         self.mode = mode
         self.categories = categories
+        self.oldmode = None
+        self.changes = ()
 
     def __enter__(self):
-        self.oldmode, self.changes = self.session.init_hooks_mode_categories(
-            self.mode, self.categories)
+        self.oldmode = self.tx.hooks_mode
+        self.tx.hooks_mode = self.mode
+        if self.mode is HOOKS_DENY_ALL:
+            self.changes = self.tx.enable_hook_categories(*self.categories)
+        else:
+            self.changes = self.tx.disable_hook_categories(*self.categories)
+        self.tx.ctx_count += 1
 
     def __exit__(self, exctype, exc, traceback):
-        self.session.reset_hooks_mode_categories(self.oldmode, self.mode, self.changes)
-
+        self.tx.ctx_count -= 1
+        if self.tx.ctx_count == 0:
+            self.session._clear_thread_storage(self.tx)
+        else:
+            try:
+                if self.categories:
+                    if self.mode is HOOKS_DENY_ALL:
+                        self.tx.disable_hook_categories(*self.categories)
+                    else:
+                        self.tx.enable_hook_categories(*self.categories)
+            finally:
+                self.tx.hooks_mode = self.oldmode
 
-class security_enabled(object):
-    """context manager to control security w/ session.execute, since by
-    default security is disabled on queries executed on the repository
+@deprecated('[3.17] use <object>.security_enabled instead')
+def security_enabled(obj, *args, **kwargs):
+    return obj.security_enabled(*args, **kwargs)
+
+class _security_enabled(object):
+    """context manager to control security w/ session.execute,
+
+    By default security is disabled on queries executed on the repository
     side.
     """
     def __init__(self, session, read=None, write=None):
         self.session = session
+        self.tx = session._tx
         self.read = read
         self.write = write
+        self.oldread = None
+        self.oldwrite = None
 
     def __enter__(self):
-        self.oldread, self.oldwrite = self.session.init_security(
-            self.read, self.write)
+        if self.read is None:
+            self.oldread = None
+        else:
+            self.oldread = self.tx.read_security
+            self.tx.read_security = self.read
+        if self.write is None:
+            self.oldwrite = None
+        else:
+            self.oldwrite = self.tx.write_security
+            self.tx.write_security = self.write
+        self.tx.ctx_count += 1
 
     def __exit__(self, exctype, exc, traceback):
-        self.session.reset_security(self.oldread, self.oldwrite)
+        self.tx.ctx_count -= 1
+        if self.tx.ctx_count == 0:
+            self.session._clear_thread_storage(self.tx)
+        else:
+            if self.oldread is not None:
+                self.tx.read_security = self.oldread
+            if self.oldwrite is not None:
+                self.tx.write_security = self.oldwrite
+
+HOOKS_ALLOW_ALL = object()
+HOOKS_DENY_ALL = object()
+DEFAULT_SECURITY = object() # evaluated to true by design
+
+class SessionClosedError(RuntimeError):
+    pass
+
+class CnxSetTracker(object):
+    """Keep track of which transaction use which cnxset.
+
+    There should be one of this object per session plus one another for
+    internal session.
+
+    Session object are responsible of creating their CnxSetTracker object.
+
+    Transaction should use the :meth:`record` and :meth:`forget` to inform the
+    tracker of cnxset they have acquired.
+
+    .. automethod:: cubicweb.server.session.CnxSetTracker.record
+    .. automethod:: cubicweb.server.session.CnxSetTracker.forget
+
+    Session use the :meth:`close` and :meth:`wait` method when closing.
+
+    .. automethod:: cubicweb.server.session.CnxSetTracker.close
+    .. automethod:: cubicweb.server.session.CnxSetTracker.wait
+
+    This object itself is threadsafe. It also requires caller to acquired its
+    lock in some situation.
+    """
+
+    def __init__(self):
+        self._active = True
+        self._condition = threading.Condition()
+        self._record = {}
+
+    def __enter__(self):
+        self._condition.__enter__()
+
+    def __exit__(self, *args):
+        self._condition.__exit__(*args)
+
+    def record(self, txid, cnxset):
+        """Inform the tracker that a txid have acquired a cnxset
+
+        This methode is to be used by Transaction object.
+
+        This method fails when:
+        - The txid already have a recorded cnxset.
+        - The tracker is not active anymore.
+
+        Notes about the caller:
+        (1) It is responsible for retrieving a cnxset.
+        (2) It must be prepared to release the cnxset if the
+            `cnxsettracker.forget` call fails.
+        (3) It should acquire the tracker lock until the very end of the operation.
+        (4) However It take care to lock the CnxSetTracker object after having
+            retrieved the cnxset to prevent deadlock.
+
+        A typical usage look like::
+
+        cnxset = repo._get_cnxset() # (1)
+        try:
+            with cnxset_tracker: # (3) and (4)
+                cnxset_tracker.record(caller.id, cnxset)
+                # (3') operation ends when caller is in expected state only
+                caller.cnxset = cnxset
+        except Exception:
+            repo._free_cnxset(cnxset) # (2)
+            raise
+        """
+        # dubious since the caller is suppose to have acquired it anyway.
+        with self._condition:
+            if not self._active:
+                raise SessionClosedError('Closed')
+            old = self._record.get(txid)
+            if old is not None:
+                raise ValueError('"%s" already have a cnx_set (%r)'
+                                 % (txid, old))
+            self._record[txid] = cnxset
+
+    def forget(self, txid, cnxset):
+        """Inform the tracker that a txid have release a cnxset
+
+        This methode is to be used by Transaction object.
+
+        This method fails when:
+        - The cnxset for the txid does not match the recorded one.
+
+        Notes about the caller:
+        (1) It is responsible for releasing the cnxset.
+        (2) It should acquire the tracker lock during the operation to ensure
+            the internal tracker state is always accurate regarding its own state.
+
+        A typical usage look like::
+
+        cnxset = caller.cnxset
+        try:
+            with cnxset_tracker:
+                # (2) you can not have caller.cnxset out of sync with
+                #     cnxset_tracker state while unlocked
+                caller.cnxset = None
+                cnxset_tracker.forget(caller.id, cnxset)
+        finally:
+            cnxset = repo._free_cnxset(cnxset) # (1)
+        """
+        with self._condition:
+            old = self._record.get(txid, None)
+            if old is not cnxset:
+                raise ValueError('recorded cnxset for "%s" mismatch: %r != %r'
+                                 % (txid, old, cnxset))
+            self._record.pop(txid)
+            self._condition.notify_all()
+
+    def close(self):
+        """Marks the tracker as inactive.
+
+        This methode is to be used by Session object.
+
+        Inactive tracker does not accept new record anymore.
+        """
+        with self._condition:
+            self._active = False
+
+    def wait(self, timeout=10):
+        """Wait for all recorded cnxset to be released
+
+        This methode is to be used by Session object.
+
+        returns a tuple of transaction id that remains open.
+        """
+        with self._condition:
+            if  self._active:
+                raise RuntimeError('Cannot wait on active tracker.'
+                                   ' Call tracker.close() first')
+            while self._record and timeout > 0:
+                start = time()
+                self._condition.wait(timeout)
+                timeout -= time() - start
+            return tuple(self._record)
+
+class Transaction(object):
+    """Repository Transaction
+
+    Holds all transaction related data
+
+    Database connections resource:
+
+      :attr:`running_dbapi_query`, boolean flag telling if the executing query
+      is coming from a dbapi connection or is a query from within the repository
+
+      :attr:`cnxset`, the connections set to use to execute queries on sources.
+      If the transaction is read only, the connection set may be freed between
+      actual query. This allows multiple transaction with a reasonable low
+      connection set pool size. control mechanism is detailed below
+
+    .. automethod:: cubicweb.server.session.Transaction.set_cnxset
+    .. automethod:: cubicweb.server.session.Transaction.free_cnxset
+
+      :attr:`mode`, string telling the connections set handling mode, may be one
+      of 'read' (connections set may be freed), 'write' (some write was done in
+      the connections set, it can't be freed before end of the transaction),
+      'transaction' (we want to keep the connections set during all the
+      transaction, with or without writing)
+
+    Internal transaction data:
+
+      :attr:`data`,is a dictionary containing some shared data
+      cleared at the end of the transaction. Hooks and operations may put
+      arbitrary data in there, and this may also be used as a communication
+      channel between the client and the repository.
+
+      :attr:`pending_operations`, ordered list of operations to be processed on
+      commit/rollback
+
+      :attr:`commit_state`, describing the transaction commit state, may be one
+      of None (not yet committing), 'precommit' (calling precommit event on
+      operations), 'postcommit' (calling postcommit event on operations),
+      'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error
+      has been raised during the transaction and so it must be rollbacked).
+
+    Hooks controls:
+
+      :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`.
+
+      :attr:`enabled_hook_cats`, when :attr:`hooks_mode` is
+      `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled.
+
+      :attr:`disabled_hook_cats`, when :attr:`hooks_mode` is
+      `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled.
+
+    Security level Management:
+
+      :attr:`read_security` and :attr:`write_security`, boolean flags telling if
+      read/write security is currently activated.
+
+    """
+
+    def __init__(self, txid, session, rewriter):
+        #: transaction unique id
+        self.transactionid = txid
+        #: reentrance handling
+        self.ctx_count = 0
+
+        #: server.Repository object
+        self.repo = session.repo
+        self.vreg = self.repo.vreg
+
+        #: connection handling mode
+        self.mode = session.default_mode
+        #: connection set used to execute queries on sources
+        self._cnxset = None
+        #: CnxSetTracker used to report cnxset usage
+        self._cnxset_tracker = session._cnxset_tracker
+        #: is this transaction from a client or internal to the repo
+        self.running_dbapi_query = True
+
+        #: dict containing arbitrary data cleared at the end of the transaction
+        self.data = {}
+        #: ordered list of operations to be processed on commit/rollback
+        self.pending_operations = []
+        #: (None, 'precommit', 'postcommit', 'uncommitable')
+        self.commit_state = None
+
+        ### hook control attribute
+        self.hooks_mode = HOOKS_ALLOW_ALL
+        self.disabled_hook_cats = set()
+        self.enabled_hook_cats = set()
+        self.pruned_hooks_cache = {}
+
+
+        ### security control attributes
+        self._read_security = DEFAULT_SECURITY # handled by a property
+        self.write_security = DEFAULT_SECURITY
+
+        # undo control
+        config = session.repo.config
+        if config.creating or config.repairing or session.is_internal_session:
+            self.undo_actions = False
+        else:
+            self.undo_actions = config['undo-enabled']
+
+        # RQLRewriter are not thread safe
+        self._rewriter = rewriter
+
+    @property
+    def transaction_data(self):
+        return self.data
 
 
-class TransactionData(object):
-    def __init__(self, txid):
-        self.transactionid = txid
-        self.ctx_count = 0
+    def clear(self):
+        """reset internal data"""
+        self.data = {}
+        #: ordered list of operations to be processed on commit/rollback
+        self.pending_operations = []
+        #: (None, 'precommit', 'postcommit', 'uncommitable')
+        self.commit_state = None
+        self.pruned_hooks_cache = {}
+    # Connection Set Management ###############################################
+    @property
+    def cnxset(self):
+        return self._cnxset
+
+    @cnxset.setter
+    def cnxset(self, new_cnxset):
+        with self._cnxset_tracker:
+            old_cnxset = self._cnxset
+            if new_cnxset is old_cnxset:
+                return #nothing to do
+            if old_cnxset is not None:
+                self._cnxset = None
+                self.ctx_count -= 1
+                self._cnxset_tracker.forget(self.transactionid, old_cnxset)
+            if new_cnxset is not None:
+                self._cnxset_tracker.record(self.transactionid, new_cnxset)
+                self._cnxset = new_cnxset
+                self.ctx_count += 1
+
+    def set_cnxset(self):
+        """the transaction need a connections set to execute some queries"""
+        if self.cnxset is None:
+            cnxset = self.repo._get_cnxset()
+            try:
+                self.cnxset = cnxset
+                try:
+                    cnxset.cnxset_set()
+                except:
+                    self.cnxset = None
+                    raise
+            except:
+                self.repo._free_cnxset(cnxset)
+                raise
+        return self.cnxset
+
+    def free_cnxset(self, ignoremode=False):
+        """the transaction is no longer using its connections set, at least for some time"""
+        # cnxset may be none if no operation has been done since last commit
+        # or rollback
+        cnxset = self.cnxset
+        if cnxset is not None and (ignoremode or self.mode == 'read'):
+            try:
+                self.cnxset = None
+            finally:
+                cnxset.cnxset_freed()
+                self.repo._free_cnxset(cnxset)
+
+
+    # Entity cache management #################################################
+    #
+    # The transaction entity cache as held in tx.data it is removed at end the
+    # end of the transaction (commit and rollback)
+    #
+    # XXX transaction level caching may be a pb with multiple repository
+    # instances, but 1. this is probably not the only one :$ and 2. it may be
+    # an acceptable risk. Anyway we could activate it or not according to a
+    # configuration option
+
+    def set_entity_cache(self, entity):
+        """Add `entity` to the transaction entity cache"""
+        ecache = self.data.setdefault('ecache', {})
+        ecache.setdefault(entity.eid, entity)
+
+    def entity_cache(self, eid):
+        """get cache entity for `eid`"""
+        return self.data['ecache'][eid]
+
+    def cached_entities(self):
+        """return the whole entity cache"""
+        return self.data.get('ecache', {}).values()
+
+    def drop_entity_cache(self, eid=None):
+        """drop entity from the cache
+
+        If eid is None, the whole cache is dropped"""
+        if eid is None:
+            self.data.pop('ecache', None)
+        else:
+            del self.data['ecache'][eid]
+
+    # Tracking of entity added of removed in the transaction ##################
+    #
+    # Those are function to  allows cheap call from client in other process.
+
+    def deleted_in_transaction(self, eid):
+        """return True if the entity of the given eid is being deleted in the
+        current transaction
+        """
+        return eid in self.data.get('pendingeids', ())
+
+    def added_in_transaction(self, eid):
+        """return True if the entity of the given eid is being created in the
+        current transaction
+        """
+        return eid in self.data.get('neweids', ())
+
+    # Operation management ####################################################
+
+    def add_operation(self, operation, index=None):
+        """add an operation to be executed at the end of the transaction"""
+        if index is None:
+            self.pending_operations.append(operation)
+        else:
+            self.pending_operations.insert(index, operation)
+
+    # Hooks control ###########################################################
+
+    def disable_hook_categories(self, *categories):
+        """disable the given hook categories:
+
+        - on HOOKS_DENY_ALL mode, ensure those categories are not enabled
+        - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled
+        """
+        changes = set()
+        self.pruned_hooks_cache.clear()
+        categories = set(categories)
+        if self.hooks_mode is HOOKS_DENY_ALL:
+            enabledcats = self.enabled_hook_cats
+            changes = enabledcats & categories
+            enabledcats -= changes # changes is small hence faster
+        else:
+            disabledcats = self.disabled_hook_cats
+            changes = categories - disabledcats
+            disabledcats |= changes # changes is small hence faster
+        return tuple(changes)
+
+    def enable_hook_categories(self, *categories):
+        """enable the given hook categories:
+
+        - on HOOKS_DENY_ALL mode, ensure those categories are enabled
+        - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled
+        """
+        changes = set()
+        self.pruned_hooks_cache.clear()
+        categories = set(categories)
+        if self.hooks_mode is HOOKS_DENY_ALL:
+            enabledcats = self.enabled_hook_cats
+            changes = categories - enabledcats
+            enabledcats |= changes # changes is small hence faster
+        else:
+            disabledcats = self.disabled_hook_cats
+            changes = disabledcats & categories
+            disabledcats -= changes # changes is small hence faster
+        return tuple(changes)
+
+    def is_hook_category_activated(self, category):
+        """return a boolean telling if the given category is currently activated
+        or not
+        """
+        if self.hooks_mode is HOOKS_DENY_ALL:
+            return category in self.enabled_hook_cats
+        return category not in self.disabled_hook_cats
+
+    def is_hook_activated(self, hook):
+        """return a boolean telling if the given hook class is currently
+        activated or not
+        """
+        return self.is_hook_category_activated(hook.category)
+
+    # Security management #####################################################
+    @property
+    def read_security(self):
+        return self._read_security
+
+    @read_security.setter
+    def read_security(self, activated):
+        oldmode = self._read_security
+        self._read_security = activated
+        # running_dbapi_query used to detect hooks triggered by a 'dbapi' query
+        # (eg not issued on the session). This is tricky since we the execution
+        # model of a (write) user query is:
+        #
+        # repository.execute (security enabled)
+        #  \-> querier.execute
+        #       \-> repo.glob_xxx (add/update/delete entity/relation)
+        #            \-> deactivate security before calling hooks
+        #                 \-> WE WANT TO CHECK QUERY NATURE HERE
+        #                      \-> potentially, other calls to querier.execute
+        #
+        # so we can't rely on simply checking session.read_security, but
+        # recalling the first transition from DEFAULT_SECURITY to something
+        # else (False actually) is not perfect but should be enough
+        #
+        # also reset running_dbapi_query to true when we go back to
+        # DEFAULT_SECURITY
+        self.running_dbapi_query = (oldmode is DEFAULT_SECURITY
+                                    or activated is DEFAULT_SECURITY)
+
+    # undo support ############################################################
+
+    def ertype_supports_undo(self, ertype):
+        return self.undo_actions and ertype not in NO_UNDO_TYPES
+
+    def transaction_uuid(self, set=True):
+        uuid = self.data.get('tx_uuid')
+        if set and uuid is None:
+            raise KeyError
+        return uuid
+
+    def transaction_inc_action_counter(self):
+        num = self.data.setdefault('tx_action_count', 0) + 1
+        self.data['tx_action_count'] = num
+        return num
+    # db-api like interface ###################################################
+
+    def source_defs(self):
+        return self.repo.source_defs()
+
+    def describe(self, eid, asdict=False):
+        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
+        metas = self.repo.type_and_source_from_eid(eid, self)
+        if asdict:
+            return dict(zip(('type', 'source', 'extid', 'asource'), metas))
+       # XXX :-1 for cw compat, use asdict=True for full information
+        return metas[:-1]
+
+
+    def source_from_eid(self, eid):
+        """return the source where the entity with id <eid> is located"""
+        return self.repo.source_from_eid(eid, self)
+
+    # resource accessors ######################################################
+
+    def system_sql(self, sql, args=None, rollback_on_failure=True):
+        """return a sql cursor on the system database"""
+        if sql.split(None, 1)[0].upper() != 'SELECT':
+            self.mode = 'write'
+        source = self.cnxset.source('system')
+        try:
+            return source.doexec(self, sql, args, rollback=rollback_on_failure)
+        except (source.OperationalError, source.InterfaceError):
+            if not rollback_on_failure:
+                raise
+            source.warning("trying to reconnect")
+            self.cnxset.reconnect(source)
+            return source.doexec(self, sql, args, rollback=rollback_on_failure)
+
+    def rtype_eids_rdef(self, rtype, eidfrom, eidto):
+        # use type_and_source_from_eid instead of type_from_eid for optimization
+        # (avoid two extra methods call)
+        subjtype = self.repo.type_and_source_from_eid(eidfrom, self)[0]
+        objtype = self.repo.type_and_source_from_eid(eidto, self)[0]
+        return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)]
+
+
+def tx_attr(attr_name, writable=False):
+    """return a property to forward attribute access to transaction.
+
+    This is to be used by session"""
+    args = {}
+    def attr_from_tx(session):
+        return getattr(session._tx, attr_name)
+    args['fget'] = attr_from_tx
+    if writable:
+        def write_attr(session, value):
+            return setattr(session._tx, attr_name, value)
+        args['fset'] = write_attr
+    return property(**args)
+
+def tx_meth(meth_name):
+    """return a function forwarding calls to transaction.
+
+    This is to be used by session"""
+    def meth_from_tx(session, *args, **kwargs):
+        return getattr(session._tx, meth_name)(*args, **kwargs)
+    return meth_from_tx
 
 
 class Session(RequestSessionBase):
-    """Repository usersession, tie a session id, user, connections set and
-    other session data all together.
+    """Repository user session
+
+    This tie all together:
+     * session id,
+     * user,
+     * connections set,
+     * other session data.
 
     About session storage / transactions
     ------------------------------------
@@ -161,20 +736,17 @@
       :attr:`data` is a dictionary containing shared data, used to communicate
       extra information between the client and the repository
 
-      :attr:`_tx_data` is a dictionary of :class:`TransactionData` instance, one
+      :attr:`_txs` is a dictionary of :class:`TransactionData` instance, one
       for each running transaction. The key is the transaction id. By default
       the transaction id is the thread name but it can be otherwise (per dbapi
       cursor for instance, or per thread name *from another process*).
 
-      :attr:`__threaddata` is a thread local storage whose `txdata` attribute
-      refers to the proper instance of :class:`TransactionData` according to the
+      :attr:`__threaddata` is a thread local storage whose `tx` attribute
+      refers to the proper instance of :class:`Transaction` according to the
       transaction.
 
-      :attr:`_threads_in_transaction` is a set of (thread, connections set)
-      referencing threads that currently hold a connections set for the session.
-
-    You should not have to use neither :attr:`_txdata` nor :attr:`__threaddata`,
-    simply access transaction data transparently through the :attr:`_threaddata`
+    You should not have to use neither :attr:`_tx` nor :attr:`__threaddata`,
+    simply access transaction data transparently through the :attr:`_tx`
     property. Also, you usually don't have to access it directly since current
     transaction's data may be accessed/modified through properties / methods:
 
@@ -184,11 +756,24 @@
       this may also be used as a communication channel between the client and
       the repository.
 
+    .. automethod:: cubicweb.server.session.Session.get_shared_data
+    .. automethod:: cubicweb.server.session.Session.set_shared_data
+    .. automethod:: cubicweb.server.session.Session.added_in_transaction
+    .. automethod:: cubicweb.server.session.Session.deleted_in_transaction
+
+    Transaction state information:
+
+      :attr:`running_dbapi_query`, boolean flag telling if the executing query
+      is coming from a dbapi connection or is a query from within the repository
+
       :attr:`cnxset`, the connections set to use to execute queries on sources.
       During a transaction, the connection set may be freed so that is may be
       used by another session as long as no writing is done. This means we can
       have multiple sessions with a reasonably low connections set pool size.
 
+    .. automethod:: cubicweb.server.session.set_cnxset
+    .. automethod:: cubicweb.server.session.free_cnxset
+
       :attr:`mode`, string telling the connections set handling mode, may be one
       of 'read' (connections set may be freed), 'write' (some write was done in
       the connections set, it can't be freed before end of the transaction),
@@ -204,9 +789,20 @@
       'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error
       has been raised during the transaction and so it must be rollbacked).
 
+    .. automethod:: cubicweb.server.session.Session.commit
+    .. automethod:: cubicweb.server.session.Session.rollback
+    .. automethod:: cubicweb.server.session.Session.close
+    .. automethod:: cubicweb.server.session.Session.closed
+
+    Security level Management:
+
       :attr:`read_security` and :attr:`write_security`, boolean flags telling if
       read/write security is currently activated.
 
+    .. automethod:: cubicweb.server.session.Session.security_enabled
+
+    Hooks Management:
+
       :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`.
 
       :attr:`enabled_hook_categories`, when :attr:`hooks_mode` is
@@ -215,12 +811,23 @@
       :attr:`disabled_hook_categories`, when :attr:`hooks_mode` is
       `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled.
 
+    .. automethod:: cubicweb.server.session.Session.deny_all_hooks_but
+    .. automethod:: cubicweb.server.session.Session.allow_all_hooks_but
+    .. automethod:: cubicweb.server.session.Session.is_hook_category_activated
+    .. automethod:: cubicweb.server.session.Session.is_hook_activated
 
-      :attr:`running_dbapi_query`, boolean flag telling if the executing query
-      is coming from a dbapi connection or is a query from within the repository
+    Data manipulation:
+
+    .. automethod:: cubicweb.server.session.Session.add_relation
+    .. automethod:: cubicweb.server.session.Session.add_relations
+    .. automethod:: cubicweb.server.session.Session.delete_relation
 
-    .. automethod:: cubicweb.server.session.deny_all_hooks_but
-    .. automethod:: cubicweb.server.session.all_all_hooks_but
+    Other:
+
+    .. automethod:: cubicweb.server.session.Session.call_service
+
+
+
     """
     is_request = False
     is_internal_session = False
@@ -232,11 +839,6 @@
         self.repo = repo
         self.timestamp = time()
         self.default_mode = 'read'
-        # undo support
-        if repo.config.creating or repo.config.repairing or self.is_internal_session:
-            self.undo_actions = False
-        else:
-            self.undo_actions = repo.config['undo-enabled']
         # short cut to querier .execute method
         self._execute = repo.querier.execute
         # shared data, used to communicate extra information between the client
@@ -244,17 +846,54 @@
         self.data = {}
         # i18n initialization
         self.set_language(user.prefered_language())
-        # internals
-        self._tx_data = {}
+        ### internals
+        # Transaction of this section
+        self._txs = {}
+        # Data local to the thread
         self.__threaddata = threading.local()
-        self._threads_in_transaction = set()
+        self._cnxset_tracker = CnxSetTracker()
         self._closed = False
-        self._closed_lock = threading.Lock()
+        self._lock = threading.RLock()
 
     def __unicode__(self):
         return '<session %s (%s 0x%x)>' % (
             unicode(self.user.login), self.id, id(self))
 
+    def get_tx(self, txid):
+        """return the <txid> transaction attached to this session
+
+        Transaction is created if necessary"""
+        with self._lock: # no transaction exist with the same id
+            try:
+                if self.closed:
+                    raise SessionClosedError('try to access connections set on a closed session %s' % self.id)
+                tx = self._txs[txid]
+            except KeyError:
+                rewriter = RQLRewriter(self)
+                tx = Transaction(txid, self, rewriter)
+                self._txs[txid] = tx
+        return tx
+
+    def set_tx(self, txid=None):
+        """set the default transaction of the current thread to <txid>
+
+        Transaction is created if necessary"""
+        if txid is None:
+            txid = threading.currentThread().getName()
+        self.__threaddata.tx = self.get_tx(txid)
+
+    @property
+    def _tx(self):
+        """default transaction for current session in current thread"""
+        try:
+            return self.__threaddata.tx
+        except AttributeError:
+            self.set_tx()
+            return self.__threaddata.tx
+
+    def get_option_value(self, option, foreid=None):
+        return self.repo.get_option_value(option, foreid)
+
     def transaction(self, free_cnxset=True):
         """return context manager to enter a transaction for the session: when
         exiting the `with` block on exception, call `session.rollback()`, else
@@ -265,40 +904,20 @@
         """
         return transaction(self, free_cnxset)
 
-    def set_tx_data(self, txid=None):
-        if txid is None:
-            txid = threading.currentThread().getName()
-        try:
-            self.__threaddata.txdata = self._tx_data[txid]
-        except KeyError:
-            self.__threaddata.txdata = self._tx_data[txid] = TransactionData(txid)
 
-    @property
-    def _threaddata(self):
-        try:
-            return self.__threaddata.txdata
-        except AttributeError:
-            self.set_tx_data()
-            return self.__threaddata.txdata
-
-    def get_option_value(self, option, foreid=None):
-        return self.repo.get_option_value(option, foreid)
-
+    @deprecated('[3.17] do not use hijack_user. create new Session object')
     def hijack_user(self, user):
         """return a fake request/session using specified user"""
         session = Session(user, self.repo)
-        threaddata = session._threaddata
-        threaddata.cnxset = self.cnxset
-        # we attributed a connections set, need to update ctx_count else it will be freed
-        # while undesired
-        threaddata.ctx_count = 1
+        tx = session._tx
+        tx.cnxset = self.cnxset
         # share pending_operations, else operation added in the hi-jacked
         # session such as SendMailOp won't ever be processed
-        threaddata.pending_operations = self.pending_operations
-        # everything in transaction_data should be copied back but the entity
+        tx.pending_operations = self.pending_operations
+        # everything in tx.data should be copied back but the entity
         # type cache we don't want to avoid security pb
-        threaddata.transaction_data = self.transaction_data.copy()
-        threaddata.transaction_data.pop('ecache', None)
+        tx.data = self._tx.data.copy()
+        tx.data.pop('ecache', None)
         return session
 
     def add_relation(self, fromeid, rtype, toeid):
@@ -323,7 +942,7 @@
         '''
         edited_entities = {}
         relations_dict = {}
-        with security_enabled(self, False, False):
+        with self.security_enabled(False, False):
             for rtype, eids in relations:
                 if self.vreg.schema[rtype].inlined:
                     for fromeid, toeid in eids:
@@ -352,7 +971,7 @@
         You may use this in hooks when you know both eids of the relation you
         want to delete.
         """
-        with security_enabled(self, False, False):
+        with self.security_enabled(False, False):
             if self.vreg.schema[rtype].inlined:
                 entity = self.entity_from_eid(fromeid)
                 entity.cw_attr_cache[rtype] = None
@@ -429,266 +1048,37 @@
 
     # resource accessors ######################################################
 
-    def system_sql(self, sql, args=None, rollback_on_failure=True):
-        """return a sql cursor on the system database"""
-        if sql.split(None, 1)[0].upper() != 'SELECT':
-            self.mode = 'write'
-        source = self.cnxset.source('system')
-        try:
-            return source.doexec(self, sql, args, rollback=rollback_on_failure)
-        except (source.OperationalError, source.InterfaceError):
-            if not rollback_on_failure:
-                raise
-            source.warning("trying to reconnect")
-            self.cnxset.reconnect(source)
-            return source.doexec(self, sql, args, rollback=rollback_on_failure)
-
-    def deleted_in_transaction(self, eid):
-        """return True if the entity of the given eid is being deleted in the
-        current transaction
-        """
-        return eid in self.transaction_data.get('pendingeids', ())
-
-    def added_in_transaction(self, eid):
-        """return True if the entity of the given eid is being created in the
-        current transaction
-        """
-        return eid in self.transaction_data.get('neweids', ())
-
-    def rtype_eids_rdef(self, rtype, eidfrom, eidto):
-        # use type_and_source_from_eid instead of type_from_eid for optimization
-        # (avoid two extra methods call)
-        subjtype = self.repo.type_and_source_from_eid(eidfrom, self)[0]
-        objtype = self.repo.type_and_source_from_eid(eidto, self)[0]
-        return self.vreg.schema.rschema(rtype).rdefs[(subjtype, objtype)]
+    system_sql = tx_meth('system_sql')
+    deleted_in_transaction = tx_meth('deleted_in_transaction')
+    added_in_transaction = tx_meth('added_in_transaction')
+    rtype_eids_rdef = tx_meth('rtype_eids_rdef')
 
     # security control #########################################################
 
-    DEFAULT_SECURITY = object() # evaluated to true by design
 
     def security_enabled(self, read=None, write=None):
-        return security_enabled(self, read=read, write=write)
-
-    def init_security(self, read, write):
-        if read is None:
-            oldread = None
-        else:
-            oldread = self.set_read_security(read)
-        if write is None:
-            oldwrite = None
-        else:
-            oldwrite = self.set_write_security(write)
-        self._threaddata.ctx_count += 1
-        return oldread, oldwrite
-
-    def reset_security(self, read, write):
-        txstore = self._threaddata
-        txstore.ctx_count -= 1
-        if txstore.ctx_count == 0:
-            self._clear_thread_storage(txstore)
-        else:
-            if read is not None:
-                self.set_read_security(read)
-            if write is not None:
-                self.set_write_security(write)
-
-    @property
-    def read_security(self):
-        """return a boolean telling if read security is activated or not"""
-        txstore = self._threaddata
-        if txstore is None:
-            return self.DEFAULT_SECURITY
-        try:
-            return txstore.read_security
-        except AttributeError:
-            txstore.read_security = self.DEFAULT_SECURITY
-            return txstore.read_security
-
-    def set_read_security(self, activated):
-        """[de]activate read security, returning the previous value set for
-        later restoration.
+        return _security_enabled(self, read=read, write=write)
 
-        you should usually use the `security_enabled` context manager instead
-        of this to change security settings.
-        """
-        txstore = self._threaddata
-        if txstore is None:
-            return self.DEFAULT_SECURITY
-        oldmode = getattr(txstore, 'read_security', self.DEFAULT_SECURITY)
-        txstore.read_security = activated
-        # dbapi_query used to detect hooks triggered by a 'dbapi' query (eg not
-        # issued on the session). This is tricky since we the execution model of
-        # a (write) user query is:
-        #
-        # repository.execute (security enabled)
-        #  \-> querier.execute
-        #       \-> repo.glob_xxx (add/update/delete entity/relation)
-        #            \-> deactivate security before calling hooks
-        #                 \-> WE WANT TO CHECK QUERY NATURE HERE
-        #                      \-> potentially, other calls to querier.execute
-        #
-        # so we can't rely on simply checking session.read_security, but
-        # recalling the first transition from DEFAULT_SECURITY to something
-        # else (False actually) is not perfect but should be enough
-        #
-        # also reset dbapi_query to true when we go back to DEFAULT_SECURITY
-        txstore.dbapi_query = (oldmode is self.DEFAULT_SECURITY
-                               or activated is self.DEFAULT_SECURITY)
-        return oldmode
-
-    @property
-    def write_security(self):
-        """return a boolean telling if write security is activated or not"""
-        txstore = self._threaddata
-        if txstore is None:
-            return self.DEFAULT_SECURITY
-        try:
-            return txstore.write_security
-        except AttributeError:
-            txstore.write_security = self.DEFAULT_SECURITY
-            return txstore.write_security
-
-    def set_write_security(self, activated):
-        """[de]activate write security, returning the previous value set for
-        later restoration.
-
-        you should usually use the `security_enabled` context manager instead
-        of this to change security settings.
-        """
-        txstore = self._threaddata
-        if txstore is None:
-            return self.DEFAULT_SECURITY
-        oldmode = getattr(txstore, 'write_security', self.DEFAULT_SECURITY)
-        txstore.write_security = activated
-        return oldmode
-
-    @property
-    def running_dbapi_query(self):
-        """return a boolean telling if it's triggered by a db-api query or by
-        a session query.
-
-        To be used in hooks, else may have a wrong value.
-        """
-        return getattr(self._threaddata, 'dbapi_query', True)
+    read_security = tx_attr('read_security', writable=True)
+    write_security = tx_attr('write_security', writable=True)
+    running_dbapi_query = tx_attr('running_dbapi_query')
 
     # hooks activation control #################################################
     # all hooks should be activated during normal execution
 
-    HOOKS_ALLOW_ALL = object()
-    HOOKS_DENY_ALL = object()
-
     def allow_all_hooks_but(self, *categories):
-        return hooks_control(self, self.HOOKS_ALLOW_ALL, *categories)
+        return _hooks_control(self, HOOKS_ALLOW_ALL, *categories)
     def deny_all_hooks_but(self, *categories):
-        return hooks_control(self, self.HOOKS_DENY_ALL, *categories)
-
-    @property
-    def hooks_mode(self):
-        return getattr(self._threaddata, 'hooks_mode', self.HOOKS_ALLOW_ALL)
-
-    def set_hooks_mode(self, mode):
-        assert mode is self.HOOKS_ALLOW_ALL or mode is self.HOOKS_DENY_ALL
-        oldmode = getattr(self._threaddata, 'hooks_mode', self.HOOKS_ALLOW_ALL)
-        self._threaddata.hooks_mode = mode
-        return oldmode
-
-    def init_hooks_mode_categories(self, mode, categories):
-        oldmode = self.set_hooks_mode(mode)
-        if mode is self.HOOKS_DENY_ALL:
-            changes = self.enable_hook_categories(*categories)
-        else:
-            changes = self.disable_hook_categories(*categories)
-        self._threaddata.ctx_count += 1
-        return oldmode, changes
+        return _hooks_control(self, HOOKS_DENY_ALL, *categories)
 
-    def reset_hooks_mode_categories(self, oldmode, mode, categories):
-        txstore = self._threaddata
-        txstore.ctx_count -= 1
-        if txstore.ctx_count == 0:
-            self._clear_thread_storage(txstore)
-        else:
-            try:
-                if categories:
-                    if mode is self.HOOKS_DENY_ALL:
-                        return self.disable_hook_categories(*categories)
-                    else:
-                        return self.enable_hook_categories(*categories)
-            finally:
-                self.set_hooks_mode(oldmode)
-
-    @property
-    def disabled_hook_categories(self):
-        try:
-            return getattr(self._threaddata, 'disabled_hook_cats')
-        except AttributeError:
-            cats = self._threaddata.disabled_hook_cats = set()
-            return cats
-
-    @property
-    def enabled_hook_categories(self):
-        try:
-            return getattr(self._threaddata, 'enabled_hook_cats')
-        except AttributeError:
-            cats = self._threaddata.enabled_hook_cats = set()
-            return cats
+    hooks_mode = tx_attr('hooks_mode')
 
-    def disable_hook_categories(self, *categories):
-        """disable the given hook categories:
-
-        - on HOOKS_DENY_ALL mode, ensure those categories are not enabled
-        - on HOOKS_ALLOW_ALL mode, ensure those categories are disabled
-        """
-        changes = set()
-        self.pruned_hooks_cache.clear()
-        if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enabledcats = self.enabled_hook_categories
-            for category in categories:
-                if category in enabledcats:
-                    enabledcats.remove(category)
-                    changes.add(category)
-        else:
-            disabledcats = self.disabled_hook_categories
-            for category in categories:
-                if category not in disabledcats:
-                    disabledcats.add(category)
-                    changes.add(category)
-        return tuple(changes)
-
-    def enable_hook_categories(self, *categories):
-        """enable the given hook categories:
-
-        - on HOOKS_DENY_ALL mode, ensure those categories are enabled
-        - on HOOKS_ALLOW_ALL mode, ensure those categories are not disabled
-        """
-        changes = set()
-        self.pruned_hooks_cache.clear()
-        if self.hooks_mode is self.HOOKS_DENY_ALL:
-            enabledcats = self.enabled_hook_categories
-            for category in categories:
-                if category not in enabledcats:
-                    enabledcats.add(category)
-                    changes.add(category)
-        else:
-            disabledcats = self.disabled_hook_categories
-            for category in categories:
-                if category in disabledcats:
-                    disabledcats.remove(category)
-                    changes.add(category)
-        return tuple(changes)
-
-    def is_hook_category_activated(self, category):
-        """return a boolean telling if the given category is currently activated
-        or not
-        """
-        if self.hooks_mode is self.HOOKS_DENY_ALL:
-            return category in self.enabled_hook_categories
-        return category not in self.disabled_hook_categories
-
-    def is_hook_activated(self, hook):
-        """return a boolean telling if the given hook class is currently
-        activated or not
-        """
-        return self.is_hook_category_activated(hook.category)
+    disabled_hook_categories = tx_attr('disabled_hook_cats')
+    enabled_hook_categories = tx_attr('enabled_hook_cats')
+    disable_hook_categories = tx_meth('disable_hook_categories')
+    enable_hook_categories = tx_meth('enable_hook_categories')
+    is_hook_category_activated = tx_meth('is_hook_category_activated')
+    is_hook_activated = tx_meth('is_hook_activated')
 
     # connection management ###################################################
 
@@ -712,85 +1102,37 @@
         else: # mode == 'write'
             self.default_mode = 'read'
 
-    def get_mode(self):
-        return getattr(self._threaddata, 'mode', self.default_mode)
-    def set_mode(self, value):
-        self._threaddata.mode = value
-    mode = property(get_mode, set_mode,
-                    doc='transaction mode (read/write/transaction), resetted to'
-                    ' default_mode on commit / rollback')
-
-    def get_commit_state(self):
-        return getattr(self._threaddata, 'commit_state', None)
-    def set_commit_state(self, value):
-        self._threaddata.commit_state = value
-    commit_state = property(get_commit_state, set_commit_state)
+    mode = tx_attr('mode', writable=True)
+    commit_state = tx_attr('commit_state', writable=True)
 
     @property
     def cnxset(self):
         """connections set, set according to transaction mode for each query"""
         if self._closed:
             self.free_cnxset(True)
-            raise Exception('try to access connections set on a closed session %s' % self.id)
-        return getattr(self._threaddata, 'cnxset', None)
+            raise SessionClosedError('try to access connections set on a closed session %s' % self.id)
+        return self._tx.cnxset
 
     def set_cnxset(self):
         """the session need a connections set to execute some queries"""
-        with self._closed_lock:
+        with self._lock: # can probably be removed
             if self._closed:
                 self.free_cnxset(True)
-                raise Exception('try to set connections set on a closed session %s' % self.id)
-            if self.cnxset is None:
-                # get connections set first to avoid race-condition
-                self._threaddata.cnxset = cnxset = self.repo._get_cnxset()
-                self._threaddata.ctx_count += 1
-                try:
-                    cnxset.cnxset_set()
-                except Exception:
-                    self._threaddata.cnxset = None
-                    self.repo._free_cnxset(cnxset)
-                    raise
-                self._threads_in_transaction.add(
-                    (threading.currentThread(), cnxset) )
-            return self._threaddata.cnxset
-
-    def _free_thread_cnxset(self, thread, cnxset, force_close=False):
-        try:
-            self._threads_in_transaction.remove( (thread, cnxset) )
-        except KeyError:
-            # race condition on cnxset freeing (freed by commit or rollback vs
-            # close)
-            pass
-        else:
-            if force_close:
-                cnxset.reconnect()
-            else:
-                cnxset.cnxset_freed()
-            # free cnxset once everything is done to avoid race-condition
-            self.repo._free_cnxset(cnxset)
-
-    def free_cnxset(self, ignoremode=False):
-        """the session is no longer using its connections set, at least for some time"""
-        # cnxset may be none if no operation has been done since last commit
-        # or rollback
-        cnxset = getattr(self._threaddata, 'cnxset', None)
-        if cnxset is not None and (ignoremode or self.mode == 'read'):
-            # even in read mode, we must release the current transaction
-            self._free_thread_cnxset(threading.currentThread(), cnxset)
-            del self._threaddata.cnxset
-            self._threaddata.ctx_count -= 1
+                raise SessionClosedError('try to set connections set on a closed session %s' % self.id)
+            return self._tx.set_cnxset()
+    free_cnxset = tx_meth('free_cnxset')
 
     def _touch(self):
         """update latest session usage timestamp and reset mode to read"""
         self.timestamp = time()
-        self.local_perm_cache.clear() # XXX simply move in transaction_data, no?
+        self.local_perm_cache.clear() # XXX simply move in tx.data, no?
 
     # shared data handling ###################################################
 
     def get_shared_data(self, key, default=None, pop=False, txdata=False):
         """return value associated to `key` in session data"""
         if txdata:
-            data = self.transaction_data
+            data = self._tx.data
         else:
             data = self.data
         if pop:
@@ -801,7 +1143,7 @@
     def set_shared_data(self, key, value, txdata=False):
         """set value associated to `key` in session data"""
         if txdata:
-            self.transaction_data[key] = value
+            self._tx.data[key] = value
         else:
             self.data[key] = value
 
@@ -819,28 +1161,10 @@
         """return a rql cursor"""
         return self
 
-    def set_entity_cache(self, entity):
-        # XXX session level caching may be a pb with multiple repository
-        #     instances, but 1. this is probably not the only one :$ and 2. it
-        #     may be an acceptable risk. Anyway we could activate it or not
-        #     according to a configuration option
-        try:
-            self.transaction_data['ecache'].setdefault(entity.eid, entity)
-        except KeyError:
-            self.transaction_data['ecache'] = ecache = {}
-            ecache[entity.eid] = entity
-
-    def entity_cache(self, eid):
-        return self.transaction_data['ecache'][eid]
-
-    def cached_entities(self):
-        return self.transaction_data.get('ecache', {}).values()
-
-    def drop_entity_cache(self, eid=None):
-        if eid is None:
-            self.transaction_data.pop('ecache', None)
-        else:
-            del self.transaction_data['ecache'][eid]
+    set_entity_cache  = tx_meth('set_entity_cache')
+    entity_cache      = tx_meth('entity_cache')
+    cache_entities    = tx_meth('cached_entities')
+    drop_entity_cache = tx_meth('drop_entity_cache')
 
     def from_controller(self):
         """return the id (string) of the controller issuing the request (no
@@ -848,22 +1172,10 @@
         """
         return 'view'
 
-    def source_defs(self):
-        return self.repo.source_defs()
+    source_defs = tx_meth('source_defs')
+    describe = tx_meth('describe')
+    source_from_eid = tx_meth('source_from_eid')
 
-    def describe(self, eid, asdict=False):
-        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        metas = self.repo.type_and_source_from_eid(eid, self)
-        if asdict:
-            return dict(zip(('type', 'source', 'extid', 'asource'), metas))
-       # XXX :-1 for cw compat, use asdict=True for full information
-        return metas[:-1]
-
-    # db-api like interface ###################################################
-
-    def source_from_eid(self, eid):
-        """return the source where the entity with id <eid> is located"""
-        return self.repo.source_from_eid(eid, self)
 
     def execute(self, rql, kwargs=None, eid_key=None, build_descr=True):
         """db-api like method directly linked to the querier execute method.
@@ -884,34 +1196,29 @@
         by _touch
         """
         try:
-            txstore = self.__threaddata.txdata
+            tx = self.__threaddata.tx
         except AttributeError:
             pass
         else:
             if free_cnxset:
                 self.free_cnxset()
-                if txstore.ctx_count == 0:
-                    self._clear_thread_storage(txstore)
+                if tx.ctx_count == 0:
+                    self._clear_thread_storage(tx)
                 else:
-                    self._clear_tx_storage(txstore)
+                    self._clear_tx_storage(tx)
             else:
-                self._clear_tx_storage(txstore)
+                self._clear_tx_storage(tx)
 
-    def _clear_thread_storage(self, txstore):
-        self._tx_data.pop(txstore.transactionid, None)
+    def _clear_thread_storage(self, tx):
+        self._txs.pop(tx.transactionid, None)
         try:
-            del self.__threaddata.txdata
+            del self.__threaddata.tx
         except AttributeError:
             pass
 
-    def _clear_tx_storage(self, txstore):
-        for name in ('commit_state', 'transaction_data',
-                     'pending_operations', '_rewriter',
-                     'pruned_hooks_cache'):
-            try:
-                delattr(txstore, name)
-            except AttributeError:
-                continue
+    def _clear_tx_storage(self, tx):
+        tx.clear()
+        tx._rewriter = RQLRewriter(self)
 
     def commit(self, free_cnxset=True, reset_pool=None):
         """commit the current session's transaction"""
@@ -937,7 +1244,7 @@
         debug = server.DEBUG & server.DBG_OPS
         try:
             # by default, operations are executed with security turned off
-            with security_enabled(self, False, False):
+            with self.security_enabled(False, False):
                 processed = []
                 self.commit_state = 'precommit'
                 if debug:
@@ -1008,7 +1315,7 @@
                  DeprecationWarning, stacklevel=2)
             free_cnxset = reset_pool
         # don't use self.cnxset, rollback may be called with _closed == True
-        cnxset = getattr(self._threaddata, 'cnxset', None)
+        cnxset = self._tx.cnxset
         if cnxset is None:
             self._clear_thread_data()
             self._touch()
@@ -1016,7 +1323,7 @@
             return
         try:
             # by default, operations are executed with security turned off
-            with security_enabled(self, False, False):
+            with self.security_enabled(False, False):
                 while self.pending_operations:
                     try:
                         operation = self.pending_operations.pop(0)
@@ -1033,97 +1340,65 @@
             self._clear_thread_data(free_cnxset)
 
     def close(self):
-        """do not close connections set on session close, since they are shared now"""
-        with self._closed_lock:
+        # do not close connections set on session close, since they are shared now
+        tracker = self._cnxset_tracker
+        with self._lock:
             self._closed = True
-        # copy since _threads_in_transaction maybe modified while waiting
-        for thread, cnxset in self._threads_in_transaction.copy():
-            if thread is threading.currentThread():
-                continue
-            self.info('waiting for thread %s', thread)
-            # do this loop/break instead of a simple join(10) in case thread is
-            # the main thread (in which case it will be removed from
-            # self._threads_in_transaction but still be alive...)
-            for i in xrange(10):
-                thread.join(1)
-                if not (thread.isAlive() and
-                        (thread, cnxset) in self._threads_in_transaction):
-                    break
-            else:
-                self.error('thread %s still alive after 10 seconds, will close '
-                           'session anyway', thread)
-                self._free_thread_cnxset(thread, cnxset, force_close=True)
+        tracker.close()
         self.rollback()
+        self.info('waiting for open transaction of session: %s', self)
+        timeout = 10
+        pendings = tracker.wait(timeout)
+        if pendings:
+            self.error('%i transaction still alive after 10 seconds, will close '
+                       'session anyway', len(pendings))
+            for txid in pendings:
+                tx = self._txs.get(txid)
+                if tx is not None:
+                    # drop tx.cnxset
+                    with tracker:
+                        try:
+                            cnxset = tx.cnxset
+                            if cnxset is None:
+                                continue
+                            tx.cnxset = None
+                        except RuntimeError:
+                            msg = 'issue while force free of cnxset in %s'
+                            self.error(msg, tx)
+                    # cnxset.reconnect() do an hard reset of the cnxset
+                    # it force it to be freed
+                    cnxset.reconnect()
+                    self.repo._free_cnxset(cnxset)
         del self.__threaddata
-        del self._tx_data
+        del self._txs
 
     @property
     def closed(self):
-        return not hasattr(self, '_tx_data')
+        return not hasattr(self, '_txs')
 
     # transaction data/operations management ##################################
 
-    @property
-    def transaction_data(self):
-        try:
-            return self._threaddata.transaction_data
-        except AttributeError:
-            self._threaddata.transaction_data = {}
-            return self._threaddata.transaction_data
-
-    @property
-    def pending_operations(self):
-        try:
-            return self._threaddata.pending_operations
-        except AttributeError:
-            self._threaddata.pending_operations = []
-            return self._threaddata.pending_operations
-
-    @property
-    def pruned_hooks_cache(self):
-        try:
-            return self._threaddata.pruned_hooks_cache
-        except AttributeError:
-            self._threaddata.pruned_hooks_cache = {}
-            return self._threaddata.pruned_hooks_cache
-
-    def add_operation(self, operation, index=None):
-        """add an operation"""
-        if index is None:
-            self.pending_operations.append(operation)
-        else:
-            self.pending_operations.insert(index, operation)
+    transaction_data = tx_attr('data')
+    pending_operations = tx_attr('pending_operations')
+    pruned_hooks_cache = tx_attr('pruned_hooks_cache')
+    add_operation      = tx_meth('add_operation')
 
     # undo support ############################################################
 
-    def ertype_supports_undo(self, ertype):
-        return self.undo_actions  and ertype not in NO_UNDO_TYPES
+    ertype_supports_undo = tx_meth('ertype_supports_undo')
+    transaction_inc_action_counter = tx_meth('transaction_inc_action_counter')
 
     def transaction_uuid(self, set=True):
         try:
-            return self.transaction_data['tx_uuid']
+            return self._tx.transaction_uuid(set=set)
         except KeyError:
-            if not set:
-                return
-            self.transaction_data['tx_uuid'] = uuid = uuid4().hex
+            self._tx.data['tx_uuid'] = uuid = uuid4().hex
             self.repo.system_source.start_undoable_transaction(self, uuid)
             return uuid
 
-    def transaction_inc_action_counter(self):
-        num = self.transaction_data.setdefault('tx_action_count', 0) + 1
-        self.transaction_data['tx_action_count'] = num
-        return num
-
     # querier helpers #########################################################
 
-    @property
-    def rql_rewriter(self):
-        # in thread local storage since the rewriter isn't thread safe
-        try:
-            return self._threaddata._rewriter
-        except AttributeError:
-            self._threaddata._rewriter = RQLRewriter(self)
-            return self._threaddata._rewriter
+    rql_rewriter = tx_attr('_rewriter')
 
     # deprecated ###############################################################
 
@@ -1144,32 +1419,15 @@
     def reset_pool(self):
         return self.free_cnxset()
 
-    @deprecated("[3.7] execute is now unsafe by default in hooks/operation. You"
-                " can also control security with the security_enabled context "
-                "manager")
-    def unsafe_execute(self, rql, kwargs=None, eid_key=None, build_descr=True,
-                       propagate=False):
-        """like .execute but with security checking disabled (this method is
-        internal to the server, it's not part of the db-api)
-        """
-        with security_enabled(self, read=False, write=False):
-            return self.execute(rql, kwargs, eid_key, build_descr)
-
-    @property
-    @deprecated("[3.7] is_super_session is deprecated, test "
-                "session.read_security and or session.write_security")
-    def is_super_session(self):
-        return not self.read_security or not self.write_security
-
-    @deprecated("[3.7] session is actual session")
-    def actual_session(self):
-        """return the original parent session if any, else self"""
-        return self
-
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
     info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
 
+Session.HOOKS_ALLOW_ALL = HOOKS_ALLOW_ALL
+Session.HOOKS_DENY_ALL = HOOKS_DENY_ALL
+Session.DEFAULT_SECURITY = DEFAULT_SECURITY
+
+
 
 class InternalSession(Session):
     """special session created internaly by the repository"""
@@ -1195,7 +1453,7 @@
         if self.repo.shutting_down:
             self.free_cnxset(True)
             raise ShuttingDown('repository is shutting down')
-        return getattr(self._threaddata, 'cnxset', None)
+        return self._tx.cnxset
 
 
 class InternalManager(object):
--- a/server/sources/__init__.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/__init__.py	Fri Jun 14 16:26:25 2013 +0200
@@ -140,17 +140,20 @@
         return '<%s %s source %s @%#x>' % (self.uri, self.__class__.__name__,
                                            self.eid, id(self))
 
-    def __cmp__(self, other):
+    def __lt__(self, other):
         """simple comparison function to get predictable source order, with the
         system source at last
         """
         if self.uri == other.uri:
-            return 0
+            return False
         if self.uri == 'system':
-            return 1
+            return False
         if other.uri == 'system':
-            return -1
-        return cmp(self.uri, other.uri)
+            return True
+        return self.uri < other.uri
+
+    def __eq__(self, other):
+        return self.uri == other.uri
 
     def backup(self, backupfile, confirm, format='native'):
         """method called to create a backup of source's data"""
--- a/server/sources/extlite.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/extlite.py	Fri Jun 14 16:26:25 2013 +0200
@@ -247,7 +247,7 @@
         entity is deleted.
         """
         attrs = {'cw_eid': entity.eid}
-        sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+        sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
         self.doexec(session, sql, attrs)
 
     def local_add_relation(self, session, subject, rtype, object):
--- a/server/sources/ldapfeed.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/ldapfeed.py	Fri Jun 14 16:26:25 2013 +0200
@@ -17,10 +17,21 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb ldap feed source"""
 
-from cubicweb.cwconfig import merge_options
+import ldap
+from ldap.filter import filter_format
+
+from logilab.common.configuration import merge_options
+
 from cubicweb.server.sources import datafeed
-from cubicweb.server import ldaputils
+from cubicweb.server import ldaputils, utils
+from cubicweb import Binary
+
+_ = unicode
 
+# search scopes
+ldapscope = {'BASE': ldap.SCOPE_BASE,
+             'ONELEVEL': ldap.SCOPE_ONELEVEL,
+             'SUBTREE': ldap.SCOPE_SUBTREE}
 
 class LDAPFeedSource(ldaputils.LDAPSourceMixIn,
                      datafeed.DataFeedSource):
@@ -31,7 +42,65 @@
     support_entities = {'CWUser': False}
     use_cwuri_as_url = False
 
+    options_group = (
+        ('group-base-dn',
+         {'type' : 'string',
+          'default': '',
+          'help': 'base DN to lookup for groups; disable group importation mechanism if unset',
+          'group': 'ldap-source', 'level': 1,
+          }),
+        ('group-scope',
+         {'type' : 'choice',
+          'default': 'ONELEVEL',
+          'choices': ('BASE', 'ONELEVEL', 'SUBTREE'),
+          'help': 'group search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")',
+          'group': 'ldap-source', 'level': 1,
+          }),
+        ('group-classes',
+         {'type' : 'csv',
+          'default': ('top', 'posixGroup'),
+          'help': 'classes of group',
+          'group': 'ldap-source', 'level': 1,
+          }),
+        ('group-filter',
+         {'type': 'string',
+          'default': '',
+          'help': 'additional filters to be set in the ldap query to find valid groups',
+          'group': 'ldap-source', 'level': 2,
+          }),
+        ('group-attrs-map',
+         {'type' : 'named',
+          'default': {'cn': 'name', 'memberUid': 'member'},
+          'help': 'map from ldap group attributes to cubicweb attributes',
+          'group': 'ldap-source', 'level': 1,
+          }),
+    )
+
     options = merge_options(datafeed.DataFeedSource.options
-                            + ldaputils.LDAPSourceMixIn.options,
-                            optgroup='ldap-source')
+                            + ldaputils.LDAPSourceMixIn.options
+                            + options_group,
+                            optgroup='ldap-source',)
 
+    def update_config(self, source_entity, typedconfig):
+        """update configuration from source entity. `typedconfig` is config
+        properly typed with defaults set
+        """
+        super(LDAPFeedSource, self).update_config(source_entity, typedconfig)
+        self.group_base_dn = str(typedconfig['group-base-dn'])
+        self.group_base_scope = ldapscope[typedconfig['group-scope']]
+        self.group_attrs = typedconfig['group-attrs-map']
+        self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
+        self.group_attrs.update(typedconfig['group-attrs-map'])
+        self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.iteritems())
+        self.group_base_filters = [filter_format('(%s=%s)', ('objectClass', o))
+                                   for o in typedconfig['group-classes']]
+        if typedconfig['group-filter']:
+            self.group_base_filters.append(typedconfig['group-filter'])
+
+    def _process_ldap_item(self, dn, iterator):
+        itemdict = super(LDAPFeedSource, self)._process_ldap_item(dn, iterator)
+        # we expect memberUid to be a list of user ids, make sure of it
+        member = self.group_rev_attrs['member']
+        if isinstance(itemdict.get(member), basestring):
+            itemdict[member] = [itemdict[member]]
+        return itemdict
--- a/server/sources/ldapuser.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/ldapuser.py	Fri Jun 14 16:26:25 2013 +0200
@@ -27,6 +27,7 @@
 
 from rql.nodes import Relation, VariableRef, Constant, Function
 
+import warnings
 from cubicweb import UnknownEid, RepositoryError
 from cubicweb.server import ldaputils
 from cubicweb.server.utils import cartesian_product
@@ -45,6 +46,11 @@
               }
 
 
+# module is lazily imported
+warnings.warn('Imminent drop of ldapuser. Switch to ldapfeed now!',
+              DeprecationWarning)
+
+
 class LDAPUserSource(ldaputils.LDAPSourceMixIn, AbstractSource):
     """LDAP read-only CWUser source"""
     support_entities = {'CWUser': False}
--- a/server/sources/native.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/native.py	Fri Jun 14 16:26:25 2013 +0200
@@ -197,7 +197,7 @@
     sentity, oentity = entities
     try:
         rschema = session.vreg.schema.rschema(rtype)
-        rdef = rschema.rdefs[(sentity.__regid__, oentity.__regid__)]
+        rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)]
     except KeyError:
         raise _UndoException(session._(
             "Can't restore relation %(rtype)s between %(subj)s and "
@@ -370,8 +370,12 @@
     def backup(self, backupfile, confirm, format='native'):
         """method called to create a backup of the source's data"""
         if format == 'portable':
-            self.repo.fill_schema()
-            self.set_schema(self.repo.schema)
+            # ensure the schema is the one stored in the database: if repository
+            # started in quick_start mode, the file system's one has been loaded
+            # so force reload
+            if self.repo.config.quick_start:
+                self.repo.set_schema(self.repo.deserialize_schema(),
+                                     resetvreg=False)
             helper = DatabaseIndependentBackupRestore(self)
             self.close_source_connections()
             try:
@@ -630,38 +634,38 @@
         """add a new entity to the source"""
         with self._storage_handler(entity, 'added'):
             attrs = self.preprocess_entity(entity)
-            sql = self.sqlgen.insert(SQL_PREFIX + entity.__regid__, attrs)
+            sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
             self.doexec(session, sql, attrs)
-            if session.ertype_supports_undo(entity.__regid__):
+            if session.ertype_supports_undo(entity.cw_etype):
                 self._record_tx_action(session, 'tx_entity_actions', 'C',
-                                       etype=entity.__regid__, eid=entity.eid)
+                                       etype=entity.cw_etype, eid=entity.eid)
 
     def update_entity(self, session, entity):
         """replace an entity in the source"""
         with self._storage_handler(entity, 'updated'):
             attrs = self.preprocess_entity(entity)
-            if session.ertype_supports_undo(entity.__regid__):
+            if session.ertype_supports_undo(entity.cw_etype):
                 changes = self._save_attrs(session, entity, attrs)
                 self._record_tx_action(session, 'tx_entity_actions', 'U',
-                                       etype=entity.__regid__, eid=entity.eid,
+                                       etype=entity.cw_etype, eid=entity.eid,
                                        changes=self._binary(dumps(changes)))
-            sql = self.sqlgen.update(SQL_PREFIX + entity.__regid__, attrs,
+            sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs,
                                      ['cw_eid'])
             self.doexec(session, sql, attrs)
 
     def delete_entity(self, session, entity):
         """delete an entity from the source"""
         with self._storage_handler(entity, 'deleted'):
-            if session.ertype_supports_undo(entity.__regid__):
+            if session.ertype_supports_undo(entity.cw_etype):
                 attrs = [SQL_PREFIX + r.type
                          for r in entity.e_schema.subject_relations()
                          if (r.final or r.inlined) and not r in VIRTUAL_RTYPES]
                 changes = self._save_attrs(session, entity, attrs)
                 self._record_tx_action(session, 'tx_entity_actions', 'D',
-                                       etype=entity.__regid__, eid=entity.eid,
+                                       etype=entity.cw_etype, eid=entity.eid,
                                        changes=self._binary(dumps(changes)))
             attrs = {'cw_eid': entity.eid}
-            sql = self.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+            sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
             self.doexec(session, sql, attrs)
 
     def add_relation(self, session, subject, rtype, object, inlined=False):
@@ -978,7 +982,7 @@
             assert isinstance(extid, str)
             extid = b64encode(extid)
         uri = 'system' if source.copy_based_source else source.uri
-        attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
+        attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
                  'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
         self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs)
         # insert core relations: is, is_instance_of and cw_source
@@ -997,7 +1001,7 @@
             self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
                                          (entity.eid, source.eid))
         # now we can update the full text index
-        if self.do_fti and self.need_fti_indexation(entity.__regid__):
+        if self.do_fti and self.need_fti_indexation(entity.cw_etype):
             if complete:
                 entity.complete(entity.e_schema.indexable_attributes())
             self.index_entity(session, entity=entity)
@@ -1009,7 +1013,7 @@
             # one indexable attribute
             self.index_entity(session, entity=entity)
         # update entities.mtime.
-        # XXX Only if entity.__regid__ in self.multisources_etypes?
+        # XXX Only if entity.cw_etype in self.multisources_etypes?
         attrs = {'eid': entity.eid, 'mtime': datetime.utcnow()}
         self.doexec(session, self.sqlgen.update('entities', attrs, ['eid']), attrs)
 
@@ -1191,7 +1195,7 @@
         attributes of the entity
         """
         restr = {'cw_eid': entity.eid}
-        sql = self.sqlgen.select(SQL_PREFIX + entity.__regid__, restr, attrs)
+        sql = self.sqlgen.select(SQL_PREFIX + entity.cw_etype, restr, attrs)
         cu = self.doexec(session, sql, restr)
         values = dict(zip(attrs, cu.fetchone()))
         # ensure backend specific binary are converted back to string
@@ -1302,7 +1306,7 @@
         # restore record in entities (will update fti if needed)
         self.add_info(session, entity, self, None, True)
         # remove record from deleted_entities if entity's type is multi-sources
-        if entity.__regid__ in self.multisources_etypes:
+        if entity.cw_etype in self.multisources_etypes:
             self.doexec(session,
                         'DELETE FROM deleted_entities WHERE eid=%s' % eid)
         self.repo.hm.call_hooks('after_add_entity', session, entity=entity)
@@ -1365,7 +1369,7 @@
         # XXX check removal of inlined relation?
         # delete the entity
         attrs = {'cw_eid': eid}
-        sql = self.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+        sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
         self.doexec(session, sql, attrs)
         # remove record from entities (will update fti if needed)
         self.delete_info_multi(session, [entity], self.uri)
@@ -1385,7 +1389,7 @@
         self._reedit_entity(entity, action.changes, err)
         entity.cw_edited.check()
         self.repo.hm.call_hooks('before_update_entity', session, entity=entity)
-        sql = self.sqlgen.update(SQL_PREFIX + entity.__regid__, action.changes,
+        sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, action.changes,
                                  ['cw_eid'])
         self.doexec(session, sql, action.changes)
         self.repo.hm.call_hooks('after_update_entity', session, entity=entity)
@@ -1403,7 +1407,7 @@
             rschema = rdef.rtype
             if rschema.inlined:
                 sql = 'SELECT 1 FROM cw_%s WHERE cw_eid=%s and cw_%s=%s'\
-                      % (sentity.__regid__, subj, rtype, obj)
+                      % (sentity.cw_etype, subj, rtype, obj)
             else:
                 sql = 'SELECT 1 FROM %s_relation WHERE eid_from=%s and eid_to=%s'\
                       % (rtype, subj, obj)
--- a/server/sources/remoterql.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/remoterql.py	Fri Jun 14 16:26:25 2013 +0200
@@ -415,7 +415,7 @@
             self._query_cache.clear()
             return
         cu = session.cnxset[self.uri]
-        cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__,
+        cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.cw_etype,
                    {'x': self.repo.eid2extid(self, entity.eid, session)})
         self._query_cache.clear()
 
--- a/server/sources/rql2sql.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/rql2sql.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1506,15 +1506,14 @@
         value = constant.value
         if constant.type == 'etype':
             return value
-        if constant.type == 'Int': # XXX Float?
+        # don't substitute int, causes pb when used as sorting column number
+        if constant.type == 'Int':
             return str(value)
         if constant.type in ('Date', 'Datetime'):
             rel = constant.relation()
             if rel is not None:
                 rel._q_needcast = value
             return self.keyword_map[value]()
-        if constant.type == 'Boolean':
-            return str(self.dbhelper.boolean_value(value))
         if constant.type == 'Substitute':
             try:
                 # we may found constant from simplified var in varmap
--- a/server/sources/storages.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sources/storages.py	Fri Jun 14 16:26:25 2013 +0200
@@ -239,7 +239,7 @@
         sysource = entity._cw.cnxset.source('system')
         cu = sysource.doexec(entity._cw,
                              'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
-                             attr, entity.__regid__, entity.eid))
+                             attr, entity.cw_etype, entity.eid))
         rawvalue = cu.fetchone()[0]
         if rawvalue is None: # no previous value
             return None
@@ -253,7 +253,7 @@
         session = entity._cw
         source = session.repo.system_source
         attrs = source.preprocess_entity(entity)
-        sql = source.sqlgen.update('cw_' + entity.__regid__, attrs,
+        sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs,
                                    ['cw_eid'])
         source.doexec(session, sql, attrs)
         entity.cw_edited = None
--- a/server/sqlutils.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/sqlutils.py	Fri Jun 14 16:26:25 2013 +0200
@@ -297,12 +297,16 @@
         """
         attrs = {}
         eschema = entity.e_schema
+        converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {})
         for attr, value in entity.cw_edited.iteritems():
             if value is not None and eschema.subjrels[attr].final:
                 atype = str(entity.e_schema.destination(attr))
-                if atype == 'Boolean':
-                    value = self.dbhelper.boolean_value(value)
-                elif atype == 'Password':
+                if atype in converters:
+                    # It is easier to modify preprocess_entity rather
+                    # than add_entity (native) as this behavior
+                    # may also be used for update.
+                    value = converters[atype](value)
+                elif atype == 'Password': # XXX could be done using a TYPE_CONVERTERS callback
                     # if value is a Binary instance, this mean we got it
                     # from a query result and so it is already encrypted
                     if isinstance(value, Binary):
@@ -310,16 +314,6 @@
                     else:
                         value = crypt_password(value)
                     value = self._binary(value)
-                # XXX needed for sqlite but I don't think it is for other backends
-                # Note: use is __class__ since issubclass(datetime, date)
-                elif atype in ('Datetime', 'TZDatetime') and type(value) is date:
-                    value = todatetime(value)
-                elif atype == 'Date' and isinstance(value, datetime):
-                    value = todate(value)
-                elif atype == 'TZDatetime' and getattr(value, 'tzinfo', None):
-                    value = utcdatetime(value)
-                elif atype == 'TZTime' and getattr(value, 'tzinfo', None):
-                    value = utctime(value)
                 elif isinstance(value, Binary):
                     value = self._binary(value.getvalue())
             attrs[SQL_PREFIX+str(attr)] = value
--- a/server/ssplanner.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/ssplanner.py	Fri Jun 14 16:26:25 2013 +0200
@@ -22,7 +22,7 @@
 from rql.stmts import Union, Select
 from rql.nodes import Constant, Relation
 
-from cubicweb import QueryError, typed_eid
+from cubicweb import QueryError
 from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.rqlrewrite import add_types_restriction
 from cubicweb.server.edition import EditedEntity
@@ -84,7 +84,7 @@
             and rel.children[1].operator == '='):
             lhs, rhs = rel.get_variable_parts()
             if isinstance(rhs, Constant):
-                eid = typed_eid(rhs.eval(plan.args))
+                eid = int(rhs.eval(plan.args))
                 # check read permission here since it may not be done by
                 # the generated select substep if not emited (eg nothing
                 # to be selected)
@@ -521,7 +521,7 @@
         """execute this step"""
         results = self.execute_child()
         if results:
-            todelete = frozenset(typed_eid(eid) for eid, in results)
+            todelete = frozenset(int(eid) for eid, in results)
             session = self.plan.session
             session.repo.glob_delete_entities(session, todelete)
         return results
@@ -567,7 +567,7 @@
                 lhsval = _handle_relterm(lhsinfo, row, newrow)
                 rhsval = _handle_relterm(rhsinfo, row, newrow)
                 if rschema.final or rschema.inlined:
-                    eid = typed_eid(lhsval)
+                    eid = int(lhsval)
                     try:
                         edited = edefs[eid]
                     except KeyError:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schemaserial/bootstrap_cubes	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,1 @@
+card,comment,folder,tag,basket,email,file,localperms
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schemaserial/schema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,256 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
+                            SubjectRelation, RichString, String, Int, Float,
+                            Boolean, Datetime, TZDatetime, Bytes)
+from yams.constraints import SizeConstraint
+from cubicweb.schema import (WorkflowableEntityType,
+                             RQLConstraint, RQLUniqueConstraint,
+                             ERQLExpression, RRQLExpression)
+
+from yams.buildobjs import make_type
+BabarTestType = make_type('BabarTestType')
+
+
+class Affaire(WorkflowableEntityType):
+    __permissions__ = {
+        'read':   ('managers',
+                   ERQLExpression('X owned_by U'), ERQLExpression('X concerne S?, S owned_by U')),
+        'add':    ('managers', ERQLExpression('X concerne S, S owned_by U')),
+        'update': ('managers', 'owners', ERQLExpression('X in_state S, S name in ("pitetre", "en cours")')),
+        'delete': ('managers', 'owners', ERQLExpression('X concerne S, S owned_by U')),
+        }
+
+    ref = String(fulltextindexed=True, indexed=True,
+                 constraints=[SizeConstraint(16)])
+    sujet = String(fulltextindexed=True,
+                   constraints=[SizeConstraint(256)])
+    descr = RichString(fulltextindexed=True,
+                       description=_('more detailed description'))
+
+    duration = Int()
+    invoiced = Float()
+    opt_attr = Bytes()
+
+    depends_on = SubjectRelation('Affaire')
+    require_permission = SubjectRelation('CWPermission')
+    concerne = SubjectRelation(('Societe', 'Note'))
+    todo_by = SubjectRelation('Personne', cardinality='?*')
+    documented_by = SubjectRelation('Card')
+
+
+class Societe(EntityType):
+    __unique_together__ = [('nom', 'type', 'cp')]
+    __permissions__ = {
+        'read': ('managers', 'users', 'guests'),
+        'update': ('managers', 'owners', ERQLExpression('U login L, X nom L')),
+        'delete': ('managers', 'owners', ERQLExpression('U login L, X nom L')),
+        'add': ('managers', 'users',)
+        }
+
+    nom  = String(maxsize=64, fulltextindexed=True)
+    web  = String(maxsize=128)
+    type  = String(maxsize=128) # attribute in common with Note
+    tel  = Int()
+    fax  = Int()
+    rncs = String(maxsize=128)
+    ad1  = String(maxsize=128)
+    ad2  = String(maxsize=128)
+    ad3  = String(maxsize=128)
+    cp   = String(maxsize=12)
+    ville= String(maxsize=32)
+
+
+class Division(Societe):
+    __specializes_schema__ = True
+
+class SubDivision(Division):
+    __specializes_schema__ = True
+
+class travaille_subdivision(RelationDefinition):
+    subject = 'Personne'
+    object = 'SubDivision'
+
+from cubicweb.schemas.base import CWUser
+CWUser.get_relations('login').next().fulltextindexed = True
+
+class Note(WorkflowableEntityType):
+    date = String(maxsize=10)
+    type = String(maxsize=6)
+    para = String(maxsize=512,
+                  __permissions__ = {
+                      'read':   ('managers', 'users', 'guests'),
+                      'update': ('managers', ERQLExpression('X in_state S, S name "todo"')),
+                      })
+
+    migrated_from = SubjectRelation('Note')
+    attachment = SubjectRelation('File')
+    inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*',
+                              constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, '
+                                                              'Y type T, Y inline1 A2, A2 todo_by C',
+                                                               'S,Y')])
+    todo_by = SubjectRelation('CWUser')
+
+class Personne(EntityType):
+    __unique_together__ = [('nom', 'prenom', 'inline2')]
+    nom    = String(fulltextindexed=True, required=True, maxsize=64)
+    prenom = String(fulltextindexed=True, maxsize=64)
+    sexe   = String(maxsize=1, default='M', fulltextindexed=True)
+    promo  = String(vocabulary=('bon','pasbon'))
+    titre  = String(fulltextindexed=True, maxsize=128)
+    adel   = String(maxsize=128)
+    ass    = String(maxsize=128)
+    web    = String(maxsize=128)
+    tel    = Int()
+    fax    = Int()
+    datenaiss = Datetime()
+    tzdatenaiss = TZDatetime()
+    test   = Boolean(__permissions__={
+        'read': ('managers', 'users', 'guests'),
+        'update': ('managers',),
+        })
+    description = String()
+    firstname = String(fulltextindexed=True, maxsize=64)
+
+    concerne = SubjectRelation('Affaire')
+    connait = SubjectRelation('Personne')
+    inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*')
+
+    custom_field_of_jungle = BabarTestType(jungle_speed=42)
+
+
+class Old(EntityType):
+    name = String()
+
+
+class connait(RelationType):
+    symmetric = True
+
+class concerne(RelationType):
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('U has_update_permission S')),
+        'delete': ('managers', RRQLExpression('O owned_by U')),
+        }
+
+class travaille(RelationDefinition):
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers', RRQLExpression('U has_update_permission S')),
+        'delete': ('managers', RRQLExpression('O owned_by U')),
+        }
+    subject = 'Personne'
+    object = 'Societe'
+
+class comments(RelationDefinition):
+    subject = 'Comment'
+    object = 'Personne'
+
+class fiche(RelationDefinition):
+    inlined = True
+    subject = 'Personne'
+    object = 'Card'
+    cardinality = '??'
+
+class multisource_inlined_rel(RelationDefinition):
+    inlined = True
+    cardinality = '?*'
+    subject = ('Card', 'Note')
+    object = ('Affaire', 'Note')
+
+class multisource_rel(RelationDefinition):
+    subject = ('Card', 'Note')
+    object = 'Note'
+
+class multisource_crossed_rel(RelationDefinition):
+    subject = ('Card', 'Note')
+    object = 'Note'
+
+
+class see_also_1(RelationDefinition):
+    name = 'see_also'
+    subject = object = 'Folder'
+
+class see_also_2(RelationDefinition):
+    name = 'see_also'
+    subject = ('Bookmark', 'Note')
+    object = ('Bookmark', 'Note')
+
+class evaluee(RelationDefinition):
+    subject = ('Personne', 'CWUser', 'Societe')
+    object = ('Note')
+
+class ecrit_par(RelationType):
+    inlined = True
+
+class ecrit_par_1(RelationDefinition):
+    name = 'ecrit_par'
+    subject = 'Note'
+    object ='Personne'
+    constraints = [RQLConstraint('E concerns P, S version_of P')]
+    cardinality = '?*'
+
+class ecrit_par_2(RelationDefinition):
+    name = 'ecrit_par'
+    subject = 'Note'
+    object ='CWUser'
+    cardinality='?*'
+
+
+class copain(RelationDefinition):
+    subject = object = 'CWUser'
+
+class tags(RelationDefinition):
+    subject = 'Tag'
+    object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire')
+
+class filed_under(RelationDefinition):
+    subject = ('Note', 'Affaire')
+    object = 'Folder'
+
+class require_permission(RelationDefinition):
+    subject = ('Card', 'Note', 'Personne')
+    object = 'CWPermission'
+
+class require_state(RelationDefinition):
+    subject = 'CWPermission'
+    object = 'State'
+
+class personne_composite(RelationDefinition):
+    subject='Personne'
+    object='Personne'
+    composite='subject'
+
+class personne_inlined(RelationDefinition):
+    subject='Personne'
+    object='Personne'
+    cardinality='?*'
+    inlined=True
+
+
+class login_user(RelationDefinition):
+    subject = 'Personne'
+    object = 'CWUser'
+    cardinality = '??'
+
+class ambiguous_inlined(RelationDefinition):
+    subject = ('Affaire', 'Note')
+    object = 'CWUser'
+    inlined = True
+    cardinality = '?*'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schemaserial/site_cubicweb.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,30 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+from logilab.database import FunctionDescr
+from logilab.database.sqlite import register_sqlite_pyfunc
+from rql.utils import register_function
+
+class DUMB_SORT(FunctionDescr):
+    pass
+
+register_function(DUMB_SORT)
+def dumb_sort(something):
+    return something
+register_sqlite_pyfunc(dumb_sort)
+
--- a/server/test/data/ldap_test.ldif	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/data/ldap_test.ldif	Fri Jun 14 16:26:25 2013 +0200
@@ -10,13 +10,31 @@
 ou: People
 structuralObjectClass: organizationalUnit
 
+dn: ou=Group,dc=cubicweb,dc=test
+objectClass: organizationalUnit
+ou: Group
+
+dn: cn=logilab,ou=Group,dc=cubicweb,dc=test
+gidNumber: 2000
+objectClass: posixGroup
+objectClass: top
+cn: logilab
+memberUid: adim
+
+dn: cn=dir,ou=Group,dc=cubicweb,dc=test
+gidNumber: 2002
+objectClass: posixGroup
+objectClass: top
+cn: dir
+memberUid: adim
+memberUid: syt
+
 dn: uid=syt,ou=People,dc=cubicweb,dc=test
 loginShell: /bin/bash
-objectClass: inetOrgPerson
+objectClass: OpenLDAPperson
 objectClass: posixAccount
 objectClass: top
 objectClass: shadowAccount
-structuralObjectClass: inetOrgPerson
 cn: Sylvain Thenault
 sn: Thenault
 shadowMax: 99999
@@ -35,7 +53,7 @@
 
 dn: uid=adim,ou=People,dc=cubicweb,dc=test
 loginShell: /bin/bash
-objectClass: inetOrgPerson
+objectClass: OpenLDAPperson
 objectClass: posixAccount
 objectClass: top
 objectClass: shadowAccount
@@ -46,7 +64,6 @@
 uid: adim
 homeDirectory: /home/adim
 uidNumber: 1006
-structuralObjectClass: inetOrgPerson
 givenName: Adrien
 telephoneNumber: 109
 displayName: adimascio
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_ldapsource.py	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,802 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb.server.sources.ldapusers unit and functional tests"""
+
+import os
+import sys
+import shutil
+import time
+from os.path import join, exists
+import subprocess
+import tempfile
+
+from logilab.common.testlib import TestCase, unittest_main, mock_object, Tags
+
+from cubicweb import AuthenticationError
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.devtools.repotest import RQLGeneratorTC
+from cubicweb.devtools.httptest import get_available_port
+from cubicweb.devtools import get_test_db_handler
+
+from cubicweb.server.sources.ldapuser import GlobTrFunc, UnknownEid, RQL2LDAPFilter
+
+CONFIG_LDAPFEED = u'''
+user-base-dn=ou=People,dc=cubicweb,dc=test
+group-base-dn=ou=Group,dc=cubicweb,dc=test
+user-attrs-map=uid=login,mail=email,userPassword=upassword
+group-attrs-map=cn=name,memberUid=member
+'''
+CONFIG_LDAPUSER = u'''
+user-base-dn=ou=People,dc=cubicweb,dc=test
+user-attrs-map=uid=login,mail=email,userPassword=upassword
+'''
+
+URL = None
+
+def create_slapd_configuration(cls):
+    global URL
+    slapddir = tempfile.mkdtemp('cw-unittest-ldap')
+    config = cls.config
+    slapdconf = join(config.apphome, "slapd.conf")
+    confin = file(join(config.apphome, "slapd.conf.in")).read()
+    confstream = file(slapdconf, 'w')
+    confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir})
+    confstream.close()
+    # fill ldap server with some data
+    ldiffile = join(config.apphome, "ldap_test.ldif")
+    config.info('Initing ldap database')
+    cmdline = ['/usr/sbin/slapadd', '-f', slapdconf, '-l', ldiffile, '-c']
+    PIPE = subprocess.PIPE
+    slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
+    stdout, stderr = slapproc.communicate()
+    if slapproc.returncode:
+        print >> sys.stderr, ('slapadd returned with status: %s'
+                              % slapproc.returncode)
+        sys.stdout.write(stdout)
+        sys.stderr.write(stderr)
+
+    #ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
+    port = get_available_port(xrange(9000, 9100))
+    host = 'localhost:%s' % port
+    ldapuri = 'ldap://%s' % host
+    cmdline = ["/usr/sbin/slapd", "-f",  slapdconf,  "-h",  ldapuri, "-d", "0"]
+    config.info('Starting slapd:', ' '.join(cmdline))
+    PIPE = subprocess.PIPE
+    cls.slapd_process = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
+    time.sleep(0.2)
+    if cls.slapd_process.poll() is None:
+        config.info('slapd started with pid %s', cls.slapd_process.pid)
+    else:
+        raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' %
+                               (" ".join(cmdline), os.getcwd()))
+    URL = u'ldap://%s' % host
+    return slapddir
+
+def terminate_slapd(cls):
+    config = cls.config
+    if cls.slapd_process and cls.slapd_process.returncode is None:
+        config.info('terminating slapd')
+        if hasattr(cls.slapd_process, 'terminate'):
+            cls.slapd_process.terminate()
+        else:
+            import os, signal
+            os.kill(cls.slapd_process.pid, signal.SIGTERM)
+        stdout, stderr = cls.slapd_process.communicate()
+        if cls.slapd_process.returncode:
+            print >> sys.stderr, ('slapd returned with status: %s'
+                                  % cls.slapd_process.returncode)
+            sys.stdout.write(stdout)
+            sys.stderr.write(stderr)
+        config.info('DONE')
+
+
+class LDAPFeedTestBase(CubicWebTC):
+    test_db_id = 'ldap-feed'
+    loglevel = 'ERROR'
+
+    @classmethod
+    def setUpClass(cls):
+        from cubicweb.cwctl import init_cmdline_log_threshold
+        init_cmdline_log_threshold(cls.config, cls.loglevel)
+        cls._tmpdir = create_slapd_configuration(cls)
+
+    @classmethod
+    def tearDownClass(cls):
+        terminate_slapd(cls)
+        try:
+            shutil.rmtree(cls._tmpdir)
+        except:
+            pass
+
+    @classmethod
+    def pre_setup_database(cls, session, config):
+        session.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed',
+                              url=URL, config=CONFIG_LDAPFEED)
+
+        session.commit()
+        return cls._pull(session)
+
+    @classmethod
+    def _pull(cls, session):
+        with session.repo.internal_session() as isession:
+            lfsource = isession.repo.sources_by_uri['ldap']
+            stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
+            isession.commit()
+            return stats
+
+    def pull(self):
+        return self._pull(self.session)
+
+    def setup_database(self):
+        if self.test_db_id == 'ldap-feed':
+            with self.session.repo.internal_session(safe=True) as session:
+                session.execute('DELETE Any E WHERE E cw_source S, S name "ldap"')
+                session.commit()
+        if self.test_db_id == 'ldap-feed':
+            src = self.sexecute('CWSource S WHERE S name "ldap"').get_entity(0,0)
+            src.cw_set(config=CONFIG_LDAPFEED)
+        self.session.commit()
+        self.pull()
+
+    def delete_ldap_entry(self, dn):
+        """
+        delete an LDAP entity
+        """
+        modcmd = ['dn: %s'%dn, 'changetype: delete']
+        self._ldapmodify(modcmd)
+
+    def update_ldap_entry(self, dn, mods):
+        """
+        modify one or more attributes of an LDAP entity
+        """
+        modcmd = ['dn: %s'%dn, 'changetype: modify']
+        for (kind, key), values in mods.iteritems():
+            modcmd.append('%s: %s' % (kind, key))
+            if isinstance(values, basestring):
+                values = [values]
+            for value in values:
+                modcmd.append('%s: %s'%(key, value))
+            modcmd.append('-')
+        self._ldapmodify(modcmd)
+
+    def _ldapmodify(self, modcmd):
+        uri = self.repo.sources_by_uri['ldap'].urls[0]
+        updatecmd = ['ldapmodify', '-H', uri, '-v', '-x', '-D',
+                     'cn=admin,dc=cubicweb,dc=test', '-w', 'cw']
+        PIPE = subprocess.PIPE
+        p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+        p.stdin.write('\n'.join(modcmd))
+        p.stdin.close()
+        if p.wait():
+            raise RuntimeError("ldap update failed: %s"%('\n'.join(p.stderr.readlines())))
+
+class CheckWrongGroup(LDAPFeedTestBase):
+    """
+    A testcase for situations where the default group for CWUser
+    created from LDAP is wrongly configured.
+    """
+
+    def test_wrong_group(self):
+        with self.session.repo.internal_session(safe=True) as session:
+            source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
+            config = source.repo_source.check_config(source)
+            # inject a bogus group here, along with at least a valid one
+            config['user-default-group'] = ('thisgroupdoesnotexists','users')
+            source.repo_source.update_config(source, config)
+            session.commit(free_cnxset=False)
+            # here we emitted an error log entry
+            stats = source.repo_source.pull_data(session, force=True, raise_on_error=True)
+            session.commit()
+
+
+
+class LDAPFeedUserTC(LDAPFeedTestBase):
+    """
+    A testcase for CWUser support in ldapfeed (basic tests and authentication).
+    """
+
+    def assertMetadata(self, entity):
+        self.assertTrue(entity.creation_date)
+        self.assertTrue(entity.modification_date)
+
+    def test_authenticate(self):
+        source = self.repo.sources_by_uri['ldap']
+        self.session.set_cnxset()
+        # ensure we won't be logged against
+        self.assertRaises(AuthenticationError,
+                          source.authenticate, self.session, 'toto', 'toto')
+        self.assertTrue(source.authenticate(self.session, 'syt', 'syt'))
+        self.assertTrue(self.repo.connect('syt', password='syt'))
+
+    def test_base(self):
+        # check a known one
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.login, 'syt')
+        e.complete()
+        self.assertMetadata(e)
+        self.assertEqual(e.firstname, None)
+        self.assertEqual(e.surname, None)
+        self.assertTrue('users' in [g.name for g in e.in_group])
+        self.assertEqual(e.owned_by[0].login, 'syt')
+        self.assertEqual(e.created_by, ())
+        addresses = [pe.address for pe in e.use_email]
+        addresses.sort()
+        self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'],
+                         addresses)
+        self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr',
+                                                   'syt@logilab.fr'])
+        # email content should be indexed on the user
+        rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
+        self.assertEqual(rset.rows, [[e.eid]])
+
+    def test_copy_to_system_source(self):
+        "make sure we can 'convert' an LDAP user into a system one"
+        source = self.repo.sources_by_uri['ldap']
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
+        self.commit()
+        source.reset_caches()
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+        self.assertEqual(len(rset), 1)
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.eid, eid)
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
+                                                             'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'CWUser',
+                                                  'extid': None})
+        self.assertEqual(e.cw_source[0].name, 'system')
+        self.assertTrue(e.creation_date)
+        self.assertTrue(e.modification_date)
+        source.pull_data(self.session)
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+        self.assertEqual(len(rset), 1)
+        self.assertTrue(self.repo.system_source.authenticate(
+                self.session, 'syt', password='syt'))
+        # make sure the pull from ldap have not "reverted" user as a ldap-feed user
+        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
+                                                             'uri': u'system',
+                                                             'use-cwuri-as-url': False},
+                                                  'type': 'CWUser',
+                                                  'extid': None})
+        # and that the password stored in the system source is not empty or so
+        user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
+        user.cw_clear_all_caches()
+        pwd = self.session.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0]
+        self.assertIsNotNone(pwd)
+        self.assertTrue(str(pwd))
+
+
+
+class LDAPFeedUserDeletionTC(LDAPFeedTestBase):
+    """
+    A testcase for situations where users are deleted from or
+    unavailabe in the LDAP database.
+    """
+    def test_a_filter_inactivate(self):
+        """ filtered out people should be deactivated, unable to authenticate """
+        source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
+        config = source.repo_source.check_config(source)
+        # filter with adim's phone number
+        config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
+        source.repo_source.update_config(source, config)
+        self.commit()
+        self.pull()
+        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
+        self.assertEqual(self.execute('Any N WHERE U login "syt", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'deactivated')
+        self.assertEqual(self.execute('Any N WHERE U login "adim", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'activated')
+        # unfilter, syt should be activated again
+        config['user-filter'] = u''
+        source.repo_source.update_config(source, config)
+        self.commit()
+        self.pull()
+        self.assertEqual(self.execute('Any N WHERE U login "syt", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'activated')
+        self.assertEqual(self.execute('Any N WHERE U login "adim", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'activated')
+
+    def test_delete(self):
+        """ delete syt, pull, check deactivation, repull,
+        read syt, pull, check activation
+        """
+        self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
+        self.pull()
+        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
+        self.assertEqual(self.execute('Any N WHERE U login "syt", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'deactivated')
+        # check that it doesn't choke
+        self.pull()
+        # reset the ldap database
+        self.tearDownClass()
+        self.setUpClass()
+        self.pull()
+        self.assertEqual(self.execute('Any N WHERE U login "syt", '
+                                      'U in_state S, S name N').rows[0][0],
+                         'activated')
+
+    def test_reactivate_deleted(self):
+        # test reactivating BY HAND the user isn't enough to
+        # authenticate, as the native source refuse to authenticate
+        # user from other sources
+        self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
+        self.pull()
+        # reactivate user (which source is still ldap-feed)
+        user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
+        user.cw_adapt_to('IWorkflowable').fire_transition('activate')
+        self.commit()
+        with self.assertRaises(AuthenticationError):
+            self.repo.connect('syt', password='syt')
+
+        # ok now let's try to make it a system user
+        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid})
+        self.commit()
+        # and that we can now authenticate again
+        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto')
+        self.assertTrue(self.repo.connect('syt', password='syt'))
+
+class LDAPFeedGroupTC(LDAPFeedTestBase):
+    """
+    A testcase for group support in ldapfeed.
+    """
+
+    def test_groups_exist(self):
+        rset = self.sexecute('CWGroup X WHERE X name "dir"')
+        self.assertEqual(len(rset), 1)
+
+        rset = self.sexecute('CWGroup X WHERE X cw_source S, S name "ldap"')
+        self.assertEqual(len(rset), 2)
+
+    def test_group_deleted(self):
+        rset = self.sexecute('CWGroup X WHERE X name "dir"')
+        self.assertEqual(len(rset), 1)
+
+    def test_in_group(self):
+        rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'dir'})
+        dirgroup = rset.get_entity(0, 0)
+        self.assertEqual(set(['syt', 'adim']),
+                         set([u.login for u in dirgroup.reverse_in_group]))
+        rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'})
+        logilabgroup = rset.get_entity(0, 0)
+        self.assertEqual(set(['adim']),
+                         set([u.login for u in logilabgroup.reverse_in_group]))
+
+    def test_group_member_added(self):
+        self.pull()
+        rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
+                             {'name': 'logilab'})
+        self.assertEqual(len(rset), 1)
+        self.assertEqual(rset[0][0], 'adim')
+
+        try:
+            self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test',
+                                   {('add', 'memberUid'): ['syt']})
+            time.sleep(1.1) # timestamps precision is 1s
+            self.pull()
+
+            rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
+                                 {'name': 'logilab'})
+            self.assertEqual(len(rset), 2)
+            members = set([u[0] for u in rset])
+            self.assertEqual(set(['adim', 'syt']), members)
+
+        finally:
+            # back to normal ldap setup
+            self.tearDownClass()
+            self.setUpClass()
+
+    def test_group_member_deleted(self):
+        self.pull() # ensure we are sync'ed
+        rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
+                             {'name': 'logilab'})
+        self.assertEqual(len(rset), 1)
+        self.assertEqual(rset[0][0], 'adim')
+
+        try:
+            self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test',
+                                   {('delete', 'memberUid'): ['adim']})
+            time.sleep(1.1) # timestamps precision is 1s
+            self.pull()
+
+            rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
+                                 {'name': 'logilab'})
+            self.assertEqual(len(rset), 0)
+        finally:
+            # back to normal ldap setup
+            self.tearDownClass()
+            self.setUpClass()
+
+
+class LDAPUserSourceTC(LDAPFeedTestBase):
+    test_db_id = 'ldap-user'
+    tags = CubicWebTC.tags | Tags(('ldap'))
+
+    @classmethod
+    def pre_setup_database(cls, session, config):
+        session.create_entity('CWSource', name=u'ldap', type=u'ldapuser',
+                              url=URL, config=CONFIG_LDAPUSER)
+        session.commit()
+        # XXX keep it there
+        session.execute('CWUser U')
+
+    def assertMetadata(self, entity):
+        self.assertEqual(entity.creation_date, None)
+        self.assertEqual(entity.modification_date, None)
+
+    def test_synchronize(self):
+        source = self.repo.sources_by_uri['ldap']
+        source.synchronize()
+
+    def test_base(self):
+        # check a known one
+        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+        e = rset.get_entity(0, 0)
+        self.assertEqual(e.login, 'syt')
+        e.complete()
+        self.assertMetadata(e)
+        self.assertEqual(e.firstname, None)
+        self.assertEqual(e.surname, None)
+        self.assertEqual(e.in_group[0].name, 'users')
+        self.assertEqual(e.owned_by[0].login, 'syt')
+        self.assertEqual(e.created_by, ())
+        addresses = [pe.address for pe in e.use_email]
+        addresses.sort()
+        # should habe two element but ldapuser seems buggy. It's going to be dropped anyway.
+        self.assertEqual(['sylvain.thenault@logilab.fr',], # 'syt@logilab.fr'],
+                         addresses)
+        self.assertIn(e.primary_email[0].address,
+                      ['sylvain.thenault@logilab.fr', 'syt@logilab.fr'])
+        # email content should be indexed on the user
+        rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
+        self.assertEqual(rset.rows, [[e.eid]])
+
+    def test_not(self):
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        rset = self.sexecute('CWUser X WHERE NOT X eid %s' % eid)
+        self.assert_(rset)
+        self.assert_(not eid in (r[0] for r in rset))
+
+    def test_multiple(self):
+        seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'})[0][0]
+        rset = self.sexecute('CWUser X, Y WHERE X login %(syt)s, Y login %(adim)s',
+                            {'syt': 'syt', 'adim': 'adim'})
+        self.assertEqual(rset.rows, [[seid, aeid]])
+        rset = self.sexecute('Any X,Y,L WHERE X login L, X login %(syt)s, Y login %(adim)s',
+                            {'syt': 'syt', 'adim': 'adim'})
+        self.assertEqual(rset.rows, [[seid, aeid, 'syt']])
+
+    def test_in(self):
+        seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'})[0][0]
+        rset = self.sexecute('Any X,L ORDERBY L WHERE X login IN("%s", "%s"), X login L' % ('syt', 'adim'))
+        self.assertEqual(rset.rows, [[aeid, 'adim'], [seid, 'syt']])
+
+    def test_relations(self):
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        rset = self.sexecute('Any X,E WHERE X is CWUser, X login L, X primary_email E')
+        self.assert_(eid in (r[0] for r in rset))
+        rset = self.sexecute('Any X,L,E WHERE X is CWUser, X login L, X primary_email E')
+        self.assert_('syt' in (r[1] for r in rset))
+
+    def test_count(self):
+        nbusers = self.sexecute('Any COUNT(X) WHERE X is CWUser')[0][0]
+        # just check this is a possible number
+        self.assert_(nbusers > 1, nbusers)
+        self.assert_(nbusers < 30, nbusers)
+
+    def test_upper(self):
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        rset = self.sexecute('Any UPPER(L) WHERE X eid %s, X login L' % eid)
+        self.assertEqual(rset[0][0], 'syt'.upper())
+
+    def test_unknown_attr(self):
+        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+        rset = self.sexecute('Any L,C,M WHERE X eid %s, X login L, '
+                            'X creation_date C, X modification_date M' % eid)
+        self.assertEqual(rset[0][0], 'syt')
+        self.assertEqual(rset[0][1], None)
+        self.assertEqual(rset[0][2], None)
+
+    def test_sort(self):
+        logins = [l for l, in self.sexecute('Any L ORDERBY L WHERE X login L')]
+        self.assertEqual(logins, sorted(logins))
+
+    def test_lower_sort(self):
+        logins = [l for l, in self.sexecute('Any L ORDERBY lower(L) WHERE X login L')]
+        self.assertEqual(logins, sorted(logins))
+
+    def test_or(self):
+        rset = self.sexecute('DISTINCT Any X WHERE X login %(login)s OR (X in_group G, G name "managers")',
+                            {'login': 'syt'})
+        self.assertEqual(len(rset), 2, rset.rows) # syt + admin
+
+    def test_nonregr_set_owned_by(self):
+        # test that when a user coming from ldap is triggering a transition
+        # the related TrInfo has correct owner information
+        self.sexecute('SET X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': 'syt'})
+        self.commit()
+        syt = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'}).get_entity(0, 0)
+        self.assertEqual([g.name for g in syt.in_group], ['managers', 'users'])
+        cnx = self.login('syt', password='syt')
+        cu = cnx.cursor()
+        adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': 'adim'}).get_entity(0, 0)
+        iworkflowable = adim.cw_adapt_to('IWorkflowable')
+        iworkflowable.fire_transition('deactivate')
+        try:
+            cnx.commit()
+            adim.cw_clear_all_caches()
+            self.assertEqual(adim.in_state[0].name, 'deactivated')
+            trinfo = iworkflowable.latest_trinfo()
+            self.assertEqual(trinfo.owned_by[0].login, 'syt')
+            # select from_state to skip the user's creation TrInfo
+            rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,'
+                                'WF creation_date D, WF from_state FS,'
+                                'WF owned_by U?, X eid %(x)s',
+                                {'x': adim.eid})
+            self.assertEqual(rset.rows, [[syt.eid]])
+        finally:
+            # restore db state
+            self.restore_connection()
+            adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'}).get_entity(0, 0)
+            adim.cw_adapt_to('IWorkflowable').fire_transition('activate')
+            self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': 'syt'})
+
+    def test_same_column_names(self):
+        self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"')
+
+    def test_multiple_entities_from_different_sources(self):
+        req = self.request()
+        self.create_user(req, 'cochon')
+        self.assertTrue(self.sexecute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'}))
+
+    def test_exists1(self):
+        self.session.set_cnxset()
+        self.session.create_entity('CWGroup', name=u'bougloup1')
+        self.session.create_entity('CWGroup', name=u'bougloup2')
+        self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"')
+        self.sexecute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': 'syt'})
+        rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, '
+                             'S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")')
+        self.assertEqual(rset.rows, [['admin', 'activated'], ['syt', 'activated']])
+
+    def test_exists2(self):
+        req = self.request()
+        self.create_user(req, 'comme')
+        self.create_user(req, 'cochon')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
+        rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, '
+                             '(G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))')
+        self.assertEqual(rset.rows, [['managers'], ['users']])
+
+    def test_exists3(self):
+        req = self.request()
+        self.create_user(req, 'comme')
+        self.create_user(req, 'cochon')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
+        self.assertTrue(self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"'))
+        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'})
+        self.assertTrue(self.sexecute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': 'syt'}))
+        rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" '
+                             'OR EXISTS(X copain T, T login in ("comme", "cochon"))')
+        self.assertEqual(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', 'syt']])
+
+    def test_exists4(self):
+        req = self.request()
+        self.create_user(req, 'comme')
+        self.create_user(req, 'cochon', groups=('users', 'guests'))
+        self.create_user(req, 'billy')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
+        self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"')
+        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "billy"', {'syt': 'syt'})
+        # search for group name, login where
+        #   CWUser copain with "comme" or "cochon" AND same login as the copain
+        # OR
+        #   CWUser in_state activated AND not copain with billy
+        #
+        # SO we expect everybody but "comme" and "syt"
+        rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, '
+                           'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR '
+                           'EXISTS(X in_state S, S name "activated", NOT X copain T2, T2 login "billy")')
+        all = self.sexecute('Any GN, L WHERE X in_group G, X login L, G name GN')
+        all.rows.remove(['users', 'comme'])
+        all.rows.remove(['users', 'syt'])
+        self.assertEqual(sorted(rset.rows), sorted(all.rows))
+
+    def test_exists5(self):
+        req = self.request()
+        self.create_user(req, 'comme')
+        self.create_user(req, 'cochon', groups=('users', 'guests'))
+        self.create_user(req, 'billy')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
+        self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"')
+        self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"')
+        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'})
+        rset= self.sexecute('Any L WHERE X login L, '
+                           'EXISTS(X copain T, T login in ("comme", "cochon")) AND '
+                           'NOT EXISTS(X copain T2, T2 login "billy")')
+        self.assertEqual(sorted(rset.rows), [['cochon'], ['syt']])
+        rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, '
+                           'EXISTS(X copain T, T login in ("comme", "cochon")) AND '
+                           'NOT EXISTS(X copain T2, T2 login "billy")')
+        self.assertEqual(sorted(rset.rows), [['guests', 'cochon'],
+                                              ['users', 'cochon'],
+                                              ['users', 'syt']])
+
+    def test_cd_restriction(self):
+        rset = self.sexecute('CWUser X WHERE X creation_date > "2009-02-01"')
+        # admin/anon but no ldap user since it doesn't support creation_date
+        self.assertEqual(sorted(e.login for e in rset.entities()),
+                          ['admin', 'anon'])
+
+    def test_union(self):
+        afeids = self.sexecute('State X')
+        ueids = self.sexecute('CWUser X')
+        rset = self.sexecute('(Any X WHERE X is State) UNION (Any X WHERE X is CWUser)')
+        self.assertEqual(sorted(r[0] for r in rset.rows),
+                          sorted(r[0] for r in afeids + ueids))
+
+    def _init_security_test(self):
+        req = self.request()
+        self.create_user(req, 'iaminguestsgrouponly', groups=('guests',))
+        cnx = self.login('iaminguestsgrouponly')
+        return cnx.cursor()
+
+    def test_security1(self):
+        cu = self._init_security_test()
+        rset = cu.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+        self.assertEqual(rset.rows, [])
+        rset = cu.execute('Any X WHERE X login "iaminguestsgrouponly"')
+        self.assertEqual(len(rset.rows), 1)
+
+    def test_security2(self):
+        cu = self._init_security_test()
+        rset = cu.execute('Any X WHERE X has_text %(syt)s', {'syt': 'syt'})
+        self.assertEqual(rset.rows, [])
+        rset = cu.execute('Any X WHERE X has_text "iaminguestsgrouponly"')
+        self.assertEqual(len(rset.rows), 1)
+
+    def test_security3(self):
+        cu = self._init_security_test()
+        rset = cu.execute('Any F WHERE X has_text %(syt)s, X firstname F', {'syt': 'syt'})
+        self.assertEqual(rset.rows, [])
+        rset = cu.execute('Any F WHERE X has_text "iaminguestsgrouponly", X firstname F')
+        self.assertEqual(rset.rows, [[None]])
+
+    def test_nonregr1(self):
+        self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, '
+                     'X modification_date AA',
+                     {'x': self.session.user.eid})
+
+    def test_nonregr2(self):
+        self.sexecute('Any X,L,AA WHERE E eid %(x)s, E owned_by X, '
+                     'X login L, X modification_date AA',
+                     {'x': self.session.user.eid})
+
+    def test_nonregr3(self):
+        self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, '
+                     'X modification_date AA',
+                     {'x': self.session.user.eid})
+
+    def test_nonregr4(self):
+        emaileid = self.sexecute('INSERT EmailAddress X: X address "toto@logilab.org"')[0][0]
+        self.sexecute('Any X,AA WHERE X use_email Y, Y eid %(x)s, X modification_date AA',
+                     {'x': emaileid})
+
+    def test_nonregr5(self):
+        # original jpl query:
+        # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser,
+        # U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5
+        rql = ('Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, '
+               'U login "%s", P is X, X creation_date CD') % self.session.user.login
+        self.sexecute(rql, )#{'x': })
+
+    def test_nonregr6(self):
+        self.sexecute('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
+                     'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) '
+                     'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) '
+                     'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)',
+                     {'x': self.session.user.eid})
+
+class GlobTrFuncTC(TestCase):
+
+    def test_count(self):
+        trfunc = GlobTrFunc('count', 0)
+        res = trfunc.apply([[1], [2], [3], [4]])
+        self.assertEqual(res, [[4]])
+        trfunc = GlobTrFunc('count', 1)
+        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
+        self.assertEqual(res, [[1, 2], [2, 1], [3, 1]])
+
+    def test_sum(self):
+        trfunc = GlobTrFunc('sum', 0)
+        res = trfunc.apply([[1], [2], [3], [4]])
+        self.assertEqual(res, [[10]])
+        trfunc = GlobTrFunc('sum', 1)
+        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
+        self.assertEqual(res, [[1, 7], [2, 4], [3, 6]])
+
+    def test_min(self):
+        trfunc = GlobTrFunc('min', 0)
+        res = trfunc.apply([[1], [2], [3], [4]])
+        self.assertEqual(res, [[1]])
+        trfunc = GlobTrFunc('min', 1)
+        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
+        self.assertEqual(res, [[1, 2], [2, 4], [3, 6]])
+
+    def test_max(self):
+        trfunc = GlobTrFunc('max', 0)
+        res = trfunc.apply([[1], [2], [3], [4]])
+        self.assertEqual(res, [[4]])
+        trfunc = GlobTrFunc('max', 1)
+        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
+        self.assertEqual(res, [[1, 5], [2, 4], [3, 6]])
+
+
+class RQL2LDAPFilterTC(RQLGeneratorTC):
+
+    tags = RQLGeneratorTC.tags | Tags(('ldap'))
+
+    @property
+    def schema(self):
+        """return the application schema"""
+        return self._schema
+
+    def setUp(self):
+        self.handler = get_test_db_handler(LDAPUserSourceTC.config)
+        self.handler.build_db_cache('ldap-rqlgenerator', LDAPUserSourceTC.pre_setup_database)
+        self.handler.restore_database('ldap-rqlgenerator')
+        self._repo = repo = self.handler.get_repo()
+        self._schema = repo.schema
+        super(RQL2LDAPFilterTC, self).setUp()
+        ldapsource = repo.sources[-1]
+        self.cnxset = repo._get_cnxset()
+        session = mock_object(cnxset=self.cnxset)
+        self.o = RQL2LDAPFilter(ldapsource, session)
+        self.ldapclasses = ''.join(ldapsource.base_filters)
+
+    def tearDown(self):
+        self._repo.turn_repo_off()
+        super(RQL2LDAPFilterTC, self).tearDown()
+
+    def test_base(self):
+        rqlst = self._prepare('CWUser X WHERE X login "toto"').children[0]
+        self.assertEqual(self.o.generate(rqlst, 'X')[1],
+                          '(&%s(uid=toto))' % self.ldapclasses)
+
+    def test_kwargs(self):
+        rqlst = self._prepare('CWUser X WHERE X login %(x)s').children[0]
+        self.o._args = {'x': "toto"}
+        self.assertEqual(self.o.generate(rqlst, 'X')[1],
+                          '(&%s(uid=toto))' % self.ldapclasses)
+
+    def test_get_attr(self):
+        rqlst = self._prepare('Any X WHERE E firstname X, E eid 12').children[0]
+        self.assertRaises(UnknownEid, self.o.generate, rqlst, 'E')
+
+
+if __name__ == '__main__':
+    unittest_main()
--- a/server/test/unittest_ldapuser.py	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,632 +0,0 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb.server.sources.ldapusers unit and functional tests"""
-
-import os
-import shutil
-import time
-from os.path import join, exists
-import subprocess
-import tempfile
-
-from logilab.common.testlib import TestCase, unittest_main, mock_object, Tags
-
-from cubicweb import AuthenticationError
-from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.devtools.repotest import RQLGeneratorTC
-from cubicweb.devtools.httptest import get_available_port
-from cubicweb.devtools import get_test_db_handler
-
-from cubicweb.server.sources.ldapuser import GlobTrFunc, UnknownEid, RQL2LDAPFilter
-
-CONFIG = u'user-base-dn=ou=People,dc=cubicweb,dc=test'
-URL = None
-
-def create_slapd_configuration(cls):
-    global URL
-    slapddir = tempfile.mkdtemp('cw-unittest-ldap')
-    config = cls.config
-    slapdconf = join(config.apphome, "slapd.conf")
-    confin = file(join(config.apphome, "slapd.conf.in")).read()
-    confstream = file(slapdconf, 'w')
-    confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir})
-    confstream.close()
-    # fill ldap server with some data
-    ldiffile = join(config.apphome, "ldap_test.ldif")
-    config.info('Initing ldap database')
-    cmdline = "/usr/sbin/slapadd -f %s -l %s -c" % (slapdconf, ldiffile)
-    subprocess.call(cmdline, shell=True)
-
-    #ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
-    port = get_available_port(xrange(9000, 9100))
-    host = 'localhost:%s' % port
-    ldapuri = 'ldap://%s' % host
-    cmdline = ["/usr/sbin/slapd", "-f",  slapdconf,  "-h",  ldapuri, "-d", "0"]
-    config.info('Starting slapd:', ' '.join(cmdline))
-    cls.slapd_process = subprocess.Popen(cmdline)
-    time.sleep(0.2)
-    if cls.slapd_process.poll() is None:
-        config.info('slapd started with pid %s' % cls.slapd_process.pid)
-    else:
-        raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' %
-                               (" ".join(cmdline), os.getcwd()))
-    URL = u'ldap://%s' % host
-    return slapddir
-
-def terminate_slapd(cls):
-    config = cls.config
-    if cls.slapd_process and cls.slapd_process.returncode is None:
-        config.info('terminating slapd')
-        if hasattr(cls.slapd_process, 'terminate'):
-            cls.slapd_process.terminate()
-        else:
-            import os, signal
-            os.kill(cls.slapd_process.pid, signal.SIGTERM)
-        cls.slapd_process.wait()
-        config.info('DONE')
-
-class LDAPTestBase(CubicWebTC):
-    loglevel = 'ERROR'
-
-    @classmethod
-    def setUpClass(cls):
-        from cubicweb.cwctl import init_cmdline_log_threshold
-        init_cmdline_log_threshold(cls.config, cls.loglevel)
-        cls._tmpdir = create_slapd_configuration(cls)
-
-    @classmethod
-    def tearDownClass(cls):
-        terminate_slapd(cls)
-        try:
-            shutil.rmtree(cls._tmpdir)
-        except:
-            pass
-
-class CheckWrongGroup(LDAPTestBase):
-
-    def test_wrong_group(self):
-        self.session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
-                                   url=URL, config=CONFIG)
-        self.commit()
-        with self.session.repo.internal_session(safe=True) as session:
-            source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
-            config = source.repo_source.check_config(source)
-            # inject a bogus group here, along with at least a valid one
-            config['user-default-group'] = ('thisgroupdoesnotexists','users')
-            source.repo_source.update_config(source, config)
-            session.commit(free_cnxset=False)
-            # here we emitted an error log entry
-            stats = source.repo_source.pull_data(session, force=True, raise_on_error=True)
-            session.commit()
-
-class DeleteStuffFromLDAPFeedSourceTC(LDAPTestBase):
-    test_db_id = 'ldap-feed'
-
-    @classmethod
-    def pre_setup_database(cls, session, config):
-        session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
-                              url=URL, config=CONFIG)
-        session.commit()
-        with session.repo.internal_session(safe=True) as isession:
-            lfsource = isession.repo.sources_by_uri['ldapuser']
-            stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
-
-    def _pull(self):
-        with self.session.repo.internal_session() as isession:
-            lfsource = isession.repo.sources_by_uri['ldapuser']
-            stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
-            isession.commit()
-
-    def test_a_filter_inactivate(self):
-        """ filtered out people should be deactivated, unable to authenticate """
-        source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
-        config = source.repo_source.check_config(source)
-        # filter with adim's phone number
-        config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
-        source.repo_source.update_config(source, config)
-        self.commit()
-        self._pull()
-        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
-        self.assertEqual(self.execute('Any N WHERE U login "syt", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'deactivated')
-        self.assertEqual(self.execute('Any N WHERE U login "adim", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'activated')
-        # unfilter, syt should be activated again
-        config['user-filter'] = u''
-        source.repo_source.update_config(source, config)
-        self.commit()
-        self._pull()
-        self.assertEqual(self.execute('Any N WHERE U login "syt", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'activated')
-        self.assertEqual(self.execute('Any N WHERE U login "adim", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'activated')
-
-    def test_delete(self):
-        """ delete syt, pull, check deactivation, repull,
-        readd syt, pull, check activation
-        """
-        uri = self.repo.sources_by_uri['ldapuser'].urls[0]
-        deletecmd = ("ldapdelete -H %s 'uid=syt,ou=People,dc=cubicweb,dc=test' "
-                     "-v -x -D cn=admin,dc=cubicweb,dc=test -w'cw'" % uri)
-        os.system(deletecmd)
-        self._pull()
-        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
-        self.assertEqual(self.execute('Any N WHERE U login "syt", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'deactivated')
-        # check that it doesn't choke
-        self._pull()
-        # reset the fscking ldap thing
-        self.tearDownClass()
-        self.setUpClass()
-        self._pull()
-        self.assertEqual(self.execute('Any N WHERE U login "syt", '
-                                      'U in_state S, S name N').rows[0][0],
-                         'activated')
-        # test reactivating the user isn't enough to authenticate, as the native source
-        # refuse to authenticate user from other sources
-        os.system(deletecmd)
-        self._pull()
-        user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
-        user.cw_adapt_to('IWorkflowable').fire_transition('activate')
-        self.commit()
-        self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
-
-class LDAPFeedSourceTC(LDAPTestBase):
-    test_db_id = 'ldap-feed'
-
-    @classmethod
-    def pre_setup_database(cls, session, config):
-        session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
-                              url=URL, config=CONFIG)
-        session.commit()
-        isession = session.repo.internal_session(safe=True)
-        lfsource = isession.repo.sources_by_uri['ldapuser']
-        stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
-
-    def setUp(self):
-        super(LDAPFeedSourceTC, self).setUp()
-        # ldap source url in the database may use a different port as the one
-        # just attributed
-        lfsource = self.repo.sources_by_uri['ldapuser']
-        lfsource.urls = [URL]
-
-    def assertMetadata(self, entity):
-        self.assertTrue(entity.creation_date)
-        self.assertTrue(entity.modification_date)
-
-    def test_authenticate(self):
-        source = self.repo.sources_by_uri['ldapuser']
-        self.session.set_cnxset()
-        # ensure we won't be logged against
-        self.assertRaises(AuthenticationError,
-                          source.authenticate, self.session, 'toto', 'toto')
-        self.assertTrue(source.authenticate(self.session, 'syt', 'syt'))
-        self.assertTrue(self.repo.connect('syt', password='syt'))
-
-    def test_base(self):
-        # check a known one
-        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
-        e = rset.get_entity(0, 0)
-        self.assertEqual(e.login, 'syt')
-        e.complete()
-        self.assertMetadata(e)
-        self.assertEqual(e.firstname, None)
-        self.assertEqual(e.surname, None)
-        self.assertEqual(e.in_group[0].name, 'users')
-        self.assertEqual(e.owned_by[0].login, 'syt')
-        self.assertEqual(e.created_by, ())
-        self.assertEqual(e.primary_email[0].address, 'Sylvain Thenault')
-        # email content should be indexed on the user
-        rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
-        self.assertEqual(rset.rows, [[e.eid]])
-
-    def test_copy_to_system_source(self):
-        source = self.repo.sources_by_uri['ldapuser']
-        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
-        self.commit()
-        source.reset_caches()
-        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
-        self.assertEqual(len(rset), 1)
-        e = rset.get_entity(0, 0)
-        self.assertEqual(e.eid, eid)
-        self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
-                                                             'uri': u'system',
-                                                             'use-cwuri-as-url': False},
-                                                  'type': 'CWUser',
-                                                  'extid': None})
-        self.assertEqual(e.cw_source[0].name, 'system')
-        self.assertTrue(e.creation_date)
-        self.assertTrue(e.modification_date)
-        source.pull_data(self.session)
-        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
-        self.assertEqual(len(rset), 1)
-        self.assertTrue(self.repo.system_source.authenticate(
-                self.session, 'syt', password='syt'))
-
-
-class LDAPUserSourceTC(LDAPFeedSourceTC):
-    test_db_id = 'ldap-user'
-    tags = CubicWebTC.tags | Tags(('ldap'))
-
-    @classmethod
-    def pre_setup_database(cls, session, config):
-        session.create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
-                              url=URL, config=CONFIG)
-        session.commit()
-        # XXX keep it there
-        session.execute('CWUser U')
-
-    def assertMetadata(self, entity):
-        self.assertEqual(entity.creation_date, None)
-        self.assertEqual(entity.modification_date, None)
-
-    def test_synchronize(self):
-        source = self.repo.sources_by_uri['ldapuser']
-        source.synchronize()
-
-    def test_base(self):
-        # check a known one
-        rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
-        e = rset.get_entity(0, 0)
-        self.assertEqual(e.login, 'syt')
-        e.complete()
-        self.assertMetadata(e)
-        self.assertEqual(e.firstname, None)
-        self.assertEqual(e.surname, None)
-        self.assertEqual(e.in_group[0].name, 'users')
-        self.assertEqual(e.owned_by[0].login, 'syt')
-        self.assertEqual(e.created_by, ())
-        self.assertEqual(e.primary_email[0].address, 'Sylvain Thenault')
-        # email content should be indexed on the user
-        rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
-        self.assertEqual(rset.rows, [[e.eid]])
-
-    def test_not(self):
-        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        rset = self.sexecute('CWUser X WHERE NOT X eid %s' % eid)
-        self.assert_(rset)
-        self.assert_(not eid in (r[0] for r in rset))
-
-    def test_multiple(self):
-        seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'})[0][0]
-        rset = self.sexecute('CWUser X, Y WHERE X login %(syt)s, Y login %(adim)s',
-                            {'syt': 'syt', 'adim': 'adim'})
-        self.assertEqual(rset.rows, [[seid, aeid]])
-        rset = self.sexecute('Any X,Y,L WHERE X login L, X login %(syt)s, Y login %(adim)s',
-                            {'syt': 'syt', 'adim': 'adim'})
-        self.assertEqual(rset.rows, [[seid, aeid, 'syt']])
-
-    def test_in(self):
-        seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'})[0][0]
-        rset = self.sexecute('Any X,L ORDERBY L WHERE X login IN("%s", "%s"), X login L' % ('syt', 'adim'))
-        self.assertEqual(rset.rows, [[aeid, 'adim'], [seid, 'syt']])
-
-    def test_relations(self):
-        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        rset = self.sexecute('Any X,E WHERE X is CWUser, X login L, X primary_email E')
-        self.assert_(eid in (r[0] for r in rset))
-        rset = self.sexecute('Any X,L,E WHERE X is CWUser, X login L, X primary_email E')
-        self.assert_('syt' in (r[1] for r in rset))
-
-    def test_count(self):
-        nbusers = self.sexecute('Any COUNT(X) WHERE X is CWUser')[0][0]
-        # just check this is a possible number
-        self.assert_(nbusers > 1, nbusers)
-        self.assert_(nbusers < 30, nbusers)
-
-    def test_upper(self):
-        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        rset = self.sexecute('Any UPPER(L) WHERE X eid %s, X login L' % eid)
-        self.assertEqual(rset[0][0], 'syt'.upper())
-
-    def test_unknown_attr(self):
-        eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
-        rset = self.sexecute('Any L,C,M WHERE X eid %s, X login L, '
-                            'X creation_date C, X modification_date M' % eid)
-        self.assertEqual(rset[0][0], 'syt')
-        self.assertEqual(rset[0][1], None)
-        self.assertEqual(rset[0][2], None)
-
-    def test_sort(self):
-        logins = [l for l, in self.sexecute('Any L ORDERBY L WHERE X login L')]
-        self.assertEqual(logins, sorted(logins))
-
-    def test_lower_sort(self):
-        logins = [l for l, in self.sexecute('Any L ORDERBY lower(L) WHERE X login L')]
-        self.assertEqual(logins, sorted(logins))
-
-    def test_or(self):
-        rset = self.sexecute('DISTINCT Any X WHERE X login %(login)s OR (X in_group G, G name "managers")',
-                            {'login': 'syt'})
-        self.assertEqual(len(rset), 2, rset.rows) # syt + admin
-
-    def test_nonregr_set_owned_by(self):
-        # test that when a user coming from ldap is triggering a transition
-        # the related TrInfo has correct owner information
-        self.sexecute('SET X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': 'syt'})
-        self.commit()
-        syt = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'}).get_entity(0, 0)
-        self.assertEqual([g.name for g in syt.in_group], ['managers', 'users'])
-        cnx = self.login('syt', password='syt')
-        cu = cnx.cursor()
-        adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': 'adim'}).get_entity(0, 0)
-        iworkflowable = adim.cw_adapt_to('IWorkflowable')
-        iworkflowable.fire_transition('deactivate')
-        try:
-            cnx.commit()
-            adim.cw_clear_all_caches()
-            self.assertEqual(adim.in_state[0].name, 'deactivated')
-            trinfo = iworkflowable.latest_trinfo()
-            self.assertEqual(trinfo.owned_by[0].login, 'syt')
-            # select from_state to skip the user's creation TrInfo
-            rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,'
-                                'WF creation_date D, WF from_state FS,'
-                                'WF owned_by U?, X eid %(x)s',
-                                {'x': adim.eid})
-            self.assertEqual(rset.rows, [[syt.eid]])
-        finally:
-            # restore db state
-            self.restore_connection()
-            adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'adim'}).get_entity(0, 0)
-            adim.cw_adapt_to('IWorkflowable').fire_transition('activate')
-            self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': 'syt'})
-
-    def test_same_column_names(self):
-        self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"')
-
-    def test_multiple_entities_from_different_sources(self):
-        req = self.request()
-        self.create_user(req, 'cochon')
-        self.assertTrue(self.sexecute('Any X,Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'}))
-
-    def test_exists1(self):
-        self.session.set_cnxset()
-        self.session.create_entity('CWGroup', name=u'bougloup1')
-        self.session.create_entity('CWGroup', name=u'bougloup2')
-        self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"')
-        self.sexecute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': 'syt'})
-        rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, '
-                             'S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")')
-        self.assertEqual(rset.rows, [['admin', 'activated'], ['syt', 'activated']])
-
-    def test_exists2(self):
-        req = self.request()
-        self.create_user(req, 'comme')
-        self.create_user(req, 'cochon')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
-        rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, '
-                             '(G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))')
-        self.assertEqual(rset.rows, [['managers'], ['users']])
-
-    def test_exists3(self):
-        req = self.request()
-        self.create_user(req, 'comme')
-        self.create_user(req, 'cochon')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
-        self.assertTrue(self.sexecute('Any X, Y WHERE X copain Y, X login "comme", Y login "cochon"'))
-        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'})
-        self.assertTrue(self.sexecute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': 'syt'}))
-        rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" '
-                             'OR EXISTS(X copain T, T login in ("comme", "cochon"))')
-        self.assertEqual(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', 'syt']])
-
-    def test_exists4(self):
-        req = self.request()
-        self.create_user(req, 'comme')
-        self.create_user(req, 'cochon', groups=('users', 'guests'))
-        self.create_user(req, 'billy')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
-        self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"')
-        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "billy"', {'syt': 'syt'})
-        # search for group name, login where
-        #   CWUser copain with "comme" or "cochon" AND same login as the copain
-        # OR
-        #   CWUser in_state activated AND not copain with billy
-        #
-        # SO we expect everybody but "comme" and "syt"
-        rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, '
-                           'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR '
-                           'EXISTS(X in_state S, S name "activated", NOT X copain T2, T2 login "billy")')
-        all = self.sexecute('Any GN, L WHERE X in_group G, X login L, G name GN')
-        all.rows.remove(['users', 'comme'])
-        all.rows.remove(['users', 'syt'])
-        self.assertEqual(sorted(rset.rows), sorted(all.rows))
-
-    def test_exists5(self):
-        req = self.request()
-        self.create_user(req, 'comme')
-        self.create_user(req, 'cochon', groups=('users', 'guests'))
-        self.create_user(req, 'billy')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"')
-        self.sexecute('SET X copain Y WHERE X login "cochon", Y login "cochon"')
-        self.sexecute('SET X copain Y WHERE X login "comme", Y login "billy"')
-        self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': 'syt'})
-        rset= self.sexecute('Any L WHERE X login L, '
-                           'EXISTS(X copain T, T login in ("comme", "cochon")) AND '
-                           'NOT EXISTS(X copain T2, T2 login "billy")')
-        self.assertEqual(sorted(rset.rows), [['cochon'], ['syt']])
-        rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, '
-                           'EXISTS(X copain T, T login in ("comme", "cochon")) AND '
-                           'NOT EXISTS(X copain T2, T2 login "billy")')
-        self.assertEqual(sorted(rset.rows), [['guests', 'cochon'],
-                                              ['users', 'cochon'],
-                                              ['users', 'syt']])
-
-    def test_cd_restriction(self):
-        rset = self.sexecute('CWUser X WHERE X creation_date > "2009-02-01"')
-        # admin/anon but no ldap user since it doesn't support creation_date
-        self.assertEqual(sorted(e.login for e in rset.entities()),
-                          ['admin', 'anon'])
-
-    def test_union(self):
-        afeids = self.sexecute('State X')
-        ueids = self.sexecute('CWUser X')
-        rset = self.sexecute('(Any X WHERE X is State) UNION (Any X WHERE X is CWUser)')
-        self.assertEqual(sorted(r[0] for r in rset.rows),
-                          sorted(r[0] for r in afeids + ueids))
-
-    def _init_security_test(self):
-        req = self.request()
-        self.create_user(req, 'iaminguestsgrouponly', groups=('guests',))
-        cnx = self.login('iaminguestsgrouponly')
-        return cnx.cursor()
-
-    def test_security1(self):
-        cu = self._init_security_test()
-        rset = cu.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
-        self.assertEqual(rset.rows, [])
-        rset = cu.execute('Any X WHERE X login "iaminguestsgrouponly"')
-        self.assertEqual(len(rset.rows), 1)
-
-    def test_security2(self):
-        cu = self._init_security_test()
-        rset = cu.execute('Any X WHERE X has_text %(syt)s', {'syt': 'syt'})
-        self.assertEqual(rset.rows, [])
-        rset = cu.execute('Any X WHERE X has_text "iaminguestsgrouponly"')
-        self.assertEqual(len(rset.rows), 1)
-
-    def test_security3(self):
-        cu = self._init_security_test()
-        rset = cu.execute('Any F WHERE X has_text %(syt)s, X firstname F', {'syt': 'syt'})
-        self.assertEqual(rset.rows, [])
-        rset = cu.execute('Any F WHERE X has_text "iaminguestsgrouponly", X firstname F')
-        self.assertEqual(rset.rows, [[None]])
-
-    def test_nonregr1(self):
-        self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, '
-                     'X modification_date AA',
-                     {'x': self.session.user.eid})
-
-    def test_nonregr2(self):
-        self.sexecute('Any X,L,AA WHERE E eid %(x)s, E owned_by X, '
-                     'X login L, X modification_date AA',
-                     {'x': self.session.user.eid})
-
-    def test_nonregr3(self):
-        self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, '
-                     'X modification_date AA',
-                     {'x': self.session.user.eid})
-
-    def test_nonregr4(self):
-        emaileid = self.sexecute('INSERT EmailAddress X: X address "toto@logilab.org"')[0][0]
-        self.sexecute('Any X,AA WHERE X use_email Y, Y eid %(x)s, X modification_date AA',
-                     {'x': emaileid})
-
-    def test_nonregr5(self):
-        # original jpl query:
-        # Any X, NOW - CD, P WHERE P is Project, U interested_in P, U is CWUser,
-        # U login "sthenault", X concerns P, X creation_date CD ORDERBY CD DESC LIMIT 5
-        rql = ('Any X, NOW - CD, P ORDERBY CD DESC LIMIT 5 WHERE P bookmarked_by U, '
-               'U login "%s", P is X, X creation_date CD') % self.session.user.login
-        self.sexecute(rql, )#{'x': })
-
-    def test_nonregr6(self):
-        self.sexecute('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
-                     'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (EXISTS(U identity ME) '
-                     'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) '
-                     'OR (EXISTS(U in_group H, ME in_group H, NOT H name "users")), U login UL, U is CWUser)',
-                     {'x': self.session.user.eid})
-
-class GlobTrFuncTC(TestCase):
-
-    def test_count(self):
-        trfunc = GlobTrFunc('count', 0)
-        res = trfunc.apply([[1], [2], [3], [4]])
-        self.assertEqual(res, [[4]])
-        trfunc = GlobTrFunc('count', 1)
-        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
-        self.assertEqual(res, [[1, 2], [2, 1], [3, 1]])
-
-    def test_sum(self):
-        trfunc = GlobTrFunc('sum', 0)
-        res = trfunc.apply([[1], [2], [3], [4]])
-        self.assertEqual(res, [[10]])
-        trfunc = GlobTrFunc('sum', 1)
-        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
-        self.assertEqual(res, [[1, 7], [2, 4], [3, 6]])
-
-    def test_min(self):
-        trfunc = GlobTrFunc('min', 0)
-        res = trfunc.apply([[1], [2], [3], [4]])
-        self.assertEqual(res, [[1]])
-        trfunc = GlobTrFunc('min', 1)
-        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
-        self.assertEqual(res, [[1, 2], [2, 4], [3, 6]])
-
-    def test_max(self):
-        trfunc = GlobTrFunc('max', 0)
-        res = trfunc.apply([[1], [2], [3], [4]])
-        self.assertEqual(res, [[4]])
-        trfunc = GlobTrFunc('max', 1)
-        res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
-        self.assertEqual(res, [[1, 5], [2, 4], [3, 6]])
-
-
-class RQL2LDAPFilterTC(RQLGeneratorTC):
-
-    tags = RQLGeneratorTC.tags | Tags(('ldap'))
-
-    @property
-    def schema(self):
-        """return the application schema"""
-        return self._schema
-
-    def setUp(self):
-        self.handler = get_test_db_handler(LDAPUserSourceTC.config)
-        self.handler.build_db_cache('ldap-user', LDAPUserSourceTC.pre_setup_database)
-        self.handler.restore_database('ldap-user')
-        self._repo = repo = self.handler.get_repo()
-        self._schema = repo.schema
-        super(RQL2LDAPFilterTC, self).setUp()
-        ldapsource = repo.sources[-1]
-        self.cnxset = repo._get_cnxset()
-        session = mock_object(cnxset=self.cnxset)
-        self.o = RQL2LDAPFilter(ldapsource, session)
-        self.ldapclasses = ''.join(ldapsource.base_filters)
-
-    def tearDown(self):
-        self._repo.turn_repo_off()
-        super(RQL2LDAPFilterTC, self).tearDown()
-
-    def test_base(self):
-        rqlst = self._prepare('CWUser X WHERE X login "toto"').children[0]
-        self.assertEqual(self.o.generate(rqlst, 'X')[1],
-                          '(&%s(uid=toto))' % self.ldapclasses)
-
-    def test_kwargs(self):
-        rqlst = self._prepare('CWUser X WHERE X login %(x)s').children[0]
-        self.o._args = {'x': "toto"}
-        self.assertEqual(self.o.generate(rqlst, 'X')[1],
-                          '(&%s(uid=toto))' % self.ldapclasses)
-
-    def test_get_attr(self):
-        rqlst = self._prepare('Any X WHERE E firstname X, E eid 12').children[0]
-        self.assertRaises(UnknownEid, self.o.generate, rqlst, 'E')
-
-
-if __name__ == '__main__':
-    unittest_main()
--- a/server/test/unittest_migractions.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_migractions.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,7 +17,6 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for module cubicweb.server.migractions"""
 
-from copy import deepcopy
 from datetime import date
 from os.path import join
 
@@ -31,6 +30,8 @@
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.server.migractions import *
 
+import cubicweb.devtools
+
 migrschema = None
 def tearDownModule(*args):
     global migrschema
@@ -40,17 +41,17 @@
 
 class MigrationCommandsTC(CubicWebTC):
 
+    configcls = cubicweb.devtools.TestServerConfiguration
+
     tags = CubicWebTC.tags | Tags(('server', 'migration', 'migractions'))
 
     @classmethod
     def _init_repo(cls):
         super(MigrationCommandsTC, cls)._init_repo()
         # we have to read schema from the database to get eid for schema entities
+        cls.repo.set_schema(cls.repo.deserialize_schema(), resetvreg=False)
+        # hack to read the schema from data/migrschema
         config = cls.config
-        config._cubes = None
-        cls.repo.fill_schema()
-        cls.origschema = deepcopy(cls.repo.schema)
-        # hack to read the schema from data/migrschema
         config.appid = join('data', 'migratedapp')
         config._apphome = cls.datapath('migratedapp')
         global migrschema
@@ -73,8 +74,8 @@
 
     def test_add_attribute_int(self):
         self.assertFalse('whatever' in self.schema)
-        self.request().create_entity('Note')
-        self.commit()
+        self.session.create_entity('Note')
+        self.session.commit(free_cnxset=False)
         orderdict = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
                                          'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
         self.mh.cmd_add_attribute('Note', 'whatever')
@@ -83,10 +84,10 @@
         self.assertEqual(self.schema['whatever'].objects(), ('Int',))
         self.assertEqual(self.schema['Note'].default('whatever'), 2)
         # test default value set on existing entities
-        note = self.execute('Note X').get_entity(0, 0)
+        note = self.session.execute('Note X').get_entity(0, 0)
         self.assertEqual(note.whatever, 2)
         # test default value set for next entities
-        self.assertEqual(self.request().create_entity('Note').whatever, 2)
+        self.assertEqual(self.session.create_entity('Note').whatever, 2)
         # test attribute order
         orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
                                           'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
@@ -107,8 +108,8 @@
 
     def test_add_attribute_varchar(self):
         self.assertFalse('whatever' in self.schema)
-        self.request().create_entity('Note')
-        self.commit()
+        self.session.create_entity('Note')
+        self.session.commit(free_cnxset=False)
         self.assertFalse('shortpara' in self.schema)
         self.mh.cmd_add_attribute('Note', 'shortpara')
         self.assertTrue('shortpara' in self.schema)
@@ -118,11 +119,10 @@
         notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0]
         fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
         self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)')
-        req = self.request()
         # test default value set on existing entities
-        self.assertEqual(req.execute('Note X').get_entity(0, 0).shortpara, 'hop')
+        self.assertEqual(self.session.execute('Note X').get_entity(0, 0).shortpara, 'hop')
         # test default value set for next entities
-        self.assertEqual(req.create_entity('Note').shortpara, 'hop')
+        self.assertEqual(self.session.create_entity('Note').shortpara, 'hop')
         self.mh.rollback()
 
     def test_add_datetime_with_default_value_attribute(self):
@@ -195,9 +195,9 @@
         self.mh.cmd_add_entity_type('Folder2')
         self.assertTrue('Folder2' in self.schema)
         self.assertTrue('Old' in self.schema)
-        self.assertTrue(self.execute('CWEType X WHERE X name "Folder2"'))
+        self.assertTrue(self.session.execute('CWEType X WHERE X name "Folder2"'))
         self.assertTrue('filed_under2' in self.schema)
-        self.assertTrue(self.execute('CWRType X WHERE X name "filed_under2"'))
+        self.assertTrue(self.session.execute('CWRType X WHERE X name "filed_under2"'))
         self.schema.rebuild_infered_relations()
         self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
                           ['created_by', 'creation_date', 'cw_source', 'cwuri',
@@ -225,15 +225,15 @@
         done = wf.add_state(u'done')
         wf.add_transition(u'redoit', done, todo)
         wf.add_transition(u'markasdone', todo, done)
-        self.commit()
+        self.session.commit(free_cnxset=False)
         eschema = self.schema.eschema('Folder2')
         self.mh.cmd_drop_entity_type('Folder2')
         self.assertFalse('Folder2' in self.schema)
-        self.assertFalse(self.execute('CWEType X WHERE X name "Folder2"'))
+        self.assertFalse(self.session.execute('CWEType X WHERE X name "Folder2"'))
         # test automatic workflow deletion
-        self.assertFalse(self.execute('Workflow X WHERE NOT X workflow_of ET'))
-        self.assertFalse(self.execute('State X WHERE NOT X state_of WF'))
-        self.assertFalse(self.execute('Transition X WHERE NOT X transition_of WF'))
+        self.assertFalse(self.session.execute('Workflow X WHERE NOT X workflow_of ET'))
+        self.assertFalse(self.session.execute('State X WHERE NOT X state_of WF'))
+        self.assertFalse(self.session.execute('Transition X WHERE NOT X transition_of WF'))
 
     def test_rename_entity_type(self):
         entity = self.mh.create_entity('Old', name=u'old')
@@ -268,7 +268,7 @@
         self.mh.create_entity('Personne', nom=u'tot')
         self.mh.create_entity('Affaire')
         self.mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire')
-        self.commit()
+        self.session.commit(free_cnxset=False)
         self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire')
         self.assertTrue('concerne2' in self.schema)
         self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note')
@@ -290,7 +290,7 @@
         self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
                           ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
         # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
-        self.maxeid = self.execute('Any MAX(X)')[0][0]
+        self.maxeid = self.session.execute('Any MAX(X)')[0][0]
 
     def test_drop_relation_definition_with_specialization(self):
         self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
@@ -314,7 +314,7 @@
         self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
                           ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
         # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
-        self.maxeid = self.execute('Any MAX(X)')[0][0]
+        self.maxeid = self.session.execute('Any MAX(X)')[0][0]
 
     def test_rename_relation(self):
         self.skipTest('implement me')
@@ -495,8 +495,8 @@
                                           ('Note', 'Bookmark')]))
                 self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note'])
                 self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note'])
-                self.assertEqual(self.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0)
-                self.assertEqual(self.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0)
+                self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0)
+                self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0)
             except :
                 import traceback
                 traceback.print_exc()
@@ -520,16 +520,16 @@
             self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
             from cubes.email.__pkginfo__ import version as email_version
             from cubes.file.__pkginfo__ import version as file_version
-            self.assertEqual(self.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0],
+            self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0],
                               email_version)
-            self.assertEqual(self.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
+            self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
                               file_version)
             # trick: overwrite self.maxeid to avoid deletion of just reintroduced
             #        types (and their associated tables!)
-            self.maxeid = self.execute('Any MAX(X)')[0][0]
+            self.maxeid = self.session.execute('Any MAX(X)')[0][0]
             # why this commit is necessary is unclear to me (though without it
             # next test may fail complaining of missing tables
-            self.commit()
+            self.session.commit(free_cnxset=False)
 
 
     @tag('longrun')
@@ -554,10 +554,10 @@
             self.assertTrue('email' in self.config.cubes())
             # trick: overwrite self.maxeid to avoid deletion of just reintroduced
             #        types (and their associated tables!)
-            self.maxeid = self.execute('Any MAX(X)')[0][0]
+            self.maxeid = self.session.execute('Any MAX(X)')[0][0]
             # why this commit is necessary is unclear to me (though without it
             # next test may fail complaining of missing tables
-            self.commit()
+            self.session.commit(free_cnxset=False)
 
     def test_remove_dep_cube(self):
         with self.assertRaises(ConfigurationError) as cm:
@@ -577,16 +577,16 @@
                           ['Note', 'Text'])
         self.assertEqual(self.schema['Text'].specializes().type, 'Para')
         # test columns have been actually added
-        text = self.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
-        note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
-        aff = self.execute('INSERT Affaire X').get_entity(0, 0)
-        self.assertTrue(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+        text = self.session.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
+        note = self.session.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
+        aff = self.session.execute('INSERT Affaire X').get_entity(0, 0)
+        self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
                                      {'x': text.eid, 'y': aff.eid}))
-        self.assertTrue(self.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+        self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
                                      {'x': note.eid, 'y': aff.eid}))
-        self.assertTrue(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+        self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
                                      {'x': text.eid, 'y': aff.eid}))
-        self.assertTrue(self.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+        self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
                                      {'x': note.eid, 'y': aff.eid}))
         # XXX remove specializes by ourselves, else tearDown fails when removing
         # Para because of Note inheritance. This could be fixed by putting the
@@ -598,8 +598,8 @@
         # specialization relationship...
         self.session.data['rebuild-infered'] = True
         try:
-            self.execute('DELETE X specializes Y WHERE Y name "Para"')
-            self.commit()
+            self.session.execute('DELETE X specializes Y WHERE Y name "Para"')
+            self.session.commit(free_cnxset=False)
         finally:
             self.session.data['rebuild-infered'] = False
         self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
--- a/server/test/unittest_repository.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_repository.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,5 +1,5 @@
 # -*- coding: iso-8859-1 -*-
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -29,6 +29,9 @@
 from logilab.common.testlib import TestCase, unittest_main
 
 from yams.constraints import UniqueConstraint
+from yams import register_base_type, unregister_base_type
+
+from logilab.database import get_db_helper
 
 from cubicweb import (BadConnectionId, RepositoryError, ValidationError,
                       UnknownEid, AuthenticationError, Unauthorized, QueryError)
@@ -41,6 +44,7 @@
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.server.hook import Hook
 from cubicweb.server.sources import native
+from cubicweb.server.session import SessionClosedError
 
 
 class RepositoryTC(CubicWebTC):
@@ -57,44 +61,6 @@
                           'type': u'violates unique_together constraints (cp, nom, type)'},
                      wraperr.exception.args[1])
 
-    def test_fill_schema(self):
-        origshema = self.repo.schema
-        try:
-            self.repo.schema = CubicWebSchema(self.repo.config.appid)
-            self.repo.config._cubes = None # avoid assertion error
-            self.repo.config.repairing = True # avoid versions checking
-            self.repo.fill_schema()
-            table = SQL_PREFIX + 'CWEType'
-            namecol = SQL_PREFIX + 'name'
-            finalcol = SQL_PREFIX + 'final'
-            self.session.set_cnxset()
-            cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
-                namecol, table, finalcol))
-            self.assertEqual(cu.fetchall(), [])
-            cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
-                                         % (namecol, table, finalcol, namecol), {'final': True})
-            self.assertEqual(cu.fetchall(), [(u'BigInt',), (u'Boolean',), (u'Bytes',),
-                                             (u'Date',), (u'Datetime',),
-                                             (u'Decimal',),(u'Float',),
-                                             (u'Int',),
-                                             (u'Interval',), (u'Password',),
-                                             (u'String',),
-                                             (u'TZDatetime',), (u'TZTime',), (u'Time',)])
-            sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
-                   "FROM cw_CWUniqueTogetherConstraint as cstr, "
-                   "     relations_relation as rel, "
-                   "     cw_CWEType as etype "
-                   "WHERE cstr.cw_eid = rel.eid_from "
-                   "  AND cstr.cw_constraint_of = etype.cw_eid "
-                   "  AND etype.cw_name = 'Personne' "
-                   ";")
-            cu = self.session.system_sql(sql)
-            rows = cu.fetchall()
-            self.assertEqual(len(rows), 3)
-            self.test_unique_together()
-        finally:
-            self.repo.set_schema(origshema)
-
     def test_unique_together(self):
         person = self.repo.schema.eschema('Personne')
         self.assertEqual(len(person._unique_together), 1)
@@ -297,7 +263,7 @@
             repo.execute(cnxid, 'DELETE CWUser X WHERE X login "toto"')
             repo.commit(cnxid)
         try:
-            with self.assertRaises(Exception) as cm:
+            with self.assertRaises(SessionClosedError) as cm:
                 run_transaction()
             self.assertEqual(str(cm.exception), 'try to access connections set on a closed session %s' % cnxid)
         finally:
@@ -317,7 +283,8 @@
                                'constrained_by',
                                'cardinality', 'ordernum',
                                'indexed', 'fulltextindexed', 'internationalizable',
-                               'defaultval', 'description', 'description_format'])
+                               'defaultval', 'extra_props',
+                               'description', 'description_format'])
 
         self.assertEqual(schema.eschema('CWEType').main_attribute(), 'name')
         self.assertEqual(schema.eschema('State').main_attribute(), 'name')
@@ -382,7 +349,10 @@
             self.assertTrue(user._cw.vreg)
             from cubicweb.entities import authobjs
             self.assertIsInstance(user._cw.user, authobjs.CWUser)
+            # make sure the tcp connection is closed properly; yes, it's disgusting.
+            adapter = cnx._repo.adapter
             cnx.close()
+            adapter.release()
             done.append(True)
         finally:
             # connect monkey patch some method by default, remove them
@@ -584,6 +554,87 @@
             req.create_entity('Affaire', ref=u'AFF02')
             req.execute('SET A duration 10 WHERE A is Affaire')
 
+class SchemaDeserialTC(CubicWebTC):
+
+    appid = 'data-schemaserial'
+
+    @classmethod
+    def setUpClass(cls):
+        register_base_type('BabarTestType', ('jungle_speed',))
+        helper = get_db_helper('sqlite')
+        helper.TYPE_MAPPING['BabarTestType'] = 'TEXT'
+        helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x
+        super(SchemaDeserialTC, cls).setUpClass()
+
+
+    @classmethod
+    def tearDownClass(cls):
+        unregister_base_type('BabarTestType')
+        helper = get_db_helper('sqlite')
+        helper.TYPE_MAPPING.pop('BabarTestType', None)
+        helper.TYPE_CONVERTERS.pop('BabarTestType', None)
+        super(SchemaDeserialTC, cls).tearDownClass()
+
+    def test_deserialization_base(self):
+        """Check the following deserialization
+
+        * all CWEtype has name
+        * Final type
+        * CWUniqueTogetherConstraint
+        * _unique_together__ content"""
+        origshema = self.repo.schema
+        try:
+            self.repo.config.repairing = True # avoid versions checking
+            self.repo.set_schema(self.repo.deserialize_schema())
+            table = SQL_PREFIX + 'CWEType'
+            namecol = SQL_PREFIX + 'name'
+            finalcol = SQL_PREFIX + 'final'
+            self.session.set_cnxset()
+            cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
+                namecol, table, finalcol))
+            self.assertEqual(cu.fetchall(), [])
+            cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
+                                         % (namecol, table, finalcol, namecol), {'final': True})
+            self.assertEqual(cu.fetchall(), [(u'BabarTestType',),
+                                             (u'BigInt',), (u'Boolean',), (u'Bytes',),
+                                             (u'Date',), (u'Datetime',),
+                                             (u'Decimal',),(u'Float',),
+                                             (u'Int',),
+                                             (u'Interval',), (u'Password',),
+                                             (u'String',),
+                                             (u'TZDatetime',), (u'TZTime',), (u'Time',)])
+            sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
+                   "FROM cw_CWUniqueTogetherConstraint as cstr, "
+                   "     relations_relation as rel, "
+                   "     cw_CWEType as etype "
+                   "WHERE cstr.cw_eid = rel.eid_from "
+                   "  AND cstr.cw_constraint_of = etype.cw_eid "
+                   "  AND etype.cw_name = 'Personne' "
+                   ";")
+            cu = self.session.system_sql(sql)
+            rows = cu.fetchall()
+            self.assertEqual(len(rows), 3)
+            person = self.repo.schema.eschema('Personne')
+            self.assertEqual(len(person._unique_together), 1)
+            self.assertItemsEqual(person._unique_together[0],
+                                  ('nom', 'prenom', 'inline2'))
+
+        finally:
+            self.repo.set_schema(origshema)
+
+    def test_custom_attribute_param(self):
+        origshema = self.repo.schema
+        try:
+            self.repo.config.repairing = True # avoid versions checking
+            self.repo.set_schema(self.repo.deserialize_schema())
+            pes = self.repo.schema['Personne']
+            attr = pes.rdef('custom_field_of_jungle')
+            self.assertIn('jungle_speed', vars(attr))
+            self.assertEqual(42, attr.jungle_speed)
+        finally:
+            self.repo.set_schema(origshema)
+
+
 
 class DataHelpersTC(CubicWebTC):
 
--- a/server/test/unittest_rql2sql.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_rql2sql.py	Fri Jun 14 16:26:25 2013 +0200
@@ -106,12 +106,12 @@
     ("Personne P WHERE P test TRUE",
      '''SELECT _P.cw_eid
 FROM cw_Personne AS _P
-WHERE _P.cw_test=TRUE'''),
+WHERE _P.cw_test=True'''),
 
     ("Personne P WHERE P test false",
      '''SELECT _P.cw_eid
 FROM cw_Personne AS _P
-WHERE _P.cw_test=FALSE'''),
+WHERE _P.cw_test=False'''),
 
     ("Personne P WHERE P eid -1",
      '''SELECT -1'''),
@@ -532,7 +532,7 @@
     ("Any X WHERE X eid 0, X test TRUE",
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
+WHERE _X.cw_eid=0 AND _X.cw_test=True'''),
 
     ('Any 1 WHERE X in_group G, X is CWUser',
      '''SELECT 1
@@ -1756,7 +1756,7 @@
 class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC):
     backend = 'sqlserver2005'
     def _norm_sql(self, sql):
-        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ')
 
     def test_has_text(self):
         for t in self._parse(HAS_TEXT_LG_INDEXER):
@@ -1941,7 +1941,7 @@
     backend = 'sqlite'
 
     def _norm_sql(self, sql):
-        return sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+        return sql.strip().replace(' ILIKE ', ' LIKE ')
 
     def test_date_extraction(self):
         self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
@@ -2108,7 +2108,7 @@
     backend = 'mysql'
 
     def _norm_sql(self, sql):
-        sql = sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+        sql = sql.strip().replace(' ILIKE ', ' LIKE ')
         newsql = []
         latest = None
         for line in sql.splitlines(False):
--- a/server/test/unittest_schemaserial.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_schemaserial.py	Fri Jun 14 16:26:25 2013 +0200
@@ -25,17 +25,32 @@
 from cubicweb.schema import CubicWebSchemaLoader
 from cubicweb.devtools import TestServerConfiguration
 
+from logilab.database import get_db_helper
+from yams import register_base_type, unregister_base_type
+
 def setUpModule(*args):
+    register_base_type('BabarTestType', ('jungle_speed',))
+    helper = get_db_helper('sqlite')
+    helper.TYPE_MAPPING['BabarTestType'] = 'TEXT'
+    helper.TYPE_CONVERTERS['BabarTestType'] = lambda x: '"%s"' % x
+
     global schema, config
     loader = CubicWebSchemaLoader()
-    config = TestServerConfiguration('data', apphome=Schema2RQLTC.datadir)
+    apphome = Schema2RQLTC.datadir + '-schemaserial'
+    config = TestServerConfiguration('data', apphome=apphome)
     config.bootstrap_cubes()
     schema = loader.load(config)
 
+
 def tearDownModule(*args):
     global schema, config
     del schema, config
 
+    unregister_base_type('BabarTestType')
+    helper = get_db_helper('sqlite')
+    helper.TYPE_MAPPING.pop('BabarTestType', None)
+    helper.TYPE_CONVERTERS.pop('BabarTestType', None)
+
 from cubicweb.server.schemaserial import *
 from cubicweb.server.schemaserial import _erperms2rql as erperms2rql
 
@@ -72,6 +87,13 @@
                                ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
                                 {'et': None, 'x': None})])
 
+    def test_esche2rql_custom_type(self):
+        expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s',
+                     {'description': u'',
+                     'name': u'BabarTestType', 'final': True},)]
+        got = list(eschema2rql(schema.eschema('BabarTestType')))
+        self.assertListEqual(expected, got)
+
     def test_rschema2rql1(self):
         self.assertListEqual(list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)),
                              [
@@ -136,6 +158,42 @@
              {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"}),
             ])
 
+    def test_rschema2rql_custom_type(self):
+        expected = [('INSERT CWRType X: X description %(description)s,X final %(final)s,'
+                     'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+                     'X name %(name)s,X symmetric %(symmetric)s',
+                     {'description': u'',
+                      'final': True,
+                      'fulltext_container': None,
+                      'inlined': False,
+                      'name': u'custom_field_of_jungle',
+                      'symmetric': False}),
+                     ('INSERT CWAttribute X: X cardinality %(cardinality)s,'
+                      'X defaultval %(defaultval)s,X description %(description)s,'
+                      'X extra_props %(extra_props)s,X indexed %(indexed)s,'
+                      'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
+                      'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+                      {'cardinality': u'?1',
+                       'defaultval': None,
+                       'description': u'',
+                       'extra_props': '{"jungle_speed": 42}',
+                       'indexed': False,
+                       'oe': None,
+                       'ordernum': 19,
+                       'rt': None,
+                       'se': None})]
+
+        got = list(rschema2rql(schema.rschema('custom_field_of_jungle'), cstrtypemap))
+        self.assertEqual(2, len(got))
+        # this is a custom type attribute with an extra parameter
+        self.assertIn('extra_props', got[1][1])
+        # this extr
+        extra_props = got[1][1]['extra_props']
+        from cubicweb import Binary
+        self.assertIsInstance(extra_props, Binary)
+        got[1][1]['extra_props'] = got[1][1]['extra_props'].getvalue()
+        self.assertListEqual(expected, got)
+
     def test_rdef2rql(self):
         self.assertListEqual(list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)),
                               [
--- a/server/test/unittest_session.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_session.py	Fri Jun 14 16:26:25 2013 +0200
@@ -17,6 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
 from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL
 
 class InternalSessionTC(CubicWebTC):
     def test_dbapi_query(self):
@@ -29,36 +30,36 @@
 
     def test_hooks_control(self):
         session = self.session
-        self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
-        self.assertEqual(session.disabled_hook_categories, set())
-        self.assertEqual(session.enabled_hook_categories, set())
-        self.assertEqual(len(session._tx_data), 1)
+        self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
+        self.assertEqual(set(), session.disabled_hook_categories)
+        self.assertEqual(set(), session.enabled_hook_categories)
+        self.assertEqual(1, len(session._txs))
         with session.deny_all_hooks_but('metadata'):
-            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
-            self.assertEqual(session.disabled_hook_categories, set())
-            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
+            self.assertEqual(set(), session.disabled_hook_categories)
+            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
             session.commit()
-            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
-            self.assertEqual(session.disabled_hook_categories, set())
-            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
+            self.assertEqual(set(), session.disabled_hook_categories)
+            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
             session.rollback()
-            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
-            self.assertEqual(session.disabled_hook_categories, set())
-            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
+            self.assertEqual(set(), session.disabled_hook_categories)
+            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
             with session.allow_all_hooks_but('integrity'):
-                self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
-                self.assertEqual(session.disabled_hook_categories, set(('integrity',)))
-                self.assertEqual(session.enabled_hook_categories, set(('metadata',))) # not changed in such case
-            self.assertEqual(session.hooks_mode, session.HOOKS_DENY_ALL)
-            self.assertEqual(session.disabled_hook_categories, set())
-            self.assertEqual(session.enabled_hook_categories, set(('metadata',)))
+                self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
+                self.assertEqual(set(('integrity',)), session.disabled_hook_categories)
+                self.assertEqual(set(('metadata',)), session.enabled_hook_categories) # not changed in such case
+            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
+            self.assertEqual(set(), session.disabled_hook_categories)
+            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
         # leaving context manager with no transaction running should reset the
         # transaction local storage (and associated cnxset)
-        self.assertEqual(session._tx_data, {})
-        self.assertEqual(session.cnxset, None)
-        self.assertEqual(session.hooks_mode, session.HOOKS_ALLOW_ALL)
-        self.assertEqual(session.disabled_hook_categories, set())
-        self.assertEqual(session.enabled_hook_categories, set())
+        self.assertEqual({}, session._txs)
+        self.assertEqual(None, session.cnxset)
+        self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode, session.HOOKS_ALLOW_ALL)
+        self.assertEqual(set(), session.disabled_hook_categories)
+        self.assertEqual(set(), session.enabled_hook_categories)
 
 
 if __name__ == '__main__':
--- a/server/test/unittest_undo.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/test/unittest_undo.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,6 +19,8 @@
 
 from cubicweb import ValidationError
 from cubicweb.devtools.testlib import CubicWebTC
+import cubicweb.server.session
+from cubicweb.server.session import Transaction as OldTransaction
 from cubicweb.transaction import *
 
 from cubicweb.server.sources.native import UndoTransactionException, _UndoException
@@ -28,12 +30,19 @@
 
     def setup_database(self):
         req = self.request()
-        self.session.undo_actions = True
         self.toto = self.create_user(req, 'toto', password='toto', groups=('users',),
                                      commit=False)
         self.txuuid = self.commit()
 
+    def setUp(self):
+        class Transaction(OldTransaction):
+            """Force undo feature to be turned on in all case"""
+            undo_actions = property(lambda tx: True, lambda x, y:None)
+        cubicweb.server.session.Transaction = Transaction
+        super(UndoableTransactionTC, self).setUp()
+
     def tearDown(self):
+        cubicweb.server.session.Transaction = OldTransaction
         self.restore_connection()
         self.session.undo_support = set()
         super(UndoableTransactionTC, self).tearDown()
--- a/server/utils.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/server/utils.py	Fri Jun 14 16:26:25 2013 +0200
@@ -145,8 +145,8 @@
 class LoopTask(object):
     """threaded task restarting itself once executed"""
     def __init__(self, tasks_manager, interval, func, args):
-        if interval <= 0:
-            raise ValueError('Loop task interval must be > 0 '
+        if interval < 0:
+            raise ValueError('Loop task interval must be >= 0 '
                              '(current value: %f for %s)' % \
                              (interval, func_name(func)))
         self._tasks_manager = tasks_manager
@@ -219,7 +219,13 @@
 
     def add_looping_task(self, interval, func, *args):
         """register a function to be called every `interval` seconds.
+
+        If interval is negative, no looping task is registered.
         """
+        if interval < 0:
+            self.debug('looping task %s ignored due to interval %f < 0',
+                       func_name(func), interval)
+            return
         task = LoopTask(self, interval, func, args)
         if self.running:
             self._start_task(task)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/skeleton/DISTNAME.spec.tmpl	Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,46 @@
+# for el5, force use of python2.6
+%%if 0%%{?el5}
+%%define python python26
+%%define __python /usr/bin/python2.6
+%%else
+%%define python python
+%%define __python /usr/bin/python
+%%endif
+%%{!?_python_sitelib: %%define _python_sitelib %%(%%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
+
+Name:           %(distname)s
+Version:        0.1.0
+Release:        logilab.1%%{?dist}
+Summary:        %(shortdesc)s
+Group:          Applications/Internet
+License:        %(license)s
+Source0:        %(distname)s-%%{version}.tar.gz
+
+BuildArch:      noarch
+BuildRoot:      %%{_tmppath}/%%{name}-%%{version}-%%{release}-buildroot
+
+BuildRequires:  %%{python} %%{python}-setuptools
+Requires:       cubicweb >= %(version)s
+
+%%description
+%(longdesc)s
+
+%%prep
+%%setup -q -n %(distname)s-%%{version}
+%%if 0%%{?el5}
+# change the python version in shebangs
+find . -name '*.py' -type f -print0 |  xargs -0 sed -i '1,3s;^#!.*python.*$;#! /usr/bin/python2.6;'
+%%endif
+
+%%install
+NO_SETUPTOOLS=1 %%{__python} setup.py --quiet install --no-compile --prefix=%%{_prefix} --root="$RPM_BUILD_ROOT"
+# remove generated .egg-info file
+rm -rf $RPM_BUILD_ROOT/usr/lib/python*
+
+
+%%clean
+rm -rf $RPM_BUILD_ROOT
+
+%%files
+%%defattr(-, root, root)
+/*
--- a/skeleton/debian/control.tmpl	Fri Jun 14 16:13:24 2013 +0200
+++ b/skeleton/debian/control.tmpl	Fri Jun 14 16:26:25 2013 +0200
@@ -2,9 +2,9 @@
 Section: web
 Priority: optional
 Maintainer: %(author)s <%(author-email)s>
-Build-Depends: debhelper (>= 7), python (>=2.5), python-support
+Build-Depends: debhelper (>= 7), python (>= 2.6), python-support
 Standards-Version: 3.9.3
-XS-Python-Version: >= 2.5
+XS-Python-Version: >= 2.6
 
 Package: %(distname)s
 Architecture: all
--- a/sobjects/cwxmlparser.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/sobjects/cwxmlparser.py	Fri Jun 14 16:26:25 2013 +0200
@@ -42,7 +42,7 @@
 from yams.constraints import BASE_CONVERTERS
 from yams.schema import role_name as rn
 
-from cubicweb import ValidationError, RegistryException, typed_eid
+from cubicweb import ValidationError, RegistryException
 from cubicweb.view import Component
 from cubicweb.server.sources import datafeed
 from cubicweb.server.hook import match_rtype
@@ -218,7 +218,7 @@
         return entity
 
     def process_relations(self, entity, rels):
-        etype = entity.__regid__
+        etype = entity.cw_etype
         for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
             try:
                 related_items = rels[role][rtype]
@@ -326,10 +326,10 @@
         item['cwtype'] = unicode(node.tag)
         item.setdefault('cwsource', None)
         try:
-            item['eid'] = typed_eid(item['eid'])
+            item['eid'] = int(item['eid'])
         except KeyError:
             # cw < 3.11 compat mode XXX
-            item['eid'] = typed_eid(node.find('eid').text)
+            item['eid'] = int(node.find('eid').text)
             item['cwuri'] = node.find('cwuri').text
         rels = {}
         for child in node:
--- a/sobjects/ldapparser.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/sobjects/ldapparser.py	Fri Jun 14 16:26:25 2013 +0200
@@ -32,7 +32,7 @@
     __regid__ = 'ldapfeed'
     # attributes that may appears in source user_attrs dict which are not
     # attributes of the cw user
-    non_attribute_keys = set(('email',))
+    non_attribute_keys = set(('email', 'eid', 'member', 'modification_date'))
 
     @cachedproperty
     def searchfilterstr(self):
@@ -40,27 +40,57 @@
         return '(&%s)' % ''.join(self.source.base_filters)
 
     @cachedproperty
-    def source_entities_by_extid(self):
+    def searchgroupfilterstr(self):
+        """ ldap search string, including user-filter """
+        return '(&%s)' % ''.join(self.source.group_base_filters)
+
+    @cachedproperty
+    def user_source_entities_by_extid(self):
+        source = self.source
+        if source.user_base_dn.strip():
+            attrs = map(str, source.user_attrs.keys())
+            return dict((userdict['dn'], userdict)
+                        for userdict in source._search(self._cw,
+                                                       source.user_base_dn,
+                                                       source.user_base_scope,
+                                                       self.searchfilterstr,
+                                                       attrs))
+        return {}
+
+    @cachedproperty
+    def group_source_entities_by_extid(self):
         source = self.source
-        return dict((userdict['dn'], userdict)
-                    for userdict in source._search(self._cw,
-                                                   source.user_base_dn,
-                                                   source.user_base_scope,
-                                                   self.searchfilterstr))
+        if source.group_base_dn.strip():
+            attrs = map(str, ['modifyTimestamp'] + source.group_attrs.keys())
+            return dict((groupdict['dn'], groupdict)
+                        for groupdict in source._search(self._cw,
+                                                        source.group_base_dn,
+                                                        source.group_base_scope,
+                                                        self.searchgroupfilterstr,
+                                                        attrs))
+        return {}
+
+    def _process(self, etype, sdict):
+        self.warning('fetched %s %s', etype, sdict)
+        extid = sdict['dn']
+        entity = self.extid2entity(extid, etype, **sdict)
+        if entity is not None and not self.created_during_pull(entity):
+            self.notify_updated(entity)
+            attrs = self.ldap2cwattrs(sdict, etype)
+            self.update_if_necessary(entity, attrs)
+            if etype == 'CWUser':
+                self._process_email(entity, sdict)
+            if etype == 'CWGroup':
+                self._process_membership(entity, sdict)
 
     def process(self, url, raise_on_error=False):
         """IDataFeedParser main entry point"""
         self.debug('processing ldapfeed source %s %s', self.source, self.searchfilterstr)
-        for userdict in self.source_entities_by_extid.itervalues():
-            self.warning('fetched user %s', userdict)
-            extid = userdict['dn']
-            entity = self.extid2entity(extid, 'CWUser', **userdict)
-            if entity is not None and not self.created_during_pull(entity):
-                self.notify_updated(entity)
-                attrs = self.ldap2cwattrs(userdict)
-                self.update_if_necessary(entity, attrs)
-                self._process_email(entity, userdict)
-
+        for userdict in self.user_source_entities_by_extid.itervalues():
+            self._process('CWUser', userdict)
+        self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr)
+        for groupdict in self.group_source_entities_by_extid.itervalues():
+            self._process('CWGroup', groupdict)
 
     def handle_deletion(self, config, session, myuris):
         if config['delete-entities']:
@@ -85,7 +115,7 @@
         # disable read security to allow password selection
         with entity._cw.security_enabled(read=False):
             entity.complete(tuple(attrs))
-        if entity.__regid__ == 'CWUser':
+        if entity.cw_etype == 'CWUser':
             wf = entity.cw_adapt_to('IWorkflowable')
             if wf.state == 'deactivated':
                 wf.fire_transition('activate')
@@ -98,41 +128,56 @@
                 entity.cw_set(**attrs)
                 self.notify_updated(entity)
 
-    def ldap2cwattrs(self, sdict, tdict=None):
+    def ldap2cwattrs(self, sdict, etype, tdict=None):
+        """ Transform dictionary of LDAP attributes to CW
+        etype must be CWUser or CWGroup """
         if tdict is None:
             tdict = {}
-        for sattr, tattr in self.source.user_attrs.iteritems():
+        if etype == 'CWUser':
+            items = self.source.user_attrs.iteritems()
+        elif etype == 'CWGroup':
+            items = self.source.group_attrs.iteritems()
+        for sattr, tattr in items:
             if tattr not in self.non_attribute_keys:
                 try:
                     tdict[tattr] = sdict[sattr]
                 except KeyError:
                     raise ConfigurationError('source attribute %s is not present '
                                              'in the source, please check the '
-                                             'user-attrs-map field' % sattr)
+                                             '%s-attrs-map field' %
+                                             (sattr, etype[2:].lower()))
         return tdict
 
     def before_entity_copy(self, entity, sourceparams):
-        if entity.__regid__ == 'EmailAddress':
+        etype = entity.cw_etype
+        if etype == 'EmailAddress':
             entity.cw_edited['address'] = sourceparams['address']
         else:
-            self.ldap2cwattrs(sourceparams, entity.cw_edited)
-            pwd = entity.cw_edited.get('upassword')
-            if not pwd:
-                # generate a dumb password if not fetched from ldap (see
-                # userPassword)
-                pwd = crypt_password(generate_password())
-                entity.cw_edited['upassword'] = Binary(pwd)
+            self.ldap2cwattrs(sourceparams, etype, tdict=entity.cw_edited)
+            if etype == 'CWUser':
+                pwd = entity.cw_edited.get('upassword')
+                if not pwd:
+                    # generate a dumb password if not fetched from ldap (see
+                    # userPassword)
+                    pwd = crypt_password(generate_password())
+                    entity.cw_edited['upassword'] = Binary(pwd)
         return entity
 
     def after_entity_copy(self, entity, sourceparams):
         super(DataFeedLDAPAdapter, self).after_entity_copy(entity, sourceparams)
-        if entity.__regid__ == 'EmailAddress':
+        etype = entity.cw_etype
+        if etype == 'EmailAddress':
             return
-        groups = filter(None, [self._get_group(name)
-                               for name in self.source.user_default_groups])
-        if groups:
-            entity.cw_set(in_group=groups)
-        self._process_email(entity, sourceparams)
+        # all CWUsers must be treated before CWGroups to have to in_group relation
+        # set correctly in _associate_ldapusers
+        elif etype == 'CWUser':
+            groups = filter(None, [self._get_group(name)
+                                   for name in self.source.user_default_groups])
+            if groups:
+                entity.cw_set(in_group=groups)
+            self._process_email(entity, sourceparams)
+        elif etype == 'CWGroup':
+            self._process_membership(entity, sourceparams)
 
     def is_deleted(self, extidplus, etype, eid):
         try:
@@ -141,7 +186,7 @@
             # for some reason extids here tend to come in both forms, e.g:
             # dn, dn@@Babar
             extid = extidplus
-        return extid not in self.source_entities_by_extid
+        return extid not in self.user_source_entities_by_extid
 
     def _process_email(self, entity, userdict):
         try:
@@ -159,10 +204,7 @@
                 emailextid = userdict['dn'] + '@@' + emailaddr
                 email = self.extid2entity(emailextid, 'EmailAddress',
                                           address=emailaddr)
-                if entity.primary_email:
-                    entity.cw_set(use_email=email)
-                else:
-                    entity.cw_set(primary_email=email)
+                entity.cw_set(use_email=email)
             elif self.sourceuris:
                 # pop from sourceuris anyway, else email may be removed by the
                 # source once import is finished
@@ -170,13 +212,26 @@
                 self.sourceuris.pop(uri, None)
             # XXX else check use_email relation?
 
+    def _process_membership(self, entity, sourceparams):
+        """ Find existing CWUsers with the same login as the memberUids in the
+        CWGroup entity and create the in_group relationship """
+        mdate = sourceparams.get('modification_date')
+        if (not mdate or mdate > entity.modification_date):
+            self._cw.execute('DELETE U in_group G WHERE G eid %(g)s',
+                             {'g':entity.eid})
+            members = sourceparams.get(self.source.group_rev_attrs['member'])
+            if members:
+                members = ["'%s'" % e for e in members]
+                rql = 'SET U in_group G WHERE G eid %%(g)s, U login IN (%s)' % ','.join(members)
+                self._cw.execute(rql, {'g':entity.eid,  })
+
     @cached
     def _get_group(self, name):
         try:
             return self._cw.execute('Any X WHERE X is CWGroup, X name %(name)s',
                                     {'name': name}).get_entity(0, 0)
         except IndexError:
-            self.error('group %r referenced by source configuration %r does not exist'
-                       % (name, self.source.uri))
+            self.error('group %r referenced by source configuration %r does not exist',
+                       name, self.source.uri)
             return None
 
--- a/sobjects/notification.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/sobjects/notification.py	Fri Jun 14 16:26:25 2013 +0200
@@ -26,9 +26,11 @@
 from logilab.common.deprecation import class_renamed, class_moved, deprecated
 from logilab.common.registry import yes
 
-from cubicweb.view import Component
-from cubicweb.mail import NotificationView as BaseNotificationView, SkipEmail
+from cubicweb.entity import Entity
+from cubicweb.view import Component, EntityView
 from cubicweb.server.hook import SendMailOp
+from cubicweb.mail import construct_message_id, format_mail
+from cubicweb.server.session import Session
 
 
 class RecipientsFinder(Component):
@@ -59,14 +61,147 @@
 
 # abstract or deactivated notification views and mixin ########################
 
-class NotificationView(BaseNotificationView):
-    """overriden to delay actual sending of mails to a commit operation by
-    default
+
+class SkipEmail(Exception):
+    """raise this if you decide to skip an email during its generation"""
+
+
+class NotificationView(EntityView):
+    """abstract view implementing the "email" API (eg to simplify sending
+    notification)
     """
+    # XXX refactor this class to work with len(rset) > 1
+
+    msgid_timestamp = True
+
+    # to be defined on concrete sub-classes
+    content = None # body of the mail
+    message = None # action verb of the subject
+
+    # this is usually the method to call
+    def render_and_send(self, **kwargs):
+        """generate and send an email message for this view"""
+        delayed = kwargs.pop('delay_to_commit', None)
+        for recipients, msg in self.render_emails(**kwargs):
+            if delayed is None:
+                self.send(recipients, msg)
+            elif delayed:
+                self.send_on_commit(recipients, msg)
+            else:
+                self.send_now(recipients, msg)
+
+    def cell_call(self, row, col=0, **kwargs):
+        self.w(self._cw._(self.content) % self.context(**kwargs))
+
+    def render_emails(self, **kwargs):
+        """generate and send emails for this view (one per recipient)"""
+        self._kwargs = kwargs
+        recipients = self.recipients()
+        if not recipients:
+            self.info('skipping %s notification, no recipients', self.__regid__)
+            return
+        if self.cw_rset is not None:
+            entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
+            # if the view is using timestamp in message ids, no way to reference
+            # previous email
+            if not self.msgid_timestamp:
+                refs = [self.construct_message_id(eid)
+                        for eid in entity.cw_adapt_to('INotifiable').notification_references(self)]
+            else:
+                refs = ()
+            msgid = self.construct_message_id(entity.eid)
+        else:
+            refs = ()
+            msgid = None
+        req = self._cw
+        self.user_data = req.user_data()
+        origlang = req.lang
+        for something in recipients:
+            if isinstance(something, Entity):
+                # hi-jack self._cw to get a session for the returned user
+                self._cw = Session(something, self._cw.repo)
+                self._cw.set_cnxset()
+                emailaddr = something.cw_adapt_to('IEmailable').get_email()
+            else:
+                emailaddr, lang = something
+                self._cw.set_language(lang)
+            # since the same view (eg self) may be called multiple time and we
+            # need a fresh stream at each iteration, reset it explicitly
+            self.w = None
+            # XXX call render before subject to set .row/.col attributes on the
+            #     view
+            try:
+                content = self.render(row=0, col=0, **kwargs)
+                subject = self.subject()
+            except SkipEmail:
+                continue
+            except Exception as ex:
+                # shouldn't make the whole transaction fail because of rendering
+                # error (unauthorized or such) XXX check it doesn't actually
+                # occurs due to rollback on such error
+                self.exception(str(ex))
+                continue
+            msg = format_mail(self.user_data, [emailaddr], content, subject,
+                              config=self._cw.vreg.config, msgid=msgid, references=refs)
+            yield [emailaddr], msg
+            if isinstance(something, Entity):
+                self._cw.commit()
+                self._cw.close()
+                self._cw = req
+        # restore language
+        req.set_language(origlang)
+
+    # recipients / email sending ###############################################
+
+    def recipients(self):
+        """return a list of either 2-uple (email, language) or user entity to
+        who this email should be sent
+        """
+        finder = self._cw.vreg['components'].select(
+            'recipients_finder', self._cw, rset=self.cw_rset,
+            row=self.cw_row or 0, col=self.cw_col or 0)
+        return finder.recipients()
+
+    def send_now(self, recipients, msg):
+        self._cw.vreg.config.sendmails([(msg, recipients)])
+
     def send_on_commit(self, recipients, msg):
         SendMailOp(self._cw, recipients=recipients, msg=msg)
     send = send_on_commit
 
+    # email generation helpers #################################################
+
+    def construct_message_id(self, eid):
+        return construct_message_id(self._cw.vreg.config.appid, eid,
+                                    self.msgid_timestamp)
+
+    def format_field(self, attr, value):
+        return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value}
+
+    def format_section(self, attr, value):
+        return '%(attr)s\n%(ul)s\n%(value)s\n' % {
+            'attr': attr, 'ul': '-'*len(attr), 'value': value}
+
+    def subject(self):
+        entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
+        subject = self._cw._(self.message)
+        etype = entity.dc_type()
+        eid = entity.eid
+        login = self.user_data['login']
+        return self._cw._('%(subject)s %(etype)s #%(eid)s (%(login)s)') % locals()
+
+    def context(self, **kwargs):
+        entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
+        for key, val in kwargs.iteritems():
+            if val and isinstance(val, unicode) and val.strip():
+               kwargs[key] = self._cw._(val)
+        kwargs.update({'user': self.user_data['login'],
+                       'eid': entity.eid,
+                       'etype': entity.dc_type(),
+                       'url': entity.absolute_url(),
+                       'title': entity.dc_long_title(),})
+        return kwargs
+
 
 class StatusChangeMixIn(object):
     __regid__ = 'notif_status_change'
@@ -157,9 +292,8 @@
 url: %(url)s
 """
 
-    def context(self, **kwargs):
+    def context(self, changes=(), **kwargs):
         context = super(EntityUpdatedNotificationView, self).context(**kwargs)
-        changes = self._cw.transaction_data['changes'][self.cw_rset[0][0]]
         _ = self._cw._
         formatted_changes = []
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
@@ -195,12 +329,3 @@
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
         return  u'%s #%s (%s)' % (self._cw.__('Updated %s' % entity.e_schema),
                                   entity.eid, self.user_data['login'])
-
-
-from cubicweb.hooks.notification import RenderAndSendNotificationView
-from cubicweb.mail import parse_message_id
-
-NormalizedTextView = class_renamed('NormalizedTextView', ContentAddedView)
-RenderAndSendNotificationView = class_moved(RenderAndSendNotificationView)
-parse_message_id = deprecated('parse_message_id is now defined in cubicweb.mail')(parse_message_id)
-
--- a/sobjects/test/unittest_notification.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/sobjects/test/unittest_notification.py	Fri Jun 14 16:26:25 2013 +0200
@@ -60,9 +60,9 @@
             msgid1 = construct_message_id('testapp', eid, 12)
             self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname())
 
+class NotificationTC(CubicWebTC):
 
-class RecipientsFinderTC(CubicWebTC):
-    def test(self):
+    def test_recipients_finder(self):
         urset = self.execute('CWUser X WHERE X login "admin"')
         self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X '
                      'WHERE U eid %(x)s', {'x': urset[0][0]})
@@ -79,13 +79,11 @@
         self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr')
         self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')])
 
-
-class StatusChangeViewsTC(CubicWebTC):
-
     def test_status_change_view(self):
         req = self.request()
         u = self.create_user(req, 'toto')
-        u.cw_adapt_to('IWorkflowable').fire_transition('deactivate', comment=u'yeah')
+        iwfable = u.cw_adapt_to('IWorkflowable')
+        iwfable.fire_transition('deactivate', comment=u'yeah')
         self.assertFalse(MAILBOX)
         self.commit()
         self.assertEqual(len(MAILBOX), 1)
@@ -99,7 +97,8 @@
 
 url: http://testing.fr/cubicweb/cwuser/toto
 ''')
-        self.assertEqual(email.subject, 'status changed CWUser #%s (admin)' % u.eid)
+        self.assertEqual(email.subject,
+                         'status changed CWUser #%s (admin)' % u.eid)
 
 if __name__ == '__main__':
     unittest_main()
--- a/sobjects/textparsers.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/sobjects/textparsers.py	Fri Jun 14 16:26:25 2013 +0200
@@ -26,7 +26,7 @@
 
 import re
 
-from cubicweb import UnknownEid, typed_eid
+from cubicweb import UnknownEid
 from cubicweb.view import Component
 
 
@@ -66,7 +66,7 @@
     def parse(self, caller, text):
         for trname, eid in self.instr_rgx.findall(text):
             try:
-                entity = self._cw.entity_from_eid(typed_eid(eid))
+                entity = self._cw.entity_from_eid(int(eid))
             except UnknownEid:
                 self.error("can't get entity with eid %s", eid)
                 continue
--- a/test/unittest_rset.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/test/unittest_rset.py	Fri Jun 14 16:26:25 2013 +0200
@@ -363,7 +363,7 @@
                     ('CWGroup', 'users'))
         for entity in rset.entities(): # test get_entity for each row actually
             etype, n = expected[entity.cw_row]
-            self.assertEqual(entity.__regid__, etype)
+            self.assertEqual(entity.cw_etype, etype)
             attr = etype == 'Bookmark' and 'title' or 'name'
             self.assertEqual(entity.cw_attr_cache[attr], n)
 
@@ -385,7 +385,7 @@
         self.assertEqual(rtype, 'title')
         self.assertEqual(entity.title, 'aaaa')
         entity, rtype = rset.related_entity(1, 1)
-        self.assertEqual(entity.__regid__, 'CWGroup')
+        self.assertEqual(entity.cw_etype, 'CWGroup')
         self.assertEqual(rtype, 'name')
         self.assertEqual(entity.name, 'guests')
 
--- a/test/unittest_schema.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/test/unittest_schema.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -186,7 +186,7 @@
                               'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission',
                               'description', 'description_format', 'destination_state', 'dirige',
 
-                              'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype',
+                              'ecrit_par', 'eid', 'end_timestamp', 'evaluee', 'expression', 'exprtype', 'extra_props',
 
                               'fabrique_par', 'final', 'firstname', 'for_user', 'fournit',
                               'from_entity', 'from_state', 'fulltext_container', 'fulltextindexed',
--- a/test/unittest_vregistry.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/test/unittest_vregistry.py	Fri Jun 14 16:26:25 2013 +0200
@@ -54,23 +54,23 @@
 
 
     def test_load_subinterface_based_appobjects(self):
-        self.vreg.register_objects([join(BASE, 'web', 'views', 'iprogress.py')])
-        # check progressbar was kicked
-        self.assertFalse(self.vreg['views'].get('progressbar'))
+        self.vreg.register_objects([join(BASE, 'web', 'views', 'idownloadable.py')])
+        # check downloadlink was kicked
+        self.assertFalse(self.vreg['views'].get('downloadlink'))
         # we've to emulate register_objects to add custom MyCard objects
         path = [join(BASE, 'entities', '__init__.py'),
                 join(BASE, 'entities', 'adapters.py'),
-                join(BASE, 'web', 'views', 'iprogress.py')]
+                join(BASE, 'web', 'views', 'idownloadable.py')]
         filemods = self.vreg.init_registration(path, None)
         for filepath, modname in filemods:
             self.vreg.load_file(filepath, modname)
-        class CardIProgressAdapter(EntityAdapter):
-            __regid__ = 'IProgress'
+        class CardIDownloadableAdapter(EntityAdapter):
+            __regid__ = 'IDownloadable'
         self.vreg._loadedmods[__name__] = {}
-        self.vreg.register(CardIProgressAdapter)
+        self.vreg.register(CardIDownloadableAdapter)
         self.vreg.initialization_completed()
         # check progressbar isn't kicked
-        self.assertEqual(len(self.vreg['views']['progressbar']), 1)
+        self.assertEqual(len(self.vreg['views']['downloadlink']), 1)
 
     def test_properties(self):
         self.vreg.reset()
--- a/utils.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/utils.py	Fri Jun 14 16:26:25 2013 +0200
@@ -229,11 +229,8 @@
     jQuery(window).unload(unloadPageData);
     pageDataUnloaded = true;
 }'''
-    # Making <script> tag content work properly with all possible
-    # content-types (xml/html) and all possible browsers is very
-    # tricky, see http://www.hixie.ch/advocacy/xhtml for an in-depth discussion
-    xhtml_safe_script_opening = u'<script type="text/javascript"><!--//--><![CDATA[//><!--\n'
-    xhtml_safe_script_closing = u'\n//--><!]]></script>'
+    script_opening = u'<script type="text/javascript">\n'
+    script_closing = u'\n</script>'
 
     def __init__(self, req):
         super(HTMLHead, self).__init__()
@@ -263,10 +260,7 @@
     def add_post_inline_script(self, content):
         self.post_inlined_scripts.append(content)
 
-    def add_onload(self, jscode, jsoncall=_MARKER):
-        if jsoncall is not _MARKER:
-            warn('[3.7] specifying jsoncall is not needed anymore',
-                 DeprecationWarning, stacklevel=2)
+    def add_onload(self, jscode):
         self.add_post_inline_script(u"""$(cw).one('server-response', function(event) {
 %s});""" % jscode)
 
@@ -347,14 +341,14 @@
         w = self.write
         # 1/ variable declaration if any
         if self.jsvars:
-            w(self.xhtml_safe_script_opening)
+            w(self.script_opening)
             for var, value, override in self.jsvars:
                 vardecl = u'%s = %s;' % (var, json.dumps(value))
                 if not override:
                     vardecl = (u'if (typeof %s == "undefined") {%s}' %
                                (var, vardecl))
                 w(vardecl + u'\n')
-            w(self.xhtml_safe_script_closing)
+            w(self.script_closing)
         # 2/ css files
         ie_cssfiles = ((x, (y, z)) for x, y, z in self.ie_cssfiles)
         if self.datadir_url and self._cw.vreg.config['concat-resources']:
@@ -400,9 +394,9 @@
                     w(xml_escape(script))
                     w(u'</pre>')
             else:
-                w(self.xhtml_safe_script_opening)
+                w(self.script_opening)
                 w(u'\n\n'.join(self.post_inlined_scripts))
-                w(self.xhtml_safe_script_closing)
+                w(self.script_closing)
         header = super(HTMLHead, self).getvalue()
         if skiphead:
             return header
@@ -425,20 +419,17 @@
         # main stream
         self.body = UStringIO()
         self.doctype = u''
-        # xmldecl and html opening tag
-        self.xmldecl = u'<?xml version="1.0" encoding="%s"?>\n' % req.encoding
-        self._namespaces = [('xmlns', 'http://www.w3.org/1999/xhtml'),
-                            ('xmlns:cubicweb','http://www.logilab.org/2008/cubicweb')]
-        self._htmlattrs = [('xml:lang', req.lang),
-                           ('lang', req.lang)]
+        self._htmlattrs = [('lang', req.lang)]
         # keep main_stream's reference on req for easier text/html demoting
         req.main_stream = self
 
+    @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer')
     def add_namespace(self, prefix, uri):
-        self._namespaces.append( (prefix, uri) )
+        pass
 
+    @deprecated('[3.17] there are no namespaces in html, xhtml is not served any longer')
     def set_namespaces(self, namespaces):
-        self._namespaces = namespaces
+        pass
 
     def add_htmlattr(self, attrname, attrvalue):
         self._htmlattrs.append( (attrname, attrvalue) )
@@ -446,10 +437,11 @@
     def set_htmlattrs(self, attrs):
         self._htmlattrs = attrs
 
-    def set_doctype(self, doctype, reset_xmldecl=True):
+    def set_doctype(self, doctype, reset_xmldecl=None):
         self.doctype = doctype
-        if reset_xmldecl:
-            self.xmldecl = u''
+        if reset_xmldecl is not None:
+            warn('[3.17] xhtml is no more supported',
+                 DeprecationWarning, stacklevel=2)
 
     def write(self, data):
         """StringIO interface: this method will be assigned to self.w
@@ -459,17 +451,17 @@
     @property
     def htmltag(self):
         attrs = ' '.join('%s="%s"' % (attr, xml_escape(value))
-                         for attr, value in (self._namespaces + self._htmlattrs))
+                         for attr, value in self._htmlattrs)
         if attrs:
             return '<html %s>' % attrs
         return '<html>'
 
     def getvalue(self):
         """writes HTML headers, closes </head> tag and writes HTML body"""
-        return u'%s\n%s\n%s\n%s\n%s\n</html>' % (self.xmldecl, self.doctype,
-                                                 self.htmltag,
-                                                 self.head.getvalue(),
-                                                 self.body.getvalue())
+        return u'%s\n%s\n%s\n%s\n</html>' % (self.doctype,
+                                             self.htmltag,
+                                             self.head.getvalue(),
+                                             self.body.getvalue())
 
 try:
     # may not be there if cubicweb-web not installed
@@ -567,14 +559,6 @@
     return 'javascript: ' + PERCENT_IN_URLQUOTE_RE.sub(r'%25', javascript_code)
 
 
-@deprecated('[3.7] merge_dicts is deprecated')
-def merge_dicts(dict1, dict2):
-    """update a copy of `dict1` with `dict2`"""
-    dict1 = dict(dict1)
-    dict1.update(dict2)
-    return dict1
-
-
 def parse_repo_uri(uri):
     """ transform a command line uri into a (protocol, hostport, appid), e.g:
     <myapp>                      -> 'inmemory', None, '<myapp>'
--- a/view.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/view.py	Fri Jun 14 16:26:25 2013 +0200
@@ -42,50 +42,11 @@
 NOINDEX = u'<meta name="ROBOTS" content="NOINDEX" />'
 NOFOLLOW = u'<meta name="ROBOTS" content="NOFOLLOW" />'
 
-CW_XHTML_EXTENSIONS = '''[
-  <!ATTLIST html xmlns:cubicweb CDATA  #FIXED \'http://www.logilab.org/2008/cubicweb\'  >
-
-<!ENTITY % coreattrs
- "id          ID            #IMPLIED
-  class       CDATA         #IMPLIED
-  style       CDATA         #IMPLIED
-  title       CDATA         #IMPLIED
+TRANSITIONAL_DOCTYPE_NOEXT = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
+TRANSITIONAL_DOCTYPE = TRANSITIONAL_DOCTYPE_NOEXT # bw compat
 
- cubicweb:accesskey         CDATA   #IMPLIED
- cubicweb:actualrql         CDATA   #IMPLIED
- cubicweb:dataurl           CDATA   #IMPLIED
- cubicweb:facetName         CDATA   #IMPLIED
- cubicweb:facetargs         CDATA   #IMPLIED
- cubicweb:fallbackvid       CDATA   #IMPLIED
- cubicweb:fname             CDATA   #IMPLIED
- cubicweb:initfunc          CDATA   #IMPLIED
- cubicweb:inputid           CDATA   #IMPLIED
- cubicweb:inputname         CDATA   #IMPLIED
- cubicweb:limit             CDATA   #IMPLIED
- cubicweb:loadtype          CDATA   #IMPLIED
- cubicweb:loadurl           CDATA   #IMPLIED
- cubicweb:maxlength         CDATA   #IMPLIED
- cubicweb:required          CDATA   #IMPLIED
- cubicweb:rooteid           CDATA   #IMPLIED
- cubicweb:rql               CDATA   #IMPLIED
- cubicweb:size              CDATA   #IMPLIED
- cubicweb:sortvalue         CDATA   #IMPLIED
- cubicweb:target            CDATA   #IMPLIED
- cubicweb:tindex            CDATA   #IMPLIED
- cubicweb:tlunit            CDATA   #IMPLIED
- cubicweb:type              CDATA   #IMPLIED
- cubicweb:unselimg          CDATA   #IMPLIED
- cubicweb:uselabel          CDATA   #IMPLIED
- cubicweb:value             CDATA   #IMPLIED
- cubicweb:variables         CDATA   #IMPLIED
- cubicweb:vid               CDATA   #IMPLIED
- cubicweb:wdgtype           CDATA   #IMPLIED
-  "> ] '''
-
-TRANSITIONAL_DOCTYPE = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd" %s>\n' % CW_XHTML_EXTENSIONS
-TRANSITIONAL_DOCTYPE_NOEXT = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
-STRICT_DOCTYPE = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd" %s>\n' % CW_XHTML_EXTENSIONS
 STRICT_DOCTYPE_NOEXT = u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n'
+STRICT_DOCTYPE = STRICT_DOCTYPE_NOEXT # bw compat
 
 # base view object ############################################################
 
@@ -510,11 +471,7 @@
     one to display error if the first one failed
     """
 
-    @property
-    def doctype(self):
-        if self._cw.xhtml_browser():
-            return STRICT_DOCTYPE
-        return STRICT_DOCTYPE_NOEXT
+    doctype = '<!DOCTYPE html>'
 
     def set_stream(self, w=None):
         if self.w is not None:
--- a/web/application.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/application.py	Fri Jun 14 16:26:25 2013 +0200
@@ -273,6 +273,10 @@
 class CubicWebPublisher(object):
     """the publisher is a singleton hold by the web frontend, and is responsible
     to publish HTTP request.
+
+    The http server will call its main entry point ``application.handle_request``.
+
+    .. automethod:: cubicweb.web.application.CubicWebPublisher.main_handle_request
     """
 
     def __init__(self, config,
@@ -339,6 +343,15 @@
 
 
     def main_handle_request(self, req, path):
+        """Process and http request
+
+        Arguments are:
+        - a Request object
+        - path of the request object
+
+        It return the content of the http response. HTTP header and status are
+        are set on the Request Object.
+        """
         if not isinstance(req, CubicWebRequestBase):
             warn('[3.15] Application entry poin arguments are now (req, path) '
                  'not (path, req)', DeprecationWarning, 2)
--- a/web/box.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/box.py	Fri Jun 14 16:26:25 2013 +0200
@@ -174,7 +174,7 @@
         self._cw.add_js('cubicweb.ajax.js')
         entity = self.cw_rset.get_entity(row, col)
         title = display_name(self._cw, self.rtype, get_role(self),
-                             context=entity.__regid__)
+                             context=entity.cw_etype)
         box = SideBoxWidget(title, self.__regid__)
         related = self.related_boxitems(entity)
         unrelated = self.unrelated_boxitems(entity)
--- a/web/component.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/component.py	Fri Jun 14 16:26:25 2013 +0200
@@ -462,7 +462,7 @@
             eid = entity.eid
         else:
             eid = None
-            form['etype'] = entity.__regid__
+            form['etype'] = entity.cw_etype
             form['tempEid'] = entity.eid
         args = [json_dumps(x) for x in (registry, oid, eid, params)]
         return self._cw.ajax_replace_url(
@@ -546,7 +546,7 @@
         for _, eid in field.vocabulary(form):
             if eid not in skip:
                 entity = self._cw.entity_from_eid(eid)
-                if filteretype is None or entity.__regid__ == filteretype:
+                if filteretype is None or entity.cw_etype == filteretype:
                     entities.append(entity)
         return entities
 
@@ -562,7 +562,7 @@
 
     def render_title(self, w):
         w(display_name(self._cw, self.rtype, role(self),
-                       context=self.entity.__regid__))
+                       context=self.entity.cw_etype))
 
     def render_body(self, w):
         self._cw.add_js('cubicweb.ajax.js')
@@ -614,7 +614,7 @@
 
     def render_title(self, w):
         w(self.rdef.rtype.display_name(self._cw, self.role,
-                                       context=self.entity.__regid__))
+                                       context=self.entity.cw_etype))
 
     def render_body(self, w):
         req = self._cw
--- a/web/data/cubicweb.edition.js	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/data/cubicweb.edition.js	Fri Jun 14 16:26:25 2013 +0200
@@ -543,7 +543,8 @@
  *
  * .. note::
  *
- *    this is a hack to make the XHTML compliant.
+ *    This was a hack to make form loop handling XHTML compliant.
+ *    Since we do not care about xhtml any longer, this may go away.
  *
  * .. note::
  *
@@ -551,8 +552,10 @@
  *
  * .. note::
  *
- *    there is a XHTML module allowing iframe elements but there
- *    is still the problem of the form's `target` attribute
+ *    The form's `target` attribute should probably become a simple data-target
+ *    immediately generated server-side.
+ *    Since we don't do xhtml any longer, the iframe should probably be either
+ *    reconsidered or at least emitted server-side.
  */
 function setFormsTarget(node) {
     var $node = jQuery(node || document.body);
--- a/web/data/cubicweb.facets.js	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/data/cubicweb.facets.js	Fri Jun 14 16:26:25 2013 +0200
@@ -174,44 +174,34 @@
                 $($('#'+divid).get(0).parentNode).append($loadingDiv);
             }
             form.find('div.facet').each(function() {
-                var facet = $(this);
-                facet.find('div.facetCheckBox').each(function(i) {
+                var $facet = $(this);
+                $facet.find('div.facetCheckBox').each(function(i) {
                     this.setAttribute('cubicweb:idx', i);
                 });
-                facet.find('div.facetCheckBox').click(function() {
+                $facet.find('div.facetCheckBox').click(function() {
                     var $this = $(this);
                     // NOTE : add test on the facet operator (i.e. OR, AND)
                     // if ($this.hasClass('facetValueDisabled')){
                     //          return
                     // }
                     if ($this.hasClass('facetValueSelected')) {
-                        $this.removeClass('facetValueSelected');
-                        $this.find('img').each(function(i) {
-                            if (this.getAttribute('cubicweb:unselimg')) {
-                                this.setAttribute('src', UNSELECTED_BORDER_IMG);
-                            }
-                            else {
-                                this.setAttribute('src', UNSELECTED_IMG);
-                            }
-                            this.setAttribute('alt', (_("not selected")));
-                        });
+                        facetCheckBoxUnselect($this);
                     } else {
-                        $(this).addClass('facetValueSelected');
-                        var $img = $(this).find('img');
-                        $img.attr('src', SELECTED_IMG).attr('alt', (_("selected")));
+                        facetCheckBoxSelect($this);
                     }
-                    facetCheckBoxReorder(facet);
+                    facetCheckBoxReorder($facet);
                     buildRQL.apply(null, jsfacetargs);
                 });
-                facet.find('select.facetOperator').change(function() {
-                    var nbselected = facet.find('div.facetValueSelected').length;
+                $facet.find('select.facetOperator').change(function() {
+                    var nbselected = $facet.find('div.facetValueSelected').length;
                     if (nbselected >= 2) {
                         buildRQL.apply(null, jsfacetargs);
                     }
                 });
-                facet.find('div.facetTitle.hideFacetBody').click(function() {
-                    facet.find('div.facetBody').toggleClass('hidden').toggleClass('opened');
+                $facet.find('div.facetTitle.hideFacetBody').click(function() {
+                    $facet.find('div.facetBody').toggleClass('hidden').toggleClass('opened');
                     $(this).toggleClass('opened');
+
                 });
 
             });
@@ -219,6 +209,28 @@
     });
 }
 
+// facetCheckBoxSelect: select the given facet checkbox item (.facetValue
+// class)
+function facetCheckBoxSelect($item) {
+    $item.addClass('facetValueSelected');
+    $item.find('img').attr('src', SELECTED_IMG).attr('alt', (_("selected")));
+}
+
+// facetCheckBoxUnselect: unselect the given facet checkbox item (.facetValue
+// class)
+function facetCheckBoxUnselect($item) {
+    $item.removeClass('facetValueSelected');
+    $item.find('img').each(function(i) {
+        if (this.getAttribute('cubicweb:unselimg')) {
+            this.setAttribute('src', UNSELECTED_BORDER_IMG);
+        }
+        else {
+            this.setAttribute('src', UNSELECTED_IMG);
+        }
+        this.setAttribute('alt', (_("not selected")));
+    });
+}
+
 // facetCheckBoxReorder: reorder all items according to cubicweb:idx attribute
 function facetCheckBoxReorder($facet) {
     var sortfunc = function (a, b) {
--- a/web/data/cubicweb.gmap.js	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,87 +0,0 @@
-/**
- *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
- *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
- */
-
-Widgets.GMapWidget = defclass('GMapWidget', null, {
-    __init__: function(wdgnode) {
-        // Assume we have imported google maps JS
-        if (GBrowserIsCompatible()) {
-            var uselabelstr = wdgnode.getAttribute('cubicweb:uselabel');
-            var uselabel = true;
-            if (uselabelstr) {
-                if (uselabelstr == 'True') {
-                    uselabel = true;
-                }
-                else {
-                    uselabel = false;
-                }
-            }
-            var map = new GMap2(wdgnode);
-            map.addControl(new GSmallMapControl());
-            var jsonurl = wdgnode.getAttribute('cubicweb:loadurl');
-            var self = this; // bind this to a local variable
-            jQuery.getJSON(jsonurl, function(geodata) {
-                var zoomLevel;
-                var center;
-                var latlngbounds = new GLatLngBounds( );
-                for (var i = 0; i < geodata.markers.length; i++) {
-                    var marker = geodata.markers[i];
-                    var latlng = new GLatLng(marker.latitude, marker.longitude);
-                    latlngbounds.extend( latlng );
-                }
-                if (geodata.zoomlevel) {
-                    zoomLevel = geodata.zoomlevel;
-                } else {
-                    zoomLevel = map.getBoundsZoomLevel( latlngbounds ) - 1;
-                }
-                if (geodata.center) {
-                    center = new GLatng(geodata.center.latitude, geodata.center.longitude);
-                } else {
-                    center = latlngbounds.getCenter();
-                }
-                map.setCenter(center, zoomLevel);
-                for (var i = 0; i < geodata.markers.length; i++) {
-                    var marker = geodata.markers[i];
-                    self.createMarker(map, marker, i + 1, uselabel);
-                }
-            });
-            jQuery(wdgnode).after(this.legendBox);
-        } else { // incompatible browser
-            jQuery.unload(GUnload);
-        }
-    },
-
-    createMarker: function(map, marker, i, uselabel) {
-        var point = new GLatLng(marker.latitude, marker.longitude);
-        var icon = new GIcon();
-        icon.image = marker.icon[0];
-        icon.iconSize = new GSize(marker.icon[1][0], marker.icon[1][1]);
-        icon.iconAnchor = new GPoint(marker.icon[2][0], marker.icon[2][1]);
-        if (marker.icon[3]) {
-            icon.shadow4 = marker.icon[3];
-        }
-        if (typeof LabeledMarker == "undefined") {
-            var gmarker = new GMarker(point, {
-                icon: icon,
-                title: marker.title
-            });
-        } else {
-            var gmarker = new LabeledMarker(point, {
-                icon: icon,
-                title: marker.title,
-                labelText: uselabel ? '<strong>' + i + '</strong>': '',
-                labelOffset: new GSize(2, - 32)
-            });
-        }
-        map.addOverlay(gmarker);
-        GEvent.addListener(gmarker, 'click', function() {
-            jQuery.post(marker.bubbleUrl, function(data) {
-                map.openInfoWindowHtml(point, data);
-            });
-        });
-    }
-
-});
-
--- a/web/data/cubicweb.iprogress.css	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,78 +0,0 @@
-/*
- *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
- *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
- */
-
-/******************************************************************************/
-/* progressbar                                                                */
-/******************************************************************************/
-
-.done { background:red }
-
-.inprogress { background:green }
-
-.overpassed { background: yellow}
-
-
-canvas.progressbar {
-  border:1px solid black;
-}
-
-.progressbarback {
-  border: 1px solid #000000;
-  background: transparent;
-  height: 10px;
-  width: 100px;
-}
-
-/******************************************************************************/
-/* progress table                                                             */
-/******************************************************************************/
-
-table.progress {
- /* The default table view */
-  margin: 10px 0px 1em;
-  width: 100%;
-  font-size: 0.9167em;
-}
-
-table.progress th {
-  white-space: nowrap;
-  font-weight: bold;
-  background: %(listingHeaderBgColor)s;
-  padding: 2px 4px;
-  font-size:8pt;
-}
-
-table.progress th,
-table.progress td {
-  border: 1px solid %(listingBorderColor)s;
-}
-
-table.progress td {
-  text-align: right;
-  padding: 2px 3px;
-}
-
-table.progress th.tdleft,
-table.progress td.tdleft {
-  text-align: left;
-  padding: 2px 3px 2px 5px;
-}
-
-table.progress tr.highlighted {
-  background-color: %(listingHighlightedBgColor)s;
-}
-
-table.progress tr.highlighted .progressbarback {
-  border: 1px solid %(listingHighlightedBgColor)s;
-}
-
-table.progress .progressbarback {
-  border: 1px solid #777;
-}
-
-.progress_data {
-  padding-right: 3px;
-}
\ No newline at end of file
--- a/web/data/cubicweb.iprogress.js	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,63 +0,0 @@
-function ProgressBar() {
-    this.budget = 100;
-    this.todo = 100;
-    this.done = 100;
-    this.color_done = "green";
-    this.color_budget = "blue";
-    this.color_todo = "#cccccc"; //  grey
-    this.height = 16;
-    this.middle = this.height / 2;
-    this.radius = 4;
-}
-
-ProgressBar.prototype.draw_one_rect = function(ctx, pos, color, fill) {
-    ctx.beginPath();
-    ctx.lineWidth = 1;
-    ctx.strokeStyle = color;
-    if (fill) {
-        ctx.fillStyle = color;
-        ctx.fillRect(0, 0, pos, this.middle * 2);
-    } else {
-        ctx.lineWidth = 2;
-        ctx.strokeStyle = "black";
-        ctx.moveTo(pos, 0);
-        ctx.lineTo(pos, this.middle * 2);
-        ctx.stroke();
-    }
-};
-
-ProgressBar.prototype.draw_one_circ = function(ctx, pos, color) {
-    ctx.beginPath();
-    ctx.lineWidth = 2;
-    ctx.strokeStyle = color;
-    ctx.moveTo(0, this.middle);
-    ctx.lineTo(pos, this.middle);
-    ctx.arc(pos, this.middle, this.radius, 0, Math.PI * 2, true);
-    ctx.stroke();
-};
-
-ProgressBar.prototype.draw_circ = function(ctx) {
-    this.draw_one_circ(ctx, this.budget, this.color_budget);
-    this.draw_one_circ(ctx, this.todo, this.color_todo);
-    this.draw_one_circ(ctx, this.done, this.color_done);
-};
-
-ProgressBar.prototype.draw_rect = function(ctx) {
-    this.draw_one_rect(ctx, this.todo, this.color_todo, true);
-    this.draw_one_rect(ctx, this.done, this.color_done, true);
-    this.draw_one_rect(ctx, this.budget, this.color_budget, false);
-};
-
-function draw_progressbar(cid, done, todo, budget, color) {
-    var canvas = document.getElementById(cid);
-    if (canvas.getContext) {
-        var ctx = canvas.getContext("2d");
-        var bar = new ProgressBar();
-        bar.budget = budget;
-        bar.todo = todo;
-        bar.done = done;
-        bar.color_done = color;
-        bar.draw_rect(ctx);
-    }
-}
-
--- a/web/data/cubicweb.mailform.css	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,66 +0,0 @@
-/* styles for the email form (views/massmailing.py)
- *
- *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
- *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
- */
-
-form#sendmail {
-  border: 1px solid #DBDCE3;
-  background-color: #E9F5F7;
-  font-family:Verdana,Tahoma,Arial,sans-serif;
-  padding: 1em 1ex;
-}
-
-table.headersform {
-  width: 100%;
-}
-
-form#sendmail td#buttonbar {
-  padding: 0.5ex 0ex;
-}
-
-table.headersform td.hlabel {
-  padding-top: 0.5ex;
-  color: #444444;
-  text-align: right;
-}
-
-table.headersform td.hvalue {
-  padding-top: 0.5ex;
-  padding-left: 0.5em;
-  width: 100%;
-}
-
-table.headersform td.hvalue input#mailsubj {
-  width: 47em; 
-}
-
-form#sendmail div#toolbar {
-  margin: 0.5em 0em;
-  height: 29px;
-}
-
-form#sendmail div#toolbar ul {
-  list-style-image: none;
-  list-style-position: outside;
-  list-style-type:none;
-  margin:0px;
-  padding:0px;
-  /* border: 1px solid #DBDCE3; */
-}
-
-form#sendmail div#toolbar li {
-  background: none;
-  padding-left: 1em;
-  float: left;
-}
-
-form#sendmail div#toolbar li a {
-  font-family: Verdana,Tahoma,Arial,sans-serif;
-  color: #444444;
-}
-
-div#substitutions {
-  padding-left: 1em;
-}
--- a/web/data/gmap.utility.labeledmarker.js	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-/* http://code.google.com/p/gmaps-utility-library/source/browse/trunk/labeledmarker/ */
-function LabeledMarker(latlng,opt_opts){this.latlng_=latlng;this.opts_=opt_opts;this.labelText_=opt_opts.labelText||"";this.labelClass_=opt_opts.labelClass||"LabeledMarker_markerLabel";this.labelOffset_=opt_opts.labelOffset||new GSize(0,0);this.clickable_=opt_opts.clickable||true;this.title_=opt_opts.title||"";this.labelVisibility_=true;if(opt_opts.draggable){opt_opts.draggable=false}GMarker.apply(this,arguments)};LabeledMarker.prototype=new GMarker(new GLatLng(0,0));LabeledMarker.prototype.initialize=function(map){GMarker.prototype.initialize.apply(this,arguments);this.map_=map;this.div_=document.createElement("div");this.div_.className=this.labelClass_;this.div_.innerHTML=this.labelText_;this.div_.style.position="absolute";this.div_.style.cursor="pointer";this.div_.title=this.title_;map.getPane(G_MAP_MARKER_PANE).appendChild(this.div_);if(this.clickable_){function newEventPassthru(obj,event){return function(){GEvent.trigger(obj,event)}}var eventPassthrus=['click','dblclick','mousedown','mouseup','mouseover','mouseout'];for(var i=0;i<eventPassthrus.length;i++){var name=eventPassthrus[i];GEvent.addDomListener(this.div_,name,newEventPassthru(this,name))}}};LabeledMarker.prototype.redraw=function(force){GMarker.prototype.redraw.apply(this,arguments);this.redrawLabel_()};LabeledMarker.prototype.redrawLabel_=function(){var p=this.map_.fromLatLngToDivPixel(this.latlng_);var z=GOverlay.getZIndex(this.latlng_.lat());this.div_.style.left=(p.x+this.labelOffset_.width)+"px";this.div_.style.top=(p.y+this.labelOffset_.height)+"px";this.div_.style.zIndex=z};LabeledMarker.prototype.remove=function(){GEvent.clearInstanceListeners(this.div_);if(this.div_.outerHTML){this.div_.outerHTML=""}if(this.div_.parentNode){this.div_.parentNode.removeChild(this.div_)}this.div_=null;GMarker.prototype.remove.apply(this,arguments)};LabeledMarker.prototype.copy=function(){return new LabeledMarker(this.latlng_,this.opts_)};LabeledMarker.prototype.show=function(){GMarker.prototype.show.apply(this,arguments);if(this.labelVisibility_){this.showLabel()}else{this.hideLabel()}};LabeledMarker.prototype.hide=function(){GMarker.prototype.hide.apply(this,arguments);this.hideLabel()};LabeledMarker.prototype.setLatLng=function(latlng){this.latlng_=latlng;GMarker.prototype.setLatLng.apply(this,arguments);this.redrawLabel_()};LabeledMarker.prototype.setLabelVisibility=function(visibility){this.labelVisibility_=visibility;if(!this.isHidden()){if(this.labelVisibility_){this.showLabel()}else{this.hideLabel()}}};LabeledMarker.prototype.getLabelVisibility=function(){return this.labelVisibility_};LabeledMarker.prototype.hideLabel=function(){this.div_.style.visibility='hidden'};LabeledMarker.prototype.showLabel=function(){this.div_.style.visibility='visible'};
Binary file web/data/gmap_blue_marker.png has changed
--- a/web/facet.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/facet.py	Fri Jun 14 16:26:25 2013 +0200
@@ -64,7 +64,7 @@
 
 from rql import nodes, utils
 
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
 from cubicweb.schema import display_name
 from cubicweb.uilib import css_em_num_value, domid
 from cubicweb.utils import make_uid
@@ -500,8 +500,7 @@
         return FacetVocabularyWidget
 
     def get_selected(self):
-        return frozenset(typed_eid(eid)
-                         for eid in self._cw.list_form_param(self.__regid__))
+        return frozenset(int(eid) for eid in self._cw.list_form_param(self.__regid__))
 
     def get_widget(self):
         """Return the widget instance to use to display this facet.
--- a/web/formwidgets.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/formwidgets.py	Fri Jun 14 16:26:25 2013 +0200
@@ -488,7 +488,7 @@
   </tr>
 </table>
 """
-    add_button = ('<input type="button" id="cwinoutadd" class="wdgButton cwinoutadd" '
+    add_button = ('<input type="button" class="wdgButton cwinoutadd" '
                   'value="&gt;&gt;" size="10" />')
     remove_button = ('<input type="button" class="wdgButton cwinoutremove" '
                      'value="&lt;&lt;" size="10" />')
--- a/web/request.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/request.py	Fri Jun 14 16:26:25 2013 +0200
@@ -23,6 +23,7 @@
 import random
 import base64
 import urllib
+from StringIO import StringIO
 from hashlib import sha1 # pylint: disable=E0611
 from Cookie import SimpleCookie
 from calendar import timegm
@@ -41,7 +42,7 @@
 from cubicweb.dbapi import DBAPIRequest
 from cubicweb.uilib import remove_html_tags, js
 from cubicweb.utils import SizeConstrainedList, HTMLHead, make_uid
-from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE_NOEXT
+from cubicweb.view import TRANSITIONAL_DOCTYPE_NOEXT
 from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit,
                           RequestError, StatusResponse)
 from cubicweb.web.httpcache import GMTOFFSET, get_validators
@@ -116,6 +117,8 @@
             self._headers_in.addRawHeader(k, v)
         #: form parameters
         self.setup_params(form)
+        #: received body
+        self.content = StringIO()
         #: dictionary that may be used to store request data that has to be
         #: shared among various components used to publish the request (views,
         #: controller, application...)
@@ -899,29 +902,26 @@
         values = _parse_accept_header(accepteds, value_parser, value_sort_key)
         return (raw_value for (raw_value, parsed_value, score) in values)
 
+    @deprecated('[3.17] demote_to_html is deprecated as we always serve html')
     def demote_to_html(self):
         """helper method to dynamically set request content type to text/html
 
         The global doctype and xmldec must also be changed otherwise the browser
         will display '<[' at the beginning of the page
         """
-        if not self.vreg.config['force-html-content-type']:
-            if not hasattr(self, 'main_stream'):
-                raise Exception("Can't demote to html from an ajax context. You "
-                                "should change force-html-content-type to yes "
-                                "in the instance configuration file.")
-            self.set_content_type('text/html')
-            self.main_stream.set_doctype(TRANSITIONAL_DOCTYPE_NOEXT)
+        pass
+
 
     # xml doctype #############################################################
 
-    def set_doctype(self, doctype, reset_xmldecl=True):
+    def set_doctype(self, doctype, reset_xmldecl=None):
         """helper method to dynamically change page doctype
 
         :param doctype: the new doctype, e.g. '<!DOCTYPE html>'
-        :param reset_xmldecl: if True, remove the '<?xml version="1.0"?>'
-                              declaration from the page
         """
+        if reset_xmldecl is not None:
+            warn('[3.17] reset_xmldecl is deprecated as we only serve html',
+                 DeprecationWarning, stacklevel=2)
         self.main_stream.set_doctype(doctype, reset_xmldecl)
 
     # page data management ####################################################
@@ -962,6 +962,7 @@
         useragent = self.useragent()
         return useragent and 'MSIE' in useragent
 
+    @deprecated('[3.17] xhtml_browser is deprecated (xhtml is no longer served)')
     def xhtml_browser(self):
         """return True if the browser is considered as xhtml compatible.
 
@@ -969,28 +970,11 @@
         application/xhtml+xml, this method will always return False, even though
         this is semantically different
         """
-        if self.vreg.config['force-html-content-type']:
-            return False
-        useragent = self.useragent()
-        # * MSIE/Konqueror does not support xml content-type
-        # * Opera supports xhtml and handles namespaces properly but it breaks
-        #   jQuery.attr()
-        if useragent and ('MSIE' in useragent or 'KHTML' in useragent
-                          or 'Opera' in useragent):
-            return False
-        return True
+        return False
 
     def html_content_type(self):
-        if self.xhtml_browser():
-            return 'application/xhtml+xml'
         return 'text/html'
 
-    def document_surrounding_div(self):
-        if self.xhtml_browser():
-            return (u'<?xml version="1.0"?>\n' + STRICT_DOCTYPE + # XXX encoding ?
-                    u'<div xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb">')
-        return u'<div>'
-
     @deprecated('[3.9] use req.uiprops[rid]')
     def external_resource(self, rid, default=_MARKER):
         """return a path to an external resource, using its identifier
--- a/web/test/test_views.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/test_views.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -16,7 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """automatic tests"""
-
+from cubicweb.devtools import htmlparser
 from cubicweb.devtools.testlib import CubicWebTC, AutoPopulateTest, AutomaticWebTest
 from cubicweb.view import AnyRsetView
 
@@ -27,6 +27,14 @@
         'Any COUNT(X) WHERE X is CWUser',
         ]
 
+    def to_test_etypes(self):
+        # We do not really want to test cube views here. So we can drop testing 
+        # some EntityType. The two Blog types below require the sioc cube that 
+        # we do not want to add as a dependency.
+        etypes = super(AutomaticWebTest, self).to_test_etypes()
+        etypes -= set(('Blog', 'BlogEntry'))
+        return etypes
+
 
 class SomeView(AnyRsetView):
     __regid__ = 'someview'
@@ -51,7 +59,7 @@
         self.assertFalse('jquery.tablesorter.js' in self.view('oneline', rset))
         # but should be included by the tableview
         rset = self.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S')
-        self.assertTrue('jquery.tablesorter.js' in self.view('table', rset))
+        self.assertIn('jquery.tablesorter.js', self.view('table', rset).source)
 
     def test_js_added_only_once(self):
         self.vreg._loadedmods[__name__] = {}
--- a/web/test/unittest_form.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_form.py	Fri Jun 14 16:26:25 2013 +0200
@@ -122,14 +122,6 @@
         data = form.render(row=0, rtype='content', formid='base', action='edit_rtype')
         self.assertTrue('content_format' in data)
 
-    # form view tests #########################################################
-
-    def test_massmailing_formview(self):
-        self.execute('INSERT EmailAddress X: X address L + "@cubicweb.org", '
-                     'U use_email X WHERE U is CWUser, U login L')
-        rset = self.execute('CWUser X')
-        self.view('massmailing', rset, template=None)
-
 
     # form tests ##############################################################
 
--- a/web/test/unittest_idownloadable.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_idownloadable.py	Fri Jun 14 16:26:25 2013 +0200
@@ -146,7 +146,7 @@
         finally:
             self.app.error_handler = errhdlr
         get = req.headers_out.getRawHeaders
-        self.assertEqual(['application/xhtml+xml'],
+        self.assertEqual(['text/html;charset=UTF-8'],
                          get('content-type'))
         self.assertEqual(None,
                          get('content-disposition'))
--- a/web/test/unittest_views_actions.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_views_actions.py	Fri Jun 14 16:26:25 2013 +0200
@@ -30,16 +30,6 @@
         vaction = [action for action in actions if action.__regid__ == 'view'][0]
         self.assertEqual(vaction.url(), 'http://testing.fr/cubicweb/view?rql=CWUser%20X')
 
-    def test_sendmail_action(self):
-        req = self.request()
-        rset = self.execute('Any X WHERE X login "admin"', req=req)
-        actions = self.vreg['actions'].poss_visible_objects(req, rset=rset)
-        self.assertTrue([action for action in actions if action.__regid__ == 'sendemail'])
-        self.login('anon')
-        req = self.request()
-        rset = self.execute('Any X WHERE X login "anon"', req=req)
-        actions = self.vreg['actions'].poss_visible_objects(req, rset=rset)
-        self.assertFalse([action for action in actions if action.__regid__ == 'sendemail'])
 
 if __name__ == '__main__':
     unittest_main()
--- a/web/test/unittest_views_basecontrollers.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Fri Jun 14 16:26:25 2013 +0200
@@ -32,6 +32,8 @@
 from cubicweb.utils import json_dumps
 from cubicweb.uilib import rql_for_eid
 from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect, RequestError, RemoteCallFailed
+import cubicweb.server.session
+from cubicweb.server.session import Transaction as OldTransaction
 from cubicweb.entities.authobjs import CWUser
 from cubicweb.web.views.autoform import get_pending_inserts, get_pending_deletes
 from cubicweb.web.views.basecontrollers import JSonController, xhtmlize, jsonize
@@ -533,18 +535,6 @@
             p.__class__.skip_copy_for = old_skips
 
 
-class EmbedControllerTC(CubicWebTC):
-
-    def test_nonregr_embed_publish(self):
-        # This test looks a bit stupid but at least it will probably
-        # fail if the controller API changes and if EmbedController is not
-        # updated (which is what happened before this test)
-        req = self.request()
-        req.form['url'] = 'http://www.logilab.fr/'
-        controller = self.vreg['controllers'].select('embed', req)
-        result = controller.publish(rset=None)
-
-
 class ReportBugControllerTC(CubicWebTC):
 
     def test_usable_by_guest(self):
@@ -554,21 +544,6 @@
         self.vreg['controllers'].select('reportbug', self.request(description='hop'))
 
 
-class SendMailControllerTC(CubicWebTC):
-
-    def test_not_usable_by_guest(self):
-        self.assertRaises(NoSelectableObject,
-                          self.vreg['controllers'].select, 'sendmail', self.request())
-        self.vreg['controllers'].select('sendmail',
-                                        self.request(subject='toto',
-                                                     recipient='toto@logilab.fr',
-                                                     mailbody='hop'))
-        self.login('anon')
-        self.assertRaises(NoSelectableObject,
-                          self.vreg['controllers'].select, 'sendmail', self.request())
-
-
-
 class AjaxControllerTC(CubicWebTC):
     tested_controller = 'ajax'
 
@@ -591,11 +566,6 @@
         rset = self.john.as_rset()
         rset.req = req
         source = ctrl.publish()
-        self.assertTrue(source.startswith('<?xml version="1.0"?>\n' + STRICT_DOCTYPE +
-                                          u'<div xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb">')
-                        )
-        req.xhtml_browser = lambda: False
-        source = ctrl.publish()
         self.assertTrue(source.startswith('<div>'))
 
 #     def test_json_exec(self):
@@ -769,9 +739,7 @@
         def js_foo(self):
             return u'hello'
         res, req = self.remote_call('foo')
-        self.assertEqual(res,
-                         '<?xml version="1.0"?>\n' + STRICT_DOCTYPE +
-                         u'<div xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb">hello</div>')
+        self.assertEqual(u'<div>hello</div>', res)
 
     def test_monkeypatch_jsoncontroller_jsonize(self):
         self.assertRaises(RemoteCallFailed, self.remote_call, 'foo')
@@ -793,9 +761,20 @@
 
 class UndoControllerTC(CubicWebTC):
 
+    def setUp(self):
+        class Transaction(OldTransaction):
+            """Force undo feature to be turned on in all case"""
+            undo_actions = property(lambda tx: True, lambda x, y:None)
+        cubicweb.server.session.Transaction = Transaction
+        super(UndoControllerTC, self).setUp()
+
+    def tearDown(self):
+        super(UndoControllerTC, self).tearDown()
+        cubicweb.server.session.Transaction = OldTransaction
+
+
     def setup_database(self):
         req = self.request()
-        self.session.undo_actions = True
         self.toto = self.create_user(req, 'toto', password='toto', groups=('users',),
                                      commit=False)
         self.txuuid_toto = self.commit()
--- a/web/test/unittest_views_basetemplates.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_views_basetemplates.py	Fri Jun 14 16:26:25 2013 +0200
@@ -15,14 +15,15 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
 from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.devtools.htmlparser import DTDValidator
+from cubicweb.devtools.htmlparser import XMLValidator
 
 
 class LogFormTemplateTC(CubicWebTC):
 
     def _login_labels(self):
-        valid = self.content_type_validators.get('text/html', DTDValidator)()
+        valid = self.content_type_validators.get('text/html', XMLValidator)()
         req = self.request()
         req.cnx.anonymous_connection = True
         page = valid.parse_string(self.vreg['views'].main_template(self.request(), 'login'))
--- a/web/test/unittest_views_baseviews.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/test/unittest_views_baseviews.py	Fri Jun 14 16:26:25 2013 +0200
@@ -21,7 +21,7 @@
 
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.utils import json
-from cubicweb.view import StartupView, TRANSITIONAL_DOCTYPE_NOEXT
+from cubicweb.view import StartupView, TRANSITIONAL_DOCTYPE
 from cubicweb.web.htmlwidgets import TableWidget
 from cubicweb.web.views import vid_from_rset
 
@@ -133,31 +133,26 @@
             html_source = self.view('my-view').source
             source_lines = [line.strip() for line in html_source.splitlines(False)
                             if line.strip()]
-            self.assertListEqual(source_lines[:2],
-                                 ['<!DOCTYPE html>',
-                                  '<html xmlns="http://www.w3.org/1999/xhtml" xmlns:cubicweb="http://www.logilab.org/2008/cubicweb" xml:lang="en" lang="en">'])
+            self.assertListEqual(['<!DOCTYPE html>', '<html lang="en">'], source_lines[:2])
 
     def test_set_doctype_no_reset_xmldecl(self):
         """
         tests `cubicweb.web.request.CubicWebRequestBase.set_doctype`
         with no xmldecl reset
         """
-        html_doctype = TRANSITIONAL_DOCTYPE_NOEXT.strip()
+        html_doctype = TRANSITIONAL_DOCTYPE.strip()
         class MyView(StartupView):
             __regid__ = 'my-view'
             def call(self):
                 self._cw.set_doctype(html_doctype, reset_xmldecl=False)
-                self._cw.main_stream.set_namespaces([('xmlns', 'http://www.w3.org/1999/xhtml')])
                 self._cw.main_stream.set_htmlattrs([('lang', 'cz')])
 
         with self.temporary_appobjects(MyView):
             html_source = self.view('my-view').source
             source_lines = [line.strip() for line in html_source.splitlines(False)
                             if line.strip()]
-            self.assertListEqual(source_lines[:3],
-                                 ['<?xml version="1.0" encoding="UTF-8"?>',
-                                  html_doctype,
-                                  '<html xmlns="http://www.w3.org/1999/xhtml" lang="cz">'])
+            self.assertListEqual([html_doctype, '<html lang="cz">', '<head>'],
+                                 source_lines[:3])
 
 if __name__ == '__main__':
     unittest_main()
--- a/web/test/unittest_views_embeding.py	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,53 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-
-from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb.web.views.embedding import prefix_links
-
-class UILIBTC(TestCase):
-
-
-    def test_prefix_links(self):
-        """suppose we are embedding http://embedded.com/page1.html"""
-        orig = ['<a href="http://www.perdu.com">perdu ?</a>',
-        '<a href="http://embedded.com/page1.html">perdu ?</a>',
-        '<a href="/page2.html">perdu ?</a>',
-        '<a href="page3.html">perdu ?</a>',
-        '<img src="http://www.perdu.com/img.png"/>',
-        '<img src="/img.png"/>',
-        '<img src="img.png"/>',
-        ]
-        expected = ['<a href="PREFIXhttp%3A%2F%2Fwww.perdu.com">perdu ?</a>',
-        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage1.html">perdu ?</a>',
-        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage2.html">perdu ?</a>',
-        '<a href="PREFIXhttp%3A%2F%2Fembedded.com%2Fpage3.html">perdu ?</a>',
-        '<img src="http://www.perdu.com/img.png"/>',
-        '<img src="http://embedded.com/img.png"/>',
-        '<img src="http://embedded.com/img.png"/>',
-        ]
-        for orig_a, expected_a in zip(orig, expected):
-            got = prefix_links(orig_a, 'PREFIX', 'http://embedded.com/page1.html')
-            self.assertEqual(got, expected_a)
-
-if __name__ == '__main__':
-    unittest_main()
-
--- a/web/views/ajaxcontroller.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/ajaxcontroller.py	Fri Jun 14 16:26:25 2013 +0200
@@ -268,7 +268,7 @@
                 return content
             elif self.output_type == 'xhtml':
                 self._cw.set_content_type(self._cw.html_content_type())
-                return ''.join((self._cw.document_surrounding_div(),
+                return ''.join((u'<div>',
                                 content.strip(), u'</div>'))
             elif self.output_type == 'json':
                 self._cw.set_content_type('application/json')
--- a/web/views/autoform.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/autoform.py	Fri Jun 14 16:26:25 2013 +0200
@@ -127,7 +127,7 @@
 from logilab.common.decorators import iclassmethod, cached
 from logilab.common.deprecation import deprecated
 
-from cubicweb import typed_eid, neg_role, uilib
+from cubicweb import neg_role, uilib
 from cubicweb.schema import display_name
 from cubicweb.view import EntityView
 from cubicweb.predicates import (
@@ -272,7 +272,7 @@
                          **kwargs)
 
     def form_title(self, entity, i18nctx):
-        return self._cw.pgettext(i18nctx, entity.__regid__)
+        return self._cw.pgettext(i18nctx, entity.cw_etype)
 
     def add_hiddens(self, form, entity):
         """to ease overriding (see cubes.vcsfile.views.forms for instance)"""
@@ -415,7 +415,7 @@
         subjs, rtype, objs = rstr.split(':')
         for subj in subjs.split('_'):
             for obj in objs.split('_'):
-                yield typed_eid(subj), rtype, typed_eid(obj)
+                yield int(subj), rtype, int(obj)
 
 def delete_relations(req, rdefs):
     """delete relations from the repository"""
@@ -460,12 +460,12 @@
 def _add_pending(req, eidfrom, rel, eidto, kind):
     key = 'pending_%s' % kind
     pendings = req.session.data.setdefault(key, set())
-    pendings.add( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
+    pendings.add( (int(eidfrom), rel, int(eidto)) )
 
 def _remove_pending(req, eidfrom, rel, eidto, kind):
     key = 'pending_%s' % kind
     pendings = req.session.data[key]
-    pendings.remove( (typed_eid(eidfrom), rel, typed_eid(eidto)) )
+    pendings.remove( (int(eidfrom), rel, int(eidto)) )
 
 @ajaxfunc(output_type='json')
 def remove_pending_insert(self, (eidfrom, rel, eidto)):
@@ -498,7 +498,7 @@
         for rschema, role, related in field.relations_table(form):
             # already linked entities
             if related:
-                label = rschema.display_name(req, role, context=form.edited_entity.__regid__)
+                label = rschema.display_name(req, role, context=form.edited_entity.cw_etype)
                 w(u'<tr><th class="labelCol">%s</th>' % label)
                 w(u'<td>')
                 w(u'<ul>')
@@ -606,13 +606,13 @@
         for pendingid in pending_inserts:
             eidfrom, rtype, eidto = pendingid.split(':')
             pendingid = 'id' + pendingid
-            if typed_eid(eidfrom) == entity.eid: # subject
+            if int(eidfrom) == entity.eid: # subject
                 label = display_name(form._cw, rtype, 'subject',
-                                     entity.__regid__)
+                                     entity.cw_etype)
                 reid = eidto
             else:
                 label = display_name(form._cw, rtype, 'object',
-                                     entity.__regid__)
+                                     entity.cw_etype)
                 reid = eidfrom
             jscall = "javascript: cancelPendingInsert('%s', 'tr', null, %s);" \
                      % (pendingid, entity.eid)
@@ -852,7 +852,7 @@
         for rschema, _, role in self._relations_by_section('relations',
                                                            strict=True):
             result.append( (rschema.display_name(self.edited_entity._cw, role,
-                                                 self.edited_entity.__regid__),
+                                                 self.edited_entity.cw_etype),
                             rschema, role) )
         return sorted(result)
 
--- a/web/views/basecontrollers.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/basecontrollers.py	Fri Jun 14 16:26:25 2013 +0200
@@ -27,7 +27,7 @@
 from logilab.common.deprecation import deprecated
 
 from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError,
-                      AuthenticationError, typed_eid, UndoTransactionException,
+                      AuthenticationError, UndoTransactionException,
                       Forbidden)
 from cubicweb.utils import json_dumps
 from cubicweb.predicates import (authenticated_user, anonymous_user,
@@ -54,7 +54,7 @@
     def wrapper(self, *args, **kwargs):
         self._cw.set_content_type(self._cw.html_content_type())
         result = func(self, *args, **kwargs)
-        return ''.join((self._cw.document_surrounding_div(), result.strip(),
+        return ''.join((u'<div>', result.strip(),
                         u'</div>'))
     wrapper.__name__ = func.__name__
     return wrapper
@@ -176,7 +176,7 @@
         if not '__linkto' in req.form:
             return
         if eid is None:
-            eid = typed_eid(req.form['eid'])
+            eid = int(req.form['eid'])
         for linkto in req.list_form_param('__linkto', pop=True):
             rtype, eids, target = linkto.split(':')
             assert target in ('subject', 'object')
@@ -186,7 +186,7 @@
             else:
                 rql = 'SET Y %s X WHERE X eid %%(x)s, Y eid %%(y)s' % rtype
             for teid in eids:
-                req.execute(rql, {'x': eid, 'y': typed_eid(teid)})
+                req.execute(rql, {'x': eid, 'y': int(teid)})
 
 
 def _validation_error(req, ex):
@@ -271,7 +271,6 @@
         return ajax_controller.publish(rset)
 
 
-# XXX move to massmailing
 class MailBugReportController(Controller):
     __regid__ = 'reportbug'
     __select__ = match_form_params('description')
--- a/web/views/basetemplates.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/basetemplates.py	Fri Jun 14 16:26:25 2013 +0200
@@ -108,7 +108,7 @@
         if (('__notemplate' in self._cw.form)
             and view.templatable
             and view.content_type == self._cw.html_content_type()):
-            view.w(self._cw.document_surrounding_div())
+            view.w(u'<div>')
             view.render()
             view.w(u'</div>')
         else:
--- a/web/views/baseviews.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/baseviews.py	Fri Jun 14 16:26:25 2013 +0200
@@ -157,7 +157,7 @@
 class InContextView(EntityView):
     """:__regid__: *incontext*
 
-    This view is used whenthe entity should be considered as displayed in its
+    This view is used when the entity should be considered as displayed in its
     context. By default it produces the result of ``entity.dc_title()`` wrapped in a
     link leading to the primary view of the entity.
     """
@@ -593,7 +593,7 @@
         year, month = key
         label = u'%s %s [%s]' % (self._cw._(calendar.MONTHNAMES[int(month)-1]),
                                  year, len(items))
-        etypes = set(entity.__regid__ for entity in items)
+        etypes = set(entity.cw_etype for entity in items)
         vtitle = '%s %s' % (', '.join(display_name(self._cw, etype, 'plural')
                                       for etype in etypes),
                             label)
@@ -620,7 +620,7 @@
         if key[0] is None:
             return
         label = u'%s [%s]' % (key[0], len(items))
-        etypes = set(entity.__regid__ for entity in items)
+        etypes = set(entity.cw_etype for entity in items)
         vtitle = self._cw._('%(etype)s by %(author)s') % {
             'etype': ', '.join(display_name(self._cw, etype, 'plural')
                                for etype in etypes),
--- a/web/views/bookmark.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/bookmark.py	Fri Jun 14 16:26:25 2013 +0200
@@ -22,7 +22,7 @@
 
 from logilab.mtconverter import xml_escape
 
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
 from cubicweb.predicates import is_instance, one_line_rset
 from cubicweb.web import action, component, htmlwidgets, formwidgets as fw
 from cubicweb.web.views import uicfg, primary
@@ -137,4 +137,4 @@
 @ajaxfunc
 def delete_bookmark(self, beid):
     rql = 'DELETE B bookmarked_by U WHERE B eid %(b)s, U eid %(u)s'
-    self._cw.execute(rql, {'b': typed_eid(beid), 'u' : self._cw.user.eid})
+    self._cw.execute(rql, {'b': int(beid), 'u' : self._cw.user.eid})
--- a/web/views/calendar.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/calendar.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -188,7 +188,6 @@
         }
 
     def call(self):
-        self._cw.demote_to_html()
         self._cw.add_css(('fullcalendar.css', 'cubicweb.calendar.css'))
         self._cw.add_js(('jquery.ui.js', 'fullcalendar.min.js', 'jquery.qtip.min.js', 'fullcalendar.locale.js'))
         self.calendar_id = 'cal' + make_uid('uid')
--- a/web/views/editcontroller.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/editcontroller.py	Fri Jun 14 16:26:25 2013 +0200
@@ -25,7 +25,7 @@
 
 from rql.utils import rqlvar_maker
 
-from cubicweb import Binary, ValidationError, typed_eid
+from cubicweb import Binary, ValidationError
 from cubicweb.view import EntityAdapter, implements_adapter_compat
 from cubicweb.predicates import is_instance
 from cubicweb.web import (INTERNAL_FIELD_VALUE, RequestError, NothingToEdit,
@@ -67,7 +67,7 @@
 
 def valerror_eid(eid):
     try:
-        return typed_eid(eid)
+        return int(eid)
     except (ValueError, TypeError):
         return eid
 
@@ -221,7 +221,7 @@
             todelete = self._cw.list_form_param('__delete', formparams, pop=True)
             autoform.delete_relations(self._cw, todelete)
         if '__cloned_eid' in formparams:
-            entity.copy_relations(typed_eid(formparams['__cloned_eid']))
+            entity.copy_relations(int(formparams['__cloned_eid']))
         if is_main_entity: # only execute linkto for the main entity
             self.execute_linkto(entity.eid)
         return eid
--- a/web/views/editviews.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/editviews.py	Fri Jun 14 16:26:25 2013 +0200
@@ -23,7 +23,6 @@
 from logilab.common.decorators import cached
 from logilab.mtconverter import xml_escape
 
-from cubicweb import typed_eid
 from cubicweb.view import EntityView, StartupView
 from cubicweb.predicates import (one_line_rset, non_final_entity,
                                  match_search_state)
@@ -53,7 +52,7 @@
     def filter_box_context_info(self):
         entity = self.cw_rset.get_entity(0, 0)
         role, eid, rtype, etype = self._cw.search_state[1]
-        assert entity.eid == typed_eid(eid)
+        assert entity.eid == int(eid)
         # the default behaviour is to fetch all unrelated entities and display
         # them. Use fetch_order and not fetch_unrelated_order as sort method
         # since the latter is mainly there to select relevant items in the combo
--- a/web/views/embedding.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/embedding.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,172 +19,20 @@
 functionality.
 """
 
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-import re
-from urlparse import urljoin
-from urllib2 import urlopen, Request, HTTPError
-from urllib import quote as urlquote # XXX should use view.url_quote method
-
-from logilab.mtconverter import guess_encoding
-
-from cubicweb.predicates import (one_line_rset, score_entity, implements,
-                                adaptable, match_search_state)
-from cubicweb.interfaces import IEmbedable
-from cubicweb.view import NOINDEX, NOFOLLOW, EntityAdapter, implements_adapter_compat
-from cubicweb.uilib import soup2xhtml
-from cubicweb.web.controller import Controller
-from cubicweb.web.action import Action
-from cubicweb.web.views import basetemplates
-
-
-class IEmbedableAdapter(EntityAdapter):
-    """interface for embedable entities"""
-    __needs_bw_compat__ = True
-    __regid__ = 'IEmbedable'
-    __select__ = implements(IEmbedable, warn=False) # XXX for bw compat, should be abstract
-
-    @implements_adapter_compat('IEmbedable')
-    def embeded_url(self):
-        """embed action interface"""
-        raise NotImplementedError
-
-
-class ExternalTemplate(basetemplates.TheMainTemplate):
-    """template embeding an external web pages into CubicWeb web interface
-    """
-    __regid__ = 'external'
+from logilab.common.deprecation import class_moved, moved
 
-    def call(self, body):
-        # XXX fallback to HTML 4 mode when embeding ?
-        self.set_request_content_type()
-        self._cw.search_state = ('normal',)
-        self.template_header(self.content_type, None, self._cw._('external page'),
-                             [NOINDEX, NOFOLLOW])
-        self.content_header()
-        self.w(body)
-        self.content_footer()
-        self.template_footer()
-
-
-class EmbedController(Controller):
-    __regid__ = 'embed'
-    template = 'external'
-
-    def publish(self, rset=None):
-        req = self._cw
-        if 'custom_css' in req.form:
-            req.add_css(req.form['custom_css'])
-        embedded_url = req.form['url']
-        allowed = self._cw.vreg.config['embed-allowed']
-        _ = req._
-        if allowed is None or not allowed.match(embedded_url):
-            body = '<h2>%s</h2><h3>%s</h3>' % (
-                _('error while embedding page'),
-                _('embedding this url is forbidden'))
-        else:
-            prefix = req.build_url(self.__regid__, url='')
-            authorization = req.get_header('Authorization')
-            if authorization:
-                headers = {'Authorization' : authorization}
-            else:
-                headers = {}
-            try:
-                body = embed_external_page(embedded_url, prefix,
-                                           headers, req.form.get('custom_css'))
-                body = soup2xhtml(body, self._cw.encoding)
-            except HTTPError as err:
-                body = '<h2>%s</h2><h3>%s</h3>' % (
-                    _('error while embedding page'), err)
-        rset = self.process_rql()
-        return self._cw.vreg['views'].main_template(req, self.template,
-                                                    rset=rset, body=body)
-
+try:
+    from cubes.embed.views import *
 
-def entity_has_embedable_url(entity):
-    """return 1 if the entity provides an allowed embedable url"""
-    url = entity.cw_adapt_to('IEmbedable').embeded_url()
-    if not url or not url.strip():
-        return 0
-    allowed = entity._cw.vreg.config['embed-allowed']
-    if allowed is None or not allowed.match(url):
-        return 0
-    return 1
-
-
-class EmbedAction(Action):
-    """display an 'embed' link on entity implementing `embeded_url` method
-    if the returned url match embeding configuration
-    """
-    __regid__ = 'embed'
-    __select__ = (one_line_rset() & match_search_state('normal')
-                  & adaptable('IEmbedable')
-                  & score_entity(entity_has_embedable_url))
-
-    title = _('embed')
-
-    def url(self, row=0):
-        entity = self.cw_rset.get_entity(row, 0)
-        url = urljoin(self._cw.base_url(), entity.cw_adapt_to('IEmbedable').embeded_url())
-        if 'rql' in self._cw.form:
-            return self._cw.build_url('embed', url=url, rql=self._cw.form['rql'])
-        return self._cw.build_url('embed', url=url)
-
-
-
-# functions doing necessary substitutions to embed an external html page ######
-
-
-BODY_RGX = re.compile('<body.*?>(.*?)</body>', re.I | re.S | re.U)
-HREF_RGX = re.compile('<a\s+href="([^"]*)"', re.I | re.S | re.U)
-SRC_RGX = re.compile('<img\s+src="([^"]*)"', re.I | re.S | re.U)
-
-
-class replace_href:
-    def __init__(self, prefix, custom_css=None):
-        self.prefix = prefix
-        self.custom_css = custom_css
-
-    def __call__(self, match):
-        original_url = match.group(1)
-        url = self.prefix + urlquote(original_url, safe='')
-        if self.custom_css is not None:
-            if '?' in url:
-                url = '%s&amp;custom_css=%s' % (url, self.custom_css)
-            else:
-                url = '%s?custom_css=%s' % (url, self.custom_css)
-        return '<a href="%s"' % url
-
-
-class absolutize_links:
-    def __init__(self, embedded_url, tag, custom_css=None):
-        self.embedded_url = embedded_url
-        self.tag = tag
-        self.custom_css = custom_css
-
-    def __call__(self, match):
-        original_url = match.group(1)
-        if '://' in original_url:
-            return match.group(0) # leave it unchanged
-        return '%s="%s"' % (self.tag, urljoin(self.embedded_url, original_url))
-
-
-def prefix_links(body, prefix, embedded_url, custom_css=None):
-    filters = ((HREF_RGX, absolutize_links(embedded_url, '<a href', custom_css)),
-               (SRC_RGX, absolutize_links(embedded_url, '<img src')),
-               (HREF_RGX, replace_href(prefix, custom_css)))
-    for rgx, repl in filters:
-        body = rgx.sub(repl, body)
-    return body
-
-
-def embed_external_page(url, prefix, headers=None, custom_css=None):
-    req = Request(url, headers=(headers or {}))
-    content = urlopen(req).read()
-    page_source = unicode(content, guess_encoding(content), 'replace')
-    page_source = page_source
-    match = BODY_RGX.search(page_source)
-    if match is None:
-        return page_source
-    return prefix_links(match.group(1), prefix, url, custom_css)
+    IEmbedableAdapter = class_moved(IEmbedableAdapter, message='[3.17] IEmbedableAdapter moved to cubes.embed.views')
+    ExternalTemplate = class_moved(ExternalTemplate, message='[3.17] IEmbedableAdapter moved to cubes.embed.views')
+    EmbedController = class_moved(EmbedController, message='[3.17] IEmbedableAdapter moved to cubes.embed.views')
+    entity_has_embedable_url = moved('cubes.embed.views', 'entity_has_embedable_url')
+    EmbedAction = class_moved(EmbedAction, message='[3.17] EmbedAction moved to cubes.embed.views')
+    replace_href = class_moved(replace_href, message='[3.17] replace_href moved to cubes.embed.views')
+    embed_external_page = moved('cubes.embed.views', 'embed_external_page')
+    absolutize_links = class_moved(absolutize_links, message='[3.17] absolutize_links moved to cubes.embed.views')
+    prefix_links = moved('cubes.embed.views', 'prefix_links')
+except ImportError:
+    from cubicweb.web import LOGGER
+    LOGGER.warning('[3.17] embedding extracted to cube embed that was not found. try installing it.')
--- a/web/views/forms.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/forms.py	Fri Jun 14 16:26:25 2013 +0200
@@ -51,7 +51,7 @@
 from logilab.common.textutils import splitstrip
 from logilab.common.deprecation import deprecated
 
-from cubicweb import ValidationError, typed_eid
+from cubicweb import ValidationError
 from cubicweb.utils import support_args
 from cubicweb.predicates import non_final_entity, match_kwargs, one_line_rset
 from cubicweb.web import RequestError, ProcessFormError
@@ -358,7 +358,7 @@
             'autoform_field', self._cw, entity=self.edited_entity)
         self.uicfg_affk = self._cw.vreg['uicfg'].select(
             'autoform_field_kwargs', self._cw, entity=self.edited_entity)
-        self.add_hidden('__type', self.edited_entity.__regid__, eidparam=True)
+        self.add_hidden('__type', self.edited_entity.cw_etype, eidparam=True)
         self.add_hidden('eid', self.edited_entity.eid)
         # mainform default to true in parent, hence default to True
         if kwargs.get('mainform', True) or kwargs.get('mainentity', False):
@@ -404,7 +404,7 @@
         linked_to = {}
         for linkto in self._cw.list_form_param('__linkto'):
             ltrtype, eid, ltrole = linkto.split(':')
-            linked_to.setdefault((ltrtype, ltrole), []).append(typed_eid(eid))
+            linked_to.setdefault((ltrtype, ltrole), []).append(int(eid))
         return linked_to
 
     def session_key(self):
@@ -436,7 +436,7 @@
         # created entity)
         assert eid or eid == 0, repr(eid) # 0 is a valid eid
         try:
-            return typed_eid(eid)
+            return int(eid)
         except ValueError:
             try:
                 return self._cw.data['eidmap'][eid]
--- a/web/views/igeocodable.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/igeocodable.py	Fri Jun 14 16:26:25 2013 +0200
@@ -17,121 +17,21 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Specific views for entities implementing IGeocodable"""
 
-__docformat__ = "restructuredtext en"
-
-from cubicweb.interfaces import IGeocodable
-from cubicweb.view import EntityView, EntityAdapter, implements_adapter_compat
-from cubicweb.predicates import implements, adaptable
-from cubicweb.utils import json_dumps
-
-class IGeocodableAdapter(EntityAdapter):
-    """interface required by geocoding views such as gmap-view"""
-    __needs_bw_compat__ = True
-    __regid__ = 'IGeocodable'
-    __select__ = implements(IGeocodable, warn=False) # XXX for bw compat, should be abstract
-
-    @property
-    @implements_adapter_compat('IGeocodable')
-    def latitude(self):
-        """returns the latitude of the entity in degree (-90 < float < +90)"""
-        raise NotImplementedError
+try:
+    from cubes.geocoding.views import (IGeocodableAdapter,
+                                       GeocodingJsonView,
+                                       GoogleMapBubbleView,
+                                       GoogleMapsView,
+                                       GoogeMapsLegend)
 
-    @property
-    @implements_adapter_compat('IGeocodable')
-    def longitude(self):
-        """returns the longitude of the entity in degree (-180 < float < +180)"""
-        raise NotImplementedError
-
-    @implements_adapter_compat('IGeocodable')
-    def marker_icon(self):
-        """returns the icon that should be used as the marker.
-
-        an icon is defined by a 4-uple:
-
-          (icon._url, icon.size,  icon.iconAnchor, icon.shadow)
-        """
-        return (self._cw.uiprops['GMARKER_ICON'], (20, 34), (4, 34), None)
-
-
-class GeocodingJsonView(EntityView):
-    __regid__ = 'geocoding-json'
-    __select__ = adaptable('IGeocodable')
-
-    binary = True
-    templatable = False
-    content_type = 'application/json'
+    from logilab.common.deprecation import class_moved
 
-    def call(self):
-        zoomlevel = self._cw.form.pop('zoomlevel', None)
-        extraparams = self._cw.form.copy()
-        extraparams.pop('vid', None)
-        extraparams.pop('rql', None)
-        markers = []
-        for entity in self.cw_rset.entities():
-            igeocodable = entity.cw_adapt_to('IGeocodable')
-            # remove entities that don't define latitude and longitude
-            if not (igeocodable.latitude and igeocodable.longitude):
-                continue
-            markers.append(self.build_marker_data(entity, igeocodable,
-                                                  extraparams))
-        if not markers:
-            return
-        geodata = {
-            'markers': markers,
-            }
-        if zoomlevel:
-            geodata['zoomlevel'] = int(zoomlevel)
-        self.w(json_dumps(geodata))
-
-    def build_marker_data(self, entity, igeocodable, extraparams):
-        return {'latitude': igeocodable.latitude,
-                'longitude': igeocodable.longitude,
-                'icon': igeocodable.marker_icon(),
-                'title': entity.dc_long_title(),
-                'bubbleUrl': entity.absolute_url(
-                    vid='gmap-bubble', __notemplate=1, **extraparams),
-                }
-
-
-class GoogleMapBubbleView(EntityView):
-    __regid__ = 'gmap-bubble'
-    __select__ = adaptable('IGeocodable')
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.get_entity(row, col)
-        self.w(u'<div>%s</div>' % entity.view('oneline'))
-        # FIXME: we should call something like address-view if available
-
-
-class GoogleMapsView(EntityView):
-    __regid__ = 'gmap-view'
-    __select__ = adaptable('IGeocodable')
-
-    paginable = False
-
-    def call(self, gmap_key, width=400, height=400, uselabel=True, urlparams=None):
-        self._cw.demote_to_html()
-        self._cw.add_js('http://maps.google.com/maps?sensor=false&file=api&v=2&key=%s'
-                        % gmap_key, localfile=False)
-        self._cw.add_js( ('cubicweb.widgets.js', 'cubicweb.gmap.js', 'gmap.utility.labeledmarker.js') )
-        rql = self.cw_rset.printable_rql()
-        if urlparams is None:
-            loadurl = self._cw.build_url(rql=rql, vid='geocoding-json')
-        else:
-            loadurl = self._cw.build_url(rql=rql, vid='geocoding-json', **urlparams)
-        self.w(u'<div style="width: %spx; height: %spx;" class="widget gmap" '
-               u'cubicweb:wdgtype="GMapWidget" cubicweb:loadtype="auto" '
-               u'cubicweb:loadurl="%s" cubicweb:uselabel="%s"> </div>'
-               % (width, height, loadurl, uselabel))
-
-
-class GoogeMapsLegend(EntityView):
-    __regid__ = 'gmap-legend'
-
-    def call(self):
-        self.w(u'<ol>')
-        for rowidx in xrange(len(self.cw_rset)):
-            self.w(u'<li>')
-            self.wview('listitem', self.cw_rset, row=rowidx, col=0)
-            self.w(u'</li>')
-        self.w(u'</ol>')
+    msg = '[3.17] cubicweb.web.views.igeocodable moved to cubes.geocoding.views'
+    IGeocodableAdapter = class_moved(IGeocodableAdapter, message=msg)
+    GeocodingJsonView = class_moved(GeocodingJsonView, message=msg)
+    GoogleMapBubbleView = class_moved(GoogleMapBubbleView, message=msg)
+    GoogleMapsView = class_moved(GoogleMapsView, message=msg)
+    GoogeMapsLegend = class_moved(GoogeMapsLegend, message=msg)
+except ImportError:
+    from cubicweb.web import LOGGER
+    LOGGER.warning('[3.17] igeocoding extracted to cube geocoding that was not found. try installing it.')
--- a/web/views/iprogress.py	Fri Jun 14 16:13:24 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,260 +0,0 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Specific views for entities implementing IProgress/IMileStone"""
-
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-from math import floor
-
-from logilab.common.deprecation import class_deprecated
-from logilab.mtconverter import xml_escape
-
-from cubicweb.utils import make_uid
-from cubicweb.predicates import adaptable
-from cubicweb.schema import display_name
-from cubicweb.view import EntityView
-from cubicweb.web.views.tableview import EntityAttributesTableView
-
-
-class ProgressTableView(EntityAttributesTableView):
-    """The progress table view is able to display progress information
-    of any object implement IMileStone.
-
-    The default layout is composoed of 7 columns : parent task,
-    milestone, state, estimated date, cost, progressbar, and todo_by
-
-    The view accepts an optional ``columns`` paramater that lets you
-    remove or reorder some of those columns.
-
-    To add new columns, you should extend this class, define a new
-    ``columns`` class attribute and implement corresponding
-    build_COLNAME_cell methods
-
-    header_for_COLNAME methods allow to customize header's label
-    """
-    __metaclass__ = class_deprecated
-    __deprecation_warning__ = '[3.14] %(cls)s is deprecated'
-
-    __regid__ = 'progress_table_view'
-    __select__ = adaptable('IMileStone')
-    title = _('task progression')
-    table_css = "progress"
-    css_files = ('cubicweb.iprogress.css',)
-
-    # default columns of the table
-    columns = (_('project'), _('milestone'), _('state'), _('eta_date'),
-               _('cost'), _('progress'), _('todo_by'))
-
-    def cell_call(self, row, col):
-        _ = self._cw._
-        entity = self.cw_rset.get_entity(row, col)
-        infos = {}
-        for col in self.columns:
-            meth = getattr(self, 'build_%s_cell' % col, None)
-            # find the build method or try to find matching attribute
-            if meth:
-                content = meth(entity)
-            else:
-                content = entity.printable_value(col)
-            infos[col] = content
-        cssclass = entity.cw_adapt_to('IMileStone').progress_class()
-        self.w(u"""<tr class="%s" onmouseover="$(this).addClass('highlighted');"
-            onmouseout="$(this).removeClass('highlighted')">""" % cssclass)
-        line = u''.join(u'<td>%%(%s)s</td>' % col for col in self.columns)
-        self.w(line % infos)
-        self.w(u'</tr>\n')
-
-    ## header management ######################################################
-
-    def header_for_project(self, sample):
-        """use entity's parent type as label"""
-        return display_name(self._cw, sample.cw_adapt_to('IMileStone').parent_type)
-
-    def header_for_milestone(self, sample):
-        """use entity's type as label"""
-        return display_name(self._cw, sample.__regid__)
-
-    ## cell management ########################################################
-    def build_project_cell(self, entity):
-        """``project`` column cell renderer"""
-        project = entity.cw_adapt_to('IMileStone').get_main_task()
-        if project:
-            return project.view('incontext')
-        return self._cw._('no related project')
-
-    def build_milestone_cell(self, entity):
-        """``milestone`` column cell renderer"""
-        return entity.view('incontext')
-
-    def build_state_cell(self, entity):
-        """``state`` column cell renderer"""
-        return xml_escape(entity.cw_adapt_to('IWorkflowable').printable_state)
-
-    def build_eta_date_cell(self, entity):
-        """``eta_date`` column cell renderer"""
-        imilestone = entity.cw_adapt_to('IMileStone')
-        if imilestone.finished():
-            return self._cw.format_date(imilestone.completion_date())
-        formated_date = self._cw.format_date(imilestone.initial_prevision_date())
-        if imilestone.in_progress():
-            eta_date = self._cw.format_date(imilestone.eta_date())
-            _ = self._cw._
-            if formated_date:
-                formated_date += u' (%s %s)' % (_('expected:'), eta_date)
-            else:
-                formated_date = u'%s %s' % (_('expected:'), eta_date)
-        return formated_date
-
-    def build_todo_by_cell(self, entity):
-        """``todo_by`` column cell renderer"""
-        imilestone = entity.cw_adapt_to('IMileStone')
-        return u', '.join(p.view('outofcontext') for p in imilestone.contractors())
-
-    def build_cost_cell(self, entity):
-        """``cost`` column cell renderer"""
-        _ = self._cw._
-        imilestone = entity.cw_adapt_to('IMileStone')
-        pinfo = imilestone.progress_info()
-        totalcost = pinfo.get('estimatedcorrected', pinfo['estimated'])
-        missing = pinfo.get('notestimatedcorrected', pinfo.get('notestimated', 0))
-        costdescr = []
-        if missing:
-            # XXX: link to unestimated entities
-            costdescr.append(_('%s not estimated') % missing)
-        estimated = pinfo['estimated']
-        if estimated and estimated != totalcost:
-            costdescr.append(_('initial estimation %s') % estimated)
-        if costdescr:
-            return u'%s (%s)' % (totalcost, ', '.join(costdescr))
-        return unicode(totalcost)
-
-    def build_progress_cell(self, entity):
-        """``progress`` column cell renderer"""
-        return entity.view('progressbar')
-
-
-class InContextProgressTableView(ProgressTableView):
-    """this views redirects to ``progress_table_view`` but removes
-    the ``project`` column
-    """
-    __metaclass__ = class_deprecated
-    __deprecation_warning__ = '[3.14] %(cls)s is deprecated'
-    __regid__ = 'ic_progress_table_view'
-
-    def call(self, columns=None):
-        view = self._cw.vreg['views'].select('progress_table_view', self._cw,
-                                         rset=self.cw_rset)
-        columns = list(columns or view.columns)
-        try:
-            columns.remove('project')
-        except ValueError:
-            self.info('[ic_progress_table_view] could not remove project from columns')
-        view.render(w=self.w, columns=columns)
-
-
-class ProgressBarView(EntityView):
-    """displays a progress bar"""
-    __metaclass__ = class_deprecated
-    __deprecation_warning__ = '[3.14] %(cls)s is deprecated'
-    __regid__ = 'progressbar'
-    __select__ = adaptable('IProgress')
-
-    title = _('progress bar')
-
-    precision = 0.1
-    red_threshold = 1.1
-    orange_threshold = 1.05
-    yellow_threshold = 1
-
-    @classmethod
-    def overrun(cls, iprogress):
-        done = iprogress.done or 0
-        todo = iprogress.todo or 0
-        budget = iprogress.revised_cost or 0
-        if done + todo > budget:
-            overrun = done + todo - budget
-        else:
-            overrun = 0
-        if overrun < cls.precision:
-            overrun = 0
-        return overrun
-
-    @classmethod
-    def overrun_percentage(cls, iprogress):
-        budget = iprogress.revised_cost or 0
-        if budget == 0:
-            return 0
-        return cls.overrun(iprogress) * 100. / budget
-
-    def cell_call(self, row, col):
-        self._cw.add_css('cubicweb.iprogress.css')
-        self._cw.add_js('cubicweb.iprogress.js')
-        entity = self.cw_rset.get_entity(row, col)
-        iprogress = entity.cw_adapt_to('IProgress')
-        done = iprogress.done or 0
-        todo = iprogress.todo or 0
-        budget = iprogress.revised_cost or 0
-        if budget == 0:
-            pourcent = 100
-        else:
-            pourcent = done*100./budget
-        if pourcent > 100.1:
-            color = 'red'
-        elif todo+done > self.red_threshold*budget:
-            color = 'red'
-        elif todo+done > self.orange_threshold*budget:
-            color = 'orange'
-        elif todo+done > self.yellow_threshold*budget:
-            color = 'yellow'
-        else:
-            color = 'green'
-        if pourcent < 0:
-            pourcent = 0
-
-        if floor(done) == done or done>100:
-            done_str = '%i' % done
-        else:
-            done_str = '%.1f' % done
-        if floor(budget) == budget or budget>100:
-            budget_str = '%i' % budget
-        else:
-            budget_str = '%.1f' % budget
-
-        title = u'%s/%s = %i%%' % (done_str, budget_str, pourcent)
-        short_title = title
-        overrunpercent = self.overrun_percentage(iprogress)
-        if overrunpercent:
-            overrun = self.overrun(iprogress)
-            title += u' overrun +%sj (+%i%%)' % (overrun, overrunpercent)
-            if floor(overrun) == overrun or overrun > 100:
-                short_title += u' +%i' % overrun
-            else:
-                short_title += u' +%.1f' % overrun
-        # write bars
-        maxi = max(done+todo, budget)
-        if maxi == 0:
-            maxi = 1
-        cid = make_uid('progress_bar')
-        self._cw.html_headers.add_onload(
-            'draw_progressbar("canvas%s", %i, %i, %i, "%s");' %
-            (cid, int(100.*done/maxi), int(100.*(done+todo)/maxi),
-             int(100.*budget/maxi), color))
-        self.w(u'%s<br/>'
-               u'<canvas class="progressbar" id="canvas%s" width="100" height="10"></canvas>'
-               % (xml_escape(short_title), cid))
--- a/web/views/isioc.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/isioc.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,144 +20,16 @@
 http://sioc-project.org
 """
 
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-from logilab.mtconverter import xml_escape
-
-from cubicweb.view import EntityView, EntityAdapter, implements_adapter_compat
-from cubicweb.predicates import implements, adaptable
-from cubicweb.interfaces import ISiocItem, ISiocContainer
-
-
-class ISIOCItemAdapter(EntityAdapter):
-    """interface for entities which may be represented as an ISIOC items"""
-    __needs_bw_compat__ = True
-    __regid__ = 'ISIOCItem'
-    __select__ = implements(ISiocItem, warn=False) # XXX for bw compat, should be abstract
-
-    @implements_adapter_compat('ISIOCItem')
-    def isioc_content(self):
-        """return item's content"""
-        raise NotImplementedError
-
-    @implements_adapter_compat('ISIOCItem')
-    def isioc_container(self):
-        """return container entity"""
-        raise NotImplementedError
-
-    @implements_adapter_compat('ISIOCItem')
-    def isioc_type(self):
-        """return container type (post, BlogPost, MailMessage)"""
-        raise NotImplementedError
+from logilab.common.deprecation import class_moved
 
-    @implements_adapter_compat('ISIOCItem')
-    def isioc_replies(self):
-        """return replies items"""
-        raise NotImplementedError
-
-    @implements_adapter_compat('ISIOCItem')
-    def isioc_topics(self):
-        """return topics items"""
-        raise NotImplementedError
-
-
-class ISIOCContainerAdapter(EntityAdapter):
-    """interface for entities which may be represented as an ISIOC container"""
-    __needs_bw_compat__ = True
-    __regid__ = 'ISIOCContainer'
-    __select__ = implements(ISiocContainer, warn=False) # XXX for bw compat, should be abstract
-
-    @implements_adapter_compat('ISIOCContainer')
-    def isioc_type(self):
-        """return container type (forum, Weblog, MailingList)"""
-        raise NotImplementedError
-
-    @implements_adapter_compat('ISIOCContainer')
-    def isioc_items(self):
-        """return contained items"""
-        raise NotImplementedError
-
-
-class SIOCView(EntityView):
-    __regid__ = 'sioc'
-    __select__ = adaptable('ISIOCItem', 'ISIOCContainer')
-    title = _('sioc')
-    templatable = False
-    content_type = 'text/xml'
+try:
+    from cubes.sioc.views import *
 
-    def call(self):
-        self.w(u'<?xml version="1.0" encoding="%s"?>\n' % self._cw.encoding)
-        self.w(u'''<rdf:RDF
-             xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-             xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
-             xmlns:owl="http://www.w3.org/2002/07/owl#"
-             xmlns:foaf="http://xmlns.com/foaf/0.1/"
-             xmlns:sioc="http://rdfs.org/sioc/ns#"
-             xmlns:sioctype="http://rdfs.org/sioc/types#"
-             xmlns:dcterms="http://purl.org/dc/terms/">\n''')
-        for i in xrange(self.cw_rset.rowcount):
-            self.cell_call(i, 0)
-        self.w(u'</rdf:RDF>\n')
-
-    def cell_call(self, row, col):
-        self.wview('sioc_element', self.cw_rset, row=row, col=col)
-
-class SIOCContainerView(EntityView):
-    __regid__ = 'sioc_element'
-    __select__ = adaptable('ISIOCContainer')
-    templatable = False
-    content_type = 'text/xml'
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.complete_entity(row, col)
-        isioc = entity.cw_adapt_to('ISIOCContainer')
-        isioct = isioc.isioc_type()
-        self.w(u'<sioc:%s rdf:about="%s">\n'
-               % (isioct, xml_escape(entity.absolute_url())))
-        self.w(u'<dcterms:title>%s</dcterms:title>'
-               % xml_escape(entity.dc_title()))
-        self.w(u'<dcterms:created>%s</dcterms:created>'
-               % entity.creation_date) # XXX format
-        self.w(u'<dcterms:modified>%s</dcterms:modified>'
-               % entity.modification_date) # XXX format
-        self.w(u'<!-- FIXME : here be items -->')#entity.isioc_items()
-        self.w(u'</sioc:%s>\n' % isioct)
-
-
-class SIOCItemView(EntityView):
-    __regid__ = 'sioc_element'
-    __select__ = adaptable('ISIOCItem')
-    templatable = False
-    content_type = 'text/xml'
-
-    def cell_call(self, row, col):
-        entity = self.cw_rset.complete_entity(row, col)
-        isioc = entity.cw_adapt_to('ISIOCItem')
-        isioct = isioc.isioc_type()
-        self.w(u'<sioc:%s rdf:about="%s">\n'
-               % (isioct, xml_escape(entity.absolute_url())))
-        self.w(u'<dcterms:title>%s</dcterms:title>'
-               % xml_escape(entity.dc_title()))
-        self.w(u'<dcterms:created>%s</dcterms:created>'
-               % entity.creation_date) # XXX format
-        self.w(u'<dcterms:modified>%s</dcterms:modified>'
-               % entity.modification_date) # XXX format
-        content = isioc.isioc_content()
-        if content:
-            self.w(u'<sioc:content>%s</sioc:content>' % xml_escape(content))
-        container = isioc.isioc_container()
-        if container:
-            self.w(u'<sioc:has_container rdf:resource="%s"/>\n'
-                   % xml_escape(container.absolute_url()))
-        if entity.creator:
-            self.w(u'<sioc:has_creator>\n')
-            self.w(u'<sioc:User rdf:about="%s">\n'
-                   % xml_escape(entity.creator.absolute_url()))
-            self.w(entity.creator.view('foaf'))
-            self.w(u'</sioc:User>\n')
-            self.w(u'</sioc:has_creator>\n')
-        self.w(u'<!-- FIXME : here be topics -->')#entity.isioc_topics()
-        self.w(u'<!-- FIXME : here be replies -->')#entity.isioc_replies()
-        self.w(u' </sioc:%s>\n' % isioct)
-
+    ISIOCItemAdapter = class_moved(ISIOCItemAdapter, message='[3.17] ISIOCItemAdapter moved to cubes.isioc.views')
+    ISIOCContainerAdapter = class_moved(ISIOCContainerAdapter, message='[3.17] ISIOCContainerAdapter moved to cubes.isioc.views')
+    SIOCView = class_moved(SIOCView, message='[3.17] SIOCView moved to cubes.is.view')
+    SIOCContainerView = class_moved(SIOCContainerView, message='[3.17] SIOCContainerView moved to cubes.is.view')
+    SIOCItemView = class_moved(SIOCItemView, message='[3.17] SIOCItemView moved to cubes.is.view')
+except ImportError:
+    from cubicweb.web import LOGGER
+    LOGGER.warning('[3.17] isioc extracted to cube sioc that was not found. try installing it.')
--- a/web/views/json.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/json.py	Fri Jun 14 16:26:25 2013 +0200
@@ -114,7 +114,7 @@
             entity.complete() # fetch all attributes
             # hack to add extra metadata
             entity.cw_attr_cache.update({
-                    '__cwetype__': entity.__regid__,
+                    '__cwetype__': entity.cw_etype,
                     })
             entities.append(entity)
         self.wdata(entities)
--- a/web/views/magicsearch.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/magicsearch.py	Fri Jun 14 16:26:25 2013 +0200
@@ -29,7 +29,7 @@
 from rql.utils import rqlvar_maker
 from rql.nodes import Relation
 
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
 from cubicweb.view import Component
 from cubicweb.web.views.ajaxcontroller import ajaxfunc
 
@@ -254,7 +254,7 @@
         """
         # if this is an integer, then directly go to eid
         try:
-            eid = typed_eid(word)
+            eid = int(word)
             return 'Any X WHERE X eid %(x)s', {'x': eid}, 'x'
         except ValueError:
             etype = self._get_entity_type(word)
--- a/web/views/massmailing.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/massmailing.py	Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,161 +17,24 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Mass mailing handling: send mail to entities adaptable to IEmailable"""
 
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-import operator
-from functools import reduce
-
-from cubicweb.predicates import (is_instance, authenticated_user,
-                                adaptable, match_form_params)
-from cubicweb.view import EntityView
-from cubicweb.web import (Redirect, stdmsgs, controller, action,
-                          form, formfields as ff)
-from cubicweb.web.formwidgets import CheckBox, TextInput, AjaxWidget, ImgButton
-from cubicweb.web.views import forms, formrenderers
-
-
-class SendEmailAction(action.Action):
-    __regid__ = 'sendemail'
-    # XXX should check email is set as well
-    __select__ = (action.Action.__select__
-                  & authenticated_user()
-                  & adaptable('IEmailable'))
-
-    title = _('send email')
-    category = 'mainactions'
-
-    def url(self):
-        params = {'vid': 'massmailing', '__force_display': 1}
-        if 'rql' in self._cw.form:
-            params['rql'] = self._cw.form['rql']
-        return self._cw.build_url(self._cw.relative_path(includeparams=False),
-                                  **params)
-
-
-def recipient_vocabulary(form, field):
-    vocab = [(entity.cw_adapt_to('IEmailable').get_email(), unicode(entity.eid))
-             for entity in form.cw_rset.entities()]
-    return [(label, value) for label, value in vocab if label]
+try:
+    from cubes.massmailing.views import (SendEmailAction,
+                                         recipient_vocabulary,
+                                         MassMailingForm,
+                                         MassMailingFormRenderer,
+                                         MassMailingFormView,
+                                         SendMailController)
 
 
-class MassMailingForm(forms.FieldsForm):
-    __regid__ = 'massmailing'
-
-    needs_js = ('cubicweb.edition.js', 'cubicweb.widgets.js',)
-    needs_css = ('cubicweb.mailform.css')
-    domid = 'sendmail'
-    action = 'sendmail'
-
-    sender = ff.StringField(widget=TextInput({'disabled': 'disabled'}),
-                            label=_('From:'),
-                            value=lambda form, field: '%s <%s>' % (
-                                form._cw.user.dc_title(),
-                                form._cw.user.cw_adapt_to('IEmailable').get_email()))
-    recipient = ff.StringField(widget=CheckBox(), label=_('Recipients:'),
-                               choices=recipient_vocabulary,
-                               value= lambda form, field: [entity.eid for entity in form.cw_rset.entities()
-                                                           if entity.cw_adapt_to('IEmailable').get_email()])
-    subject = ff.StringField(label=_('Subject:'), max_length=256)
-    mailbody = ff.StringField(widget=AjaxWidget(wdgtype='TemplateTextField',
-                                                inputid='mailbody'))
-
-    form_buttons = [ImgButton('sendbutton', "javascript: $('#sendmail').submit()",
-                              _('send email'), 'SEND_EMAIL_ICON'),
-                    ImgButton('cancelbutton', "javascript: history.back()",
-                              _(stdmsgs.BUTTON_CANCEL[0]), stdmsgs.BUTTON_CANCEL[1])]
-    form_renderer_id = __regid__
-
-    def __init__(self, *args, **kwargs):
-        super(MassMailingForm, self).__init__(*args, **kwargs)
-        field = self.field_by_name('mailbody')
-        field.widget.attrs['cubicweb:variables'] = ','.join(self.get_allowed_substitutions())
-
-    def get_allowed_substitutions(self):
-        attrs = []
-        for coltype in self.cw_rset.column_types(0):
-            entity = self._cw.vreg['etypes'].etype_class(coltype)(self._cw)
-            attrs.append(entity.cw_adapt_to('IEmailable').allowed_massmail_keys())
-        return sorted(reduce(operator.and_, attrs))
-
-    def build_substitutions_help(self):
-        insertLink = u'<a href="javascript: cw.widgets.insertText(\'%%(%s)s\', \'emailarea\');">%%(%s)s</a>'
-        substs = (u'<div class="substitution">%s</div>' % (insertLink % (subst, subst))
-                  for subst in self.get_allowed_substitutions())
-        helpmsg = self._cw._('You can use any of the following substitutions in your text')
-        return u'<div id="substitutions"><span>%s</span>%s</div>' % (
-            helpmsg, u'\n'.join(substs))
-
-
-class MassMailingFormRenderer(formrenderers.FormRenderer):
-    __regid__ = 'massmailing'
+    from logilab.common.deprecation import class_moved, moved
 
-    def _render_fields(self, fields, w, form):
-        w(u'<table class="headersform">')
-        for field in fields:
-            if field.name == 'mailbody':
-                w(u'</table>')
-                self._render_toolbar(w, form)
-                w(u'<table>')
-                w(u'<tr><td><div>')
-            else:
-                w(u'<tr>')
-                w(u'<td class="hlabel">%s</td>' % self.render_label(form, field))
-                w(u'<td class="hvalue">')
-            w(field.render(form, self))
-            if field.name == 'mailbody':
-                w(u'</div></td>')
-                w(u'<td>%s</td>' % form.build_substitutions_help())
-                w(u'</tr>')
-            else:
-                w(u'</td></tr>')
-        w(u'</table>')
-
-    def _render_toolbar(self, w, form):
-        w(u'<div id="toolbar">')
-        w(u'<ul>')
-        for button in form.form_buttons:
-            w(u'<li>%s</li>' % button.render(form))
-        w(u'</ul>')
-        w(u'</div>')
-
-    def render_buttons(self, w, form):
-        pass
-
-
-class MassMailingFormView(form.FormViewMixIn, EntityView):
-    __regid__ = 'massmailing'
-    __select__ = authenticated_user() & adaptable('IEmailable')
-
-    def call(self):
-        form = self._cw.vreg['forms'].select('massmailing', self._cw,
-                                             rset=self.cw_rset)
-        form.render(w=self.w)
-
-
-class SendMailController(controller.Controller):
-    __regid__ = 'sendmail'
-    __select__ = authenticated_user() & match_form_params('recipient', 'mailbody', 'subject')
-
-    def recipients(self):
-        """returns an iterator on email's recipients as entities"""
-        eids = self._cw.form['recipient']
-        # eids may be a string if only one recipient was specified
-        if isinstance(eids, basestring):
-            rset = self._cw.execute('Any X WHERE X eid %(x)s', {'x': eids})
-        else:
-            rset = self._cw.execute('Any X WHERE X eid in (%s)' % (','.join(eids)))
-        return rset.entities()
-
-    def publish(self, rset=None):
-        # XXX this allows users with access to an cubicweb instance to use it as
-        # a mail relay
-        body = self._cw.form['mailbody']
-        subject = self._cw.form['subject']
-        for recipient in self.recipients():
-            iemailable = recipient.cw_adapt_to('IEmailable')
-            text = body % iemailable.as_email_context()
-            self.sendmail(iemailable.get_email(), subject, text)
-        url = self._cw.build_url(__message=self._cw._('emails successfully sent'))
-        raise Redirect(url)
+    msg = '[3.17] cubicweb.web.views.massmailing moved to cubes.massmailing.views'
+    SendEmailAction = class_moved(SendEmailAction, message=msg)
+    recipient_vocabulary = moved('cubes.massmailing.views', 'recipient_vocabulary')
+    MassMailingForm = class_moved(MassMailingForm, message=msg)
+    MassMailingFormRenderer = class_moved(MassMailingFormRenderer, message=msg)
+    MassMailingFormView = class_moved(MassMailingFormView, message=msg)
+    SendMailController = class_moved(SendMailController, message=msg)
+except ImportError:
+    from cubicweb.web import LOGGER
+    LOGGER.warning('[3.17] massmailing extracted to cube massmailing that was not found. try installing it.')
--- a/web/views/primary.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/primary.py	Fri Jun 14 16:26:25 2013 +0200
@@ -362,7 +362,7 @@
                 label = self._cw._(dispctrl['label'])
             else:
                 label = display_name(self._cw, rschema.type, role,
-                                     context=entity.__regid__)
+                                     context=entity.cw_etype)
             return label
         return u''
 
--- a/web/views/reledit.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/reledit.py	Fri Jun 14 16:26:25 2013 +0200
@@ -29,7 +29,7 @@
 from logilab.common.deprecation import deprecated, class_renamed
 from logilab.common.decorators import cached
 
-from cubicweb import neg_role, typed_eid
+from cubicweb import neg_role
 from cubicweb.schema import display_name
 from cubicweb.utils import json, json_dumps
 from cubicweb.predicates import non_final_entity, match_kwargs
@@ -402,7 +402,7 @@
     req = self._cw
     args = dict((x, req.form[x])
                 for x in ('formid', 'rtype', 'role', 'reload', 'action'))
-    rset = req.eid_rset(typed_eid(self._cw.form['eid']))
+    rset = req.eid_rset(int(self._cw.form['eid']))
     try:
         args['reload'] = json.loads(args['reload'])
     except ValueError: # not true/false, an absolute url
--- a/web/views/treeview.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/treeview.py	Fri Jun 14 16:26:25 2013 +0200
@@ -68,7 +68,7 @@
         self.close_item(entity)
 
     def open_item(self, entity):
-        self.w(u'<li class="%s">\n' % entity.__regid__.lower())
+        self.w(u'<li class="%s">\n' % entity.cw_etype.lower())
     def close_item(self, entity):
         self.w(u'</li>\n')
 
--- a/web/views/urlpublishing.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/urlpublishing.py	Fri Jun 14 16:26:25 2013 +0200
@@ -59,7 +59,7 @@
 
 from rql import TypeResolverException
 
-from cubicweb import RegistryException, typed_eid
+from cubicweb import RegistryException
 from cubicweb.web import NotFound, Redirect, component
 
 
@@ -165,7 +165,7 @@
         if len(parts) != 1:
             raise PathDontMatch()
         try:
-            rset = req.execute('Any X WHERE X eid %(x)s', {'x': typed_eid(parts[0])})
+            rset = req.execute('Any X WHERE X eid %(x)s', {'x': int(parts[0])})
         except ValueError:
             raise PathDontMatch()
         if rset.rowcount == 0:
@@ -222,7 +222,7 @@
                                     'x', 'Substitute')
         if attrname == 'eid':
             try:
-                rset = req.execute(st.as_string(), {'x': typed_eid(value)})
+                rset = req.execute(st.as_string(), {'x': int(value)})
             except (ValueError, TypeResolverException):
                 # conflicting eid/type
                 raise PathDontMatch()
--- a/web/views/urlrewrite.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/urlrewrite.py	Fri Jun 14 16:26:25 2013 +0200
@@ -19,7 +19,6 @@
 
 import re
 
-from cubicweb import typed_eid
 from cubicweb.uilib import domid
 from cubicweb.appobject import AppObject
 
@@ -186,7 +185,7 @@
                     except KeyError:
                         kwargs[key] = value
                     if cachekey is not None and key in cachekey:
-                        kwargs[key] = typed_eid(value)
+                        kwargs[key] = int(value)
             if setuser:
                 kwargs['u'] = req.user.eid
             for param in rqlformparams:
--- a/web/views/wdoc.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/wdoc.py	Fri Jun 14 16:26:25 2013 +0200
@@ -15,7 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""inline help system, using ReST file in products `wdoc` directory
+"""inline help system, rendering ReST files in the `wdoc` subdirectory of
+CubicWeb and cubes
 
 """
 __docformat__ = "restructuredtext en"
--- a/web/views/xmlrss.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/views/xmlrss.py	Fri Jun 14 16:26:25 2013 +0200
@@ -78,7 +78,7 @@
         entity.complete()
         source = entity.cw_metainformation()['source']['uri']
         self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n'
-               % (entity.__regid__, entity.eid, xml_escape(entity.cwuri),
+               % (entity.cw_etype, entity.eid, xml_escape(entity.cwuri),
                   xml_escape(source)))
         for rschema, attrschema in entity.e_schema.attribute_definitions():
             attr = rschema.type
--- a/web/webconfig.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/web/webconfig.py	Fri Jun 14 16:26:25 2013 +0200
@@ -28,10 +28,11 @@
 
 from logilab.common.decorators import cached, cachedproperty
 from logilab.common.deprecation import deprecated
+from logilab.common.configuration import merge_options
 
 from cubicweb import ConfigurationError
 from cubicweb.toolsutils import read_config
-from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options, merge_options
+from cubicweb.cwconfig import CubicWebConfiguration, register_persistent_options
 
 
 register_persistent_options( (
@@ -110,6 +111,14 @@
           'group': 'web', 'level': 3,
           }),
         # web configuration
+        ('ui-cube',
+         {'type' : 'string',
+          'default': None,
+          'help': 'the name of the UI cube that will be loaded before all other '\
+          'cubes. Setting this value to None will instruct cubicweb not to load '\
+          'any extra cube.',
+          'group': 'web', 'level': 3,
+          }),
         ('https-url',
          {'type' : 'string',
           'default': None,
@@ -162,13 +171,6 @@
           'transparent to the user. Default to 5min.',
           'group': 'web', 'level': 3,
           }),
-        ('force-html-content-type',
-         {'type' : 'yn',
-          'default': False,
-          'help': 'force text/html content type for your html pages instead of cubicweb user-agent based'\
-          'deduction of an appropriate content type',
-          'group': 'web', 'level': 3,
-          }),
         ('embed-allowed',
          {'type' : 'regexp',
           'default': None,
--- a/wsgi/request.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/wsgi/request.py	Fri Jun 14 16:26:25 2013 +0200
@@ -38,13 +38,14 @@
 
 
 class CubicWebWsgiRequest(CubicWebRequestBase):
-    """most of this code COMES FROM DJANO
+    """most of this code COMES FROM DJANGO
     """
 
     def __init__(self, environ, vreg):
         self.environ = environ
         self.path = environ['PATH_INFO']
         self.method = environ['REQUEST_METHOD'].upper()
+        self.content = environ['wsgi.input']
 
         headers_in = dict((normalize_header(k[5:]), v) for k, v in self.environ.items()
                           if k.startswith('HTTP_'))
--- a/xy.py	Fri Jun 14 16:13:24 2013 +0200
+++ b/xy.py	Fri Jun 14 16:26:25 2013 +0200
@@ -23,7 +23,6 @@
 xy.register_prefix('dc', 'http://purl.org/dc/elements/1.1/')
 xy.register_prefix('foaf', 'http://xmlns.com/foaf/0.1/')
 xy.register_prefix('doap', 'http://usefulinc.com/ns/doap#')
-xy.register_prefix('sioc', 'http://rdfs.org/sioc/ns#')
 xy.register_prefix('owl', 'http://www.w3.org/2002/07/owl#')
 xy.register_prefix('dcterms', 'http://purl.org/dc/terms/')