Merge 3.23 branch
authorDenis Laxalde <denis.laxalde@logilab.fr>
Thu, 20 Oct 2016 18:28:46 +0200
changeset 11724 0fe3cf5c06b3
parent 11723 d8072617af3b (diff)
parent 11722 4b52c358b0ff (current diff)
child 11725 904ee9cd0cf9
Merge 3.23 branch
cubicweb/cwconfig.py
cubicweb/misc/migration/3.23.0_Any.py
debian/changelog
--- a/.hgignore	Wed Oct 19 22:31:32 2016 +0200
+++ b/.hgignore	Thu Oct 20 18:28:46 2016 +0200
@@ -16,9 +16,10 @@
 .*/data.*/database/.*
 .*/data/ldapdb/.*
 .*/data/uicache/
-.*/data/cubes/.*/i18n/.*\.po
+.*/data/libpython/cubicweb_.*/i18n/.*\.po
 ^doc/html/
 ^doc/doctrees/
 ^doc/book/en/devweb/js_api/
 ^doc/_build
 ^doc/js_api/
+test-results.xml
--- a/MANIFEST.in	Wed Oct 19 22:31:32 2016 +0200
+++ b/MANIFEST.in	Thu Oct 20 18:28:46 2016 +0200
@@ -1,25 +1,30 @@
 include README
+include README.pyramid.rst
 include COPYING
 include COPYING.LESSER
 include pylintrc
+include jshintrc
 include tox.ini
 include bin/cubicweb-*
 include man/cubicweb-ctl.1
 
 include doc/*.rst
+include doc/*.txt
 include doc/Makefile
 recursive-include doc/book *
 recursive-include doc/tools *.py
 recursive-include doc/tutorials *.rst *.py
-include doc/api/*.rst
+recursive-include doc/api *.rst
 recursive-include doc/_themes *
 recursive-include doc/_static *
 include doc/_templates/*.html
 include doc/changes/*.rst
-recursive-include doc/dev .txt *.rst
+recursive-include doc/dev *.txt *.rst
 recursive-include doc/images *.png *.svg
 include doc/conf.py
 
+include cubicweb/devtools/fix_po_encoding
+
 recursive-include cubicweb/misc *.py *.png *.display
 
 include cubicweb/web/views/*.pt
@@ -30,23 +35,38 @@
 recursive-include cubicweb/i18n *.pot *.po
 recursive-include cubicweb/schemas *.py *.sql
 
+recursive-include requirements *.txt
+
 recursive-include cubicweb/test/data bootstrap_cubes *.py *.sql
-recursive-include cubicweb/entities/test/data bootstrap_cubes *.py
-recursive-include cubicweb/sobjects/test/data bootstrap_cubes *.py
-recursive-include cubicweb/hooks/test/data bootstrap_cubes *.py
-recursive-include cubicweb/server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif
+recursive-include cubicweb/test/data-rewrite bootstrap_cubes *.py
+recursive-include cubicweb/test/data_schemareader *.py
+recursive-include cubicweb/dataimport/test/data *.py *.csv *.txt
+recursive-include cubicweb/dataimport/test/data-massimport *.py
 recursive-include cubicweb/devtools/test/data bootstrap_cubes *.py *.txt *.js *.po.ref
+recursive-include cubicweb/entities/test/data bootstrap_cubes *.py
+recursive-include cubicweb/etwist/test/data *.py
+recursive-include cubicweb/ext/test/data *.py
+recursive-include cubicweb/hooks/test/data-computed *.py
+recursive-include cubicweb/hooks/test/data bootstrap_cubes *.py
+recursive-include cubicweb/sobjects/test/data bootstrap_cubes *.py
+recursive-include cubicweb/server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif
+recursive-include cubicweb/server/test/data-cwep002 *.py
+recursive-include cubicweb/server/test/datacomputed *.py
+recursive-include cubicweb/server/test/data-schema2sql bootstrap_cubes toignore
+recursive-include cubicweb/server/test/data-migractions bootstrap_cubes *.py
+recursive-include cubicweb/server/test/data-schemaserial *.py
+include cubicweb/web/test/testutils.js
 recursive-include cubicweb/web/test/data bootstrap_cubes pouet.css *.py
-recursive-include cubicweb/etwist/test/data *.py
+recursive-include cubicweb/web/test/data/static/jstests *.js *.html *.css *.json
+recursive-include cubicweb/web/test/windmill *.py
 
-recursive-include cubicweb/web/test/jstests *.js *.html *.css *.json
-recursive-include cubicweb/web/test/windmill *.py
+include cubicweb/web/data/jquery-treeview/*.md
 
 recursive-include cubicweb/skeleton *.py *.css *.js *.po compat *.in *.tmpl rules tox.ini
 
 prune doc/book/en/.static
 prune doc/book/fr/.static
-prune doc/html/_sources/
+prune doc/html/_sources
 prune cubicweb/misc/cwfs
 prune doc/js_api
 global-exclude *.pyc
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/README.pyramid.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,170 @@
+
+pyramid_cubicweb_ is one specific way of integrating CubicWeb_ with a
+Pyramid_ web application.
+
+Features
+========
+
+* provides a default route that let a cubicweb instance handle the request.
+
+Usage
+=====
+
+To use, install ``pyramid_cubicweb`` in your python environment, and
+then include_ the package::
+
+    config.include('pyramid_cubicweb')
+
+    
+Configuration
+=============
+
+Requires the following `INI setting / environment variable`_:
+
+* `cubicweb.instance` / `CW_INSTANCE`: the cubicweb instance name
+
+Authentication cookies
+----------------------
+
+When using the `pyramid_cubicweb.auth` (CubicWeb AuthTkt
+authentication policy), which is the default in most cases, you may
+have to configure the behaviour of these authentication policies using
+standard's Pyramid configuration. You may want to configure in your
+``pyramid.ini``:
+
+:Session Authentication:
+
+    This is a `AuthTktAuthenticationPolicy`_ so you may overwrite default
+    configuration values by adding configuration entries using the prefix
+    ``cubicweb.auth.authtkt.session``. Default values are:
+
+    ::
+
+        cubicweb.auth.authtkt.session.hashalg = sha512
+        cubicweb.auth.authtkt.session.cookie_name = auth_tkt
+        cubicweb.auth.authtkt.session.timeout = 1200
+        cubicweb.auth.authtkt.session.reissue_time = 120
+        cubicweb.auth.authtkt.session.http_only = True
+        cubicweb.auth.authtkt.session.secure = True
+
+
+:Persistent Authentication:
+
+    This is also a `AuthTktAuthenticationPolicy`_. It is used when persistent
+    sessions are activated (typically when using the cubicweb-rememberme_
+    cube). You may overwrite default configuration values by adding
+    configuration entries using the prefix
+    ``cubicweb.auth.authtkt.persistent``. Default values are:
+
+    ::
+
+        cubicweb.auth.authtkt.persistent.hashalg = sha512
+        cubicweb.auth.authtkt.persistent.cookie_name = pauth_tkt
+        cubicweb.auth.authtkt.persistent.max_age = 3600*24*30
+        cubicweb.auth.authtkt.persistent.reissue_time = 3600*24
+        cubicweb.auth.authtkt.persistent.http_only = True
+        cubicweb.auth.authtkt.persistent.secure = True
+
+
+.. Warning:: Legacy timeout values from the instance's
+             ``all-in-one.conf`` are **not** used at all (``
+             http-session-time`` and ``cleanup-session-time``)
+
+Please refer to the documentation_ for more details (available in the
+``docs`` directory of the source code).
+
+.. _pyramid_cubicweb: https://www.cubicweb.org/project/pyramid-cubicweb
+.. _CubicWeb: https://www.cubicweb.org/
+.. _`cubicweb-rememberme`: \
+    https://www.cubicweb.org/project/cubicweb-rememberme
+.. _Pyramid: http://pypi.python.org/pypi/pyramid
+.. _include: http://docs.pylonsproject.org/projects/pyramid/en/latest/api/config.html#pyramid.config.Configurator.include
+.. _`INI setting / environment variable`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html#adding-a-custom-setting
+.. _documentation: http://pyramid-cubicweb.readthedocs.org/
+.. _AuthTktAuthenticationPolicy: \
+    http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html#pyramid.authentication.AuthTktAuthenticationPolicy
+
+Command
+=======
+
+Summary
+-------
+
+Add the 'pyramid' command to cubicweb-ctl".
+
+This cube also add a ``CWSession`` entity type so that sessions can be
+stored in the database, which allows to run a Cubicweb instance
+without having to set up a session storage (like redis or memcache)
+solution.
+
+However, for production systems, it is greatly advised to use such a
+storage solution for the sessions.
+
+The handling of the sessions is made by pyramid (see the
+`pyramid's documentation on sessions`_ for more details).
+
+For example, to set up a redis based session storage, you need the
+`pyramid-redis-session`_ package, then you must configure pyramid to
+use this backend, by configuring the ``pyramid.ini`` file in the instance's
+config directory (near the ``all-in-one.conf`` file):
+
+
+.. code-block:: ini
+
+   [main]
+   cubicweb.defaults = no # we do not want to load the default cw session handling
+
+   cubicweb.auth.authtkt.session.secret = <secret1>
+   cubicweb.auth.authtkt.persistent.secret = <secret2>
+   cubicweb.auth.authtkt.session.secure = yes
+   cubicweb.auth.authtkt.persistent.secure = yes
+
+   redis.sessions.secret = <secret3>
+   redis.sessions.prefix = <my-app>:
+
+   redis.sessions.url = redis://localhost:6379/0
+
+   pyramid.includes =
+           pyramid_redis_sessions
+           pyramid_cubicweb.auth
+           pyramid_cubicweb.login
+
+
+See the documentation of `Pyramid Cubicweb`_ for more details.
+
+.. Warning:: If you want to be able to log in a CubicWeb application
+             served by pyramid on a unsecured stream (typically when
+             you start an instance in dev mode using a simple
+             ``cubicweb-ctl pyramid -D -linfo myinstance``), you
+             **must** set ``cubicweb.auth.authtkt.session.secure`` to
+             ``no``.
+
+Secrets
+~~~~~~~
+
+There are a number of secrets to configure in ``pyramid.ini``. They
+should be different one from each other, as explained in `Pyramid's
+documentation`_.
+
+For the record:
+
+:cubicweb.session.secret: This secret is used to encrypt the session's
+   data ID (data themselved are stored in the backend, database or
+   redis) when using the integrated (``CWSession`` based) session data
+   storage.
+
+:redis.session.secret: This secret is used to encrypt the session's
+   data ID (data themselved are stored in the backend, database or
+   redis) when using redis as backend.
+
+:cubicweb.auth.authtkt.session.secret: This secret is used to encrypt
+   the authentication cookie.
+
+:cubicweb.auth.authtkt.persistent.secret: This secret is used to
+   encrypt the persistent authentication cookie.
+
+
+.. _`Pyramid Cubicweb`: http://pyramid-cubicweb.readthedocs.org/
+.. _`pyramid's documentation on sessions`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/sessions.html
+.. _`pyramid-redis-session`: http://pyramid-redis-sessions.readthedocs.org/en/latest/index.html
+.. _`Pyramid's documentation`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/security.html#admonishment-against-secret-sharing
--- a/cubicweb.spec	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb.spec	Thu Oct 20 18:28:46 2016 +0200
@@ -8,7 +8,7 @@
 %{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
 
 Name:           cubicweb
-Version:        3.23.2
+Version:        3.23.1
 Release:        logilab.1%{?dist}
 Summary:        CubicWeb is a semantic web application framework
 Source0:        https://pypi.python.org/packages/source/c/cubicweb/cubicweb-%{version}.tar.gz
@@ -49,7 +49,7 @@
 %endif
 
 %install
-NO_SETUPTOOLS=1 %{__python} setup.py --quiet install --no-compile --prefix=%{_prefix} --root="$RPM_BUILD_ROOT"
+%{__python} setup.py --quiet install --no-compile --prefix=%{_prefix} --root="$RPM_BUILD_ROOT"
 mkdir -p $RPM_BUILD_ROOT/var/log/cubicweb
 
 %clean
--- a/cubicweb/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,9 +20,11 @@
 """
 __docformat__ = "restructuredtext en"
 
+import imp
 import logging
 import os
 import pickle
+import pkgutil
 import sys
 import warnings
 import zlib
@@ -59,14 +61,14 @@
 CW_SOFTWARE_ROOT = __path__[0]
 
 
-from cubicweb.__pkginfo__ import version as __version__
+from cubicweb.__pkginfo__ import version as __version__   # noqa
 
 
 set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb'))
 
 # make all exceptions accessible from the package
-from cubicweb._exceptions import *
-from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound
+from cubicweb._exceptions import *  # noqa
+from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound  # noqa
 
 
 # '_' is available to mark internationalized string but should not be used to
@@ -81,10 +83,6 @@
 def typed_eid(eid):
     return int(eid)
 
-#def log_thread(f, w, a):
-#    print f.f_code.co_filename, f.f_code.co_name
-#import threading
-#threading.settrace(log_thread)
 
 class Binary(BytesIO):
     """class to hold binary data. Use BytesIO to prevent use of unicode data"""
@@ -92,13 +90,13 @@
 
     def __init__(self, buf=b''):
         assert isinstance(buf, self._allowed_types), \
-               "Binary objects must use bytes/buffer objects, not %s" % buf.__class__
+            "Binary objects must use bytes/buffer objects, not %s" % buf.__class__
         # don't call super, BytesIO may be an old-style class (on python < 2.7.4)
         BytesIO.__init__(self, buf)
 
     def write(self, data):
         assert isinstance(data, self._allowed_types), \
-               "Binary objects must use bytes/buffer objects, not %s" % data.__class__
+            "Binary objects must use bytes/buffer objects, not %s" % data.__class__
         # don't call super, BytesIO may be an old-style class (on python < 2.7.4)
         BytesIO.write(self, data)
 
@@ -114,7 +112,7 @@
             while True:
                 # the 16kB chunksize comes from the shutil module
                 # in stdlib
-                chunk = self.read(16*1024)
+                chunk = self.read(16 * 1024)
                 if not chunk:
                     break
                 fobj.write(chunk)
@@ -135,7 +133,7 @@
                 while True:
                     # the 16kB chunksize comes from the shutil module
                     # in stdlib
-                    chunk = fobj.read(16*1024)
+                    chunk = fobj.read(16 * 1024)
                     if not chunk:
                         break
                     binary.write(chunk)
@@ -149,7 +147,6 @@
             return False
         return self.getvalue() == other.getvalue()
 
-
     # Binary helpers to store/fetch python objects
 
     @classmethod
@@ -168,6 +165,7 @@
     return isinstance(value, (binary_type, Binary))
 BASE_CHECKERS['Password'] = check_password
 
+
 def str_or_binary(value):
     if isinstance(value, Binary):
         return value
@@ -182,17 +180,20 @@
 #     to help in cube renaming
 CW_MIGRATION_MAP = {}
 
+
 def neg_role(role):
     if role == 'subject':
         return 'object'
     return 'subject'
 
+
 def role(obj):
     try:
         return obj.role
     except AttributeError:
         return neg_role(obj.target)
 
+
 def target(obj):
     try:
         return obj.target
@@ -220,7 +221,7 @@
         self.callbacks = {}
 
     def bind(self, event, callback, *args, **kwargs):
-        self.callbacks.setdefault(event, []).append( (callback, args, kwargs) )
+        self.callbacks.setdefault(event, []).append((callback, args, kwargs))
 
     def emit(self, event, context=None):
         for callback, args, kwargs in self.callbacks.get(event, ()):
@@ -231,6 +232,7 @@
 
 CW_EVENT_MANAGER = CubicWebEventManager()
 
+
 def onevent(event, *args, **kwargs):
     """decorator to ease event / callback binding
 
@@ -249,6 +251,7 @@
 
 from yams.schema import role_name as rname
 
+
 def validation_error(entity, errors, substitutions=None, i18nvalues=None):
     """easy way to retrieve a :class:`cubicweb.ValidationError` for an entity or eid.
 
@@ -272,10 +275,34 @@
 
 # exceptions ##################################################################
 
-class ProgrammingError(Exception): #DatabaseError):
+class ProgrammingError(Exception):
     """Exception raised for errors that are related to the database's operation
     and not necessarily under the control of the programmer, e.g. an unexpected
     disconnect occurs, the data source name is not found, a transaction could
     not be processed, a memory allocation error occurred during processing,
     etc.
     """
+
+
+# Import hook for "legacy" cubes ##############################################
+
+class _CubesImporter(object):
+    """Module finder handling redirection of import of "cubes.<name>"
+    to "cubicweb_<name>".
+    """
+
+    @classmethod
+    def install(cls):
+        if not any(isinstance(x, cls) for x in sys.meta_path):
+            self = cls()
+            sys.meta_path.append(self)
+
+    def find_module(self, fullname, path=None):
+        if fullname.startswith('cubes.'):
+            modname = 'cubicweb_' + fullname.split('.', 1)[1]
+            try:
+                modinfo = imp.find_module(modname)
+            except ImportError:
+                return None
+            else:
+                return pkgutil.ImpLoader(fullname, *modinfo)
--- a/cubicweb/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -27,8 +27,8 @@
 
 modname = distname = "cubicweb"
 
-numversion = (3, 23, 2)
-version = '.'.join(str(num) for num in numversion)
+numversion = (3, 24, 0)
+version = '.'.join(str(num) for num in numversion) + '.dev0'
 
 description = "a repository of entities / relations for knowledge management"
 author = "Logilab"
@@ -43,37 +43,6 @@
     'Programming Language :: JavaScript',
 ]
 
-__depends__ = {
-    'six': '>= 1.4.0',
-    'logilab-common': '>= 1.2.2',
-    'logilab-mtconverter': '>= 0.8.0',
-    'rql': '>= 0.34.0',
-    'yams': '>= 0.44.0',
-    #gettext                    # for xgettext, msgcat, etc...
-    # web dependencies
-    'lxml': '',
-    # XXX graphviz
-    # server dependencies
-    'logilab-database': '>= 1.15.0',
-    'passlib': '',
-    'pytz': '',
-    'Markdown': '',
-    'unittest2': '>= 0.7.0',
-    }
-
-__recommends__ = {
-    'docutils': '>= 0.6',
-    'Pillow': '',               # for captcha
-    'pycrypto': '',             # for crypto extensions
-    'fyzz': '>= 0.1.0',         # for sparql
-    'vobject': '>= 0.6.0',      # for ical view
-    'rdflib': None,             #
-    'pyzmq': None,
-    'Twisted': '< 16.0.0',
-    #'Products.FCKeditor':'',
-    #'SimpleTAL':'>= 4.1.6',
-}
-
 scripts = [s for s in glob.glob(join('bin', 'cubicweb-*'))
            if not s.endswith('.bat')]
 include_dirs = [join('test', 'data'),
--- a/cubicweb/cwconfig.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/cwconfig.py	Thu Oct 20 18:28:46 2016 +0200
@@ -181,15 +181,19 @@
 
 __docformat__ = "restructuredtext en"
 
-import sys
-import os
-import stat
+import importlib
 import logging
 import logging.config
-from smtplib import SMTP
-from threading import Lock
+import os
 from os.path import (exists, join, expanduser, abspath, normpath,
                      basename, isdir, dirname, splitext)
+import pkgutil
+import pkg_resources
+import re
+from smtplib import SMTP
+import stat
+import sys
+from threading import Lock
 from warnings import warn, filterwarnings
 
 from six import text_type
@@ -202,7 +206,7 @@
 
 from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
                       ConfigurationError, Binary, _)
-from cubicweb.toolsutils import create_dir
+from cubicweb.toolsutils import create_dir, option_value_from_env
 
 CONFIGURATIONS = []
 
@@ -262,6 +266,13 @@
         prefix = dirname(prefix)
     return prefix
 
+
+def _cube_pkgname(cube):
+    if not cube.startswith('cubicweb_'):
+        return 'cubicweb_' + cube
+    return cube
+
+
 # persistent options definition
 PERSISTENT_OPTIONS = (
     ('encoding',
@@ -405,6 +416,12 @@
           'group': 'email', 'level': 3,
           }),
         )
+
+    def __getitem__(self, key):
+        """Get configuration option, by first looking at environmnent."""
+        file_value = super(CubicWebNoAppConfiguration, self).__getitem__(key)
+        return option_value_from_env(key, file_value)
+
     # static and class methods used to get instance independant resources ##
     @staticmethod
     def cubicweb_version():
@@ -444,8 +461,21 @@
 
     @classmethod
     def available_cubes(cls):
-        import re
         cubes = set()
+        for entry_point in pkg_resources.iter_entry_points(
+                group='cubicweb.cubes', name=None):
+            try:
+                module = entry_point.load()
+            except ImportError:
+                continue
+            else:
+                modname = module.__name__
+                if not modname.startswith('cubicweb_'):
+                    cls.warning('entry point %s does not appear to be a cube',
+                                entry_point)
+                    continue
+                cubes.add(modname)
+        # Legacy cubes.
         for directory in cls.cubes_search_path():
             if not exists(directory):
                 cls.error('unexistant directory in cubes search path: %s'
@@ -456,10 +486,24 @@
                     continue
                 if not re.match('[_A-Za-z][_A-Za-z0-9]*$', cube):
                     continue # skip invalid python package name
+                if cube == 'pyramid':
+                    cls._warn_pyramid_cube()
+                    continue
                 cubedir = join(directory, cube)
                 if isdir(cubedir) and exists(join(cubedir, '__init__.py')):
                     cubes.add(cube)
-        return sorted(cubes)
+
+        def sortkey(cube):
+            """Preserve sorting with "cubicweb_" prefix."""
+            prefix = 'cubicweb_'
+            if cube.startswith(prefix):
+                # add a suffix to have a deterministic sorting between
+                # 'cubicweb_<cube>' and '<cube>' (useful in tests with "hash
+                # randomization" turned on).
+                return cube[len(prefix):] + '~'
+            return cube
+
+        return sorted(cubes, key=sortkey)
 
     @classmethod
     def cubes_search_path(cls):
@@ -483,12 +527,19 @@
         """return the cube directory for the given cube id, raise
         `ConfigurationError` if it doesn't exist
         """
+        pkgname = _cube_pkgname(cube)
+        loader = pkgutil.find_loader(pkgname)
+        if loader:
+            return dirname(loader.get_filename())
+        # Legacy cubes.
         for directory in cls.cubes_search_path():
             cubedir = join(directory, cube)
             if exists(cubedir):
                 return cubedir
-        raise ConfigurationError('no cube %r in %s' % (
-            cube, cls.cubes_search_path()))
+        msg = 'no module %(pkg)s in search path nor cube %(cube)r in %(path)s'
+        raise ConfigurationError(msg % {'cube': cube,
+                                        'pkg': _cube_pkgname(cube),
+                                        'path': cls.cubes_search_path()})
 
     @classmethod
     def cube_migration_scripts_dir(cls, cube):
@@ -498,14 +549,18 @@
     @classmethod
     def cube_pkginfo(cls, cube):
         """return the information module for the given cube"""
-        cube = CW_MIGRATION_MAP.get(cube, cube)
+        pkgname = _cube_pkgname(cube)
         try:
-            parent = __import__('cubes.%s.__pkginfo__' % cube)
-            return getattr(parent, cube).__pkginfo__
-        except Exception as ex:
-            raise ConfigurationError(
-                'unable to find packaging information for cube %s (%s: %s)'
-                % (cube, ex.__class__.__name__, ex))
+            return importlib.import_module('%s.__pkginfo__' % pkgname)
+        except ImportError:
+            cube = CW_MIGRATION_MAP.get(cube, cube)
+            try:
+                parent = __import__('cubes.%s.__pkginfo__' % cube)
+                return getattr(parent, cube).__pkginfo__
+            except Exception as ex:
+                raise ConfigurationError(
+                    'unable to find packaging information for cube %s (%s: %s)'
+                    % (cube, ex.__class__.__name__, ex))
 
     @classmethod
     def cube_version(cls, cube):
@@ -605,6 +660,8 @@
     @classmethod
     def cls_adjust_sys_path(cls):
         """update python path if necessary"""
+        from cubicweb import _CubesImporter
+        _CubesImporter.install()
         cubes_parent_dir = normpath(join(cls.CUBES_DIR, '..'))
         if not cubes_parent_dir in sys.path:
             sys.path.insert(0, cubes_parent_dir)
@@ -627,24 +684,29 @@
     def load_cwctl_plugins(cls):
         cls.cls_adjust_sys_path()
         for ctlmod in ('web.webctl',  'etwist.twctl', 'server.serverctl',
-                       'devtools.devctl'):
+                       'devtools.devctl', 'pyramid.pyramidctl'):
             try:
                 __import__('cubicweb.%s' % ctlmod)
             except ImportError:
                 continue
             cls.info('loaded cubicweb-ctl plugin %s', ctlmod)
         for cube in cls.available_cubes():
-            pluginfile = join(cls.cube_dir(cube), 'ccplugin.py')
-            initfile = join(cls.cube_dir(cube), '__init__.py')
+            cubedir = cls.cube_dir(cube)
+            pluginfile = join(cubedir, 'ccplugin.py')
+            initfile = join(cubedir, '__init__.py')
+            if cube.startswith('cubicweb_'):
+                pkgname = cube
+            else:
+                pkgname = 'cubes.%s' % cube
             if exists(pluginfile):
                 try:
-                    __import__('cubes.%s.ccplugin' % cube)
+                    __import__(pkgname + '.ccplugin')
                     cls.info('loaded cubicweb-ctl plugin from %s', cube)
                 except Exception:
                     cls.exception('while loading plugin %s', pluginfile)
             elif exists(initfile):
                 try:
-                    __import__('cubes.%s' % cube)
+                    __import__(pkgname)
                 except Exception:
                     cls.exception('while loading cube %s', cube)
             else:
@@ -821,11 +883,24 @@
 
     _cubes = None
 
+    @classmethod
+    def _warn_pyramid_cube(cls):
+        cls.warning("cubicweb-pyramid got integrated into CubicWeb; "
+                    "remove it from your project's dependencies")
+
     def init_cubes(self, cubes):
+        cubes = list(cubes)
+        if 'pyramid' in cubes:
+            self._warn_pyramid_cube()
+            cubes.remove('pyramid')
         self._cubes = self.reorder_cubes(cubes)
         # load cubes'__init__.py file first
         for cube in cubes:
-            __import__('cubes.%s' % cube)
+            try:
+                importlib.import_module(_cube_pkgname(cube))
+            except ImportError:
+                # Legacy cube.
+                __import__('cubes.%s' % cube)
         self.load_site_cubicweb()
 
     def cubes(self):
--- a/cubicweb/dataimport/massive_store.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/dataimport/massive_store.py	Thu Oct 20 18:28:46 2016 +0200
@@ -20,7 +20,6 @@
 import logging
 from copy import copy
 from collections import defaultdict
-from io import StringIO
 from itertools import chain
 from base64 import b64encode
 
@@ -43,41 +42,20 @@
       the indexes.
 
     - This store can only insert relations that are not inlined (i.e.,
-      which do *not* have inlined=True in their definition in the schema), unless they are
-      specified as entity attributes.
+      which do *not* have inlined=True in their definition in the schema),
+      unless they are specified as entity attributes.
 
     It should be used as follows:
 
        store = MassiveObjectStore(cnx)
-       store.init_rtype_table('Person', 'lives_in', 'Location')
-       ...
-
-       store.prepare_insert_entity('Person', subj_iid_attribute=person_iid, ...)
-       store.prepare_insert_entity('Location', obj_iid_attribute=location_iid, ...)
-       ...
-
-       # subj_iid_attribute and obj_iid_attribute are argument names
-       # chosen by the user (e.g. "cwuri"). These names can be identical.
-       # person_iid and location_iid are unique IDs and depend on the data
-       # (e.g URI).
+       eid_p = store.prepare_insert_entity('Person',
+                                           cwuri='http://dbpedia.org/toto',
+                                           name='Toto')
+       eid_loc = store.prepare_insert_entity('Location',
+                                             cwuri='http://geonames.org/11111',
+                                             name='Somewhere')
+       store.prepare_insert_relation(eid_p, 'lives_in', eid_loc)
        store.flush()
-       store.relate_by_iid(person_iid, 'lives_in', location_iid)
-       # For example:
-       store.prepare_insert_entity('Person',
-                                   cwuri='http://dbpedia.org/toto',
-                                   name='Toto')
-       store.prepare_insert_entity('Location',
-                                   uri='http://geonames.org/11111',
-                                   name='Somewhere')
-       store.flush()
-       store.relate_by_iid('http://dbpedia.org/toto',
-                           'lives_in',
-                           'http://geonames.org/11111')
-       # Finally
-       store.convert_relations('Person', 'lives_in', 'Location',
-                               'subj_iid_attribute', 'obj_iid_attribute')
-       # For the previous example:
-       store.convert_relations('Person', 'lives_in', 'Location', 'cwuri', 'uri')
        ...
        store.commit()
        store.finish()
@@ -85,8 +63,6 @@
     Full-text indexation is not handled, you'll have to reindex the proper entity types by yourself
     if desired.
     """
-    # max size of the iid, used to create the iid_eid conversion table
-    iid_maxsize = 1024
 
     def __init__(self, cnx,
                  on_commit_callback=None, on_rollback_callback=None,
@@ -120,14 +96,6 @@
         self._data_entities = defaultdict(list)
         self._data_relations = defaultdict(list)
         self._initialized = set()
-        # uri handling
-        self._data_uri_relations = defaultdict(list)
-        # etypes for which we have a uri_eid_%(etype)s table
-        self._init_uri_eid = set()
-        # etypes for which we have a uri_eid_%(e)s_idx index
-        self._uri_eid_inserted = set()
-        # set of rtypes for which we have a %(rtype)s_relation_iid_tmp table
-        self._uri_rtypes = set()
 
         if not self.slave_mode:
             # drop constraint and metadata table, they will be recreated when self.finish() is
@@ -144,112 +112,6 @@
             for eid in range(last_eid - self.eids_seq_range + 1, last_eid + 1):
                 yield eid
 
-    # URI related things #######################################################
-
-    def init_rtype_table(self, etype_from, rtype, etype_to):
-        """ Build temporary table for standard rtype """
-        # Create an uri_eid table for each etype for a better control of which etype is concerned by
-        # a particular possibly multivalued relation.
-        for etype in (etype_from, etype_to):
-            if etype and etype not in self._init_uri_eid:
-                self._init_uri_eid.add(etype)
-                self.sql('CREATE TABLE IF NOT EXISTS uri_eid_%(e)s'
-                         '(uri character varying(%(size)s), eid integer)'
-                         % {'e': etype.lower(), 'size': self.iid_maxsize})
-        if rtype not in self._uri_rtypes:
-            # Create the temporary table
-            if not self.schema.rschema(rtype).inlined:
-                self.sql('CREATE TABLE IF NOT EXISTS %(r)s_relation_iid_tmp'
-                         '(uri_from character varying(%(s)s), uri_to character varying(%(s)s))'
-                         % {'r': rtype, 's': self.iid_maxsize})
-                self._uri_rtypes.add(rtype)
-            else:
-                self.logger.warning("inlined relation %s: cannot insert it", rtype)
-
-    def relate_by_iid(self, iid_from, rtype, iid_to):
-        """Add new relation based on the internal id (iid)
-        of the entities (not the eid)"""
-        # Push data
-        if isinstance(iid_from, unicode):
-            iid_from = iid_from.encode('utf-8')
-        if isinstance(iid_to, unicode):
-            iid_to = iid_to.encode('utf-8')
-        self._data_uri_relations[rtype].append({'uri_from': iid_from, 'uri_to': iid_to})
-
-    def flush_relations(self):
-        """ Flush the relations data
-        """
-        for rtype, data in self._data_uri_relations.items():
-            if not data:
-                self.logger.info('No data for rtype %s', rtype)
-            buf = StringIO('\n'.join(['%(uri_from)s\t%(uri_to)s' % d for d in data]))
-            if not buf:
-                self.logger.info('Empty Buffer for rtype %s', rtype)
-                continue
-            cursor = self._cnx.cnxset.cu
-            if not self.schema.rschema(rtype).inlined:
-                cursor.copy_from(buf, '%s_relation_iid_tmp' % rtype.lower(),
-                                 null='NULL', columns=('uri_from', 'uri_to'))
-            else:
-                self.logger.warning("inlined relation %s: cannot insert it", rtype)
-            buf.close()
-            # Clear data cache
-            self._data_uri_relations[rtype] = []
-
-    def fill_uri_eid_table(self, etype, uri_label):
-        """ Fill the uri_eid table
-        """
-        if etype not in self._uri_eid_inserted:
-            self._uri_eid_inserted.add(etype)
-            self.logger.info('Fill uri_eid for etype %s', etype)
-            self.sql('INSERT INTO uri_eid_%(e)s SELECT cw_%(l)s, cw_eid FROM cw_%(e)s'
-                     % {'l': uri_label, 'e': etype.lower()})
-            self.sql('CREATE INDEX uri_eid_%(e)s_idx ON uri_eid_%(e)s(uri)'
-                     % {'e': etype.lower()})
-
-    def convert_relations(self, etype_from, rtype, etype_to,
-                          uri_label_from='cwuri', uri_label_to='cwuri'):
-        """ Flush the converted relations
-        """
-        # Always flush relations to be sure
-        self.logger.info('Convert relations %s %s %s', etype_from, rtype, etype_to)
-        self.flush_relations()
-        if uri_label_from:
-            self.fill_uri_eid_table(etype_from, uri_label_from)
-        if uri_label_to:
-            self.fill_uri_eid_table(etype_to, uri_label_to)
-        if self.schema.rschema(rtype).inlined:
-            self.logger.warning("Can't insert inlined relation %s", rtype)
-            return
-        if uri_label_from and uri_label_to:
-            sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT O1.eid, O2.eid
-            FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(ef)s as O1, uri_eid_%(et)s as O2
-            WHERE O1.uri=T.uri_from AND O2.uri=T.uri_to AND NOT EXISTS (
-            SELECT 1 FROM %(r)s_relation AS TT WHERE TT.eid_from=O1.eid AND TT.eid_to=O2.eid);
-            '''
-        elif uri_label_to:
-            sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT
-            CAST(T.uri_from AS INTEGER), O1.eid
-            FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(et)s as O1
-            WHERE O1.uri=T.uri_to AND NOT EXISTS (
-            SELECT 1 FROM %(r)s_relation AS TT WHERE
-            TT.eid_from=CAST(T.uri_from AS INTEGER) AND TT.eid_to=O1.eid);
-            '''
-        elif uri_label_from:
-            sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT O1.eid, T.uri_to
-            O1.eid, CAST(T.uri_to AS INTEGER)
-            FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(ef)s as O1
-            WHERE O1.uri=T.uri_from AND NOT EXISTS (
-            SELECT 1 FROM %(r)s_relation AS TT WHERE
-            TT.eid_from=O1.eid AND TT.eid_to=CAST(T.uri_to AS INTEGER));
-            '''
-        try:
-            self.sql(sql % {'r': rtype.lower(),
-                            'et': etype_to.lower() if etype_to else u'',
-                            'ef': etype_from.lower() if etype_from else u''})
-        except Exception as ex:
-            self.logger.error("Can't insert relation %s: %s", rtype, ex)
-
     # SQL utilities #########################################################
 
     def _drop_all_constraints(self):
@@ -324,7 +186,6 @@
     def flush(self):
         """Flush the data"""
         self.flush_entities()
-        self.flush_internal_relations()
         self.flush_relations()
 
     def commit(self):
@@ -334,16 +195,9 @@
 
     def finish(self):
         """Remove temporary tables and columns."""
-        self.logger.info("Start cleaning")
         if self.slave_mode:
             raise RuntimeError('Store cleanup is not allowed in slave mode')
         self.logger.info("Start cleaning")
-        # Cleanup relations tables
-        for etype in self._init_uri_eid:
-            self.sql('DROP TABLE uri_eid_%s' % etype.lower())
-        # Remove relations tables
-        for rtype in self._uri_rtypes:
-            self.sql('DROP TABLE %(r)s_relation_iid_tmp' % {'r': rtype})
         # Get all the initialized etypes/rtypes
         if self._dbh.table_exists('cwmassive_initialized'):
             cu = self.sql('SELECT retype, type FROM cwmassive_initialized')
@@ -374,9 +228,8 @@
         else:
             raise exc
 
-    def flush_internal_relations(self):
-        """ Flush the relations data
-        """
+    def flush_relations(self):
+        """Flush the relations data."""
         for rtype, data in self._data_relations.items():
             if not data:
                 # There is no data for these etype for this flush round.
@@ -393,8 +246,7 @@
             self._data_relations[rtype] = []
 
     def flush_entities(self):
-        """ Flush the entities data
-        """
+        """Flush the entities data."""
         for etype, data in self._data_entities.items():
             if not data:
                 # There is no data for these etype for this flush round.
@@ -534,7 +386,6 @@
     def restore_indexes_and_constraints(self):
         """Restore indexes and constraints."""
         if not self.table_exists('cwmassive_constraints'):
-            self.logger.info('The table cwmassive_constraints does not exist')
             return
         cu = self.sql('SELECT sql, insert_order FROM cwmassive_constraints '
                       'ORDER BY insert_order DESC')
--- a/cubicweb/dataimport/test/data-massimport/schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/dataimport/test/data-massimport/schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -18,17 +18,8 @@
 See geonames readme.txt for more details.
 """
 
-from yams.buildobjs import (EntityType, RelationDefinition, SubjectRelation,
-                            String, Int, BigInt, Float, Date)
-
-
-class TestLocation(EntityType):
-    """
-    Entity type for location of Geonames.
-    See cities1000.zip, cities5000.zip, cities15000.zip and allCountries.txt
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    geonameid = Int(required=True, unique=True, indexed=True)
+from yams.buildobjs import (EntityType, SubjectRelation,
+                            String, Int, BigInt, Float)
 
 
 class Location(EntityType):
@@ -40,105 +31,16 @@
     geonameid = Int(indexed=True)
     asciiname = String(maxsize=200, fulltextindexed=True)
     alternatenames = String(fulltextindexed=True)
-    names = SubjectRelation('LocationName', cardinality='**')
     latitude = Float(indexed=True)
     longitude = Float(indexed=True)
     feature_class = String(maxsize=1, indexed=True)
-    feature_code = SubjectRelation('FeatureCode', cardinality='?*', inlined=True)
-    country = SubjectRelation('Country', cardinality='?*', inlined=True)
     alternate_country_code = String(maxsize=60)
-    main_administrative_region = SubjectRelation('AdministrativeRegion',
-                                                 cardinality='?*', inlined=True)
-    second_administrative_region = SubjectRelation('AdministrativeRegion',
-                                                   cardinality='?*', inlined=True)
-    admin_code_1 = String(maxsize=124)
-    admin_code_2 = String(maxsize=124)
     admin_code_3 = String(maxsize=20)
     admin_code_4 = String(maxsize=20)
     population = BigInt(indexed=True)
     elevation = Int(indexed=True)
     gtopo30 = Int(indexed=True)
     timezone = SubjectRelation('TimeZone', cardinality='?*', inlined=True)
-    geonames_date = Date()
-
-
-class LocationName(EntityType):
-    """
-    Name of a Location
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    language = SubjectRelation('Language', cardinality='?*', inlined=True)
-    alternatenamesid = Int(indexed=True)
-
-
-class FeatureCode(EntityType):
-    """
-    Entity type for feature codes of Geonames.
-    See featureCodes_en.txt
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    main_code = String(maxsize=1, indexed=True)
-    code = String(maxsize=12)
-    description = String(maxsize=1024, fulltextindexed=True)
-
-
-class AdministrativeRegion(EntityType):
-    """
-    Entity type for administrative regions of Geonames.
-    See admin1CodesASCII.txt and admin2Codes.txt
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    code = String(maxsize=64, indexed=True)
-    country = SubjectRelation('Country', cardinality='?*', inlined=True)
-    geonameid = Int(indexed=True)
-    asciiname = String(maxsize=200, fulltextindexed=True)
-
-
-class Language(EntityType):
-    """
-    Entity type for languages of Geonames.
-    See admin1CodesASCII.txt and admin2Codes.txt
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    iso_639_3 = String(maxsize=3, indexed=True)
-    iso_639_2 = String(maxsize=64, indexed=True)
-    iso_639_1 = String(maxsize=3, indexed=True)
-
-
-class Continent(EntityType):
-    """
-    Entity type for continents of geonames.
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    code = String(maxsize=2, indexed=True)
-    geonameid = Int(indexed=True)
-
-
-class Country(EntityType):
-    """
-    Entity type for countries of geonames.
-    See countryInfo.txt
-    """
-    name = String(maxsize=1024, indexed=True, fulltextindexed=True)
-    code = String(maxsize=2, indexed=True)
-    code3 = String(maxsize=3, indexed=True)
-    codenum = Int(indexed=True)
-    fips = String(maxsize=2)
-    capital = String(maxsize=1024, fulltextindexed=True)
-    area = Float(indexed=True)
-    population = BigInt(indexed=True)
-    continent_code = String(maxsize=3)
-    continent = SubjectRelation('Continent', cardinality='?*', inlined=True)
-    tld = String(maxsize=64)
-    currency = String(maxsize=1024, fulltextindexed=True)
-    currency_code = String(maxsize=64)
-    geonameid = Int(indexed=True)
-    phone = String(maxsize=64)
-    postal_code = String(maxsize=200)
-    postal_code_regex = String(maxsize=200)
-    languages_code = String(maxsize=200)
-    neighbours_code = String(maxsize=200)
-    equivalent_fips = String(maxsize=2)
 
 
 class TimeZone(EntityType):
@@ -150,15 +52,3 @@
     gmt = Float()
     dst = Float()
     raw_offset = Float()
-
-
-class used_language(RelationDefinition):
-    subject = 'Country'
-    object = 'Language'
-    cardinality = '**'
-
-
-class neighbour_of(RelationDefinition):
-    subject = 'Country'
-    object = 'Country'
-    cardinality = '**'
--- a/cubicweb/dataimport/test/test_massive_store.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/dataimport/test/test_massive_store.py	Thu Oct 20 18:28:46 2016 +0200
@@ -81,12 +81,12 @@
                       'alternatenames': infos[3],
                       'latitude': latitude, 'longitude': longitude,
                       'feature_class': feature_class,
-                      'alternate_country_code':infos[9],
+                      'alternate_country_code': infos[9],
                       'admin_code_3': infos[12],
                       'admin_code_4': infos[13],
                       'population': population, 'elevation': elevation,
                       'gtopo30': gtopo, 'timezone': timezone_code.get(infos[17]),
-                      'cwuri':  u'http://sws.geonames.org/%s/' % int(infos[0]),
+                      'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]),
                       'geonameid': int(infos[0]),
                       }
             store.prepare_insert_entity('Location', **entity)
@@ -229,7 +229,6 @@
 
     def test_slave_mode_exception(self):
         with self.admin_access.repo_cnx() as cnx:
-            master_store = MassiveObjectStore(cnx, slave_mode=False)
             slave_store = MassiveObjectStore(cnx, slave_mode=True)
             self.assertRaises(RuntimeError, slave_store.finish)
 
@@ -284,15 +283,6 @@
             store.prepare_insert_entity('Location', name=u'toto')
             store.finish()
 
-    def test_multiple_insert_relation(self):
-        with self.admin_access.repo_cnx() as cnx:
-            store = MassiveObjectStore(cnx)
-            store.init_rtype_table('Country', 'used_language', 'Language')
-            store.finish()
-            store = MassiveObjectStore(cnx)
-            store.init_rtype_table('Country', 'used_language', 'Language')
-            store.finish()
-
 
 if __name__ == '__main__':
     import unittest
--- a/cubicweb/dataimport/test/test_stores.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/dataimport/test/test_stores.py	Thu Oct 20 18:28:46 2016 +0200
@@ -107,8 +107,7 @@
             md = DT.datetime.now(pytz.utc) - DT.timedelta(days=1)
             entity, rels = metagen.base_etype_dicts('CWUser')
             entity.cw_edited.update(dict(modification_date=md))
-            with cnx.ensure_cnx_set:
-                metagen.init_entity(entity)
+            metagen.init_entity(entity)
             self.assertEqual(entity.cw_edited['modification_date'], md)
 
 
@@ -140,8 +139,7 @@
             md = DT.datetime.now(pytz.utc) - DT.timedelta(days=1)
             attrs = metagen.base_etype_attrs('CWUser')
             attrs.update(dict(modification_date=md))
-            with cnx.ensure_cnx_set:
-                metagen.init_entity_attrs('CWUser', 1, attrs)
+            metagen.init_entity_attrs('CWUser', 1, attrs)
             self.assertEqual(attrs['modification_date'], md)
 
 
--- a/cubicweb/devtools/devctl.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/devtools/devctl.py	Thu Oct 20 18:28:46 2016 +0200
@@ -30,6 +30,7 @@
 from os import mkdir, chdir, path as osp
 from warnings import warn
 
+from pytz import UTC
 from six.moves import input
 
 from logilab.common import STD_BLACKLIST
@@ -115,6 +116,8 @@
     from cubicweb.cwvreg import CWRegistryStore
     if cubedir:
         cube = osp.split(cubedir)[-1]
+        if cube.startswith('cubicweb_'):
+            cube = cube[len('cubicweb_'):]
         config = DevConfiguration(cube)
         depcubes = list(config._cubes)
         depcubes.remove(cube)
@@ -510,20 +513,6 @@
     return toedit
 
 
-# XXX totally broken, fix it
-# class LiveServerCommand(Command):
-#     """Run a server from within a cube directory.
-#     """
-#     name = 'live-server'
-#     arguments = ''
-#     options = ()
-
-#     def run(self, args):
-#         """run the command with its specific arguments"""
-#         from cubicweb.devtools.livetest import runserver
-#         runserver()
-
-
 class NewCubeCommand(Command):
     """Create a new cube.
 
@@ -619,29 +608,25 @@
             raise BadCommandUsage(
                 'cube name must be a valid python module name')
         verbose = self.get('verbose')
-        cubesdir = self.get('directory')
-        if not cubesdir:
+        destdir = self.get('directory')
+        if not destdir:
             cubespath = ServerConfiguration.cubes_search_path()
             if len(cubespath) > 1:
                 raise BadCommandUsage(
                     "can't guess directory where to put the new cube."
                     " Please specify it using the --directory option")
-            cubesdir = cubespath[0]
-        if not osp.isdir(cubesdir):
-            print("-> creating cubes directory", cubesdir)
+            destdir = cubespath[0]
+        if not osp.isdir(destdir):
+            print("-> creating cubes directory", destdir)
             try:
-                mkdir(cubesdir)
+                mkdir(destdir)
             except OSError as err:
                 self.fail("failed to create directory %r\n(%s)"
-                          % (cubesdir, err))
-        cubedir = osp.join(cubesdir, cubename)
-        if osp.exists(cubedir):
-            self.fail("%s already exists!" % cubedir)
-        skeldir = osp.join(BASEDIR, 'skeleton')
+                          % (destdir, err))
         default_name = 'cubicweb-%s' % cubename.lower().replace('_', '-')
         if verbose:
             distname = input('Debian name for your cube ? [%s]): '
-                                 % default_name).strip()
+                             % default_name).strip()
             if not distname:
                 distname = default_name
             elif not distname.startswith('cubicweb-'):
@@ -652,28 +637,36 @@
         if not re.match('[a-z][-a-z0-9]*$', distname):
             raise BadCommandUsage(
                 'cube distname should be a valid debian package name')
+        cubedir = osp.join(destdir, distname)
+        if osp.exists(cubedir):
+            self.fail("%s already exists!" % cubedir)
+        skeldir = osp.join(BASEDIR, 'skeleton')
         longdesc = shortdesc = input(
             'Enter a short description for your cube: ')
         if verbose:
             longdesc = input(
                 'Enter a long description (leave empty to reuse the short one): ')
-        dependencies = {'cubicweb': '>= %s' % cubicwebversion,
-                        'six': '>= 1.4.0',}
+        dependencies = {
+            'six': '>= 1.4.0',
+            'cubicweb': '>= %s' % cubicwebversion,
+        }
         if verbose:
             dependencies.update(self._ask_for_dependencies())
-        context = {'cubename' : cubename,
-                   'distname' : distname,
-                   'shortdesc' : shortdesc,
-                   'longdesc' : longdesc or shortdesc,
-                   'dependencies' : dependencies,
-                   'version'  : cubicwebversion,
-                   'year'  : str(date.today().year),
-                   'author': self['author'],
-                   'author-email': self['author-email'],
-                   'author-web-site': self['author-web-site'],
-                   'license': self['license'],
-                   'long-license': self.LICENSES[self['license']],
-                   }
+        context = {
+            'cubename': cubename,
+            'distname': distname,
+            'shortdesc': shortdesc,
+            'longdesc': longdesc or shortdesc,
+            'dependencies': dependencies,
+            'version': cubicwebversion,
+            'year': str(date.today().year),
+            'author': self['author'],
+            'author-email': self['author-email'],
+            'rfc2822-date': datetime.now(tz=UTC).strftime('%a, %d %b %Y %T %z'),
+            'author-web-site': self['author-web-site'],
+            'license': self['license'],
+            'long-license': self.LICENSES[self['license']],
+        }
         exclude = SKEL_EXCLUDE
         if self['layout'] == 'simple':
             exclude += ('sobjects.py*', 'precreate.py*', 'realdb_test*',
@@ -848,7 +841,6 @@
 
 for cmdcls in (UpdateCubicWebCatalogCommand,
                UpdateCubeCatalogCommand,
-               #LiveServerCommand,
                NewCubeCommand,
                ExamineLogCommand,
                GenerateSchema,
--- a/cubicweb/devtools/repotest.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/devtools/repotest.py	Thu Oct 20 18:28:46 2016 +0200
@@ -252,10 +252,10 @@
         """lightweight session using the current user with hi-jacked groups"""
         # use self.session.user.eid to get correct owned_by relation, unless explicit eid
         with self.session.new_cnx() as cnx:
-            u = self.repo._build_user(cnx, self.session.user.eid)
-            u._groups = set(groups)
-            s = Session(u, self.repo)
-            return s
+            user_eid = self.session.user.eid
+            session = Session(self.repo._build_user(cnx, user_eid), self.repo)
+            session.data['%s-groups' % user_eid] = set(groups)
+            return session
 
     def qexecute(self, rql, args=None, build_descr=True):
         with self.session.new_cnx() as cnx:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/cubes/i18ntestcube	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_i18ntestcube/
\ No newline at end of file
--- a/cubicweb/devtools/test/data/cubes/i18ntestcube/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,18 +0,0 @@
-# pylint: disable=W0622
-"""cubicweb i18n test cube application packaging information"""
-
-modname = 'i18ntestcube'
-distname = 'cubicweb-i18ntestcube'
-
-numversion = (0, 1, 0)
-version = '.'.join(str(num) for num in numversion)
-
-license = 'LGPL'
-author = 'LOGILAB S.A. (Paris, FRANCE)'
-author_email = 'contact@logilab.fr'
-description = 'forum'
-web = 'http://www.cubicweb.org/project/%s' % distname
-
-__depends__ =  {'cubicweb': '>= 3.16.4',
-               }
-__recommends__ = {}
--- a/cubicweb/devtools/test/data/cubes/i18ntestcube/i18n/en.po.ref	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,182 +0,0 @@
-msgid ""
-msgstr ""
-"Project-Id-Version: cubicweb 3.16.5\n"
-"PO-Revision-Date: 2008-03-28 18:14+0100\n"
-"Last-Translator: Logilab Team <contact@logilab.fr>\n"
-"Language-Team: fr <contact@logilab.fr>\n"
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: cubicweb-devtools\n"
-"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-
-# schema pot file, generated on 2013-07-12 16:18:12
-#
-# singular and plural forms for each entity type
-# subject and object forms for each relation type
-# (no object form for final or symmetric relation types)
-msgid "Forum"
-msgstr ""
-
-msgid "Forum_plural"
-msgstr ""
-
-msgid "This Forum"
-msgstr ""
-
-msgid "This Forum:"
-msgstr ""
-
-msgid "New Forum"
-msgstr ""
-
-msgctxt "inlined:Forum.in_forum.object"
-msgid "add a ForumThread"
-msgstr ""
-
-msgctxt "inlined:Forum.in_forum.object"
-msgid "ForumThread"
-msgstr ""
-
-msgid "add ForumThread in_forum Forum object"
-msgstr ""
-
-msgid "add a Forum"
-msgstr ""
-
-msgid "add a ForumThread"
-msgstr ""
-
-msgid "creating ForumThread (ForumThread in_forum Forum %(linkto)s)"
-msgstr ""
-
-msgid "ForumThread"
-msgstr ""
-
-msgid "ForumThread_plural"
-msgstr ""
-
-msgid "This ForumThread"
-msgstr ""
-
-msgid "This ForumThread:"
-msgstr ""
-
-msgid "New ForumThread"
-msgstr ""
-
-msgid "content"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "content"
-msgstr ""
-
-msgid "content_format"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "content_format"
-msgstr ""
-
-msgctxt "Forum"
-msgid "description"
-msgstr ""
-
-msgctxt "Forum"
-msgid "description_format"
-msgstr ""
-
-msgid "in_forum"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "in_forum"
-msgstr ""
-
-msgctxt "Forum"
-msgid "in_forum_object"
-msgstr ""
-
-msgid "in_forum_object"
-msgstr ""
-
-msgid "interested_in"
-msgstr ""
-
-msgctxt "CWUser"
-msgid "interested_in"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "interested_in_object"
-msgstr ""
-
-msgctxt "Forum"
-msgid "interested_in_object"
-msgstr ""
-
-msgid "interested_in_object"
-msgstr ""
-
-msgid "nosy_list"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "nosy_list"
-msgstr ""
-
-msgctxt "Forum"
-msgid "nosy_list"
-msgstr ""
-
-msgctxt "CWUser"
-msgid "nosy_list_object"
-msgstr ""
-
-msgid "nosy_list_object"
-msgstr ""
-
-msgctxt "ForumThread"
-msgid "title"
-msgstr ""
-
-msgid "topic"
-msgstr ""
-
-msgctxt "Forum"
-msgid "topic"
-msgstr ""
-
-msgid "Topic"
-msgstr ""
-
-msgid "Description"
-msgstr ""
-
-msgid "Number of threads"
-msgstr ""
-
-msgid "Last activity"
-msgstr ""
-
-msgid ""
-"a long\n"
-"tranlated line\n"
-"hop."
-msgstr ""
-
-msgid "Subject"
-msgstr ""
-
-msgid "Created"
-msgstr ""
-
-msgid "Answers"
-msgstr ""
-
-msgid "Last answered"
-msgstr ""
-
-msgid "This forum does not have any thread yet."
-msgstr ""
--- a/cubicweb/devtools/test/data/cubes/i18ntestcube/schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr -- mailto:contact@logilab.fr
-#
-# This program is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with this program. If not, see <http://www.gnu.org/licenses/>.
-
-"""cubicweb-forum schema"""
-
-from yams.buildobjs import (String, RichString, EntityType,
-                            RelationDefinition, SubjectRelation)
-from yams.reader import context
-
-class Forum(EntityType):
-    topic = String(maxsize=50, required=True, unique=True)
-    description = RichString()
-
-class ForumThread(EntityType):
-    __permissions__ = {
-        'read': ('managers', 'users'),
-        'add': ('managers', 'users'),
-        'update': ('managers', 'owners'),
-        'delete': ('managers', 'owners')
-        }
-    title = String(required=True, fulltextindexed=True, maxsize=256)
-    content = RichString(required=True, fulltextindexed=True)
-    in_forum = SubjectRelation('Forum', cardinality='1*', inlined=True,
-                               composite='object')
-class interested_in(RelationDefinition):
-    subject = 'CWUser'
-    object = ('ForumThread', 'Forum')
-
-class nosy_list(RelationDefinition):
-    subject = ('Forum', 'ForumThread')
-    object = 'CWUser'
--- a/cubicweb/devtools/test/data/cubes/i18ntestcube/views.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,61 +0,0 @@
-# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr -- mailto:contact@logilab.fr
-#
-# This program is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with this program. If not, see <http://www.gnu.org/licenses/>.
-
-"""cubicweb-forum views/forms/actions/components for web ui"""
-
-from cubicweb import view
-from cubicweb.predicates import is_instance
-from cubicweb.web.views import primary, baseviews, uicfg
-from cubicweb.web.views.uicfg import autoform_section as afs
-
-class MyAFS(uicfg.AutoformSectionRelationTags):
-    __select__ = is_instance('ForumThread')
-
-_myafs = MyAFS()
-
-_myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
-
-afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
-
-
-class ForumSameETypeListView(baseviews.SameETypeListView):
-    __select__ = baseviews.SameETypeListView.__select__ & is_instance('Forum')
-
-    def call(self, **kwargs):
-        _ = self._cw._
-        _('Topic'), _('Description')
-        _('Number of threads'), _('Last activity')
-        _('''a long
-tranlated line
-hop.''')
-
-
-class ForumLastActivity(view.EntityView):
-    __regid__ = 'forum_last_activity'
-    __select__ = view.EntityView.__select__ & is_instance('Forum')
-
-
-class ForumPrimaryView(primary.PrimaryView):
-    __select__ = primary.PrimaryView.__select__ & is_instance('Forum')
-
-    def render_entity_attributes(self, entity):
-        _ = self._cw._
-        _('Subject'), _('Created'), _('Answers'),
-        _('Last answered')
-        _('This forum does not have any thread yet.')
-
-class ForumThreadPrimaryView(primary.PrimaryView):
-    __select__ = primary.PrimaryView.__select__ & is_instance('ForumThread')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,18 @@
+# pylint: disable=W0622
+"""cubicweb i18n test cube application packaging information"""
+
+modname = 'i18ntestcube'
+distname = 'cubicweb-i18ntestcube'
+
+numversion = (0, 1, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = 'LGPL'
+author = 'LOGILAB S.A. (Paris, FRANCE)'
+author_email = 'contact@logilab.fr'
+description = 'forum'
+web = 'http://www.cubicweb.org/project/%s' % distname
+
+__depends__ =  {'cubicweb': '>= 3.16.4',
+               }
+__recommends__ = {}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/i18n/en.po.ref	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,182 @@
+msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb 3.16.5\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team <contact@logilab.fr>\n"
+"Language-Team: fr <contact@logilab.fr>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: cubicweb-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+# schema pot file, generated on 2013-07-12 16:18:12
+#
+# singular and plural forms for each entity type
+# subject and object forms for each relation type
+# (no object form for final or symmetric relation types)
+msgid "Forum"
+msgstr ""
+
+msgid "Forum_plural"
+msgstr ""
+
+msgid "This Forum"
+msgstr ""
+
+msgid "This Forum:"
+msgstr ""
+
+msgid "New Forum"
+msgstr ""
+
+msgctxt "inlined:Forum.in_forum.object"
+msgid "add a ForumThread"
+msgstr ""
+
+msgctxt "inlined:Forum.in_forum.object"
+msgid "ForumThread"
+msgstr ""
+
+msgid "add ForumThread in_forum Forum object"
+msgstr ""
+
+msgid "add a Forum"
+msgstr ""
+
+msgid "add a ForumThread"
+msgstr ""
+
+msgid "creating ForumThread (ForumThread in_forum Forum %(linkto)s)"
+msgstr ""
+
+msgid "ForumThread"
+msgstr ""
+
+msgid "ForumThread_plural"
+msgstr ""
+
+msgid "This ForumThread"
+msgstr ""
+
+msgid "This ForumThread:"
+msgstr ""
+
+msgid "New ForumThread"
+msgstr ""
+
+msgid "content"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "content"
+msgstr ""
+
+msgid "content_format"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "content_format"
+msgstr ""
+
+msgctxt "Forum"
+msgid "description"
+msgstr ""
+
+msgctxt "Forum"
+msgid "description_format"
+msgstr ""
+
+msgid "in_forum"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "in_forum"
+msgstr ""
+
+msgctxt "Forum"
+msgid "in_forum_object"
+msgstr ""
+
+msgid "in_forum_object"
+msgstr ""
+
+msgid "interested_in"
+msgstr ""
+
+msgctxt "CWUser"
+msgid "interested_in"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "interested_in_object"
+msgstr ""
+
+msgctxt "Forum"
+msgid "interested_in_object"
+msgstr ""
+
+msgid "interested_in_object"
+msgstr ""
+
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "Forum"
+msgid "nosy_list"
+msgstr ""
+
+msgctxt "CWUser"
+msgid "nosy_list_object"
+msgstr ""
+
+msgid "nosy_list_object"
+msgstr ""
+
+msgctxt "ForumThread"
+msgid "title"
+msgstr ""
+
+msgid "topic"
+msgstr ""
+
+msgctxt "Forum"
+msgid "topic"
+msgstr ""
+
+msgid "Topic"
+msgstr ""
+
+msgid "Description"
+msgstr ""
+
+msgid "Number of threads"
+msgstr ""
+
+msgid "Last activity"
+msgstr ""
+
+msgid ""
+"a long\n"
+"tranlated line\n"
+"hop."
+msgstr ""
+
+msgid "Subject"
+msgstr ""
+
+msgid "Created"
+msgstr ""
+
+msgid "Answers"
+msgstr ""
+
+msgid "Last answered"
+msgstr ""
+
+msgid "This forum does not have any thread yet."
+msgstr ""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""cubicweb-forum schema"""
+
+from yams.buildobjs import (String, RichString, EntityType,
+                            RelationDefinition, SubjectRelation)
+from yams.reader import context
+
+class Forum(EntityType):
+    topic = String(maxsize=50, required=True, unique=True)
+    description = RichString()
+
+class ForumThread(EntityType):
+    __permissions__ = {
+        'read': ('managers', 'users'),
+        'add': ('managers', 'users'),
+        'update': ('managers', 'owners'),
+        'delete': ('managers', 'owners')
+        }
+    title = String(required=True, fulltextindexed=True, maxsize=256)
+    content = RichString(required=True, fulltextindexed=True)
+    in_forum = SubjectRelation('Forum', cardinality='1*', inlined=True,
+                               composite='object')
+class interested_in(RelationDefinition):
+    subject = 'CWUser'
+    object = ('ForumThread', 'Forum')
+
+class nosy_list(RelationDefinition):
+    subject = ('Forum', 'ForumThread')
+    object = 'CWUser'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/devtools/test/data/libpython/cubicweb_i18ntestcube/views.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,61 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""cubicweb-forum views/forms/actions/components for web ui"""
+
+from cubicweb import view
+from cubicweb.predicates import is_instance
+from cubicweb.web.views import primary, baseviews, uicfg
+from cubicweb.web.views.uicfg import autoform_section as afs
+
+class MyAFS(uicfg.AutoformSectionRelationTags):
+    __select__ = is_instance('ForumThread')
+
+_myafs = MyAFS()
+
+_myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
+
+afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
+
+
+class ForumSameETypeListView(baseviews.SameETypeListView):
+    __select__ = baseviews.SameETypeListView.__select__ & is_instance('Forum')
+
+    def call(self, **kwargs):
+        _ = self._cw._
+        _('Topic'), _('Description')
+        _('Number of threads'), _('Last activity')
+        _('''a long
+tranlated line
+hop.''')
+
+
+class ForumLastActivity(view.EntityView):
+    __regid__ = 'forum_last_activity'
+    __select__ = view.EntityView.__select__ & is_instance('Forum')
+
+
+class ForumPrimaryView(primary.PrimaryView):
+    __select__ = primary.PrimaryView.__select__ & is_instance('Forum')
+
+    def render_entity_attributes(self, entity):
+        _ = self._cw._
+        _('Subject'), _('Created'), _('Answers'),
+        _('Last answered')
+        _('This forum does not have any thread yet.')
+
+class ForumThreadPrimaryView(primary.PrimaryView):
+    __select__ = primary.PrimaryView.__select__ & is_instance('ForumThread')
--- a/cubicweb/devtools/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-Twisted < 16.0.0
-webtest
-flake8
--- a/cubicweb/devtools/test/unittest_devctl.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/devtools/test/unittest_devctl.py	Thu Oct 20 18:28:46 2016 +0200
@@ -22,7 +22,7 @@
 import sys
 import tempfile
 import shutil
-from subprocess import Popen, PIPE, STDOUT
+from subprocess import Popen, PIPE, STDOUT, check_output
 from unittest import TestCase
 
 
@@ -33,36 +33,95 @@
     return proc.returncode, stdout
 
 
-class CubicWebCtlTC(TestCase):
-    """test case for devtools commands"""
+def to_unicode(msg):
+    return msg.decode(sys.getdefaultencoding(), errors='replace')
+
+
+class DevCtlTC(TestCase):
+    """Test case for devtools commands"""
 
     if not hasattr(TestCase, 'assertItemsEqual'):
         assertItemsEqual = TestCase.assertCountEqual
 
     def test_newcube(self):
-        expected = ['i18n', 'hooks.py', 'setup.py', 'views.py', 'test',
-                    'migration', 'entities.py', 'MANIFEST.in', 'schema.py',
-                    'cubicweb-foo.spec', '__init__.py', 'debian', 'data',
-                    '__pkginfo__.py', 'README', 'tox.ini']
+        expected_project_content = ['setup.py', 'test', 'MANIFEST.in',
+                                    'cubicweb_foo',
+                                    'cubicweb-foo.spec', 'debian', 'README',
+                                    'tox.ini']
+        expected_package_content = ['i18n', 'hooks.py', 'views.py',
+                                    'migration', 'entities.py', 'schema.py',
+                                    '__init__.py', 'data', '__pkginfo__.py']
         tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube")
         try:
             retcode, stdout = newcube(tmpdir, 'foo')
-            self.assertItemsEqual(os.listdir(osp.join(tmpdir, 'foo')), expected)
+            self.assertEqual(retcode, 0, msg=to_unicode(stdout))
+            project_dir = osp.join(tmpdir, 'cubicweb-foo')
+            project_content = os.listdir(project_dir)
+            package_dir = osp.join(project_dir, 'cubicweb_foo')
+            package_content = os.listdir(package_dir)
+            self.assertItemsEqual(project_content, expected_project_content)
+            self.assertItemsEqual(package_content, expected_package_content)
         finally:
             shutil.rmtree(tmpdir, ignore_errors=True)
-        self.assertEqual(retcode, 0, msg=stdout)
 
     def test_flake8(self):
         """Ensure newcube built from skeleton is flake8-compliant"""
         tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-flake8")
         try:
             newcube(tmpdir, 'foo')
-            cmd = [sys.executable, '-m', 'flake8', osp.join(tmpdir, 'foo')]
+            cmd = [sys.executable, '-m', 'flake8',
+                   osp.join(tmpdir, 'cubicweb-foo', 'cubicweb_foo')]
             proc = Popen(cmd, stdout=PIPE, stderr=STDOUT)
             retcode = proc.wait()
         finally:
             shutil.rmtree(tmpdir, ignore_errors=True)
-        self.assertEqual(retcode, 0, proc.stdout.read())
+        self.assertEqual(retcode, 0,
+                         msg=to_unicode(proc.stdout.read()))
+
+    def test_newcube_sdist(self):
+        """Ensure sdist can be built from a new cube"""
+        tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-sdist")
+        try:
+            newcube(tmpdir, 'foo')
+            projectdir = osp.join(tmpdir, 'cubicweb-foo')
+            cmd = [sys.executable, 'setup.py', 'sdist']
+            proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, cwd=projectdir)
+            retcode = proc.wait()
+            stdout = to_unicode(proc.stdout.read())
+            self.assertEqual(retcode, 0, stdout)
+            distfpath = osp.join(projectdir, 'dist', 'cubicweb-foo-0.1.0.tar.gz')
+            self.assertTrue(osp.isfile(distfpath))
+        finally:
+            shutil.rmtree(tmpdir, ignore_errors=True)
+
+    def test_newcube_install(self):
+        """Ensure a new cube can be installed"""
+        tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube-install")
+        try:
+            newcube(tmpdir, 'foo')
+            projectdir = osp.join(tmpdir, 'cubicweb-foo')
+            env = os.environ.copy()
+            env['HOME'] = tmpdir
+            cmd = [sys.executable, 'setup.py', 'install', '--user']
+            proc = Popen(cmd, stdout=PIPE, stderr=STDOUT,
+                         cwd=projectdir, env=env)
+            retcode = proc.wait()
+            stdout = to_unicode(proc.stdout.read())
+            self.assertEqual(retcode, 0, stdout)
+            targetdir = check_output([sys.executable, '-m', 'site', '--user-site'],
+                                     env=env, cwd=projectdir).strip()
+            target_egg = 'cubicweb_foo-0.1.0-py{0}.egg'.format(sys.version[:3]).encode()
+            self.assertTrue(osp.isdir(osp.join(targetdir, target_egg)),
+                            'target directory content: %s' % os.listdir(targetdir))
+            pkgdir = osp.join(targetdir, target_egg, b'cubicweb_foo')
+            self.assertTrue(osp.isdir(pkgdir),
+                            os.listdir(osp.join(targetdir, target_egg)))
+            pkgcontent = [f for f in os.listdir(pkgdir) if f.endswith(b'.py')]
+            self.assertItemsEqual(pkgcontent,
+                                  [b'schema.py', b'entities.py', b'hooks.py', b'__init__.py',
+                                   b'__pkginfo__.py', b'views.py'])
+        finally:
+            shutil.rmtree(tmpdir, ignore_errors=True)
 
 
 if __name__ == '__main__':
--- a/cubicweb/devtools/test/unittest_i18n.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/devtools/test/unittest_i18n.py	Thu Oct 20 18:28:46 2016 +0200
@@ -57,19 +57,33 @@
 
     def test_i18ncube(self):
         env = os.environ.copy()
+        if 'PYTHONPATH' in env:
+            env['PYTHONPATH'] += os.pathsep
+        else:
+            env['PYTHONPATH'] = ''
+        env['PYTHONPATH'] += osp.join(DATADIR, 'libpython')
+        cubedir = osp.join(DATADIR, 'libpython', 'cubicweb_i18ntestcube')
+        self._check(cubedir, env)
+
+    def test_i18ncube_legacy_layout(self):
+        env = os.environ.copy()
         env['CW_CUBES_PATH'] = osp.join(DATADIR, 'cubes')
         if 'PYTHONPATH' in env:
             env['PYTHONPATH'] += os.pathsep
         else:
             env['PYTHONPATH'] = ''
         env['PYTHONPATH'] += DATADIR
+        cubedir = osp.join(DATADIR, 'cubes', 'i18ntestcube')
+        self._check(cubedir, env)
+
+    def _check(self, cubedir, env):
         cmd = [sys.executable, '-m', 'cubicweb', 'i18ncube', 'i18ntestcube']
         proc = Popen(cmd, env=env, stdout=PIPE, stderr=STDOUT)
         stdout, _ = proc.communicate()
-        self.assertEqual(proc.returncode, 0, msg=stdout)
-        cube = osp.join(DATADIR, 'cubes', 'i18ntestcube')
-        msgs = load_po(osp.join(cube, 'i18n', 'en.po.ref'))
-        newmsgs = load_po(osp.join(cube, 'i18n', 'en.po'))
+        msg = stdout.decode(sys.getdefaultencoding(), errors='replace')
+        self.assertEqual(proc.returncode, 0, msg=msg)
+        msgs = load_po(osp.join(cubedir, 'i18n', 'en.po.ref'))
+        newmsgs = load_po(osp.join(cubedir, 'i18n', 'en.po'))
         self.assertEqual(msgs, newmsgs)
 
 
--- a/cubicweb/devtools/testlib.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/devtools/testlib.py	Thu Oct 20 18:28:46 2016 +0200
@@ -890,8 +890,9 @@
 
     def assertAuthSuccess(self, req, origsession, nbsessions=1):
         session = self.app.get_session(req)
-        cnx = repoapi.Connection(session)
-        req.set_cnx(cnx)
+        cnx = session.new_cnx()
+        with cnx:
+            req.set_cnx(cnx)
         self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions)
         self.assertEqual(session.login, origsession.login)
         self.assertEqual(session.anonymous_session, False)
@@ -942,10 +943,8 @@
                   encapsulation the generated HTML
         """
         if req is None:
-            if rset is None:
-                req = self.request()
-            else:
-                req = rset.req
+            assert rset is not None, 'you must supply at least one of rset or req'
+            req = rset.req
         req.form['vid'] = vid
         viewsreg = self.vreg['views']
         view = viewsreg.select(vid, req, rset=rset, **kwargs)
--- a/cubicweb/entities/adapters.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/entities/adapters.py	Thu Oct 20 18:28:46 2016 +0200
@@ -21,7 +21,6 @@
 from cubicweb import _
 
 from itertools import chain
-from hashlib import md5
 
 from logilab.mtconverter import TransformError
 from logilab.common.decorators import cached
@@ -413,9 +412,7 @@
         for rschema, attrschema in eschema.attribute_definitions():
             rdef = rschema.rdef(eschema, attrschema)
             for constraint in rdef.constraints:
-                if cstrname == 'cstr' + md5(
-                        (eschema.type + rschema.type + constraint.type() +
-                         (constraint.serialize() or '')).encode('ascii')).hexdigest():
+                if cstrname == constraint.name_for(rdef):
                     break
             else:
                 continue
--- a/cubicweb/entities/authobjs.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/entities/authobjs.py	Thu Oct 20 18:28:46 2016 +0200
@@ -26,6 +26,11 @@
 from cubicweb import Unauthorized
 from cubicweb.entities import AnyEntity, fetch_config
 
+
+def user_session_cache_key(user_eid, data_name):
+    return '{0}-{1}'.format(user_eid, data_name)
+
+
 class CWGroup(AnyEntity):
     __regid__ = 'CWGroup'
     fetch_attrs, cw_fetch_order = fetch_config(['name'])
@@ -54,34 +59,32 @@
     AUTHENTICABLE_STATES = ('activated',)
 
     # low level utilities #####################################################
-    def __init__(self, *args, **kwargs):
-        groups = kwargs.pop('groups', None)
-        properties = kwargs.pop('properties', None)
-        super(CWUser, self).__init__(*args, **kwargs)
-        if groups is not None:
-            self._groups = groups
-        if properties is not None:
-            self._properties = properties
 
     @property
     def groups(self):
+        key = user_session_cache_key(self.eid, 'groups')
         try:
-            return self._groups
-        except AttributeError:
-            self._groups = set(g.name for g in self.in_group)
-            return self._groups
+            return self._cw.data[key]
+        except KeyError:
+            with self._cw.security_enabled(read=False):
+                groups = set(group for group, in self._cw.execute(
+                    'Any GN WHERE U in_group G, G name GN, U eid %(userid)s',
+                    {'userid': self.eid}))
+            self._cw.data[key] = groups
+            return groups
 
     @property
     def properties(self):
+        key = user_session_cache_key(self.eid, 'properties')
         try:
-            return self._properties
-        except AttributeError:
-            self._properties = dict(
-                self._cw.execute(
+            return self._cw.data[key]
+        except KeyError:
+            with self._cw.security_enabled(read=False):
+                properties = dict(self._cw.execute(
                     'Any K, V WHERE P for_user U, U eid %(userid)s, '
-                    'P pkey K, P value V',
-                    {'userid': self.eid}))
-            return self._properties
+                    'P pkey K, P value V', {'userid': self.eid}))
+            self._cw.data[key] = properties
+            return properties
 
     def prefered_language(self, language=None):
         """return language used by this user, if explicitly defined (eg not
--- a/cubicweb/entities/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-docutils
--- a/cubicweb/etwist/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-Twisted < 16.0.0
--- a/cubicweb/ext/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-docutils
--- a/cubicweb/hooks/syncschema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/hooks/syncschema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -28,7 +28,6 @@
 
 import json
 from copy import copy
-from hashlib import md5
 
 from yams.schema import BASE_TYPES, BadSchemaDefinition, RelationDefinitionSchema
 from yams.constraints import UniqueConstraint
@@ -42,12 +41,11 @@
                              CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP)
 from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql
 from cubicweb.server.sqlutils import SQL_PREFIX
-from cubicweb.server.schema2sql import unique_index_name
 from cubicweb.hooks.synccomputed import RecomputeAttributeOperation
 
 # core entity and relation types which can't be removed
 CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set(
-    ('CWUser', 'CWGroup','login', 'upassword', 'name', 'in_group'))
+    ('CWUser', 'CWGroup', 'login', 'upassword', 'name', 'in_group'))
 
 
 def get_constraints(cnx, entity):
@@ -78,7 +76,8 @@
     table = SQL_PREFIX + etype
     column = SQL_PREFIX + rtype
     try:
-        cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)' % (table, column)),
+        cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)'
+                           % (table, column)),
                        rollback_on_failure=False)
         cnx.info('added column %s to table %s', column, table)
     except Exception:
@@ -242,7 +241,7 @@
       CWAttribute entities
     * add <meta rtype> relation by creating the necessary CWRelation entity
     """
-    entity = None # make pylint happy
+    entity = None  # make pylint happy
 
     def precommit_event(self):
         cnx = self.cnx
@@ -252,11 +251,9 @@
                                description=entity.description)
         eschema = schema.add_entity_type(etype)
         # create the necessary table
-        tablesql = y2sql.eschema2sql(cnx.repo.system_source.dbhelper,
-                                     eschema, prefix=SQL_PREFIX)
-        for sql in tablesql.split(';'):
-            if sql.strip():
-                cnx.system_sql(sql)
+        for sql in y2sql.eschema2sql(cnx.repo.system_source.dbhelper,
+                                     eschema, prefix=SQL_PREFIX):
+            cnx.system_sql(sql)
         # add meta relations
         gmap = group_mapping(cnx)
         cmap = ss.cstrtype_mapping(cnx)
@@ -326,11 +323,11 @@
                 source.create_index(cnx, new_table, SQL_PREFIX + rschema.type, unique=True)
         for attrs in eschema._unique_together or ():
             columns = ['%s%s' % (SQL_PREFIX, attr) for attr in attrs]
-            old_index_name = unique_index_name(oldname, columns)
+            old_index_name = y2sql.unique_index_name(oldname, columns)
             for sql in dbhelper.sqls_drop_multicol_unique_index(
                     new_table, columns, old_index_name):
                 sqlexec(sql)
-            new_index_name = unique_index_name(newname, columns)
+            new_index_name = y2sql.unique_index_name(newname, columns)
             for sql in dbhelper.sqls_create_multicol_unique_index(
                     new_table, columns, new_index_name):
                 sqlexec(sql)
@@ -364,11 +361,11 @@
                     op.add_data(objtype)
                     op.add_data(subjtype)
         # update the in-memory schema first
-        self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values)
+        self.oldvalues = dict((attr, getattr(rschema, attr)) for attr in self.values)
         self.rschema.__dict__.update(self.values)
         # then make necessary changes to the system source database
         if 'inlined' not in self.values:
-            return # nothing to do
+            return  # nothing to do
         inlined = self.values['inlined']
         # check in-lining is possible when inlined
         if inlined:
@@ -380,12 +377,10 @@
         if not inlined:
             # need to create the relation if it has not been already done by
             # another event of the same transaction
-            if not rschema.type in cnx.transaction_data.get('createdtables', ()):
-                tablesql = y2sql.rschema2sql(rschema)
+            if rschema.type not in cnx.transaction_data.get('createdtables', ()):
                 # create the necessary table
-                for sql in tablesql.split(';'):
-                    if sql.strip():
-                        sqlexec(sql)
+                for sql in y2sql.rschema2sql(rschema):
+                    sqlexec(sql)
                 cnx.transaction_data.setdefault('createdtables', []).append(
                     rschema.type)
             # copy existant data
@@ -395,7 +390,6 @@
                 sqlexec('INSERT INTO %s_relation SELECT %s, %s FROM %s WHERE NOT %s IS NULL'
                         % (rtype, eidcolumn, column, table, column))
             # drop existant columns
-            #if cnx.repo.system_source.dbhelper.alter_column_support:
             for etype in rschema.subjects():
                 DropColumn.get_instance(cnx).add_data((str(etype), rtype))
         else:
@@ -433,7 +427,7 @@
 
 class CWComputedRTypeUpdateOp(MemSchemaOperation):
     """actually update some properties of a computed relation definition"""
-    rschema = entity = rule = None # make pylint happy
+    rschema = entity = rule = None  # make pylint happy
     old_rule = None
 
     def precommit_event(self):
@@ -455,7 +449,7 @@
 
     constraints are handled by specific hooks
     """
-    entity = None # make pylint happy
+    entity = None  # make pylint happy
 
     def init_rdef(self, **kwargs):
         entity = self.entity
@@ -530,7 +524,7 @@
         try:
             eschema = schema.eschema(rdefdef.subject)
         except KeyError:
-            return # entity type currently being added
+            return  # entity type currently being added
         # propagate attribute to children classes
         rschema = schema.rschema(rdefdef.name)
         # if relation type has been inserted in the same transaction, its final
@@ -541,7 +535,7 @@
         if default is not None:
             default = convert_default_value(self.rdefdef, default)
             cnx.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column),
-                               {'default': default})
+                           {'default': default})
         # if attribute is computed, compute it
         if getattr(entity, 'formula', None):
             # add rtype attribute for RelationDefinitionSchema api compat, this
@@ -569,7 +563,7 @@
 
     constraints are handled by specific hooks
     """
-    entity = None # make pylint happy
+    entity = None  # make pylint happy
 
     def precommit_event(self):
         cnx = self.cnx
@@ -603,9 +597,8 @@
                     rtype in cnx.transaction_data.get('createdtables', ())):
                 rschema = schema.rschema(rtype)
                 # create the necessary table
-                for sql in y2sql.rschema2sql(rschema).split(';'):
-                    if sql.strip():
-                        cnx.system_sql(sql)
+                for sql in y2sql.rschema2sql(rschema):
+                    cnx.system_sql(sql)
                 cnx.transaction_data.setdefault('createdtables', []).append(
                     rtype)
 
@@ -614,7 +607,7 @@
 
 class RDefDelOp(MemSchemaOperation):
     """an actual relation has been removed"""
-    rdef = None # make pylint happy
+    rdef = None  # make pylint happy
 
     def precommit_event(self):
         cnx = self.cnx
@@ -677,7 +670,7 @@
 
 class RDefUpdateOp(MemSchemaOperation):
     """actually update some properties of a relation definition"""
-    rschema = rdefkey = values = None # make pylint happy
+    rschema = rdefkey = values = None  # make pylint happy
     rdef = oldvalues = None
     indexed_changed = null_allowed_changed = False
 
@@ -685,15 +678,15 @@
         cnx = self.cnx
         rdef = self.rdef = self.rschema.rdefs[self.rdefkey]
         # update the in-memory schema first
-        self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values)
+        self.oldvalues = dict((attr, getattr(rdef, attr)) for attr in self.values)
         rdef.update(self.values)
         # then make necessary changes to the system source database
         syssource = cnx.repo.system_source
         if 'indexed' in self.values:
             syssource.update_rdef_indexed(cnx, rdef)
             self.indexed_changed = True
-        if 'cardinality' in self.values and rdef.rtype.final \
-              and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]:
+        if ('cardinality' in self.values and rdef.rtype.final
+                and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]):
             syssource.update_rdef_null_allowed(self.cnx, rdef)
             self.null_allowed_changed = True
         if 'fulltextindexed' in self.values:
@@ -724,7 +717,7 @@
 
 class CWConstraintDelOp(MemSchemaOperation):
     """actually remove a constraint of a relation definition"""
-    rdef = oldcstr = newcstr = None # make pylint happy
+    rdef = oldcstr = newcstr = None  # make pylint happy
     size_cstr_changed = unique_changed = False
 
     def precommit_event(self):
@@ -760,10 +753,11 @@
         elif cstrtype == 'UniqueConstraint':
             syssource.update_rdef_unique(cnx, rdef)
             self.unique_changed = True
-        if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'):
-            cstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype +
-                                     (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest()
-            cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, cstrname))
+        elif cstrtype in ('BoundaryConstraint',
+                          'IntervalBoundConstraint',
+                          'StaticVocabularyConstraint'):
+            cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s'
+                           % (SQL_PREFIX, rdef.subject, self.oldcstr.name_for(rdef)))
 
     def revertprecommit_event(self):
         # revert changes on in memory schema
@@ -781,7 +775,7 @@
 
 class CWConstraintAddOp(CWConstraintDelOp):
     """actually update constraint of a relation definition"""
-    entity = None # make pylint happy
+    entity = None  # make pylint happy
 
     def precommit_event(self):
         cnx = self.cnx
@@ -812,22 +806,21 @@
         if cstrtype in ('BoundaryConstraint',
                         'IntervalBoundConstraint',
                         'StaticVocabularyConstraint'):
-            cstrname, check = y2sql.check_constraint(rdef.subject, rdef.object, rdef.rtype.type,
-                                                     newcstr, syssource.dbhelper, prefix=SQL_PREFIX)
+            cstrname, check = y2sql.check_constraint(rdef, newcstr, syssource.dbhelper,
+                                                     prefix=SQL_PREFIX)
             # oldcstr is the new constraint when the attribute is being added in the same
             # transaction or when constraint value is updated. So we've to take care...
             if oldcstr is not None:
-                oldcstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype +
-                                            (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest()
+                oldcstrname = self.oldcstr.name_for(rdef)
                 if oldcstrname != cstrname:
                     cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s'
-                                   % (SQL_PREFIX, rdef.subject.type, oldcstrname))
+                                   % (SQL_PREFIX, rdef.subject, oldcstrname))
             cnx.system_sql('ALTER TABLE %s%s ADD CONSTRAINT %s CHECK(%s)' %
-                           (SQL_PREFIX, rdef.subject.type, cstrname, check))
+                           (SQL_PREFIX, rdef.subject, cstrname, check))
 
 
 class CWUniqueTogetherConstraintAddOp(MemSchemaOperation):
-    entity = None # make pylint happy
+    entity = None  # make pylint happy
 
     def precommit_event(self):
         cnx = self.cnx
@@ -848,8 +841,8 @@
 
 
 class CWUniqueTogetherConstraintDelOp(MemSchemaOperation):
-    entity = cstrname = None # for pylint
-    cols = () # for pylint
+    entity = cstrname = None  # make pylint happy
+    cols = ()  # make pylint happy
 
     def insert_index(self):
         # We need to run before CWConstraintDelOp: if a size constraint is
@@ -880,7 +873,7 @@
 
 class MemSchemaCWETypeDel(MemSchemaOperation):
     """actually remove the entity type from the instance's schema"""
-    etype = None # make pylint happy
+    etype = None  # make pylint happy
 
     def postcommit_event(self):
         # del_entity_type also removes entity's relations
@@ -889,7 +882,7 @@
 
 class MemSchemaCWRTypeAdd(MemSchemaOperation):
     """actually add the relation type to the instance's schema"""
-    rtypedef = None # make pylint happy
+    rtypedef = None  # make pylint happy
 
     def precommit_event(self):
         self.cnx.vreg.schema.add_relation_type(self.rtypedef)
@@ -900,7 +893,7 @@
 
 class MemSchemaCWRTypeDel(MemSchemaOperation):
     """actually remove the relation type from the instance's schema"""
-    rtype = None # make pylint happy
+    rtype = None  # make pylint happy
 
     def postcommit_event(self):
         try:
@@ -913,7 +906,7 @@
 class MemSchemaPermissionAdd(MemSchemaOperation):
     """synchronize schema when a *_permission relation has been added on a group
     """
-    eid = action = group_eid = expr = None # make pylint happy
+    eid = action = group_eid = expr = None  # make pylint happy
 
     def precommit_event(self):
         """the observed connections.cnxset has been commited"""
@@ -968,7 +961,7 @@
 
 
 class MemSchemaSpecializesAdd(MemSchemaOperation):
-    etypeeid = parentetypeeid = None # make pylint happy
+    etypeeid = parentetypeeid = None  # make pylint happy
 
     def precommit_event(self):
         eschema = self.cnx.vreg.schema.schema_by_eid(self.etypeeid)
@@ -980,7 +973,7 @@
 
 
 class MemSchemaSpecializesDel(MemSchemaOperation):
-    etypeeid = parentetypeeid = None # make pylint happy
+    etypeeid = parentetypeeid = None  # make pylint happy
 
     def precommit_event(self):
         try:
@@ -1084,9 +1077,9 @@
             raise validation_error(self.entity, {None: _("can't be deleted")})
         # delete relation definitions using this relation type
         self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s',
-                        {'x': self.entity.eid})
+                         {'x': self.entity.eid})
         self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s',
-                        {'x': self.entity.eid})
+                         {'x': self.entity.eid})
         MemSchemaCWRTypeDel(self._cw, rtype=name)
 
 
@@ -1192,10 +1185,8 @@
         pendingrdefs = cnx.transaction_data.setdefault('pendingrdefs', set())
         # first delete existing relation if necessary
         if rschema.final:
-            rdeftype = 'CWAttribute'
             pendingrdefs.add((subjschema, rschema))
         else:
-            rdeftype = 'CWRelation'
             pendingrdefs.add((subjschema, rschema, objschema))
         RDefDelOp(cnx, rdef=rdef)
 
@@ -1316,6 +1307,7 @@
         else:
             CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr)
 
+
 # unique_together constraints
 # XXX: use setoperations and before_add_relation here (on constraint_of and relations)
 class AfterAddCWUniqueTogetherConstraintHook(SyncSchemaHook):
@@ -1358,7 +1350,7 @@
         if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup':
             MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom,
                                    group_eid=self.eidto)
-        else: # RQLExpression
+        else:  # RQLExpression
             expr = self._cw.entity_from_eid(self.eidto).expression
             MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom,
                                    expr=expr)
@@ -1379,13 +1371,12 @@
         if self._cw.entity_metas(self.eidto)['type'] == 'CWGroup':
             MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom,
                                    group_eid=self.eidto)
-        else: # RQLExpression
+        else:  # RQLExpression
             expr = self._cw.entity_from_eid(self.eidto).expression
             MemSchemaPermissionDel(self._cw, action=action, eid=self.eidfrom,
                                    expr=expr)
 
 
-
 class UpdateFTIndexOp(hook.DataOperationMixIn, hook.SingleLastOperation):
     """operation to update full text indexation of entity whose schema change
 
@@ -1417,11 +1408,8 @@
             cnx.cnxset.commit()
 
 
-
-
 # specializes synchronization hooks ############################################
 
-
 class AfterAddSpecializesHook(SyncSchemaHook):
     __regid__ = 'syncaddspecializes'
     __select__ = SyncSchemaHook.__select__ & hook.match_rtype('specializes')
--- a/cubicweb/hooks/syncsession.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/hooks/syncsession.py	Thu Oct 20 18:28:46 2016 +0200
@@ -23,12 +23,35 @@
 from cubicweb import UnknownProperty, BadConnectionId, validation_error
 from cubicweb.predicates import is_instance
 from cubicweb.server import hook
+from cubicweb.entities.authobjs import user_session_cache_key
+
+
+# take cnx and not repo because it's needed for other sessions implementation (e.g. pyramid)
+def get_user_sessions(cnx, ueid):
+    for session in cnx.repo._sessions.values():
+        if ueid == session.user.eid:
+            yield session
 
 
-def get_user_sessions(repo, ueid):
-    for session in repo._sessions.values():
-        if ueid == session.user.eid:
-            yield session
+class CachedValueMixin(object):
+    """Mixin class providing methods to retrieve some value, specified through
+    `value_name` attribute, in session data.
+    """
+    value_name = None
+    session = None  # make pylint happy
+
+    @property
+    def cached_value(self):
+        """Return cached value for the user, or None"""
+        key = user_session_cache_key(self.session.user.eid, self.value_name)
+        return self.session.data.get(key, None)
+
+    def update_cached_value(self, value):
+        """Update cached value for the user (modifying the set returned by cached_value may not be
+        necessary depending on session data implementation, e.g. redis)
+        """
+        key = user_session_cache_key(self.session.user.eid, self.value_name)
+        self.session.data[key] = value
 
 
 class SyncSessionHook(hook.Hook):
@@ -38,18 +61,18 @@
 
 # user/groups synchronisation #################################################
 
-class _GroupOperation(hook.Operation):
-    """base class for group operation"""
-    cnxuser = None # make pylint happy
+class _GroupOperation(CachedValueMixin, hook.Operation):
+    """Base class for group operation"""
+    value_name = 'groups'
 
     def __init__(self, cnx, *args, **kwargs):
-        """override to get the group name before actual groups manipulation:
+        """Override to get the group name before actual groups manipulation
 
         we may temporarily loose right access during a commit event, so
         no query should be emitted while comitting
         """
         rql = 'Any N WHERE G eid %(x)s, G name N'
-        result = cnx.execute(rql, {'x': kwargs['geid']}, build_descr=False)
+        result = cnx.execute(rql, {'x': kwargs['group_eid']}, build_descr=False)
         hook.Operation.__init__(self, cnx, *args, **kwargs)
         self.group = result[0][0]
 
@@ -58,25 +81,20 @@
     """Synchronize user when a in_group relation has been deleted"""
 
     def postcommit_event(self):
-        """the observed connections set has been commited"""
-        groups = self.cnxuser.groups
-        try:
-            groups.remove(self.group)
-        except KeyError:
-            self.error('user %s not in group %s',  self.cnxuser, self.group)
+        cached_groups = self.cached_value
+        if cached_groups is not None:
+            cached_groups.remove(self.group)
+            self.update_cached_value(cached_groups)
 
 
 class _AddGroupOp(_GroupOperation):
     """Synchronize user when a in_group relation has been added"""
 
     def postcommit_event(self):
-        """the observed connections set has been commited"""
-        groups = self.cnxuser.groups
-        if self.group in groups:
-            self.warning('user %s already in group %s', self.cnxuser,
-                         self.group)
-        else:
-            groups.add(self.group)
+        cached_groups = self.cached_value
+        if cached_groups is not None:
+            cached_groups.add(self.group)
+            self.update_cached_value(cached_groups)
 
 
 class SyncInGroupHook(SyncSessionHook):
@@ -90,67 +108,82 @@
             opcls = _DeleteGroupOp
         else:
             opcls = _AddGroupOp
-        for session in get_user_sessions(self._cw.repo, self.eidfrom):
-            opcls(self._cw, cnxuser=session.user, geid=self.eidto)
+        for session in get_user_sessions(self._cw, self.eidfrom):
+            opcls(self._cw, session=session, group_eid=self.eidto)
 
 
-class _DelUserOp(hook.Operation):
-    """close associated user's session when it is deleted"""
-    def __init__(self, cnx, sessionid):
-        self.sessionid = sessionid
-        hook.Operation.__init__(self, cnx)
+class _CloseSessionOp(hook.Operation):
+    """Close user's session when it has been deleted"""
 
     def postcommit_event(self):
         try:
-            self.cnx.repo.close(self.sessionid)
+            # remove cached groups for the user
+            key = user_session_cache_key(self.session.user.eid, 'groups')
+            self.session.data.pop(key, None)
+            self.session.repo.close(self.session.sessionid)
         except BadConnectionId:
             pass  # already closed
 
 
-class CloseDeletedUserSessionsHook(SyncSessionHook):
+class UserDeletedHook(SyncSessionHook):
+    """Watch deletion of user to close its opened session"""
     __regid__ = 'closession'
     __select__ = SyncSessionHook.__select__ & is_instance('CWUser')
     events = ('after_delete_entity',)
 
     def __call__(self):
-        for session in get_user_sessions(self._cw.repo, self.entity.eid):
-            _DelUserOp(self._cw, session.sessionid)
+        for session in get_user_sessions(self._cw, self.entity.eid):
+            _CloseSessionOp(self._cw, session=session)
 
 
 # CWProperty hooks #############################################################
 
-class _DelCWPropertyOp(hook.Operation):
-    """a user's custom properties has been deleted"""
-    cwpropdict = key = None # make pylint happy
 
-    def postcommit_event(self):
-        """the observed connections set has been commited"""
-        try:
-            del self.cwpropdict[self.key]
-        except KeyError:
-            self.error('%s has no associated value', self.key)
+class _UserPropertyOperation(CachedValueMixin, hook.Operation):
+    """Base class for property operation"""
+    value_name = 'properties'
+    key = None  # make pylint happy
 
 
-class _ChangeCWPropertyOp(hook.Operation):
-    """a user's custom properties has been added/changed"""
-    cwpropdict = key = value = None # make pylint happy
+class _ChangeUserCWPropertyOp(_UserPropertyOperation):
+    """Synchronize cached user's properties when one has been added/updated"""
+    value = None  # make pylint happy
 
     def postcommit_event(self):
-        """the observed connections set has been commited"""
-        self.cwpropdict[self.key] = self.value
+        cached_props = self.cached_value
+        if cached_props is not None:
+            cached_props[self.key] = self.value
+            self.update_cached_value(cached_props)
 
 
-class _AddCWPropertyOp(hook.Operation):
-    """a user's custom properties has been added/changed"""
-    cwprop = None # make pylint happy
+class _DelUserCWPropertyOp(_UserPropertyOperation):
+    """Synchronize cached user's properties when one has been deleted"""
 
     def postcommit_event(self):
-        """the observed connections set has been commited"""
+        cached_props = self.cached_value
+        if cached_props is not None:
+            cached_props.pop(self.key, None)
+            self.update_cached_value(cached_props)
+
+
+class _ChangeSiteWideCWPropertyOp(hook.Operation):
+    """Synchronize site wide properties when one has been added/updated"""
+    cwprop = None  # make pylint happy
+
+    def postcommit_event(self):
         cwprop = self.cwprop
         if not cwprop.for_user:
             self.cnx.vreg['propertyvalues'][cwprop.pkey] = \
                 self.cnx.vreg.typed_value(cwprop.pkey, cwprop.value)
-        # if for_user is set, update is handled by a ChangeCWPropertyOp operation
+        # if for_user is set, update is handled by a ChangeUserCWPropertyOp operation
+
+
+class _DelSiteWideCWPropertyOp(hook.Operation):
+    """Synchronize site wide properties when one has been deleted"""
+    key = None  # make pylint happy
+
+    def postcommit_event(self):
+        self.cnx.vreg['propertyvalues'].pop(self.key, None)
 
 
 class AddCWPropertyHook(SyncSessionHook):
@@ -169,12 +202,11 @@
             msg = _('unknown property key %s')
             raise validation_error(self.entity, {('pkey', 'subject'): msg}, (key,))
         except ValueError as ex:
-            raise validation_error(self.entity,
-                                  {('value', 'subject'): str(ex)})
-        if not cnx.user.matching_groups('managers'):
+            raise validation_error(self.entity, {('value', 'subject'): str(ex)})
+        if cnx.user.matching_groups('managers'):
+            _ChangeSiteWideCWPropertyOp(cnx, cwprop=self.entity)
+        else:
             cnx.add_relation(self.entity.eid, 'for_user', cnx.user.eid)
-        else:
-            _AddCWPropertyOp(cnx, cwprop=self.entity)
 
 
 class UpdateCWPropertyHook(AddCWPropertyHook):
@@ -197,13 +229,10 @@
         except ValueError as ex:
             raise validation_error(entity, {('value', 'subject'): str(ex)})
         if entity.for_user:
-            for session in get_user_sessions(cnx.repo, entity.for_user[0].eid):
-                _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties,
-                                    key=key, value=value)
+            for session in get_user_sessions(cnx, entity.for_user[0].eid):
+                _ChangeUserCWPropertyOp(cnx, session=session, key=key, value=value)
         else:
-            # site wide properties
-            _ChangeCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'],
-                              key=key, value=value)
+            _ChangeSiteWideCWPropertyOp(cnx, cwprop=self.entity)
 
 
 class DeleteCWPropertyHook(AddCWPropertyHook):
@@ -217,8 +246,7 @@
                 # if for_user was set, delete already handled by hook on for_user deletion
                 break
         else:
-            _DelCWPropertyOp(cnx, cwpropdict=cnx.vreg['propertyvalues'],
-                             key=self.entity.pkey)
+            _DelSiteWideCWPropertyOp(cnx, key=self.entity.pkey)
 
 
 class AddForUserRelationHook(SyncSessionHook):
@@ -236,9 +264,8 @@
         if cnx.vreg.property_info(key)['sitewide']:
             msg = _("site-wide property can't be set for user")
             raise validation_error(eidfrom, {('for_user', 'subject'): msg})
-        for session in get_user_sessions(cnx.repo, self.eidto):
-            _ChangeCWPropertyOp(cnx, cwpropdict=session.user.properties,
-                              key=key, value=value)
+        for session in get_user_sessions(cnx, self.eidto):
+            _ChangeUserCWPropertyOp(cnx, session=session, key=key, value=value)
 
 
 class DelForUserRelationHook(AddForUserRelationHook):
@@ -250,5 +277,5 @@
         key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', {'x': self.eidfrom})[0][0]
         cnx.transaction_data.setdefault('pendingrelations', []).append(
             (self.eidfrom, self.rtype, self.eidto))
-        for session in get_user_sessions(cnx.repo, self.eidto):
-            _DelCWPropertyOp(cnx, cwpropdict=session.user.properties, key=key)
+        for session in get_user_sessions(cnx, self.eidto):
+            _DelUserCWPropertyOp(cnx, session=session, key=key)
--- a/cubicweb/hooks/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-psycopg2
--- a/cubicweb/hooks/test/unittest_notification.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""tests for notification hooks"""
-
-from cubicweb.devtools.testlib import CubicWebTC
-
-
-class NotificationHooksTC(CubicWebTC):
-
-    def test_entity_update(self):
-        """Check transaction_data['changes'] filled by "notifentityupdated" hook.
-        """
-        with self.admin_access.repo_cnx() as cnx:
-            root = cnx.create_entity('Folder', name=u'a')
-            cnx.commit()
-            root.cw_set(name=u'b')
-            self.assertIn('changes', cnx.transaction_data)
-            self.assertEqual(cnx.transaction_data['changes'],
-                             {root.eid: set([('name', u'a', u'b')])})
-
-
-if __name__ == '__main__':
-    from logilab.common.testlib import unittest_main
-    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_notificationhooks.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,39 @@
+# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""tests for notification hooks"""
+
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class NotificationHooksTC(CubicWebTC):
+
+    def test_entity_update(self):
+        """Check transaction_data['changes'] filled by "notifentityupdated" hook.
+        """
+        with self.admin_access.repo_cnx() as cnx:
+            root = cnx.create_entity('Folder', name=u'a')
+            cnx.commit()
+            root.cw_set(name=u'b')
+            self.assertIn('changes', cnx.transaction_data)
+            self.assertEqual(cnx.transaction_data['changes'],
+                             {root.eid: set([('name', u'a', u'b')])})
+
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/cubicweb/hooks/test/unittest_syncschema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/hooks/test/unittest_syncschema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,9 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb.server.hooks.syncschema unit and functional tests"""
 
-from logilab.common.testlib import unittest_main
+from yams.constraints import BoundaryConstraint
 
-from yams.constraints import BoundaryConstraint
 from cubicweb import ValidationError, Binary
 from cubicweb.schema import META_RTYPES
 from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration
@@ -87,7 +86,7 @@
             attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", '
                                   'X defaultval %(default)s, X indexed TRUE, '
                                   'X relation_type RT, X from_entity E, X to_entity F '
-                                   'WHERE RT name "name", E name "Societe2", '
+                                  'WHERE RT name "name", E name "Societe2", '
                                   'F name "String"',
                                    {'default': Binary.zpickle('noname')})[0][0]
             self._set_attr_perms(cnx, attreid)
@@ -111,8 +110,8 @@
             self.assertEqual(rset.rows, [[s2eid]])
             # check that when a relation definition is deleted, existing relations are deleted
             rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, '
-                                   '   X from_entity E, X to_entity E '
-                                   'WHERE RT name "concerne2", E name "CWUser"')[0][0]
+                                  '   X from_entity E, X to_entity E '
+                                  'WHERE RT name "concerne2", E name "CWUser"')[0][0]
             self._set_perms(cnx, rdefeid)
             cnx.commit()
             cnx.execute('DELETE CWRelation X WHERE X eid %(x)s', {'x': concerne2_rdef_eid})
@@ -136,10 +135,10 @@
         with self.admin_access.repo_cnx() as cnx:
             META_RTYPES.add('custom_meta')
             cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", '
-                         'X final FALSE, X symmetric FALSE')
+                        'X final FALSE, X symmetric FALSE')
             cnx.commit()
             eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", '
-                                'X description "", X final FALSE')[0][0]
+                               'X description "", X final FALSE')[0][0]
             self._set_perms(cnx, eeid)
             cnx.commit()
             META_RTYPES.remove('custom_meta')
@@ -148,15 +147,15 @@
         with self.admin_access.repo_cnx() as cnx:
             META_RTYPES.add('custom_meta')
             cnx.execute('INSERT CWRType X: X name "custom_meta", X description "", '
-                         'X final FALSE, X symmetric FALSE')
+                        'X final FALSE, X symmetric FALSE')
             cnx.commit()
             rdefeid = cnx.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, '
-                                   '   X from_entity E, X to_entity E '
-                                   'WHERE RT name "custom_meta", E name "CWUser"')[0][0]
+                                  '   X from_entity E, X to_entity E '
+                                  'WHERE RT name "custom_meta", E name "CWUser"')[0][0]
             self._set_perms(cnx, rdefeid)
             cnx.commit()
             eeid = cnx.execute('INSERT CWEType X: X name "NEWEtype", '
-                                'X description "", X final FALSE')[0][0]
+                               'X description "", X final FALSE')[0][0]
             self._set_perms(cnx, eeid)
             cnx.commit()
             META_RTYPES.remove('custom_meta')
@@ -178,14 +177,14 @@
                                                     'S name N')]
             self.assertIn('subdiv', snames)
 
-
     def test_perms_synchronization_1(self):
         with self.admin_access.repo_cnx() as cnx:
             schema = self.repo.schema
             self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users')))
             self.assertTrue(cnx.execute('Any X, Y WHERE X is CWEType, X name "CWUser", '
                                         'Y is CWGroup, Y name "users"')[0])
-            cnx.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"')
+            cnx.execute('DELETE X read_permission Y '
+                        'WHERE X is CWEType, X name "CWUser", Y name "users"')
             self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', )))
             cnx.commit()
             self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',)))
@@ -228,7 +227,7 @@
             cnx.execute('DELETE X read_permission Y WHERE X eid %s' % eeid)
             cnx.execute('SET X final FALSE WHERE X eid %s' % eeid)
             cnx.execute('SET X read_permission Y WHERE X eid %s, Y eid in (%s, %s)'
-                         % (eeid, groupeids[0], groupeids[1]))
+                        % (eeid, groupeids[0], groupeids[1]))
             cnx.commit()
             cnx.execute('Any X WHERE X is CWEType, X name "CWEType"')
 
@@ -244,7 +243,7 @@
                 self.assertFalse(self.schema['state_of'].inlined)
                 self.assertFalse(self.index_exists(cnx, 'State', 'state_of'))
                 rset = cnx.execute('Any X, Y WHERE X state_of Y')
-                self.assertEqual(len(rset), 2) # user states
+                self.assertEqual(len(rset), 2)  # user states
             finally:
                 cnx.execute('SET X inlined TRUE WHERE X name "state_of"')
                 self.assertFalse(self.schema['state_of'].inlined)
@@ -293,8 +292,8 @@
     def test_required_change_1(self):
         with self.admin_access.repo_cnx() as cnx:
             cnx.execute('SET DEF cardinality "?1" '
-                         'WHERE DEF relation_type RT, DEF from_entity E,'
-                         'RT name "title", E name "Bookmark"')
+                        'WHERE DEF relation_type RT, DEF from_entity E,'
+                        'RT name "title", E name "Bookmark"')
             cnx.commit()
             # should now be able to add bookmark without title
             cnx.execute('INSERT Bookmark X: X path "/view"')
@@ -303,24 +302,25 @@
     def test_required_change_2(self):
         with self.admin_access.repo_cnx() as cnx:
             cnx.execute('SET DEF cardinality "11" '
-                         'WHERE DEF relation_type RT, DEF from_entity E,'
-                         'RT name "surname", E name "CWUser"')
+                        'WHERE DEF relation_type RT, DEF from_entity E,'
+                        'RT name "surname", E name "CWUser"')
             cnx.execute('SET U surname "Doe" WHERE U surname NULL')
             cnx.commit()
             # should not be able anymore to add cwuser without surname
             self.assertRaises(ValidationError, self.create_user, cnx, "toto")
             cnx.rollback()
             cnx.execute('SET DEF cardinality "?1" '
-                         'WHERE DEF relation_type RT, DEF from_entity E,'
-                         'RT name "surname", E name "CWUser"')
+                        'WHERE DEF relation_type RT, DEF from_entity E,'
+                        'RT name "surname", E name "CWUser"')
             cnx.commit()
 
     def test_add_attribute_to_base_class(self):
         with self.admin_access.repo_cnx() as cnx:
-            attreid = cnx.execute('INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, '
-                                   'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
-                                   'WHERE RT name "messageid", E name "BaseTransition", F name "String"',
-                                   {'default': Binary.zpickle('noname')})[0][0]
+            attreid = cnx.execute(
+                'INSERT CWAttribute X: X cardinality "11", X defaultval %(default)s, '
+                'X indexed TRUE, X relation_type RT, X from_entity E, X to_entity F '
+                'WHERE RT name "messageid", E name "BaseTransition", F name "String"',
+                {'default': Binary.zpickle('noname')})[0][0]
             assert cnx.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"',
                                {'x': attreid})
             cnx.commit()
@@ -357,12 +357,12 @@
             rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
             self.assertIn(cnx.user.eid, [item[0] for item in rset])
             assert cnx.execute('SET R fulltext_container NULL '
-                                'WHERE R name "use_email"')
+                               'WHERE R name "use_email"')
             cnx.commit()
             rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
             self.assertIn(target.eid, [item[0] for item in rset])
             assert cnx.execute('SET R fulltext_container "subject" '
-                                'WHERE R name "use_email"')
+                               'WHERE R name "use_email"')
             cnx.commit()
             rset = cnx.execute('Any X WHERE X has_text "rick.roll"')
             self.assertIn(cnx.user.eid, [item[0] for item in rset])
@@ -371,14 +371,11 @@
         with self.admin_access.repo_cnx() as cnx:
             rdef = self.schema['Transition'].rdef('type')
             cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
-            if not getattr(cstr, 'eid', None):
-                # bug in schema reloading, constraint's eid not restored
-                self.skipTest('start me alone')
             cnx.execute('SET X value %(v)s WHERE X eid %(x)s',
-                         {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"})
+                        {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"})
             cnx.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, '
                         'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
-                         {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid})
+                        {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid})
             cnx.commit()
             cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
             self.assertEqual(cstr.values, (u'normal', u'auto', u'new'))
@@ -405,4 +402,5 @@
 
 
 if __name__ == '__main__':
-    unittest_main()
+    import unittest
+    unittest.main()
--- a/cubicweb/misc/migration/3.21.0_Any.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/misc/migration/3.21.0_Any.py	Thu Oct 20 18:28:46 2016 +0200
@@ -162,8 +162,7 @@
     cstr = rdef.constraint_by_eid(cwconstraint.eid)
     if cstr.type() not in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'):
         continue
-    cstrname, check = check_constraint(rdef.subject, rdef.object, rdef.rtype.type,
-            cstr, helper, prefix='cw_')
+    cstrname, check = check_constraint(rdef, cstr, helper, prefix='cw_')
     args = {'e': rdef.subject.type, 'c': cstrname, 'v': check}
     if repo.system_source.dbdriver == 'postgres':
         sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, ask_confirm=False)
--- a/cubicweb/misc/migration/3.23.0_Any.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/misc/migration/3.23.0_Any.py	Thu Oct 20 18:28:46 2016 +0200
@@ -70,8 +70,7 @@
                            'StaticVocabularyConstraint'):
         # These cannot be translate into backend CHECK.
         continue
-    cstrname, check = check_constraint(rdef.subject, rdef.object, rdef.rtype.type,
-                                       cstr, helper, prefix='cw_')
+    cstrname, check = check_constraint(rdef, cstr, helper, prefix='cw_')
     args = {'e': rdef.subject.type, 'c': cstrname, 'v': check}
     sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args)
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.24.0_Any.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,4 @@
+# Check the CW versions and add the entity only if needed ?
+add_entity_type('CWSession')
+rql('DELETE CWProperty X WHERE X pkey "system.version.pyramid"',
+    ask_confirm=False)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,194 @@
+import os
+from warnings import warn
+import wsgicors
+
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from pyramid.config import Configurator
+from pyramid.settings import asbool, aslist
+
+try:
+    from configparser import SafeConfigParser
+except ImportError:
+    from ConfigParser import SafeConfigParser
+
+
+def make_cubicweb_application(cwconfig, settings=None):
+    """
+    Create a pyramid-based CubicWeb instance from a cubicweb configuration.
+
+    It is initialy meant to be used by the 'pyramid' command of cubicweb-ctl.
+
+    :param cwconfig: A CubicWeb configuration
+    :returns: A Pyramid config object
+    """
+    settings = dict(settings) if settings else {}
+    settings.update(settings_from_cwconfig(cwconfig))
+    config = Configurator(settings=settings)
+    config.registry['cubicweb.config'] = cwconfig
+    config.include('cubicweb.pyramid')
+    return config
+
+def settings_from_cwconfig(cwconfig):
+    '''
+    Extract settings from pyramid.ini and pyramid-debug.ini (if in debug)
+
+    Can be used to configure middleware WSGI with settings from pyramid.ini files
+
+    :param cwconfig: A CubicWeb configuration
+    :returns: A settings dictionnary
+    '''
+    settings_filenames = [os.path.join(cwconfig.apphome, 'pyramid.ini')]
+    settings = {}
+    if cwconfig.debugmode:
+        settings_filenames.insert(
+            0, os.path.join(cwconfig.apphome, 'pyramid-debug.ini'))
+    
+        settings.update({
+            'pyramid.debug_authorization': True,
+            'pyramid.debug_notfound': True,
+            'pyramid.debug_routematch': True,
+            'pyramid.reload_templates': True,
+        })
+    
+    for fname in settings_filenames:
+        if os.path.exists(fname):
+            cp = SafeConfigParser()
+            cp.read(fname)
+            settings.update(cp.items('main'))
+            break
+    
+    return settings
+
+
+def wsgi_application_from_cwconfig(
+        cwconfig,
+        profile=False, profile_output=None, profile_dump_every=None):
+    """ Build a WSGI application from a cubicweb configuration
+
+    :param cwconfig: A CubicWeb configuration
+    :param profile: Enable profiling. See :ref:`profiling`.
+    :param profile_output: Profiling output filename. See :ref:`profiling`.
+    :param profile_dump_every: Profiling number of requests before dumping the
+                               stats. See :ref:`profiling`.
+
+    :returns: A fully operationnal WSGI application
+    """
+    config = make_cubicweb_application(cwconfig)
+    profile = profile or asbool(config.registry.settings.get(
+        'cubicweb.profile.enable', False))
+    if profile:
+        config.add_route('profile_ping', '_profile/ping')
+        config.add_route('profile_cnx', '_profile/cnx')
+        config.scan('cubicweb.pyramid.profile')
+    app = config.make_wsgi_app()
+    # This replaces completely web/cors.py, which is not used by
+    # cubicweb.pyramid anymore
+    app = wsgicors.CORS(
+        app,
+        origin=' '.join(cwconfig['access-control-allow-origin']),
+        headers=', '.join(cwconfig['access-control-allow-headers']),
+        methods=', '.join(cwconfig['access-control-allow-methods']),
+        credentials='true')
+
+    if profile:
+        from cubicweb.pyramid.profile import wsgi_profile
+        filename = profile_output or config.registry.settings.get(
+            'cubicweb.profile.output', 'program.prof')
+        dump_every = profile_dump_every or config.registry.settings.get(
+            'cubicweb.profile.dump_every', 100)
+        app = wsgi_profile(app, filename=filename, dump_every=dump_every)
+    return app
+
+
+def wsgi_application(instance_name=None, debug=None):
+    """ Build a WSGI application from a cubicweb instance name
+
+    :param instance_name: Name of the cubicweb instance (optional). If not
+                          provided, :envvar:`CW_INSTANCE` must exists.
+    :param debug: Enable/disable the debug mode. If defined to True or False,
+                  overrides :envvar:`CW_DEBUG`.
+
+    The following environment variables are used if they exist:
+
+    .. envvar:: CW_INSTANCE
+
+        A CubicWeb instance name.
+
+    .. envvar:: CW_DEBUG
+
+        If defined, the debugmode is enabled.
+
+    The function can be used as an entry-point for third-party wsgi containers.
+    Below is a sample uswgi configuration file:
+
+    .. code-block:: ini
+
+        [uwsgi]
+        http = 127.0.1.1:8080
+        env = CW_INSTANCE=myinstance
+        env = CW_DEBUG=1
+        module = cubicweb.pyramid:wsgi_application()
+        virtualenv = /home/user/.virtualenvs/myvirtualenv
+        processes = 1
+        threads = 8
+        stats = 127.0.0.1:9191
+        plugins = http,python
+
+    """
+    if instance_name is None:
+        instance_name = os.environ['CW_INSTANCE']
+    if debug is None:
+        debug = 'CW_DEBUG' in os.environ
+
+    cwconfig = cwcfg.config_for(instance_name, debugmode=debug)
+
+    return wsgi_application_from_cwconfig(cwconfig)
+
+
+def includeme(config):
+    """Set-up a CubicWeb instance.
+
+    The CubicWeb instance can be set in several ways:
+
+    -   Provide an already loaded CubicWeb config instance in the registry:
+
+        .. code-block:: python
+
+            config.registry['cubicweb.config'] = your_config_instance
+
+    -   Provide an instance name in the pyramid settings with
+        :confval:`cubicweb.instance`.
+
+    """
+    cwconfig = config.registry.get('cubicweb.config')
+
+    if cwconfig is None:
+        debugmode = asbool(
+            config.registry.settings.get('cubicweb.debug', False))
+        cwconfig = cwcfg.config_for(
+            config.registry.settings['cubicweb.instance'], debugmode=debugmode)
+        config.registry['cubicweb.config'] = cwconfig
+
+    if cwconfig.debugmode:
+        try:
+            config.include('pyramid_debugtoolbar')
+        except ImportError:
+            warn('pyramid_debugtoolbar package not available, install it to '
+                 'get UI debug features', RuntimeWarning)
+
+    config.registry['cubicweb.repository'] = repo = cwconfig.repository()
+    config.registry['cubicweb.registry'] = repo.vreg
+
+    if asbool(config.registry.settings.get('cubicweb.defaults', True)):
+        config.include('cubicweb.pyramid.defaults')
+
+    for name in aslist(config.registry.settings.get('cubicweb.includes', [])):
+        config.include(name)
+
+    config.include('cubicweb.pyramid.tools')
+    config.include('cubicweb.pyramid.predicates')
+    config.include('cubicweb.pyramid.core')
+    config.include('cubicweb.pyramid.syncsession')
+
+    if asbool(config.registry.settings.get('cubicweb.bwcompat', True)):
+        config.include('cubicweb.pyramid.bwcompat')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/auth.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,180 @@
+import datetime
+import logging
+import warnings
+
+from zope.interface import implementer
+
+from pyramid.settings import asbool
+from pyramid.authorization import ACLAuthorizationPolicy
+from cubicweb.pyramid.core import get_principals
+from pyramid_multiauth import MultiAuthenticationPolicy
+
+from pyramid.authentication import AuthTktAuthenticationPolicy
+
+from pyramid.interfaces import IAuthenticationPolicy
+
+log = logging.getLogger(__name__)
+
+
+@implementer(IAuthenticationPolicy)
+class UpdateLoginTimeAuthenticationPolicy(object):
+    """An authentication policy that update the user last_login_time.
+
+    The update is done in the 'remember' method, which is called by the login
+    views login,
+
+    Usually used via :func:`includeme`.
+    """
+
+    def authenticated_userid(self, request):
+        pass
+
+    def effective_principals(self, request):
+        return ()
+
+    def remember(self, request, principal, **kw):
+        try:
+            repo = request.registry['cubicweb.repository']
+            with repo.internal_cnx() as cnx:
+                cnx.execute(
+                    "SET U last_login_time %(now)s WHERE U eid %(user)s", {
+                        'now': datetime.datetime.now(),
+                        'user': principal})
+                cnx.commit()
+        except:
+            log.exception("Failed to update last_login_time")
+        return ()
+
+    def forget(self, request):
+        return ()
+
+
+class CWAuthTktAuthenticationPolicy(AuthTktAuthenticationPolicy):
+    """
+    An authentication policy that inhibate the call the 'remember' if a
+    'persistent' argument is passed to it, and is equal to the value that
+    was passed to the constructor.
+
+    This allow to combine two policies with different settings and select them
+    by just setting this argument.
+    """
+    def __init__(self, secret, persistent, defaults={}, prefix='', **settings):
+        self.persistent = persistent
+        unset = object()
+        kw = {}
+        # load string settings
+        for name in ('cookie_name', 'path', 'domain', 'hashalg'):
+            value = settings.get(prefix + name, defaults.get(name, unset))
+            if value is not unset:
+                kw[name] = value
+        # load boolean settings
+        for name in ('secure', 'include_ip', 'http_only', 'wild_domain',
+                     'parent_domain', 'debug'):
+            value = settings.get(prefix + name, defaults.get(name, unset))
+            if value is not unset:
+                kw[name] = asbool(value)
+        # load int settings
+        for name in ('timeout', 'reissue_time', 'max_age'):
+            value = settings.get(prefix + name, defaults.get(name, unset))
+            if value is not unset:
+                kw[name] = int(value)
+        super(CWAuthTktAuthenticationPolicy, self).__init__(secret, **kw)
+
+    def remember(self, request, principals, **kw):
+        if 'persistent' not in kw or kw.pop('persistent') == self.persistent:
+            return super(CWAuthTktAuthenticationPolicy, self).remember(
+                request, principals, **kw)
+        else:
+            return ()
+
+
+def includeme(config):
+    """ Activate the CubicWeb AuthTkt authentication policy.
+
+    Usually called via ``config.include('cubicweb.pyramid.auth')``.
+
+    See also :ref:`defaults_module`
+    """
+    settings = config.registry.settings
+
+    policies = []
+
+    if asbool(settings.get('cubicweb.auth.update_login_time', True)):
+        policies.append(UpdateLoginTimeAuthenticationPolicy())
+
+    if asbool(settings.get('cubicweb.auth.authtkt', True)):
+        session_prefix = 'cubicweb.auth.authtkt.session.'
+        persistent_prefix = 'cubicweb.auth.authtkt.persistent.'
+
+        try:
+            secret = config.registry['cubicweb.config']['pyramid-auth-secret']
+            warnings.warn(
+                "pyramid-auth-secret from all-in-one is now "
+                "cubicweb.auth.authtkt.[session|persistent].secret",
+                DeprecationWarning)
+        except:
+            secret = 'notsosecret'
+
+        session_secret = settings.get(
+            session_prefix + 'secret', secret)
+        persistent_secret = settings.get(
+            persistent_prefix + 'secret', secret)
+
+        if 'notsosecret' in (session_secret, persistent_secret):
+            warnings.warn('''
+
+                !! SECURITY WARNING !!
+
+                The authentication cookies are signed with a static secret key.
+
+                Configure the following options in your pyramid.ini file:
+
+                - cubicweb.auth.authtkt.session.secret
+                - cubicweb.auth.authtkt.persistent.secret
+
+                YOU SHOULD STOP THIS INSTANCE unless your really know what you
+                are doing !!
+
+            ''')
+
+        policies.append(
+            CWAuthTktAuthenticationPolicy(
+                session_secret, False,
+                defaults={
+                    'hashalg': 'sha512',
+                    'cookie_name': 'auth_tkt',
+                    'timeout': 1200,
+                    'reissue_time': 120,
+                    'http_only': True,
+                    'secure': True
+                },
+                prefix=session_prefix,
+                **settings
+            )
+        )
+
+        policies.append(
+            CWAuthTktAuthenticationPolicy(
+                persistent_secret, True,
+                defaults={
+                    'hashalg': 'sha512',
+                    'cookie_name': 'pauth_tkt',
+                    'max_age': 3600*24*30,
+                    'reissue_time': 3600*24,
+                    'http_only': True,
+                    'secure': True
+                },
+                prefix=persistent_prefix,
+                **settings
+            )
+        )
+
+    kw = {}
+    if asbool(settings.get('cubicweb.auth.groups_principals', True)):
+        kw['callback'] = get_principals
+
+    authpolicy = MultiAuthenticationPolicy(policies, **kw)
+    config.registry['cubicweb.authpolicy'] = authpolicy
+
+    config.set_authentication_policy(authpolicy)
+    config.set_authorization_policy(ACLAuthorizationPolicy())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/bwcompat.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,213 @@
+import sys
+import logging
+
+from pyramid import security
+from pyramid import tweens
+from pyramid.httpexceptions import HTTPSeeOther
+from pyramid import httpexceptions
+from pyramid.settings import asbool
+
+import cubicweb
+import cubicweb.web
+
+from cubicweb.web.application import CubicWebPublisher
+
+from cubicweb.web import LogOut, PublishException
+
+from cubicweb.pyramid.core import cw_to_pyramid
+
+
+log = logging.getLogger(__name__)
+
+
+class PyramidSessionHandler(object):
+    """A CW Session handler that rely on the pyramid API to fetch the needed
+    informations.
+
+    It implements the :class:`cubicweb.web.application.CookieSessionHandler`
+    API.
+    """
+
+    def __init__(self, appli):
+        self.appli = appli
+
+    def get_session(self, req):
+        return req._request.cw_session
+
+    def logout(self, req, goto_url):
+        raise LogOut(url=goto_url)
+
+
+class CubicWebPyramidHandler(object):
+    """ A Pyramid request handler that rely on a cubicweb instance to do the
+    whole job
+
+    :param appli: A CubicWeb 'Application' object.
+    """
+    def __init__(self, appli):
+        self.appli = appli
+
+    def __call__(self, request):
+        """
+        Handler that mimics what CubicWebPublisher.main_handle_request and
+        CubicWebPublisher.core_handle do
+        """
+
+        # XXX The main handler of CW forbid anonymous https connections
+        # I guess we can drop this "feature" but in doubt I leave this comment
+        # so we don't forget about it. (cdevienne)
+
+        req = request.cw_request
+        vreg = request.registry['cubicweb.registry']
+
+        try:
+            content = None
+            try:
+                with cw_to_pyramid(request):
+                    ctrlid, rset = self.appli.url_resolver.process(req,
+                                                                   req.path)
+
+                    try:
+                        controller = vreg['controllers'].select(
+                            ctrlid, req, appli=self.appli)
+                    except cubicweb.NoSelectableObject:
+                        raise httpexceptions.HTTPUnauthorized(
+                            req._('not authorized'))
+
+                    req.update_search_state()
+                    content = controller.publish(rset=rset)
+
+                    # XXX this auto-commit should be handled by the cw_request
+                    # cleanup or the pyramid transaction manager.
+                    # It is kept here to have the ValidationError handling bw
+                    # compatible
+                    if req.cnx:
+                        txuuid = req.cnx.commit()
+                        # commited = True
+                        if txuuid is not None:
+                            req.data['last_undoable_transaction'] = txuuid
+            except cubicweb.web.ValidationError as ex:
+                # XXX The validation_error_handler implementation is light, we
+                # should redo it better in cw_to_pyramid, so it can be properly
+                # handled when raised from a cubicweb view.
+                # BUT the real handling of validation errors should be done
+                # earlier in the controllers, not here. In the end, the
+                # ValidationError should never by handled here.
+                content = self.appli.validation_error_handler(req, ex)
+            except cubicweb.web.RemoteCallFailed as ex:
+                # XXX The default pyramid error handler (or one that we provide
+                # for this exception) should be enough
+                # content = self.appli.ajax_error_handler(req, ex)
+                raise
+
+            if content is not None:
+                request.response.body = content
+
+
+        except LogOut as ex:
+            # The actual 'logging out' logic should be in separated function
+            # that is accessible by the pyramid views
+            headers = security.forget(request)
+            raise HTTPSeeOther(ex.url, headers=headers)
+        except cubicweb.AuthenticationError:
+            # Will occur upon access to req.cnx which is a
+            # cubicweb.dbapi._NeedAuthAccessMock.
+            if not content:
+                content = vreg['views'].main_template(req, 'login')
+                request.response.status_code = 403
+                request.response.body = content
+        finally:
+            # XXX CubicWebPyramidRequest.headers_out should
+            # access directly the pyramid response headers.
+            request.response.headers.clear()
+            for k, v in req.headers_out.getAllRawHeaders():
+                for item in v:
+                    request.response.headers.add(k, item)
+
+        return request.response
+
+    def error_handler(self, exc, request):
+        req = request.cw_request
+        if isinstance(exc, httpexceptions.HTTPException):
+            request.response = exc
+        elif isinstance(exc, PublishException) and exc.status is not None:
+            request.response = httpexceptions.exception_response(exc.status)
+        else:
+            request.response = httpexceptions.HTTPInternalServerError()
+        request.response.cache_control = 'no-cache'
+        vreg = request.registry['cubicweb.registry']
+        excinfo = sys.exc_info()
+        req.reset_message()
+        if req.ajax_request:
+            content = self.appli.ajax_error_handler(req, exc)
+        else:
+            try:
+                req.data['ex'] = exc
+                req.data['excinfo'] = excinfo
+                errview = vreg['views'].select('error', req)
+                template = self.appli.main_template_id(req)
+                content = vreg['views'].main_template(req, template, view=errview)
+            except Exception:
+                content = vreg['views'].main_template(req, 'error-template')
+        log.exception(exc)
+        request.response.body = content
+        return request.response
+
+
+class TweenHandler(object):
+    """ A Pyramid tween handler that submit unhandled requests to a Cubicweb
+    handler.
+
+    The CubicWeb handler to use is expected to be in the pyramid registry, at
+    key ``'cubicweb.handler'``.
+    """
+    def __init__(self, handler, registry):
+        self.handler = handler
+        self.cwhandler = registry['cubicweb.handler']
+
+    def __call__(self, request):
+        if request.path.startswith('/https/'):
+            request.environ['PATH_INFO'] = request.environ['PATH_INFO'][6:]
+            assert not request.path.startswith('/https/')
+            request.scheme = 'https'
+        try:
+            response = self.handler(request)
+        except httpexceptions.HTTPNotFound:
+            response = self.cwhandler(request)
+        return response
+
+
+def includeme(config):
+    """ Set up a tween app that will handle the request if the main application
+    raises a HTTPNotFound exception.
+
+    This is to keep legacy compatibility for cubes that makes use of the
+    cubicweb urlresolvers.
+
+    It provides, for now, support for cubicweb controllers, but this feature
+    will be reimplemented separatly in a less compatible way.
+
+    It is automatically included by the configuration system, but can be
+    disabled in the :ref:`pyramid_settings`:
+
+    .. code-block:: ini
+
+        cubicweb.bwcompat = no
+    """
+    cwconfig = config.registry['cubicweb.config']
+    repository = config.registry['cubicweb.repository']
+    cwappli = CubicWebPublisher(
+        repository, cwconfig,
+        session_handler_fact=PyramidSessionHandler)
+    cwhandler = CubicWebPyramidHandler(cwappli)
+
+    config.registry['cubicweb.appli'] = cwappli
+    config.registry['cubicweb.handler'] = cwhandler
+
+    config.add_tween(
+        'cubicweb.pyramid.bwcompat.TweenHandler', under=tweens.EXCVIEW)
+    if asbool(config.registry.settings.get(
+            'cubicweb.bwcompat.errorhandler', True)):
+        config.add_view(cwhandler.error_handler, context=Exception)
+        # XXX why do i need this?
+        config.add_view(cwhandler.error_handler, context=httpexceptions.HTTPForbidden)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/core.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,395 @@
+import itertools
+
+from contextlib import contextmanager
+from warnings import warn
+from cgi import FieldStorage
+
+import rql
+
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb import repoapi
+
+import cubicweb
+import cubicweb.web
+from cubicweb.server import session as cwsession
+
+from pyramid import httpexceptions
+
+from cubicweb.pyramid import tools
+
+import logging
+
+log = logging.getLogger(__name__)
+
+
+class Connection(cwsession.Connection):
+    """ A specialised Connection that access the session data through a
+    property.
+
+    This behavior makes sure the actual session data is not loaded until
+    actually accessed.
+    """
+    def __init__(self, session, *args, **kw):
+        super(Connection, self).__init__(session, *args, **kw)
+        self._session = session
+
+    def _get_session_data(self):
+        return self._session.data
+
+    def _set_session_data(self, data):
+        pass
+
+    _session_data = property(_get_session_data, _set_session_data)
+
+
+class Session(cwsession.Session):
+    """ A Session that access the session data through a property.
+
+    Along with :class:`Connection`, it avoid any load of the pyramid session
+    data until it is actually accessed.
+    """
+    def __init__(self, pyramid_request, user, repo):
+        super(Session, self).__init__(user, repo)
+        self._pyramid_request = pyramid_request
+
+    def get_data(self):
+        if not getattr(self, '_protect_data_access', False):
+            self._data_accessed = True
+            return self._pyramid_request.session
+
+    def set_data(self, data):
+        if getattr(self, '_data_accessed', False):
+            self._pyramid_request.session.clear()
+            self._pyramid_request.session.update(data)
+
+    data = property(get_data, set_data)
+
+    def new_cnx(self):
+        self._protect_data_access = True
+        try:
+            return Connection(self)
+        finally:
+            self._protect_data_access = False
+
+
+def cw_headers(request):
+    return itertools.chain(
+        *[[(k, item) for item in v]
+          for k, v in request.cw_request.headers_out.getAllRawHeaders()])
+
+
+@contextmanager
+def cw_to_pyramid(request):
+    """ Context manager to wrap a call to the cubicweb API.
+
+    All CW exceptions will be transformed into their pyramid equivalent.
+    When needed, some CW reponse bits may be converted too (mainly headers)"""
+    try:
+        yield
+    except cubicweb.web.Redirect as ex:
+        assert 300 <= ex.status < 400
+        raise httpexceptions.status_map[ex.status](
+            ex.location, headers=cw_headers(request))
+    except cubicweb.web.StatusResponse as ex:
+        warn('[3.16] StatusResponse is deprecated use req.status_out',
+             DeprecationWarning, stacklevel=2)
+        request.body = ex.content
+        request.status_int = ex.status
+    except cubicweb.web.Unauthorized as ex:
+        raise httpexceptions.HTTPForbidden(
+            request.cw_request._(
+                'You\'re not authorized to access this page. '
+                'If you think you should, please contact the site '
+                'administrator.'),
+            headers=cw_headers(request))
+    except cubicweb.web.Forbidden:
+        raise httpexceptions.HTTPForbidden(
+            request.cw_request._(
+                'This action is forbidden. '
+                'If you think it should be allowed, please contact the site '
+                'administrator.'),
+            headers=cw_headers(request))
+    except (rql.BadRQLQuery, cubicweb.web.RequestError) as ex:
+        raise
+
+
+class CubicWebPyramidRequest(CubicWebRequestBase):
+    """ A CubicWeb request that only wraps a pyramid request.
+
+    :param request: A pyramid request
+
+    """
+    def __init__(self, request):
+        self._request = request
+
+        self.path = request.upath_info
+
+        vreg = request.registry['cubicweb.registry']
+        https = request.scheme == 'https'
+
+        post = request.params.mixed()
+        headers_in = request.headers
+
+        super(CubicWebPyramidRequest, self).__init__(vreg, https, post,
+                                                     headers=headers_in)
+
+        self.content = request.body_file_seekable
+
+    def setup_params(self, params):
+        self.form = {}
+        for param, val in params.items():
+            if param in self.no_script_form_params and val:
+                val = self.no_script_form_param(param, val)
+            if isinstance(val, FieldStorage) and val.file:
+                val = (val.filename, val.file)
+            if param == '_cwmsgid':
+                self.set_message_id(val)
+            elif param == '__message':
+                warn('[3.13] __message in request parameter is deprecated '
+                     '(may only be given to .build_url). Seeing this message '
+                     'usualy means your application hold some <form> where '
+                     'you should replace use of __message hidden input by '
+                     'form.set_message, so new _cwmsgid mechanism is properly '
+                     'used',
+                     DeprecationWarning)
+                self.set_message(val)
+            else:
+                self.form[param] = val
+
+    def is_secure(self):
+        return self._request.scheme == 'https'
+
+    def relative_path(self, includeparams=True):
+        path = self._request.path[1:]
+        if includeparams and self._request.query_string:
+            return '%s?%s' % (path, self._request.query_string)
+        return path
+
+    def instance_uri(self):
+        return self._request.application_url
+
+    def get_full_path(self):
+        path = self._request.path
+        if self._request.query_string:
+            return '%s?%s' % (path, self._request.query_string)
+        return path
+
+    def http_method(self):
+        return self._request.method
+
+    def _set_status_out(self, value):
+        self._request.response.status_int = value
+
+    def _get_status_out(self):
+        return self._request.response.status_int
+
+    status_out = property(_get_status_out, _set_status_out)
+
+    @property
+    def message(self):
+        """Returns a '<br>' joined list of the cubicweb current message and the
+        default pyramid flash queue messages.
+        """
+        return u'\n<br>\n'.join(
+            self._request.session.pop_flash()
+            + self._request.session.pop_flash('cubicweb'))
+
+    def set_message(self, msg):
+        self.reset_message()
+        self._request.session.flash(msg, 'cubicweb')
+
+    def set_message_id(self, msgid):
+        self.reset_message()
+        self.set_message(
+            self._request.session.pop(msgid, u''))
+
+    def reset_message(self):
+        self._request.session.pop_flash('cubicweb')
+
+
+def render_view(request, vid, **kwargs):
+    """ Helper function to render a CubicWeb view.
+
+    :param request: A pyramid request
+    :param vid: A CubicWeb view id
+    :param **kwargs: Keyword arguments to select and instanciate the view
+    :returns: The rendered view content
+    """
+    vreg = request.registry['cubicweb.registry']
+    # XXX The select() function could, know how to handle a pyramid
+    # request, and feed it directly to the views that supports it.
+    # On the other hand, we could refine the View concept and decide it works
+    # with a cnx, and never with a WebRequest
+
+    with cw_to_pyramid(request):
+        view = vreg['views'].select(vid, request.cw_request, **kwargs)
+        view.set_stream()
+        view.render()
+        return view._stream.getvalue()
+
+
+def _cw_cnx(request):
+    """ Obtains a cw session from a pyramid request
+
+    The connection will be commited or rolled-back in a request finish
+    callback (this is temporary, we should make use of the transaction manager
+    in a later version).
+
+    Not meant for direct use, use ``request.cw_cnx`` instead.
+
+    :param request: A pyramid request
+    :returns type: :class:`cubicweb.server.session.Connection`
+    """
+    session = request.cw_session
+    if session is None:
+        return None
+
+    cnx = session.new_cnx()
+
+    def commit_state(cnx):
+        return cnx.commit_state
+
+    def cleanup(request):
+        try:
+            if (request.exception is not None and not isinstance(
+                request.exception, (
+                    httpexceptions.HTTPSuccessful,
+                    httpexceptions.HTTPRedirection))):
+                cnx.rollback()
+            elif commit_state(cnx) == 'uncommitable':
+                cnx.rollback()
+            else:
+                cnx.commit()
+        finally:
+            cnx.__exit__(None, None, None)
+
+    request.add_finished_callback(cleanup)
+    cnx.__enter__()
+    return cnx
+
+
+def repo_connect(request, repo, eid):
+    """A lightweight version of
+    :meth:`cubicweb.server.repository.Repository.connect` that does not keep
+    track of opened sessions, removing the need of closing them"""
+    user = tools.cached_build_user(repo, eid)
+    session = Session(request, user, repo)
+    tools.cnx_attach_entity(session, user)
+    # Calling the hooks should be done only once, disabling it completely for
+    # now
+    #with session.new_cnx() as cnx:
+        #repo.hm.call_hooks('session_open', cnx)
+        #cnx.commit()
+    # repo._sessions[session.sessionid] = session
+    return session
+
+
+def _cw_session(request):
+    """Obtains a cw session from a pyramid request
+
+    :param request: A pyramid request
+    :returns type: :class:`cubicweb.server.session.Session`
+
+    Not meant for direct use, use ``request.cw_session`` instead.
+    """
+    repo = request.registry['cubicweb.repository']
+
+    if not request.authenticated_userid:
+        eid = request.registry.get('cubicweb.anonymous_eid')
+        if eid is None:
+            return None
+        session = repo_connect(request, repo, eid=eid)
+    else:
+        session = request._cw_cached_session
+
+    return session
+
+
+def _cw_request(request):
+    """ Obtains a CubicWeb request wrapper for the pyramid request.
+
+    :param request: A pyramid request
+    :return: A CubicWeb request
+    :returns type: :class:`CubicWebPyramidRequest`
+
+    Not meant for direct use, use ``request.cw_request`` instead.
+
+    """
+    req = CubicWebPyramidRequest(request)
+    cnx = request.cw_cnx
+    if cnx is not None:
+        req.set_cnx(request.cw_cnx)
+    return req
+
+
+def get_principals(login, request):
+    """ Returns the group names of the authenticated user.
+
+    This function is meant to be used as an authentication policy callback.
+
+    It also pre-open the cubicweb session and put it in
+    request._cw_cached_session for later usage by :func:`_cw_session`.
+
+    .. note::
+
+        If the default authentication policy is not used, make sure this
+        function gets called by the active authentication policy.
+
+    :param login: A cubicweb user eid
+    :param request: A pyramid request
+    :returns: A list of group names
+    """
+    repo = request.registry['cubicweb.repository']
+
+    try:
+        session = repo_connect(request, repo, eid=login)
+        request._cw_cached_session = session
+    except:
+        log.exception("Failed")
+        raise
+
+    with session.new_cnx() as cnx:
+        with cnx.security_enabled(read=False):
+            return set(group for group, in cnx.execute(
+                'Any GN WHERE U in_group G, G name GN, U eid %(userid)s',
+                {'userid': login}))
+
+
+def includeme(config):
+    """ Enables the core features of Pyramid CubicWeb.
+
+    Automatically called by the 'pyramid' command, or via
+    ``config.include('cubicweb.pyramid.code')``. In the later case,
+    the following registry entries must be defined first:
+
+    'cubicweb.config'
+        A cubicweb 'config' instance.
+
+    'cubicweb.repository'
+        The correponding cubicweb repository.
+
+    'cubicweb.registry'
+        The vreg.
+    """
+    repo = config.registry['cubicweb.repository']
+
+    with repo.internal_cnx() as cnx:
+        login = config.registry['cubicweb.config'].anonymous_user()[0]
+        if login is not None:
+            config.registry['cubicweb.anonymous_eid'] = cnx.find(
+                'CWUser', login=login).one().eid
+
+    config.add_request_method(
+        _cw_session, name='cw_session', property=True, reify=True)
+    config.add_request_method(
+        _cw_cnx, name='cw_cnx', property=True, reify=True)
+    config.add_request_method(
+        _cw_request, name='cw_request', property=True, reify=True)
+
+    cwcfg = config.registry['cubicweb.config']
+    for cube in cwcfg.cubes():
+        pkgname = 'cubes.' + cube
+        mod = __import__(pkgname)
+        mod = getattr(mod, cube)
+        if hasattr(mod, 'includeme'):
+            config.include('cubes.' + cube)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/defaults.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,24 @@
+""" Defaults for a classical CubicWeb instance. """
+
+
+def includeme(config):
+    """ Enable the defaults that make the application behave like a classical
+    CubicWeb instance.
+
+    The following modules get included:
+
+    -   :func:`cubicweb.pyramid.session <cubicweb.pyramid.session.includeme>`
+    -   :func:`cubicweb.pyramid.auth <cubicweb.pyramid.auth.includeme>`
+    -   :func:`cubicweb.pyramid.login <cubicweb.pyramid.login.includeme>`
+
+    It is automatically included by the configuration system, unless the
+    following entry is added to the :ref:`pyramid_settings`:
+
+    .. code-block:: ini
+
+        cubicweb.defaults = no
+
+    """
+    config.include('cubicweb.pyramid.session')
+    config.include('cubicweb.pyramid.auth')
+    config.include('cubicweb.pyramid.login')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/init_instance.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,10 @@
+from cubicweb.cwconfig import CubicWebConfiguration
+
+
+def includeme(config):
+    appid = config.registry.settings['cubicweb.instance']
+    cwconfig = CubicWebConfiguration.config_for(appid)
+
+    config.registry['cubicweb.config'] = cwconfig
+    config.registry['cubicweb.repository'] = repo = cwconfig.repository()
+    config.registry['cubicweb.registry'] = repo.vreg
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/login.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,81 @@
+""" Provide login views that reproduce a classical CubicWeb behavior"""
+from pyramid import security
+from pyramid.httpexceptions import HTTPSeeOther
+from pyramid.view import view_config
+from pyramid.settings import asbool
+
+import cubicweb
+
+from cubicweb.pyramid.core import render_view
+
+
+@view_config(route_name='login')
+def login_form(request):
+    """ Default view for the 'login' route.
+
+    Display the 'login' CubicWeb view, which is should be a login form"""
+    request.response.text = render_view(request, 'login')
+    return request.response
+
+
+@view_config(route_name='login', request_param=('__login', '__password'))
+def login_password_login(request):
+    """ Handle GET/POST of __login/__password on the 'login' route.
+
+    The authentication itself is delegated to the CubicWeb repository.
+
+    Request parameters:
+
+    :param __login: The user login (or email if :confval:`allow-email-login` is
+                    on.
+    :param __password: The user password
+    :param __setauthcookie: (optional) If defined and equal to '1', set the
+                            authentication cookie maxage to 1 week.
+
+                            If not, the authentication cookie is a session
+                            cookie.
+    """
+    repo = request.registry['cubicweb.repository']
+
+    user_eid = None
+
+    login = request.params['__login']
+    password = request.params['__password']
+
+    try:
+        with repo.internal_cnx() as cnx:
+            user = repo.authenticate_user(cnx, login, password=password)
+            user_eid = user.eid
+    except cubicweb.AuthenticationError:
+        request.cw_request.set_message(request.cw_request._(
+            "Authentication failed. Please check your credentials."))
+        request.cw_request.post = dict(request.params)
+        del request.cw_request.post['__password']
+        request.response.status_code = 403
+        return login_form(request)
+
+    headers = security.remember(
+        request, user_eid,
+        persistent=asbool(request.params.get('__setauthcookie', False)))
+
+    new_path = request.params.get('postlogin_path', '')
+
+    if new_path == 'login':
+        new_path = ''
+
+    url = request.cw_request.build_url(new_path)
+    raise HTTPSeeOther(url, headers=headers)
+
+
+@view_config(route_name='login', effective_principals=security.Authenticated)
+def login_already_loggedin(request):
+    """ 'login' route view for Authenticated users.
+
+    Simply redirect the user to '/'."""
+    raise HTTPSeeOther('/')
+
+
+def includeme(config):
+    """ Create the 'login' route ('/login') and load this module views"""
+    config.add_route('login', '/login')
+    config.scan('cubicweb.pyramid.login')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/predicates.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,22 @@
+"""Contains predicates used in Pyramid views.
+"""
+
+
+class MatchIsETypePredicate(object):
+    """A predicate that match if a given etype exist in schema.
+    """
+    def __init__(self, matchname, config):
+        self.matchname = matchname
+
+    def text(self):
+        return 'match_is_etype = %s' % self.matchname
+
+    phash = text
+
+    def __call__(self, info, request):
+        return info['match'][self.matchname].lower() in \
+            request.registry['cubicweb.registry'].case_insensitive_etypes
+
+
+def includeme(config):
+    config.add_route_predicate('match_is_etype', MatchIsETypePredicate)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/profile.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,63 @@
+""" Tools for profiling.
+
+See :ref:`profiling`."""
+from __future__ import print_function
+
+import cProfile
+import itertools
+
+from pyramid.view import view_config
+
+
+@view_config(route_name='profile_ping')
+def ping(request):
+    """ View that handle '/_profile/ping'
+
+    It simply reply 'ping', without requiring connection to the repository.
+    It is a useful as a comparison point to evaluate the actual overhead of
+    more costly views.
+    """
+    request.response.text = u'pong'
+    return request.response
+
+
+@view_config(route_name='profile_cnx')
+def cnx(request):
+    """ View that handle '/_profile/cnx'
+
+    Same as :func:`ping`, but it first ask for a connection to the repository.
+    Useful to evaluate the overhead of opening a connection.
+    """
+    request.cw_cnx
+    request.response.text = u'pong'
+    return request.response
+
+
+def wsgi_profile(app, filename='program.prof', dump_every=50):
+    """ A WSGI middleware for profiling
+
+    It enable the profiler before passing the request to the underlying
+    application, and disable it just after.
+
+    The stats will be dumped after ``dump_every`` requests
+
+    :param filename: The filename to dump the stats to.
+    :param dump_every: Number of requests after which to dump the stats.
+    """
+
+    profile = cProfile.Profile()
+
+    counter = itertools.count(1)
+
+    def application(environ, start_response):
+        profile.enable()
+        try:
+            return app(environ, start_response)
+        finally:
+            profile.disable()
+            if not counter.next() % dump_every:
+                print("Dump profile stats to %s" % filename)
+                profile.create_stats()
+                profile.dump_stats(filename)
+
+    return application
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/pyramidctl.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,460 @@
+"""
+Provides a 'pyramid' command as a replacement to the 'start' command.
+
+The reloading strategy is heavily inspired by (and partially copied from)
+the pyramid script 'pserve'.
+"""
+from __future__ import print_function
+
+import atexit
+import errno
+import os
+import signal
+import sys
+import tempfile
+import time
+import threading
+import subprocess
+
+from cubicweb import BadCommandUsage, ExecutionError
+from cubicweb.__pkginfo__ import numversion as cwversion
+from cubicweb.cwconfig import CubicWebConfiguration as cwcfg
+from cubicweb.cwctl import CWCTL, InstanceCommand, init_cmdline_log_threshold
+from cubicweb.pyramid import wsgi_application_from_cwconfig
+from cubicweb.server import set_debug
+
+import waitress
+
+MAXFD = 1024
+
+DBG_FLAGS = ('RQL', 'SQL', 'REPO', 'HOOKS', 'OPS', 'SEC', 'MORE')
+LOG_LEVELS = ('debug', 'info', 'warning', 'error')
+
+
+class PyramidStartHandler(InstanceCommand):
+    """Start an interactive pyramid server.
+
+    This command requires http://hg.logilab.org/review/pyramid_cubicweb/
+
+    <instance>
+      identifier of the instance to configure.
+    """
+    name = 'pyramid'
+
+    options = (
+        ('no-daemon',
+         {'action': 'store_true',
+          'help': 'Run the server in the foreground.'}),
+        ('debug-mode',
+         {'action': 'store_true',
+          'help': 'Activate the repository debug mode ('
+                  'logs in the console and the debug toolbar).'
+                  ' Implies --no-daemon'}),
+        ('debug',
+         {'short': 'D', 'action': 'store_true',
+          'help': 'Equals to "--debug-mode --no-daemon --reload"'}),
+        ('reload',
+         {'action': 'store_true',
+          'help': 'Restart the server if any source file is changed'}),
+        ('reload-interval',
+         {'type': 'int', 'default': 1,
+          'help': 'Interval, in seconds, between file modifications checks'}),
+        ('loglevel',
+         {'short': 'l', 'type': 'choice', 'metavar': '<log level>',
+          'default': None, 'choices': LOG_LEVELS,
+          'help': 'debug if -D is set, error otherwise; '
+                  'one of %s' % (LOG_LEVELS,),
+          }),
+        ('dbglevel',
+         {'type': 'multiple_choice', 'metavar': '<dbg level>',
+          'default': None,
+          'choices': DBG_FLAGS,
+          'help': ('Set the server debugging flags; you may choose several '
+                   'values in %s; imply "debug" loglevel' % (DBG_FLAGS,)),
+          }),
+        ('profile',
+         {'action': 'store_true',
+          'default': False,
+          'help': 'Enable profiling'}),
+        ('profile-output',
+         {'type': 'string',
+          'default': None,
+          'help': 'Profiling output file (default: "program.prof")'}),
+        ('profile-dump-every',
+         {'type': 'int',
+          'default': None,
+          'metavar': 'N',
+          'help': 'Dump profile stats to ouput every N requests '
+                  '(default: 100)'}),
+    )
+    if cwversion >= (3, 21, 0):
+        options = options + (
+            ('param',
+             {'short': 'p',
+              'type': 'named',
+              'metavar': 'key1:value1,key2:value2',
+              'default': {},
+              'help': 'override <key> configuration file option with <value>.',
+              }),
+        )
+
+    _reloader_environ_key = 'CW_RELOADER_SHOULD_RUN'
+    _reloader_filelist_environ_key = 'CW_RELOADER_FILELIST'
+
+    def debug(self, msg):
+        print('DEBUG - %s' % msg)
+
+    def info(self, msg):
+        print('INFO - %s' % msg)
+
+    def ordered_instances(self):
+        instances = super(PyramidStartHandler, self).ordered_instances()
+        if (self['debug-mode'] or self['debug'] or self['reload']) \
+                and len(instances) > 1:
+            raise BadCommandUsage(
+                '--debug-mode, --debug and --reload can be used on a single '
+                'instance only')
+        return instances
+
+    def quote_first_command_arg(self, arg):
+        """
+        There's a bug in Windows when running an executable that's
+        located inside a path with a space in it.  This method handles
+        that case, or on non-Windows systems or an executable with no
+        spaces, it just leaves well enough alone.
+        """
+        if (sys.platform != 'win32' or ' ' not in arg):
+            # Problem does not apply:
+            return arg
+        try:
+            import win32api
+        except ImportError:
+            raise ValueError(
+                "The executable %r contains a space, and in order to "
+                "handle this issue you must have the win32api module "
+                "installed" % arg)
+        arg = win32api.GetShortPathName(arg)
+        return arg
+
+    def _remove_pid_file(self, written_pid, filename):
+        current_pid = os.getpid()
+        if written_pid != current_pid:
+            # A forked process must be exiting, not the process that
+            # wrote the PID file
+            return
+        if not os.path.exists(filename):
+            return
+        with open(filename) as f:
+            content = f.read().strip()
+        try:
+            pid_in_file = int(content)
+        except ValueError:
+            pass
+        else:
+            if pid_in_file != current_pid:
+                msg = "PID file %s contains %s, not expected PID %s"
+                self.out(msg % (filename, pid_in_file, current_pid))
+                return
+        self.info("Removing PID file %s" % filename)
+        try:
+            os.unlink(filename)
+            return
+        except OSError as e:
+            # Record, but don't give traceback
+            self.out("Cannot remove PID file: (%s)" % e)
+        # well, at least lets not leave the invalid PID around...
+        try:
+            with open(filename, 'w') as f:
+                f.write('')
+        except OSError as e:
+            self.out('Stale PID left in file: %s (%s)' % (filename, e))
+        else:
+            self.out('Stale PID removed')
+
+    def record_pid(self, pid_file):
+        pid = os.getpid()
+        self.debug('Writing PID %s to %s' % (pid, pid_file))
+        with open(pid_file, 'w') as f:
+            f.write(str(pid))
+        atexit.register(
+            self._remove_pid_file, pid, pid_file)
+
+    def daemonize(self, pid_file):
+        pid = live_pidfile(pid_file)
+        if pid:
+            raise ExecutionError(
+                "Daemon is already running (PID: %s from PID file %s)"
+                % (pid, pid_file))
+
+        self.debug('Entering daemon mode')
+        pid = os.fork()
+        if pid:
+            # The forked process also has a handle on resources, so we
+            # *don't* want proper termination of the process, we just
+            # want to exit quick (which os._exit() does)
+            os._exit(0)
+        # Make this the session leader
+        os.setsid()
+        # Fork again for good measure!
+        pid = os.fork()
+        if pid:
+            os._exit(0)
+
+        # @@: Should we set the umask and cwd now?
+
+        import resource  # Resource usage information.
+        maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
+        if (maxfd == resource.RLIM_INFINITY):
+            maxfd = MAXFD
+        # Iterate through and close all file descriptors.
+        for fd in range(0, maxfd):
+            try:
+                os.close(fd)
+            except OSError:  # ERROR, fd wasn't open to begin with (ignored)
+                pass
+
+        if (hasattr(os, "devnull")):
+            REDIRECT_TO = os.devnull
+        else:
+            REDIRECT_TO = "/dev/null"
+        os.open(REDIRECT_TO, os.O_RDWR)  # standard input (0)
+        # Duplicate standard input to standard output and standard error.
+        os.dup2(0, 1)  # standard output (1)
+        os.dup2(0, 2)  # standard error (2)
+
+    def restart_with_reloader(self):
+        self.debug('Starting subprocess with file monitor')
+
+        with tempfile.NamedTemporaryFile(delete=False) as f:
+            filelist_path = f.name
+
+        while True:
+            args = [self.quote_first_command_arg(sys.executable)] + sys.argv
+            new_environ = os.environ.copy()
+            new_environ[self._reloader_environ_key] = 'true'
+            new_environ[self._reloader_filelist_environ_key] = filelist_path
+            proc = None
+            try:
+                try:
+                    proc = subprocess.Popen(args, env=new_environ)
+                    exit_code = proc.wait()
+                    proc = None
+                    print("Process exited with", exit_code)
+                except KeyboardInterrupt:
+                    self.info('^C caught in monitor process')
+                    return 1
+            finally:
+                if proc is not None:
+                    proc.terminate()
+                    self.info(
+                        'Waiting for the server to stop. Hit CTRL-C to exit')
+                    exit_code = proc.wait()
+
+            if exit_code != 3:
+                with open(filelist_path) as f:
+                    filelist = [line.strip() for line in f]
+                if filelist:
+                    self.info("Reloading failed. Waiting for a file to change")
+                    mon = Monitor(extra_files=filelist, nomodules=True)
+                    while mon.check_reload():
+                        time.sleep(1)
+                else:
+                    return exit_code
+
+            self.info('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20))
+
+    def set_needreload(self):
+        self._needreload = True
+
+    def install_reloader(self, poll_interval, extra_files, filelist_path):
+        mon = Monitor(
+            poll_interval=poll_interval, extra_files=extra_files,
+            atexit=self.set_needreload, filelist_path=filelist_path)
+        mon_thread = threading.Thread(target=mon.periodic_reload)
+        mon_thread.daemon = True
+        mon_thread.start()
+
+    def configfiles(self, cwconfig):
+        """Generate instance configuration filenames"""
+        yield cwconfig.main_config_file()
+        for f in (
+                'sources', 'logging.conf', 'pyramid.ini', 'pyramid-debug.ini'):
+            f = os.path.join(cwconfig.apphome, f)
+            if os.path.exists(f):
+                yield f
+
+    def i18nfiles(self, cwconfig):
+        """Generate instance i18n files"""
+        i18ndir = os.path.join(cwconfig.apphome, 'i18n')
+        if os.path.exists(i18ndir):
+            for lang in cwconfig.available_languages():
+                f = os.path.join(i18ndir, lang, 'LC_MESSAGES', 'cubicweb.mo')
+                if os.path.exists(f):
+                    yield f
+
+    def pyramid_instance(self, appid):
+        self._needreload = False
+
+        debugmode = self['debug-mode'] or self['debug']
+        autoreload = self['reload'] or self['debug']
+        daemonize = not (self['no-daemon'] or debugmode or autoreload)
+
+        if autoreload and not os.environ.get(self._reloader_environ_key):
+            return self.restart_with_reloader()
+
+        cwconfig = cwcfg.config_for(appid, debugmode=debugmode)
+        if cwversion >= (3, 21, 0):
+            cwconfig.cmdline_options = self.config.param
+        if autoreload:
+            _turn_sigterm_into_systemexit()
+            self.debug('Running reloading file monitor')
+            extra_files = [sys.argv[0]]
+            extra_files.extend(self.configfiles(cwconfig))
+            extra_files.extend(self.i18nfiles(cwconfig))
+            self.install_reloader(
+                self['reload-interval'], extra_files,
+                filelist_path=os.environ.get(
+                    self._reloader_filelist_environ_key))
+
+        if daemonize:
+            self.daemonize(cwconfig['pid-file'])
+            self.record_pid(cwconfig['pid-file'])
+
+        if self['dbglevel']:
+            self['loglevel'] = 'debug'
+            set_debug('|'.join('DBG_' + x.upper() for x in self['dbglevel']))
+        init_cmdline_log_threshold(cwconfig, self['loglevel'])
+
+        app = wsgi_application_from_cwconfig(
+            cwconfig, profile=self['profile'],
+            profile_output=self['profile-output'],
+            profile_dump_every=self['profile-dump-every']
+        )
+
+        host = cwconfig['interface']
+        port = cwconfig['port'] or 8080
+        repo = app.application.registry['cubicweb.repository']
+        try:
+            repo.start_looping_tasks()
+            waitress.serve(app, host=host, port=port)
+        finally:
+            repo.shutdown()
+        if self._needreload:
+            return 3
+        return 0
+
+CWCTL.register(PyramidStartHandler)
+
+
+def live_pidfile(pidfile):  # pragma: no cover
+    """(pidfile:str) -> int | None
+    Returns an int found in the named file, if there is one,
+    and if there is a running process with that process id.
+    Return None if no such process exists.
+    """
+    pid = read_pidfile(pidfile)
+    if pid:
+        try:
+            os.kill(int(pid), 0)
+            return pid
+        except OSError as e:
+            if e.errno == errno.EPERM:
+                return pid
+    return None
+
+
+def read_pidfile(filename):
+    if os.path.exists(filename):
+        try:
+            with open(filename) as f:
+                content = f.read()
+            return int(content.strip())
+        except (ValueError, IOError):
+            return None
+    else:
+        return None
+
+
+def _turn_sigterm_into_systemexit():
+    """Attempts to turn a SIGTERM exception into a SystemExit exception."""
+    try:
+        import signal
+    except ImportError:
+        return
+
+    def handle_term(signo, frame):
+        raise SystemExit
+    signal.signal(signal.SIGTERM, handle_term)
+
+
+class Monitor(object):
+    """A file monitor and server stopper.
+
+    It is a simplified version of pyramid pserve.Monitor, with little changes:
+
+    -   The constructor takes extra_files, atexit, nomodules and filelist_path
+    -   The process is stopped by auto-kill with signal SIGTERM
+    """
+
+    def __init__(self, poll_interval=1, extra_files=[], atexit=None,
+                 nomodules=False, filelist_path=None):
+        self.module_mtimes = {}
+        self.keep_running = True
+        self.poll_interval = poll_interval
+        self.extra_files = extra_files
+        self.atexit = atexit
+        self.nomodules = nomodules
+        self.filelist_path = filelist_path
+
+    def _exit(self):
+        if self.atexit:
+            self.atexit()
+        os.kill(os.getpid(), signal.SIGTERM)
+
+    def periodic_reload(self):
+        while True:
+            if not self.check_reload():
+                self._exit()
+                break
+            time.sleep(self.poll_interval)
+
+    def check_reload(self):
+        filenames = list(self.extra_files)
+
+        if not self.nomodules:
+            for module in list(sys.modules.values()):
+                try:
+                    filename = module.__file__
+                except (AttributeError, ImportError):
+                    continue
+                if filename is not None:
+                    filenames.append(filename)
+
+        for filename in filenames:
+            try:
+                stat = os.stat(filename)
+                if stat:
+                    mtime = stat.st_mtime
+                else:
+                    mtime = 0
+            except (OSError, IOError):
+                continue
+            if filename.endswith('.pyc') and os.path.exists(filename[:-1]):
+                mtime = max(os.stat(filename[:-1]).st_mtime, mtime)
+            if filename not in self.module_mtimes:
+                self.module_mtimes[filename] = mtime
+            elif self.module_mtimes[filename] < mtime:
+                print('%s changed; reloading...' % filename)
+                return False
+
+        if self.filelist_path:
+            with open(self.filelist_path) as f:
+                filelist = set((line.strip() for line in f))
+
+            filelist.update(filenames)
+
+            with open(self.filelist_path, 'w') as f:
+                for filename in filelist:
+                    f.write('%s\n' % filename)
+
+        return True
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/resources.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,74 @@
+"""Contains resources classes.
+"""
+from six import text_type
+
+from rql import TypeResolverException
+
+from pyramid.decorator import reify
+from pyramid.httpexceptions import HTTPNotFound
+
+
+class EntityResource(object):
+
+    """A resource class for an entity. It provide method to retrieve an entity
+    by eid.
+    """
+
+    @classmethod
+    def from_eid(cls):
+        def factory(request):
+            return cls(request, None, None, request.matchdict['eid'])
+        return factory
+
+    def __init__(self, request, cls, attrname, value):
+        self.request = request
+        self.cls = cls
+        self.attrname = attrname
+        self.value = value
+
+    @reify
+    def rset(self):
+        req = self.request.cw_request
+        if self.cls is None:
+            return req.execute('Any X WHERE X eid %(x)s',
+                               {'x': int(self.value)})
+        st = self.cls.fetch_rqlst(self.request.cw_cnx.user, ordermethod=None)
+        st.add_constant_restriction(st.get_variable('X'), self.attrname,
+                                    'x', 'Substitute')
+        if self.attrname == 'eid':
+            try:
+                rset = req.execute(st.as_string(), {'x': int(self.value)})
+            except (ValueError, TypeResolverException):
+                # conflicting eid/type
+                raise HTTPNotFound()
+        else:
+            rset = req.execute(st.as_string(), {'x': text_type(self.value)})
+        return rset
+
+
+class ETypeResource(object):
+
+    """A resource for etype.
+    """
+    @classmethod
+    def from_match(cls, matchname):
+        def factory(request):
+            return cls(request, request.matchdict[matchname])
+        return factory
+
+    def __init__(self, request, etype):
+        vreg = request.registry['cubicweb.registry']
+
+        self.request = request
+        self.etype = vreg.case_insensitive_etypes[etype.lower()]
+        self.cls = vreg['etypes'].etype_class(self.etype)
+
+    def __getitem__(self, value):
+        attrname = self.cls.cw_rest_attr_info()[0]
+        return EntityResource(self.request, self.cls, attrname, value)
+
+    @reify
+    def rset(self):
+        rql = self.cls.fetch_rql(self.request.cw_cnx.user)
+        rset = self.request.cw_request.execute(rql)
+        return rset
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/rest_api.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,24 @@
+from __future__ import absolute_import
+
+
+from pyramid.httpexceptions import HTTPNotFound
+from pyramid.view import view_config
+from cubicweb.pyramid.resources import EntityResource, ETypeResource
+from cubicweb.pyramid.predicates import MatchIsETypePredicate
+
+
+@view_config(
+    route_name='cwentities',
+    context=EntityResource,
+    request_method='DELETE')
+def delete_entity(context, request):
+    context.rset.one().cw_delete()
+    request.response.status_int = 204
+    return request.response
+
+
+def includeme(config):
+    config.add_route(
+        'cwentities', '/{etype}/*traverse',
+        factory=ETypeResource.from_match('etype'), match_is_etype='etype')
+    config.scan(__name__)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/session.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,189 @@
+import warnings
+import logging
+from contextlib import contextmanager
+
+from pyramid.compat import pickle
+from pyramid.session import SignedCookieSessionFactory
+
+from cubicweb import Binary
+
+
+log = logging.getLogger(__name__)
+
+
+def logerrors(logger):
+    def wrap(fn):
+        def newfn(*args, **kw):
+            try:
+                return fn(*args, **kw)
+            except:
+                logger.exception("Error in %s" % fn.__name__)
+        return newfn
+    return wrap
+
+
+@contextmanager
+def unsafe_cnx_context_manager(request):
+    """Return a connection for use as a context manager, with security disabled
+
+    If request has an attached connection, its security will be deactived in the context manager's
+    scope, else a new internal connection is returned.
+    """
+    cnx = request.cw_cnx
+    if cnx is None:
+        with request.registry['cubicweb.repository'].internal_cnx() as cnx:
+            yield cnx
+    else:
+        with cnx.security_enabled(read=False, write=False):
+            yield cnx
+
+
+def CWSessionFactory(
+        secret,
+        cookie_name='session',
+        max_age=None,
+        path='/',
+        domain=None,
+        secure=False,
+        httponly=True,
+        set_on_exception=True,
+        timeout=1200,
+        reissue_time=120,
+        hashalg='sha512',
+        salt='pyramid.session.',
+        serializer=None):
+    """ A pyramid session factory that store session data in the CubicWeb
+    database.
+
+    Storage is done with the 'CWSession' entity, which is provided by the
+    'pyramid' cube.
+
+    .. warning::
+
+        Although it provides a sane default behavior, this session storage has
+        a serious overhead because it uses RQL to access the database.
+
+        Using pure SQL would improve a bit (it is roughly twice faster), but it
+        is still pretty slow and thus not an immediate priority.
+
+        It is recommended to use faster session factory
+        (pyramid_redis_sessions_ for example) if you need speed.
+
+    .. _pyramid_redis_sessions: http://pyramid-redis-sessions.readthedocs.org/
+                                en/latest/index.html
+    """
+
+    SignedCookieSession = SignedCookieSessionFactory(
+        secret,
+        cookie_name=cookie_name,
+        max_age=max_age,
+        path=path,
+        domain=domain,
+        secure=secure,
+        httponly=httponly,
+        set_on_exception=set_on_exception,
+        timeout=timeout,
+        reissue_time=reissue_time,
+        hashalg=hashalg,
+        salt=salt,
+        serializer=serializer)
+
+    class CWSession(SignedCookieSession):
+        def __init__(self, request):
+            # _set_accessed will be called by the super __init__.
+            # Setting _loaded to True inhibates it.
+            self._loaded = True
+
+            # the super __init__ will load a single value in the dictionnary,
+            # the session id.
+            super(CWSession, self).__init__(request)
+
+            # Remove the session id from the dict
+            self.sessioneid = self.pop('sessioneid', None)
+            self.repo = request.registry['cubicweb.repository']
+
+            # We need to lazy-load only for existing sessions
+            self._loaded = self.sessioneid is None
+
+        @logerrors(log)
+        def _set_accessed(self, value):
+            self._accessed = value
+
+            if self._loaded:
+                return
+
+            with unsafe_cnx_context_manager(self.request) as cnx:
+                value_rset = cnx.execute('Any D WHERE X eid %(x)s, X cwsessiondata D',
+                                         {'x': self.sessioneid})
+                value = value_rset[0][0]
+                if value:
+                    # Use directly dict.update to avoir _set_accessed to be
+                    # recursively called
+                    dict.update(self, pickle.load(value))
+
+            self._loaded = True
+
+        def _get_accessed(self):
+            return self._accessed
+
+        accessed = property(_get_accessed, _set_accessed)
+
+        @logerrors(log)
+        def _set_cookie(self, response):
+            # Save the value in the database
+            data = Binary(pickle.dumps(dict(self)))
+            sessioneid = self.sessioneid
+
+            with unsafe_cnx_context_manager(self.request) as cnx:
+                if not sessioneid:
+                    session = cnx.create_entity(
+                        'CWSession', cwsessiondata=data)
+                    sessioneid = session.eid
+                else:
+                    session = cnx.entity_from_eid(sessioneid)
+                    session.cw_set(cwsessiondata=data)
+                cnx.commit()
+
+            # Only if needed actually set the cookie
+            if self.new or self.accessed - self.renewed > self._reissue_time:
+                dict.clear(self)
+                dict.__setitem__(self, 'sessioneid', sessioneid)
+                return super(CWSession, self)._set_cookie(response)
+
+            return True
+
+    return CWSession
+
+
+def includeme(config):
+    """ Activate the CubicWeb session factory.
+
+    Usually called via ``config.include('cubicweb.pyramid.auth')``.
+
+    See also :ref:`defaults_module`
+    """
+    settings = config.registry.settings
+    secret = settings.get('cubicweb.session.secret', '')
+    if not secret:
+        secret = config.registry['cubicweb.config'].get('pyramid-session-secret')
+        warnings.warn('''
+        Please migrate pyramid-session-secret from
+        all-in-one.conf to cubicweb.session.secret config entry in
+        your pyramid.ini file.
+        ''')
+    if not secret:
+        secret = 'notsosecret'
+        warnings.warn('''
+
+            !! WARNING !! !! WARNING !!
+
+            The session cookies are signed with a static secret key.
+            To put your own secret key, edit your pyramid.ini file
+            and set the 'cubicweb.session.secret' key.
+
+            YOU SHOULD STOP THIS INSTANCE unless your really know what you
+            are doing !!
+
+        ''')
+    session_factory = CWSessionFactory(secret)
+    config.set_session_factory(session_factory)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/syncsession.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,29 @@
+# copyright 2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""Override cubicweb's syncsession hooks to handle them in the pyramid's way"""
+
+from logilab.common.decorators import monkeypatch
+from cubicweb.hooks import syncsession
+
+
+def includeme(config):
+
+    @monkeypatch(syncsession)
+    def get_user_sessions(cnx, user_eid):
+        if cnx.user.eid == user_eid:
+            yield cnx
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,30 @@
+import webtest
+
+from cubicweb.devtools.webtest import CubicWebTestTC
+
+from cubicweb.pyramid import make_cubicweb_application
+
+
+class PyramidCWTest(CubicWebTestTC):
+    settings = {}
+
+    @classmethod
+    def init_config(cls, config):
+        super(PyramidCWTest, cls).init_config(config)
+        config.global_set_option('https-url', 'https://localhost.local/')
+        config.global_set_option('anonymous-user', 'anon')
+        config.https_uiprops = None
+        config.https_datadir_url = None
+
+    def setUp(self):
+        # Skip CubicWebTestTC setUp
+        super(CubicWebTestTC, self).setUp()
+        config = make_cubicweb_application(self.config, self.settings)
+        self.includeme(config)
+        self.pyr_registry = config.registry
+        self.webapp = webtest.TestApp(
+            config.make_wsgi_app(),
+            extra_environ={'wsgi.url_scheme': 'https'})
+
+    def includeme(self, config):
+        pass
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_bw_request.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+from io import BytesIO
+
+import webtest
+
+import pyramid.request
+
+from cubicweb.pyramid.core import CubicWebPyramidRequest
+from cubicweb.pyramid.test import PyramidCWTest
+
+
+class WSGIAppTest(PyramidCWTest):
+    def make_request(self, path, environ=None, **kw):
+        r = webtest.app.TestRequest.blank(path, environ, **kw)
+
+        request = pyramid.request.Request(r.environ)
+        request.registry = self.pyr_registry
+
+        return request
+
+    def test_content_type(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/', {'CONTENT_TYPE': 'text/plain'}))
+
+        self.assertEqual('text/plain', req.get_header('Content-Type'))
+
+    def test_content_body(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/', {
+                'CONTENT_LENGTH': 12,
+                'CONTENT_TYPE': 'text/plain',
+                'wsgi.input': BytesIO(b'some content')}))
+
+        self.assertEqual(b'some content', req.content.read())
+
+    def test_http_scheme(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/', {
+                'wsgi.url_scheme': 'http'}))
+
+        self.assertFalse(req.https)
+
+    def test_https_scheme(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/', {
+                'wsgi.url_scheme': 'https'}))
+
+        self.assertTrue(req.https)
+
+    def test_https_prefix(self):
+        r = self.webapp.get('/https/')
+        self.assertIn('https://', r.text)
+
+    def test_big_content(self):
+        content = b'x'*100001
+
+        req = CubicWebPyramidRequest(
+            self.make_request('/', {
+                'CONTENT_LENGTH': len(content),
+                'CONTENT_TYPE': 'text/plain',
+                'wsgi.input': BytesIO(content)}))
+
+        self.assertEqual(content, req.content.read())
+
+    def test_post(self):
+        self.webapp.post(
+            '/',
+            params={'__login': self.admlogin, '__password': self.admpassword})
+
+    def test_get_multiple_variables(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/?arg=1&arg=2'))
+
+        self.assertEqual([u'1', u'2'], req.form['arg'])
+
+    def test_post_multiple_variables(self):
+        req = CubicWebPyramidRequest(
+            self.make_request('/', POST='arg=1&arg=2'))
+
+        self.assertEqual([u'1', u'2'], req.form['arg'])
+
+    def test_post_files(self):
+        content_type, params = self.webapp.encode_multipart(
+            (), (('filefield', 'aname', b'acontent'),))
+        req = CubicWebPyramidRequest(
+            self.make_request('/', POST=params, content_type=content_type))
+        self.assertIn('filefield', req.form)
+        fieldvalue = req.form['filefield']
+        self.assertEqual(u'aname', fieldvalue[0])
+        self.assertEqual(b'acontent', fieldvalue[1].read())
+
+    def test_post_unicode_urlencoded(self):
+        params = 'arg=%C3%A9'
+        req = CubicWebPyramidRequest(
+            self.make_request(
+                '/', POST=params,
+                content_type='application/x-www-form-urlencoded'))
+        self.assertEqual(u"é", req.form['arg'])
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_core.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,49 @@
+from cubicweb.pyramid.test import PyramidCWTest
+
+from cubicweb.view import View
+from cubicweb.web import Redirect
+from cubicweb import ValidationError
+
+
+class Redirector(View):
+    __regid__ = 'redirector'
+
+    def call(self, rset=None):
+        self._cw.set_header('Cache-Control', 'no-cache')
+        raise Redirect('http://example.org')
+
+
+def put_in_uncommitable_state(request):
+    try:
+        request.cw_cnx.execute('SET U login NULL WHERE U login "anon"')
+    except ValidationError:
+        pass
+    request.response.body = b'OK'
+    return request.response
+
+
+class CoreTest(PyramidCWTest):
+    anonymous_allowed = True
+
+    def includeme(self, config):
+        config.add_route('uncommitable', '/uncommitable')
+        config.add_view(put_in_uncommitable_state, route_name='uncommitable')
+
+    def test_cw_to_pyramid_copy_headers_on_redirect(self):
+        self.vreg.register(Redirector)
+        try:
+            res = self.webapp.get('/?vid=redirector', expect_errors=True)
+            self.assertEqual(res.status_int, 303)
+            self.assertEqual(res.headers['Cache-Control'], 'no-cache')
+        finally:
+            self.vreg.unregister(Redirector)
+
+    def test_uncommitable_cnx(self):
+        res = self.webapp.get('/uncommitable')
+        self.assertEqual(res.text, 'OK')
+        self.assertEqual(res.status_int, 200)
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_hooks.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,77 @@
+from six import text_type
+
+from cubicweb.pyramid.test import PyramidCWTest
+from cubicweb.pyramid import tools
+
+
+def set_language(request):
+    lang = request.POST.get('lang', None)
+    cnx = request.cw_cnx
+    if lang is None:
+        cnx.execute('DELETE CWProperty X WHERE X for_user U, U eid %(u)s',
+                    {'u': cnx.user.eid})
+    else:
+        cnx.user.set_property(u'ui.language', text_type(lang))
+    cnx.commit()
+
+    request.response.text = text_type(cnx.user.properties.get('ui.language', ''))
+    return request.response
+
+
+def add_remove_group(request):
+    add_remove = request.POST['add_remove']
+    cnx = request.cw_cnx
+    if add_remove == 'add':
+        cnx.execute('SET U in_group G WHERE G name "users", U eid %(u)s',
+                    {'u': cnx.user.eid})
+    else:
+        cnx.execute('DELETE U in_group G WHERE G name "users", U eid %(u)s',
+                    {'u': cnx.user.eid})
+    cnx.commit()
+
+    request.response.text = text_type(','.join(sorted(cnx.user.groups)))
+    return request.response
+
+
+class SessionSyncHoooksTC(PyramidCWTest):
+
+    def includeme(self, config):
+        for view in (set_language, add_remove_group):
+            config.add_route(view.__name__, '/' + view.__name__)
+            config.add_view(view, route_name=view.__name__)
+
+    def setUp(self):
+        super(SessionSyncHoooksTC, self).setUp()
+        with self.admin_access.repo_cnx() as cnx:
+            self.admin_eid = cnx.user.eid
+
+    def test_sync_props(self):
+        # initialize a pyramid session using admin credentials
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': self.admpassword})
+        self.assertEqual(res.status_int, 303)
+        # new property
+        res = self.webapp.post('/set_language', {'lang': 'fr'})
+        self.assertEqual(res.text, 'fr')
+        # updated property
+        res = self.webapp.post('/set_language', {'lang': 'en'})
+        self.assertEqual(res.text, 'en')
+        # removed property
+        res = self.webapp.post('/set_language')
+        self.assertEqual(res.text, '')
+
+    def test_sync_groups(self):
+        # initialize a pyramid session using admin credentials
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': self.admpassword})
+        self.assertEqual(res.status_int, 303)
+        # XXX how to get pyramid request using this session?
+        res = self.webapp.post('/add_remove_group', {'add_remove': 'add'})
+        self.assertEqual(res.text, 'managers,users')
+        res = self.webapp.post('/add_remove_group', {'add_remove': 'remove'})
+        self.assertEqual(res.text, 'managers')
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_login.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,51 @@
+from cubicweb.pyramid.test import PyramidCWTest
+
+
+class LoginTest(PyramidCWTest):
+    def test_login_form(self):
+        res = self.webapp.get('/login')
+        self.assertIn('__login', res.text)
+
+    def test_login_password_login(self):
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': self.admpassword})
+        self.assertEqual(res.status_int, 303)
+
+        res = self.webapp.get('/login')
+        self.assertEqual(res.status_int, 303)
+
+    def test_login_password_login_cookie_expires(self):
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': self.admpassword})
+        self.assertEqual(res.status_int, 303)
+
+        cookies = self.webapp.cookiejar._cookies['localhost.local']['/']
+        self.assertNotIn('pauth_tkt', cookies)
+        self.assertIn('auth_tkt', cookies)
+        self.assertIsNone(cookies['auth_tkt'].expires)
+
+        res = self.webapp.get('/logout')
+        self.assertEqual(res.status_int, 303)
+
+        self.assertNotIn('auth_tkt', cookies)
+        self.assertNotIn('pauth_tkt', cookies)
+
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': self.admpassword,
+            '__setauthcookie': 1})
+        self.assertEqual(res.status_int, 303)
+
+        cookies = self.webapp.cookiejar._cookies['localhost.local']['/']
+        self.assertNotIn('auth_tkt', cookies)
+        self.assertIn('pauth_tkt', cookies)
+        self.assertIsNotNone(cookies['pauth_tkt'].expires)
+
+    def test_login_bad_password(self):
+        res = self.webapp.post('/login', {
+            '__login': self.admlogin, '__password': 'empty'}, status=403)
+        self.assertIn('Authentication failed', res.text)
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_rest_api.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,59 @@
+from __future__ import absolute_import
+
+from cubicweb.pyramid.rest_api import EntityResource
+from cubicweb.pyramid.core import CubicWebPyramidRequest
+from pyramid.view import view_config
+
+from cubicweb.pyramid.test import PyramidCWTest
+
+
+class RestApiTest(PyramidCWTest):
+    def includeme(self, config):
+        config.include('cubicweb.pyramid.rest_api')
+        config.include('cubicweb.pyramid.test.test_rest_api')
+
+    def test_delete(self):
+        with self.admin_access.repo_cnx() as cnx:
+            cnx.create_entity('CWGroup', name=u'tmp')
+            cnx.commit()
+
+        self.login()
+        res = self.webapp.delete('/cwgroup/tmp')
+        self.assertEqual(res.status_int, 204)
+
+        with self.admin_access.repo_cnx() as cnx:
+            self.assertEqual(cnx.find('CWGroup', name=u'tmp').rowcount, 0)
+
+    def test_rql_execute(self):
+        with self.admin_access.repo_cnx() as cnx:
+            cnx.create_entity('CWGroup', name=u'tmp')
+            cnx.commit()
+        self.login()
+        params = {'test_rql_execute': 'test'}
+        self.webapp.get('/cwgroup/tmp', params=params)
+
+
+@view_config(
+    route_name='cwentities',
+    context=EntityResource,
+    request_method='GET',
+    request_param=('test_rql_execute',)
+)
+def rql_execute_view(context, request):
+    """Return 500 response if rset.req is not a CubicWeb request.
+    """
+    if isinstance(context.rset.req, CubicWebPyramidRequest):
+        request.response.status_int = 204
+    else:
+        request.response.status_int = 500
+        request.response.text = 'rset.req is not a CubicWeb request'
+    return request.response
+
+
+def includeme(config):
+    config.scan(__name__)
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/test/test_tools.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,31 @@
+from cubicweb.pyramid.test import PyramidCWTest
+from cubicweb.pyramid import tools
+
+
+class ToolsTest(PyramidCWTest):
+    anonymous_allowed = True
+
+    def test_clone_user(self):
+        with self.admin_access.repo_cnx() as cnx:
+            user = cnx.find('CWUser', login='anon').one()
+            user.login  # fill the cache
+            clone = tools.clone_user(self.repo, user)
+
+            self.assertEqual(clone.eid, user.eid)
+            self.assertEqual(clone.login, user.login)
+
+            self.assertEqual(clone.cw_rset.rows, user.cw_rset.rows)
+            self.assertEqual(clone.cw_rset.rql, user.cw_rset.rql)
+
+    def test_cnx_attach_entity(self):
+        with self.admin_access.repo_cnx() as cnx:
+            user = cnx.find('CWUser', login='anon').one()
+
+        with self.admin_access.repo_cnx() as cnx:
+            tools.cnx_attach_entity(cnx, user)
+            self.assertEqual(user.login, 'anon')
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/pyramid/tools.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,76 @@
+"""Various tools.
+
+.. warning::
+
+    This module should be considered as internal implementation details. Use
+    with caution, as the API may change without notice.
+"""
+
+#: A short-term cache for user clones.
+#: used by cached_build_user to speed-up repetitive calls to build_user
+#: The expiration is handled in a dumb and brutal way: the whole cache is
+#: cleared every 5 minutes.
+_user_cache = {}
+
+
+def clone_user(repo, user):
+    """Clone a CWUser instance.
+
+    .. warning::
+
+        The returned clone is detached from any cnx.
+        Before using it in any way, it should be attached to a cnx that has not
+        this user already loaded.
+    """
+    CWUser = repo.vreg['etypes'].etype_class('CWUser')
+    clone = CWUser(
+        None,
+        rset=user.cw_rset.copy(),
+        row=user.cw_row,
+        col=user.cw_col)
+    clone.cw_attr_cache = dict(user.cw_attr_cache)
+    return clone
+
+
+def cnx_attach_entity(cnx, entity):
+    """Attach an entity to a cnx."""
+    entity._cw = cnx
+    if entity.cw_rset:
+        entity.cw_rset.req = cnx
+
+
+def cached_build_user(repo, eid):
+    """Cached version of
+    :meth:`cubicweb.server.repository.Repository._build_user`
+    """
+    with repo.internal_cnx() as cnx:
+        if eid in _user_cache:
+            entity = clone_user(repo, _user_cache[eid])
+            # XXX the cnx is needed here so that the CWUser instance has an
+            # access to the vreg, which it needs when its 'prefered_language'
+            # property is accessed.
+            # If this property did not need a cnx to access a vreg, we could
+            # avoid the internal_cnx() and save more time.
+            cnx_attach_entity(cnx, entity)
+            return entity
+
+        user = repo._build_user(cnx, eid)
+        user.cw_clear_relation_cache()
+        _user_cache[eid] = clone_user(repo, user)
+        return user
+
+
+def clear_cache():
+    """Clear the user cache"""
+    _user_cache.clear()
+
+
+def includeme(config):
+    """Start the cache maintenance loop task.
+
+    Automatically included by :func:`cubicweb.pyramid.make_cubicweb_application`.
+    """
+    repo = config.registry['cubicweb.repository']
+    interval = int(config.registry.settings.get(
+        'cubicweb.usercache.expiration_time', 60*5))
+    repo.looping_task(interval, clear_cache)
--- a/cubicweb/req.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/req.py	Thu Oct 20 18:28:46 2016 +0200
@@ -86,14 +86,10 @@
         connection too.
         """
         rset = self.eid_rset(orig_user.eid, 'CWUser')
-        user_cls = self.vreg['etypes'].etype_class('CWUser')
-        user = user_cls(self, rset, row=0, groups=orig_user.groups,
-                        properties=orig_user.properties)
-        user.cw_attr_cache['login'] = orig_user.login # cache login
+        user = self.vreg['etypes'].etype_class('CWUser')(self, rset, row=0)
+        user.cw_attr_cache['login'] = orig_user.login  # cache login
         self.user = user
         self.set_entity_cache(user)
-        self.set_language(user.prefered_language())
-
 
     def set_language(self, lang):
         """install i18n configuration for `lang` translation.
--- a/cubicweb/schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -22,6 +22,7 @@
 
 import re
 from os.path import join, basename
+from hashlib import md5
 from logging import getLogger
 from warnings import warn
 
@@ -31,16 +32,15 @@
 from logilab.common import tempattr
 from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty
 from logilab.common.logging_ext import set_log_methods
-from logilab.common.deprecation import deprecated, class_moved, moved
+from logilab.common.deprecation import deprecated
 from logilab.common.textutils import splitstrip
 from logilab.common.graph import get_cycles
 
 import yams
 from yams import BadSchemaDefinition, buildobjs as ybo
 from yams.schema import Schema, ERSchema, EntitySchema, RelationSchema, \
-     RelationDefinitionSchema, PermissionMixIn, role_name
-from yams.constraints import (BaseConstraint, FormatConstraint, BoundaryConstraint,
-                              IntervalBoundConstraint, StaticVocabularyConstraint,
+    RelationDefinitionSchema, PermissionMixIn, role_name
+from yams.constraints import (BaseConstraint, FormatConstraint,
                               cstr_json_dumps, cstr_json_loads)
 from yams.reader import (CONSTRAINTS, PyFileReader, SchemaLoader,
                          cleanup_sys_modules, fill_schema_from_namespace)
@@ -51,14 +51,7 @@
 import cubicweb
 from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized, _
 
-try:
-    from cubicweb import server
-except ImportError:
-    # We need to lookup DEBUG from there,
-    # however a pure dbapi client may not have it.
-    class server(object): pass
-    server.DEBUG = False
-
+from cubicweb import server
 
 PURE_VIRTUAL_RTYPES = set(('identity', 'has_text',))
 VIRTUAL_RTYPES = set(('eid', 'identity', 'has_text',))
@@ -67,7 +60,7 @@
 META_RTYPES = set((
     'owned_by', 'created_by', 'is', 'is_instance_of', 'identity',
     'eid', 'creation_date', 'cw_source', 'modification_date', 'has_text', 'cwuri',
-    ))
+))
 WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for'))
 WORKFLOW_DEF_RTYPES = set(('workflow_of', 'state_of', 'transition_of',
                            'initial_state', 'default_workflow',
@@ -97,14 +90,15 @@
     'constraint_of', 'relations',
     'read_permission', 'add_permission',
     'delete_permission', 'update_permission',
-    ))
+))
 
 WORKFLOW_TYPES = set(('Transition', 'State', 'TrInfo', 'Workflow',
                       'WorkflowTransition', 'BaseTransition',
                       'SubWorkflowExitPoint'))
 
 INTERNAL_TYPES = set(('CWProperty', 'CWCache', 'ExternalUri', 'CWDataImport',
-                      'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig'))
+                      'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig',
+                      'CWSession'))
 
 UNIQUE_CONSTRAINTS = ('SizeConstraint', 'FormatConstraint',
                       'StaticVocabularyConstraint',
@@ -116,11 +110,13 @@
 ybo.ETYPE_PROPERTIES += ('eid',)
 ybo.RTYPE_PROPERTIES += ('eid',)
 
+
 def build_schema_from_namespace(items):
     schema = CubicWebSchema('noname')
     fill_schema_from_namespace(schema, items, register_base_types=False)
     return schema
 
+
 # Bases for manipulating RQL in schema #########################################
 
 def guess_rrqlexpr_mainvars(expression):
@@ -137,6 +133,7 @@
                                   % expression)
     return mainvars
 
+
 def split_expression(rqlstring):
     for expr in rqlstring.split(','):
         for noparen1 in expr.split('('):
@@ -144,6 +141,7 @@
                 for word in noparen2.split():
                     yield word
 
+
 def normalize_expression(rqlstring):
     """normalize an rql expression to ease schema synchronization (avoid
     suppressing and reinserting an expression if only a space has been
@@ -162,35 +160,35 @@
     if len(formula_rqlst.children) != 1:
         raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
                                   'can not use UNION in formula %(form)r' %
-                                  {'attr' : rdef.rtype,
-                                   'etype' : rdef.subject.type,
-                                   'form' : rdef.formula})
+                                  {'attr': rdef.rtype,
+                                   'etype': rdef.subject.type,
+                                   'form': rdef.formula})
     select = formula_rqlst.children[0]
     if len(select.selection) != 1:
         raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
                                   'can only select one term in formula %(form)r' %
-                                  {'attr' : rdef.rtype,
-                                   'etype' : rdef.subject.type,
-                                   'form' : rdef.formula})
+                                  {'attr': rdef.rtype,
+                                   'etype': rdef.subject.type,
+                                   'form': rdef.formula})
     term = select.selection[0]
     types = set(term.get_type(sol) for sol in select.solutions)
     if len(types) != 1:
         raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
                                   'multiple possible types (%(types)s) for formula %(form)r' %
-                                  {'attr' : rdef.rtype,
-                                   'etype' : rdef.subject.type,
-                                   'types' : list(types),
-                                   'form' : rdef.formula})
+                                  {'attr': rdef.rtype,
+                                   'etype': rdef.subject.type,
+                                   'types': list(types),
+                                   'form': rdef.formula})
     computed_type = types.pop()
     expected_type = rdef.object.type
     if computed_type != expected_type:
         raise BadSchemaDefinition('computed attribute %(attr)s on %(etype)s: '
                                   'computed attribute type (%(comp_type)s) mismatch with '
                                   'specified type (%(attr_type)s)' %
-                                  {'attr' : rdef.rtype,
-                                   'etype' : rdef.subject.type,
-                                   'comp_type' : computed_type,
-                                   'attr_type' : expected_type})
+                                  {'attr': rdef.rtype,
+                                   'etype': rdef.subject.type,
+                                   'comp_type': computed_type,
+                                   'attr_type': expected_type})
 
 
 class RQLExpression(object):
@@ -199,7 +197,7 @@
     """
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
-    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
     # to be defined in concrete classes
     predefined_variables = None
 
@@ -221,7 +219,7 @@
         :param mainvars: names of the variables being selected.
 
         """
-        self.eid = eid # eid of the entity representing this rql expression
+        self.eid = eid  # eid of the entity representing this rql expression
         assert mainvars, 'bad mainvars %s' % mainvars
         if isinstance(mainvars, string_types):
             mainvars = set(splitstrip(mainvars))
@@ -267,8 +265,10 @@
 
     def __deepcopy__(self, memo):
         return self.__class__(self.expression, self.mainvars)
+
     def __getstate__(self):
         return (self.expression, self.mainvars)
+
     def __setstate__(self, state):
         self.__init__(*state)
 
@@ -279,7 +279,8 @@
         defined = set(split_expression(self.expression))
         for varname in self.predefined_variables:
             if varname in defined:
-                select.add_eid_restriction(select.get_variable(varname), varname.lower(), 'Substitute')
+                select.add_eid_restriction(select.get_variable(varname), varname.lower(),
+                                           'Substitute')
         return select
 
     # permission rql expression specific stuff #################################
@@ -297,8 +298,8 @@
                     prefix, action, suffix = rel.r_type.split('_')
                 except ValueError:
                     continue
-                if prefix != 'has' or suffix != 'permission' or \
-                       not action in ('add', 'delete', 'update', 'read'):
+                if (prefix != 'has' or suffix != 'permission' or
+                        action not in ('add', 'delete', 'update', 'read')):
                     continue
                 if found is None:
                     found = []
@@ -398,7 +399,6 @@
                                     self.expression)
 
 
-
 # rql expressions for use in permission definition #############################
 
 class ERQLExpression(RQLExpression):
@@ -413,7 +413,7 @@
                 if creating:
                     return self._check(_cw, creating=True, **kwargs)
                 return False
-            assert creating == False
+            assert not creating
             return self._check(_cw, x=eid, **kwargs)
         return self._check(_cw, **kwargs)
 
@@ -433,11 +433,9 @@
 
     def check_permission_definitions(self):
         super(CubicWebRelationDefinitionSchema, self).check_permission_definitions()
-        schema = self.subject.schema
         for action, groups in self.permissions.items():
             for group_or_rqlexpr in groups:
-                if action == 'read' and \
-                       isinstance(group_or_rqlexpr, RQLExpression):
+                if action == 'read' and isinstance(group_or_rqlexpr, RQLExpression):
                     msg = "can't use rql expression for read permission of %s"
                     raise BadSchemaDefinition(msg % self)
                 if self.final and isinstance(group_or_rqlexpr, RRQLExpression):
@@ -447,6 +445,7 @@
                     msg = "can't use ERQLExpression on %s, use a RRQLExpression"
                     raise BadSchemaDefinition(msg % self)
 
+
 def vargraph(rqlst):
     """ builds an adjacency graph of variables from the rql syntax tree, e.g:
     Any O,S WHERE T subworkflow_exit S, T subworkflow WF, O state_of WF
@@ -462,7 +461,6 @@
         else:
             vargraph.setdefault(lhsvarname, []).append(rhsvarname)
             vargraph.setdefault(rhsvarname, []).append(lhsvarname)
-            #vargraph[(lhsvarname, rhsvarname)] = relation.r_type
     return vargraph
 
 
@@ -511,31 +509,32 @@
 
 
 PUB_SYSTEM_ENTITY_PERMS = {
-    'read':   ('managers', 'users', 'guests',),
-    'add':    ('managers',),
+    'read': ('managers', 'users', 'guests',),
+    'add': ('managers',),
     'delete': ('managers',),
     'update': ('managers',),
-    }
+}
 PUB_SYSTEM_REL_PERMS = {
-    'read':   ('managers', 'users', 'guests',),
-    'add':    ('managers',),
+    'read': ('managers', 'users', 'guests',),
+    'add': ('managers',),
     'delete': ('managers',),
-    }
+}
 PUB_SYSTEM_ATTR_PERMS = {
-    'read':   ('managers', 'users', 'guests',),
+    'read': ('managers', 'users', 'guests',),
     'add': ('managers',),
     'update': ('managers',),
-    }
+}
 RO_REL_PERMS = {
-    'read':   ('managers', 'users', 'guests',),
-    'add':    (),
+    'read': ('managers', 'users', 'guests',),
+    'add': (),
     'delete': (),
-    }
+}
 RO_ATTR_PERMS = {
-    'read':   ('managers', 'users', 'guests',),
+    'read': ('managers', 'users', 'guests',),
     'add': ybo.DEFAULT_ATTRPERMS['add'],
     'update': (),
-    }
+}
+
 
 # XXX same algorithm as in reorder_cubes and probably other place,
 # may probably extract a generic function
@@ -568,6 +567,7 @@
                         continue
     return eschemas
 
+
 def bw_normalize_etype(etype):
     if etype in ETYPE_NAME_MAP:
         msg = '%s has been renamed to %s, please update your code' % (
@@ -576,6 +576,7 @@
         etype = ETYPE_NAME_MAP[etype]
     return etype
 
+
 def display_name(req, key, form='', context=None):
     """return a internationalized string for the key (schema entity or relation
     name) in a given form
@@ -601,6 +602,7 @@
     return display_name(req, self.type, form, context)
 ERSchema.display_name = ERSchema_display_name
 
+
 @cached
 def get_groups(self, action):
     """return the groups authorized to perform <action> on entities of
@@ -613,13 +615,13 @@
     :return: names of the groups with the given permission
     """
     assert action in self.ACTIONS, action
-    #assert action in self._groups, '%s %s' % (self, action)
     try:
         return frozenset(g for g in self.permissions[action] if isinstance(g, string_types))
     except KeyError:
         return ()
 PermissionMixIn.get_groups = get_groups
 
+
 @cached
 def get_rqlexprs(self, action):
     """return the rql expressions representing queries to check the user is allowed
@@ -632,14 +634,13 @@
     :return: the rql expressions with the given permission
     """
     assert action in self.ACTIONS, action
-    #assert action in self._rqlexprs, '%s %s' % (self, action)
     try:
         return tuple(g for g in self.permissions[action] if not isinstance(g, string_types))
     except KeyError:
         return ()
 PermissionMixIn.get_rqlexprs = get_rqlexprs
 
-orig_set_action_permissions = PermissionMixIn.set_action_permissions
+
 def set_action_permissions(self, action, permissions):
     """set the groups and rql expressions allowing to perform <action> on
     entities of this type
@@ -653,8 +654,10 @@
     orig_set_action_permissions(self, action, tuple(permissions))
     clear_cache(self, 'get_rqlexprs')
     clear_cache(self, 'get_groups')
+orig_set_action_permissions = PermissionMixIn.set_action_permissions
 PermissionMixIn.set_action_permissions = set_action_permissions
 
+
 def has_local_role(self, action):
     """return true if the action *may* be granted locally (i.e. either rql
     expressions or the owners group are used in security definition)
@@ -670,6 +673,7 @@
     return False
 PermissionMixIn.has_local_role = has_local_role
 
+
 def may_have_permission(self, action, req):
     if action != 'read' and not (self.has_local_role('read') or
                                  self.has_perm(req, 'read')):
@@ -677,6 +681,7 @@
     return self.has_local_role(action) or self.has_perm(req, action)
 PermissionMixIn.may_have_permission = may_have_permission
 
+
 def has_perm(self, _cw, action, **kwargs):
     """return true if the action is granted globally or locally"""
     try:
@@ -712,8 +717,8 @@
     # NB: give _cw to user.owns since user is not be bound to a transaction on
     # the repository side
     if 'owners' in groups and (
-          kwargs.get('creating')
-          or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))):
+            kwargs.get('creating')
+            or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))):
         if DBG:
             print('check_perm: %r %r: user is owner or creation time' %
                   (action, _self_str))
@@ -872,7 +877,7 @@
             # avoid deleting the relation type accidentally...
             self.schema['has_text'].del_relation_def(self, self.schema['String'])
 
-    def schema_entity(self): # XXX @property for consistency with meta
+    def schema_entity(self):  # XXX @property for consistency with meta
         """return True if this entity type is used to build the schema"""
         return self.type in SCHEMA_TYPES
 
@@ -910,7 +915,7 @@
     def meta(self):
         return self.type in META_RTYPES
 
-    def schema_relation(self): # XXX @property for consistency with meta
+    def schema_relation(self):  # XXX @property for consistency with meta
         """return True if this relation type is used to build the schema"""
         return self.type in SCHEMA_TYPES
 
@@ -936,7 +941,7 @@
             else:
                 subjtype = objtype = None
         else:
-            assert not 'eid' in kwargs, kwargs
+            assert 'eid' not in kwargs, kwargs
             assert action in ('read', 'add', 'delete')
             if 'fromeid' in kwargs:
                 subjtype = _cw.entity_metas(kwargs['fromeid'])['type']
@@ -1000,6 +1005,7 @@
         rschema.final = False
 
     etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$'
+
     def add_entity_type(self, edef):
         edef.name = str(edef.name)
         edef.name = bw_normalize_etype(edef.name)
@@ -1055,7 +1061,7 @@
             try:
                 self._eid_index[rdef.eid] = rdefs
             except AttributeError:
-                pass # not a serialized schema
+                pass  # not a serialized schema
         return rdefs
 
     def del_relation_type(self, rtype):
@@ -1111,8 +1117,7 @@
             select.add_type_restriction(select.defined_vars['X'], str(rdef.subject))
             analyzer.visit(select)
             _check_valid_formula(rdef, rqlst)
-            rdef.formula_select = select # avoid later recomputation
-
+            rdef.formula_select = select  # avoid later recomputation
 
     def finalize_computed_relations(self):
         """Build relation definitions for computed relations
@@ -1145,6 +1150,16 @@
 
 # additional cw specific constraints ###########################################
 
+@monkeypatch(BaseConstraint)
+def name_for(self, rdef):
+    """Return a unique, size controlled, name for this constraint applied to given `rdef`.
+
+    This name may be used as name for the constraint in the database.
+    """
+    return 'cstr' + md5((rdef.subject.type + rdef.rtype.type + self.type() +
+                         (self.serialize() or '')).encode('ascii')).hexdigest()
+
+
 class BaseRQLConstraint(RRQLExpression, BaseConstraint):
     """base class for rql constraints"""
     distinct_query = None
@@ -1198,7 +1213,7 @@
     def repo_check(self, session, eidfrom, rtype, eidto):
         """raise ValidationError if the relation doesn't satisfy the constraint
         """
-        pass # this is a vocabulary constraint, not enforced
+        pass  # this is a vocabulary constraint, not enforced
 
 
 class RepoEnforcedRQLConstraintMixIn(object):
@@ -1293,6 +1308,7 @@
 
 from yams.buildobjs import _add_relation as yams_add_relation
 
+
 class workflowable_definition(ybo.metadefinition):
     """extends default EntityType's metaclass to add workflow relations
     (i.e. in_state, wf_info_for and custom_workflow). This is the default
@@ -1341,7 +1357,8 @@
 CONSTRAINTS['RQLConstraint'] = RQLConstraint
 CONSTRAINTS['RQLUniqueConstraint'] = RQLUniqueConstraint
 CONSTRAINTS['RQLVocabularyConstraint'] = RQLVocabularyConstraint
-CONSTRAINTS.pop('MultipleStaticVocabularyConstraint', None) # don't want this in cw yams schema
+# don't want MultipleStaticVocabularyConstraint in cw yams schema
+CONSTRAINTS.pop('MultipleStaticVocabularyConstraint', None)
 PyFileReader.context.update(CONSTRAINTS)
 
 
@@ -1362,7 +1379,7 @@
         # bootstraping, ignore cubes
         filepath = join(cubicweb.CW_SOFTWARE_ROOT, 'schemas', 'bootstrap.py')
         self.info('loading %s', filepath)
-        with tempattr(ybo, 'PACKAGE', 'cubicweb'): # though we don't care here
+        with tempattr(ybo, 'PACKAGE', 'cubicweb'):  # though we don't care here
             self.handle_file(filepath)
 
     def unhandled_file(self, filepath):
@@ -1371,7 +1388,8 @@
 
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
-    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
+
 
 class CubicWebSchemaLoader(BootstrapSchemaLoader):
     """cubicweb specific schema loader, automatically adding metadata to the
@@ -1383,10 +1401,7 @@
         from <directory>
         """
         self.info('loading %s schemas', ', '.join(config.cubes()))
-        self.extrapath = {}
-        for cubesdir in config.cubes_search_path():
-            if cubesdir != config.CUBES_DIR:
-                self.extrapath[cubesdir] = 'cubes'
+        self.extrapath = config.extrapath
         if config.apphome:
             path = tuple(reversed([config.apphome] + config.cubes_path()))
         else:
@@ -1412,7 +1427,7 @@
 
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
-    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
 
 
 set_log_methods(CubicWebSchemaLoader, getLogger('cubicweb.schemaloader'))
@@ -1424,6 +1439,7 @@
 MAY_USE_TEMPLATE_FORMAT = set(('managers',))
 NEED_PERM_FORMATS = [_('text/cubicweb-page-template')]
 
+
 @monkeypatch(FormatConstraint)
 def vocabulary(self, entity=None, form=None):
     cw = None
@@ -1432,11 +1448,11 @@
     elif form is not None:
         cw = form._cw
     if cw is not None:
-        if hasattr(cw, 'write_security'): # test it's a session and not a request
+        if hasattr(cw, 'write_security'):  # test it's a session and not a request
             # cw is a server session
-            hasperm = not cw.write_security or \
-                      not cw.is_hook_category_activated('integrity') or \
-                      cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT)
+            hasperm = (not cw.write_security or
+                       not cw.is_hook_category_activated('integrity') or
+                       cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT))
         else:
             hasperm = cw.user.matching_groups(MAY_USE_TEMPLATE_FORMAT)
         if hasperm:
@@ -1445,22 +1461,27 @@
 
 # XXX itou for some Statement methods
 from rql import stmts
-orig_get_etype = stmts.ScopeNode.get_etype
+
+
 def bw_get_etype(self, name):
     return orig_get_etype(self, bw_normalize_etype(name))
+orig_get_etype = stmts.ScopeNode.get_etype
 stmts.ScopeNode.get_etype = bw_get_etype
 
-orig_add_main_variable_delete = stmts.Delete.add_main_variable
+
 def bw_add_main_variable_delete(self, etype, vref):
     return orig_add_main_variable_delete(self, bw_normalize_etype(etype), vref)
+orig_add_main_variable_delete = stmts.Delete.add_main_variable
 stmts.Delete.add_main_variable = bw_add_main_variable_delete
 
-orig_add_main_variable_insert = stmts.Insert.add_main_variable
+
 def bw_add_main_variable_insert(self, etype, vref):
     return orig_add_main_variable_insert(self, bw_normalize_etype(etype), vref)
+orig_add_main_variable_insert = stmts.Insert.add_main_variable
 stmts.Insert.add_main_variable = bw_add_main_variable_insert
 
-orig_set_statement_type = stmts.Select.set_statement_type
+
 def bw_set_statement_type(self, etype):
     return orig_set_statement_type(self, bw_normalize_etype(etype))
+orig_set_statement_type = stmts.Select.set_statement_type
 stmts.Select.set_statement_type = bw_set_statement_type
--- a/cubicweb/schemas/base.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/schemas/base.py	Thu Oct 20 18:28:46 2016 +0200
@@ -23,7 +23,7 @@
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
                             SubjectRelation,
                             String, TZDatetime, Datetime, Password, Interval,
-                            Boolean, UniqueConstraint)
+                            Boolean, Bytes, UniqueConstraint)
 from cubicweb.schema import (
     RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
     PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS,
@@ -381,3 +381,17 @@
         'add':    ('managers', RRQLExpression('U has_update_permission S'),),
         'delete': ('managers', RRQLExpression('U has_update_permission S'),),
         }
+
+
+class CWSession(EntityType):
+    """Persistent session.
+
+    Used by cubicweb.pyramid to store the session data.
+    """
+    __permissions__ = {
+        'read':   ('managers',),
+        'add':    (),
+        'update': (),
+        'delete': (),
+    }
+    cwsessiondata = Bytes()
--- a/cubicweb/server/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,9 +24,6 @@
 
 __docformat__ = "restructuredtext en"
 
-import sys
-from os.path import join, exists
-from glob import glob
 from contextlib import contextmanager
 
 from six import text_type, string_types
@@ -39,9 +36,9 @@
 
 from yams import BASE_GROUPS
 
-from cubicweb import CW_SOFTWARE_ROOT
 from cubicweb.appobject import AppObject
 
+
 class ShuttingDown(BaseException):
     """raised when trying to access some resources while the repository is
     shutting down. Inherit from BaseException so that `except Exception` won't
@@ -90,7 +87,7 @@
 #: more verbosity
 DBG_MORE = 128
 #: all level enabled
-DBG_ALL  = DBG_RQL + DBG_SQL + DBG_REPO + DBG_MS + DBG_HOOKS + DBG_OPS + DBG_SEC + DBG_MORE
+DBG_ALL = DBG_RQL + DBG_SQL + DBG_REPO + DBG_MS + DBG_HOOKS + DBG_OPS + DBG_SEC + DBG_MORE
 
 _SECURITY_ITEMS = []
 _SECURITY_CAPS = ['read', 'add', 'update', 'delete', 'transition']
@@ -98,6 +95,7 @@
 #: current debug mode
 DEBUG = 0
 
+
 @contextmanager
 def tunesecurity(items=(), capabilities=()):
     """Context manager to use in conjunction with DBG_SEC.
@@ -136,6 +134,7 @@
     _SECURITY_ITEMS[:] = olditems
     _SECURITY_CAPS[:] = oldactions
 
+
 def set_debug(debugmode):
     """change the repository debugging mode"""
     global DEBUG
@@ -148,6 +147,7 @@
     else:
         DEBUG |= debugmode
 
+
 class debugged(object):
     """Context manager and decorator to help debug the repository.
 
@@ -184,7 +184,6 @@
     def __call__(self, func):
         """decorate function"""
         def wrapped(*args, **kwargs):
-            _clevel = DEBUG
             set_debug(self.debugmode)
             try:
                 return func(*args, **kwargs)
@@ -192,6 +191,7 @@
                 set_debug(self._clevel)
         return wrapped
 
+
 # database initialization ######################################################
 
 def create_user(session, login, pwd, *groups):
@@ -203,6 +203,7 @@
                         {'u': user.eid, 'group': text_type(group)})
     return user
 
+
 def init_repository(config, interactive=True, drop=False, vreg=None,
                     init_config=None):
     """initialise a repository database by creating tables add filling them
@@ -261,9 +262,7 @@
         # they are used sometimes by generated sql. Keeping them empty is much
         # simpler than fixing this...
         schemasql = sqlschema(schema, driver)
-        #skip_entities=[str(e) for e in schema.entities()
-        #               if not repo.system_source.support_entity(str(e))])
-        failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;')
+        failed = sqlexec(schemasql, execute, pbtitle=_title)
         if failed:
             print('The following SQL statements failed. You should check your schema.')
             print(failed)
@@ -291,18 +290,18 @@
             cnx.create_entity('CWGroup', name=text_type(group))
         admin = create_user(cnx, login, pwd, u'managers')
         cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s',
-                        {'u': admin.eid})
+                    {'u': admin.eid})
         cnx.commit()
     repo.shutdown()
     # re-login using the admin user
-    config._cubes = None # avoid assertion error
+    config._cubes = None  # avoid assertion error
     repo = get_repository(config=config)
     # replace previous schema by the new repo's one. This is necessary so that we give the proper
     # schema to `initialize_schema` above since it will initialize .eid attribute of schema elements
     schema = repo.schema
     with connect(repo, login, password=pwd) as cnx:
         with cnx.security_enabled(False, False):
-            repo.system_source.eid = ssource.eid # redo this manually
+            repo.system_source.eid = ssource.eid  # redo this manually
             handler = config.migration_handler(schema, interactive=False,
                                                cnx=cnx, repo=repo)
             # serialize the schema
@@ -350,7 +349,7 @@
 
 
 # sqlite'stored procedures have to be registered at connection opening time
-from logilab.database import SQL_CONNECT_HOOKS
+from logilab.database import SQL_CONNECT_HOOKS  # noqa
 
 # add to this set relations which should have their add security checking done
 # *BEFORE* adding the actual relation (done after by default)
--- a/cubicweb/server/migractions.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/migractions.py	Thu Oct 20 18:28:46 2016 +0200
@@ -138,10 +138,12 @@
         while True:
             try:
                 self.cnx = repoapi.connect(self.repo, login, password=pwd)
-                if not 'managers' in self.cnx.user.groups:
-                    print('migration need an account in the managers group')
-                else:
-                    break
+                with self.cnx:  # needed to retrieve user's groups
+                    if 'managers' not in self.cnx.user.groups:
+                        print('migration need an account in the managers group')
+                    else:
+                        break
+                self.cnx._open = None  # XXX needed to reuse it later
             except AuthenticationError:
                 print('wrong user/password')
             except (KeyboardInterrupt, EOFError):
@@ -1531,11 +1533,9 @@
         and a sql database
         """
         dbhelper = self.repo.system_source.dbhelper
-        tablesql = eschema2sql(dbhelper, self.repo.schema.eschema(etype),
-                               prefix=SQL_PREFIX)
-        for sql in tablesql.split(';'):
-            if sql.strip():
-                self.sqlexec(sql)
+        for sql in eschema2sql(dbhelper, self.repo.schema.eschema(etype),
+                               prefix=SQL_PREFIX):
+            self.sqlexec(sql)
         if commit:
             self.commit()
 
@@ -1544,10 +1544,8 @@
         This may be useful on accidental desync between the repository schema
         and a sql database
         """
-        tablesql = rschema2sql(self.repo.schema.rschema(rtype))
-        for sql in tablesql.split(';'):
-            if sql.strip():
-                self.sqlexec(sql)
+        for sql in rschema2sql(self.repo.schema.rschema(rtype)):
+            self.sqlexec(sql)
         if commit:
             self.commit()
 
--- a/cubicweb/server/repository.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/repository.py	Thu Oct 20 18:28:46 2016 +0200
@@ -234,7 +234,12 @@
                 # set eids on entities schema
                 with self.internal_cnx() as cnx:
                     for etype, eid in cnx.execute('Any XN,X WHERE X is CWEType, X name XN'):
-                        self.schema.eschema(etype).eid = eid
+                        try:
+                            self.schema.eschema(etype).eid = eid
+                        except KeyError:
+                            # etype in the database doesn't exist in the fs schema, this may occur
+                            # during dev and we shouldn't crash
+                            self.warning('No %s entity type in the file system schema', etype)
         else:
             # normal start: load the instance schema from the database
             self.info('loading schema from the repository')
@@ -475,13 +480,7 @@
         st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute')
         rset = cnx.execute(st.as_string(), {'x': eid})
         assert len(rset) == 1, rset
-        cwuser = rset.get_entity(0, 0)
-        # pylint: disable=W0104
-        # prefetch / cache cwuser's groups and properties. This is especially
-        # useful for internal sessions to avoid security insertions
-        cwuser.groups
-        cwuser.properties
-        return cwuser
+        return rset.get_entity(0, 0)
 
     # public (dbapi) interface ################################################
 
--- a/cubicweb/server/rqlannotation.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/rqlannotation.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -18,21 +18,19 @@
 """Functions to add additional annotations on a rql syntax tree to ease later
 code generation.
 """
+
 from __future__ import print_function
 
-__docformat__ = "restructuredtext en"
+from rql import BadRQLQuery
+from rql.nodes import Relation, VariableRef, Constant, Variable, Or
+from rql.utils import common_parent
 
-from rql import BadRQLQuery
-from rql.nodes import Relation, VariableRef, Constant, Variable, Or, Exists
-from rql.utils import common_parent
 
 def _annotate_select(annotator, rqlst):
     has_text_query = False
     for subquery in rqlst.with_:
         if annotator._annotate_union(subquery.query):
             has_text_query = True
-    #if server.DEBUG:
-    #    print '-------- sql annotate', repr(rqlst)
     getrschema = annotator.schema.rschema
     for var in rqlst.defined_vars.values():
         stinfo = var.stinfo
@@ -49,15 +47,16 @@
             stinfo['invariant'] = True
             stinfo['principal'] = None
             continue
-        if any(rel for rel in stinfo['relations'] if rel.r_type == 'eid' and rel.operator() != '=') and \
-               not any(r for r in var.stinfo['relations'] - var.stinfo['rhsrelations']
-                       if r.r_type != 'eid' and (getrschema(r.r_type).inlined or getrschema(r.r_type).final)):
+        if (any(rel for rel in stinfo['relations'] if rel.r_type == 'eid' and rel.operator() != '=')
+                and not any(r for r in var.stinfo['relations'] - var.stinfo['rhsrelations']
+                            if r.r_type != 'eid'
+                            and (getrschema(r.r_type).inlined or getrschema(r.r_type).final))):
             # Any X WHERE X eid > 2
             # those particular queries should be executed using the system entities table
             stinfo['invariant'] = True
             stinfo['principal'] = None
             continue
-        if stinfo['selected'] and var.valuable_references() == 1+bool(stinfo['constnode']):
+        if stinfo['selected'] and var.valuable_references() == 1 + bool(stinfo['constnode']):
             # "Any X", "Any X, Y WHERE X attr Y"
             stinfo['invariant'] = False
             continue
@@ -74,21 +73,21 @@
                 if not (onlhs and len(stinfo['relations']) > 1):
                     break
                 if not stinfo['constnode']:
-                    joins.add( (rel, role) )
+                    joins.add((rel, role))
                 continue
             elif rel.r_type == 'identity':
                 # identity can't be used as principal, so check other relation are used
                 # XXX explain rhs.operator == '='
-                if rhs.operator != '=' or len(stinfo['relations']) <= 1: #(stinfo['constnode'] and rhs.operator == '='):
+                if rhs.operator != '=' or len(stinfo['relations']) <= 1:
                     break
-                joins.add( (rel, role) )
+                joins.add((rel, role))
                 continue
             rschema = getrschema(rel.r_type)
             if rel.optional:
                 if rel in stinfo.get('optrelations', ()):
                     # optional variable can't be invariant if this is the lhs
                     # variable of an inlined relation
-                    if not rel in stinfo['rhsrelations'] and rschema.inlined:
+                    if rel not in stinfo['rhsrelations'] and rschema.inlined:
                         break
                 # variable used as main variable of an optional relation can't
                 # be invariant, unless we can use some other relation as
@@ -109,7 +108,7 @@
                     # need join anyway if the variable appears in a final or
                     # inlined relation
                     break
-                joins.add( (rel, role) )
+                joins.add((rel, role))
                 continue
             if not stinfo['constnode']:
                 if rschema.inlined and rel.neged(strict=True):
@@ -120,7 +119,7 @@
                     # can use N.ecrit_par as principal
                     if (stinfo['selected'] or len(stinfo['relations']) > 1):
                         break
-            joins.add( (rel, role) )
+            joins.add((rel, role))
         else:
             # if there is at least one ambigous relation and no other to
             # restrict types, can't be invariant since we need to filter out
@@ -151,11 +150,11 @@
     return has_text_query
 
 
-
 class CantSelectPrincipal(Exception):
     """raised when no 'principal' variable can be found"""
 
-def _select_principal(scope, relations, _sort=lambda x:x):
+
+def _select_principal(scope, relations, _sort=lambda x: x):
     """given a list of rqlst relations, select one which will be used to
     represent an invariant variable (e.g. using on extremity of the relation
     instead of the variable's type table
@@ -200,6 +199,7 @@
     # duplicates, so we should have to check cardinality
     raise CantSelectPrincipal()
 
+
 def _select_main_var(relations):
     """given a list of rqlst relations, select one which will be used as main
     relation for the rhs variable
@@ -209,8 +209,9 @@
     # sort for test predictability
     for rel in sorted(relations, key=lambda x: (x.children[0].name, x.r_type)):
         # only equality relation with a variable as rhs may be principal
-        if rel.operator() not in ('=', 'IS') \
-               or not isinstance(rel.children[1].children[0], VariableRef) or rel.neged(strict=True):
+        if (rel.operator() not in ('=', 'IS')
+                or not isinstance(rel.children[1].children[0], VariableRef)
+                or rel.neged(strict=True)):
             continue
         if rel.optional:
             others.append(rel)
@@ -259,7 +260,7 @@
           syntax tree or because a solution for this variable has been removed
           due to security filtering)
         """
-        #assert rqlst.TYPE == 'select', rqlst
+        # assert rqlst.TYPE == 'select', rqlst
         rqlst.has_text_query = self._annotate_union(rqlst)
 
     def _annotate_union(self, union):
@@ -276,7 +277,7 @@
         # interesting in multi-sources cases, as it may avoid a costly query
         # on sources to get all entities of a given type to achieve this, while
         # we have all the necessary information.
-        root = var.stmt.root # Union node
+        root = var.stmt.root  # Union node
         # rel.scope -> Select or Exists node, so add .parent to get Union from
         # Select node
         rels = [rel for rel in var.stinfo['relations'] if rel.scope.parent is root]
@@ -319,8 +320,8 @@
     def compute(self, rqlst):
         # set domains for each variable
         for varname, var in rqlst.defined_vars.items():
-            if var.stinfo['uidrel'] is not None or \
-                   self.eschema(rqlst.solutions[0][varname]).final:
+            if (var.stinfo['uidrel'] is not None
+                    or self.eschema(rqlst.solutions[0][varname]).final):
                 ptypes = var.stinfo['possibletypes']
             else:
                 ptypes = set(self.nfdomain)
@@ -356,14 +357,15 @@
 
     def _debug_print(self):
         print('varsols', dict((x, sorted(str(v) for v in values))
-                               for x, values in self.varsols.items()))
+                              for x, values in self.varsols.items()))
         print('ambiguous vars', sorted(self.ambiguousvars))
 
     def set_rel_constraint(self, term, rel, etypes_func):
         if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
             var = term.variable
-            if len(var.stinfo['relations']) == 1 \
-                   or rel.scope is var.scope or rel.r_type == 'identity':
+            if (len(var.stinfo['relations']) == 1
+                    or rel.scope is var.scope
+                    or rel.r_type == 'identity'):
                 self.restrict(var, frozenset(etypes_func()))
                 try:
                     self.maydeambrels[var].add(rel)
@@ -378,7 +380,7 @@
         # XXX isinstance(other.variable, Variable) to skip column alias
         if isinstance(other, VariableRef) and isinstance(other.variable, Variable):
             deambiguifier = other.variable
-            if not var is self.deambification_map.get(deambiguifier):
+            if var is not self.deambification_map.get(deambiguifier):
                 if var.stinfo['typerel'] is None:
                     otheretypes = deambiguifier.stinfo['possibletypes']
                 elif not self.is_ambiguous(deambiguifier):
--- a/cubicweb/server/schema2sql.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/schema2sql.py	Thu Oct 20 18:28:46 2016 +0200
@@ -39,9 +39,9 @@
 def sql_create_index(self, table, column, unique=False):
     idx = self._index_name(table, column, unique)
     if unique:
-        return 'ALTER TABLE %s ADD CONSTRAINT %s UNIQUE(%s);' % (table, idx, column)
+        return 'ALTER TABLE %s ADD CONSTRAINT %s UNIQUE(%s)' % (table, idx, column)
     else:
-        return 'CREATE INDEX %s ON %s(%s);' % (idx, table, column)
+        return 'CREATE INDEX %s ON %s(%s)' % (idx, table, column)
 
 
 @monkeypatch(database._GenericAdvFuncHelper)
@@ -53,104 +53,71 @@
 
 
 def build_index_name(table, columns, prefix='idx_'):
+    """Return a predictable-but-size-constrained name for an index on `table(*columns)`, using an
+    md5 hash.
+    """
     return '%s%s' % (prefix, md5((table +
                                   ',' +
                                   ','.join(sorted(columns))).encode('ascii')).hexdigest())
 
 
 def rschema_has_table(rschema, skip_relations):
-    """Return True if the given schema should have a table in the database"""
+    """Return True if the given schema should have a table in the database."""
     return not (rschema.final or rschema.inlined or rschema.rule or rschema.type in skip_relations)
 
 
 def schema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''):
-    """write to the output stream a SQL schema to store the objects
-    corresponding to the given schema
+    """Yield SQL statements to create a database schema for the given Yams schema.
+
+    `prefix` may be a string that will be prepended to all table / column names (usually, 'cw_').
     """
-    output = []
-    w = output.append
     for etype in sorted(schema.entities()):
         eschema = schema.eschema(etype)
         if eschema.final or eschema.type in skip_entities:
             continue
-        w(eschema2sql(dbhelper, eschema, skip_relations, prefix=prefix))
+        for sql in eschema2sql(dbhelper, eschema, skip_relations, prefix):
+            yield sql
     for rtype in sorted(schema.relations()):
         rschema = schema.rschema(rtype)
         if rschema_has_table(rschema, skip_relations):
-            w(rschema2sql(rschema))
-    return '\n'.join(output)
+            for sql in rschema2sql(rschema):
+                yield sql
 
 
-def dropschema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''):
-    """write to the output stream a SQL schema to store the objects
-    corresponding to the given schema
+def unique_index_name(eschema, attrs):
+    """Return a predictable-but-size-constrained name for a multi-columns unique index on
+    given attributes of the entity schema (actually, the later may be a schema or a string).
     """
+    # keep giving eschema instead of table name for bw compat
+    table = text_type(eschema)
+    # unique_index_name is used as name of CWUniqueConstraint, hence it should be unicode
+    return text_type(build_index_name(table, attrs, 'unique_'))
+
+
+def iter_unique_index_names(eschema):
+    """Yield (attrs, index name) where attrs is a list of entity type's attribute names that should
+    be unique together, and index name the unique index name.
+    """
+    for attrs in eschema._unique_together or ():
+        yield attrs, unique_index_name(eschema, attrs)
+
+
+def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''):
+    """Yield SQL statements to initialize database from an entity schema."""
+    table = prefix + eschema.type
     output = []
     w = output.append
-    for etype in sorted(schema.entities()):
-        eschema = schema.eschema(etype)
-        if eschema.final or eschema.type in skip_entities:
-            continue
-        stmts = dropeschema2sql(dbhelper, eschema, skip_relations, prefix=prefix)
-        for stmt in stmts:
-            w(stmt)
-    for rtype in sorted(schema.relations()):
-        rschema = schema.rschema(rtype)
-        if rschema_has_table(rschema, skip_relations):
-            w(droprschema2sql(rschema))
-    return '\n'.join(output)
-
-
-def eschema_attrs(eschema, skip_relations):
+    w('CREATE TABLE %s(' % (table))
     attrs = [attrdef for attrdef in eschema.attribute_definitions()
              if not attrdef[0].type in skip_relations]
     attrs += [(rschema, None)
               for rschema in eschema.subject_relations()
               if not rschema.final and rschema.inlined]
-    return attrs
-
-
-def unique_index_name(eschema, columns):
-    # keep giving eschema instead of table name for bw compat
-    table = text_type(eschema)
-    # unique_index_name is used as name of CWUniqueConstraint, hence it should be unicode
-    return text_type(build_index_name(table, columns, 'unique_'))
-
-
-def iter_unique_index_names(eschema):
-    for columns in eschema._unique_together or ():
-        yield columns, unique_index_name(eschema, columns)
-
-
-def dropeschema2sql(dbhelper, eschema, skip_relations=(), prefix=''):
-    """return sql to drop an entity type's table"""
-    # not necessary to drop indexes, that's implictly done when
-    # dropping the table, but we need to drop SQLServer views used to
-    # create multicol unique indices
-    statements = []
-    tablename = prefix + eschema.type
-    if eschema._unique_together is not None:
-        for columns, index_name in iter_unique_index_names(eschema):
-            cols = ['%s%s' % (prefix, col) for col in columns]
-            sqls = dbhelper.sqls_drop_multicol_unique_index(tablename, cols, index_name)
-            statements += sqls
-    statements += ['DROP TABLE %s;' % (tablename)]
-    return statements
-
-
-def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''):
-    """write an entity schema as SQL statements to stdout"""
-    output = []
-    w = output.append
-    table = prefix + eschema.type
-    w('CREATE TABLE %s(' % (table))
-    attrs = eschema_attrs(eschema, skip_relations)
     # XXX handle objectinline physical mode
     for i in range(len(attrs)):
         rschema, attrschema = attrs[i]
         if attrschema is not None:
-            sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema,
-                                  indent=' ')
+            sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema)
         else:  # inline relation
             sqltype = 'integer REFERENCES entities (eid)'
         if i == len(attrs) - 1:
@@ -160,32 +127,32 @@
     for rschema, aschema in attrs:
         if aschema is None:  # inline relation
             continue
-        attr = rschema.type
         rdef = rschema.rdef(eschema.type, aschema.type)
         for constraint in rdef.constraints:
-            cstrname, check = check_constraint(eschema, aschema, attr, constraint, dbhelper,
-                                               prefix=prefix)
+            cstrname, check = check_constraint(rdef, constraint, dbhelper, prefix=prefix)
             if cstrname is not None:
                 w(', CONSTRAINT %s CHECK(%s)' % (cstrname, check))
-    w(');')
+    w(')')
+    yield '\n'.join(output)
     # create indexes
     for i in range(len(attrs)):
         rschema, attrschema = attrs[i]
         if attrschema is None or eschema.rdef(rschema).indexed:
-            w(dbhelper.sql_create_index(table, prefix + rschema.type))
+            yield dbhelper.sql_create_index(table, prefix + rschema.type)
         if attrschema and any(isinstance(cstr, UniqueConstraint)
                               for cstr in eschema.rdef(rschema).constraints):
-            w(dbhelper.sql_create_index(table, prefix + rschema.type, unique=True))
-    for columns, index_name in iter_unique_index_names(eschema):
-        cols = ['%s%s' % (prefix, col) for col in columns]
-        sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, index_name)
+            yield dbhelper.sql_create_index(table, prefix + rschema.type, unique=True)
+    for attrs, index_name in iter_unique_index_names(eschema):
+        columns = ['%s%s' % (prefix, attr) for attr in attrs]
+        sqls = dbhelper.sqls_create_multicol_unique_index(table, columns, index_name)
         for sql in sqls:
-            w(sql)
-    w('')
-    return '\n'.join(output)
+            yield sql.rstrip(';')  # remove trailing ';' for consistency
 
 
-def as_sql(value, dbhelper, prefix):
+def constraint_value_as_sql(value, dbhelper, prefix):
+    """Return the SQL value from a Yams constraint's value, handling special cases where it's a
+    `Attribute`, `TODAY` or `NOW` instance instead of a literal value.
+    """
     if isinstance(value, Attribute):
         return prefix + value.attr
     elif isinstance(value, TODAY):
@@ -197,20 +164,22 @@
         return value
 
 
-def check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=''):
-    # XXX should find a better name
-    cstrname = 'cstr' + md5((eschema.type + attr + constraint.type() +
-                             (constraint.serialize() or '')).encode('ascii')).hexdigest()
+def check_constraint(rdef, constraint, dbhelper, prefix=''):
+    """Return (constraint name, constraint SQL definition) for the given relation definition's
+    constraint. Maybe (None, None) if the constraint is not handled in the backend.
+    """
+    attr = rdef.rtype.type
+    cstrname = constraint.name_for(rdef)
     if constraint.type() == 'BoundaryConstraint':
-        value = as_sql(constraint.boundary, dbhelper, prefix)
+        value = constraint_value_as_sql(constraint.boundary, dbhelper, prefix)
         return cstrname, '%s%s %s %s' % (prefix, attr, constraint.operator, value)
     elif constraint.type() == 'IntervalBoundConstraint':
         condition = []
         if constraint.minvalue is not None:
-            value = as_sql(constraint.minvalue, dbhelper, prefix)
+            value = constraint_value_as_sql(constraint.minvalue, dbhelper, prefix)
             condition.append('%s%s >= %s' % (prefix, attr, value))
         if constraint.maxvalue is not None:
-            value = as_sql(constraint.maxvalue, dbhelper, prefix)
+            value = constraint_value_as_sql(constraint.maxvalue, dbhelper, prefix)
             condition.append('%s%s <= %s' % (prefix, attr, value))
         return cstrname, ' AND '.join(condition)
     elif constraint.type() == 'StaticVocabularyConstraint':
@@ -224,8 +193,8 @@
     return None, None
 
 
-def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True, indent=''):
-    """write an attribute schema as SQL statements to stdout"""
+def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True):
+    """Return string containing a SQL table's column definition from attribute schema."""
     attr = rschema.type
     rdef = rschema.rdef(eschema.type, aschema.type)
     sqltype = type_from_rdef(dbhelper, rdef)
@@ -253,7 +222,7 @@
 
 
 def type_from_rdef(dbhelper, rdef):
-    """return a sql type string corresponding to the relation definition"""
+    """Return a string containing SQL type name for the given relation definition."""
     constraints = list(rdef.constraints)
     sqltype = None
     if rdef.object.type == 'String':
@@ -269,6 +238,8 @@
 
 
 def sql_type(dbhelper, rdef):
+    """Return a string containing SQL type to use to store values of the given relation definition.
+    """
     sqltype = dbhelper.TYPE_MAPPING[rdef.object]
     if callable(sqltype):
         sqltype = sqltype(rdef)
@@ -283,56 +254,54 @@
 );
 
 CREATE INDEX %(from_idx)s ON %(table)s(eid_from);
-CREATE INDEX %(to_idx)s ON %(table)s(eid_to);"""
+CREATE INDEX %(to_idx)s ON %(table)s(eid_to)"""
 
 
 def rschema2sql(rschema):
+    """Yield SQL statements to create database table and indexes for a Yams relation schema."""
     assert not rschema.rule
     table = '%s_relation' % rschema.type
-    return _SQL_SCHEMA % {'table': table,
+    sqls = _SQL_SCHEMA % {'table': table,
                           'pkey_idx': build_index_name(table, ['eid_from', 'eid_to'], 'key_'),
                           'from_idx': build_index_name(table, ['eid_from'], 'idx_'),
                           'to_idx': build_index_name(table, ['eid_to'], 'idx_')}
-
-
-def droprschema2sql(rschema):
-    """return sql to drop a relation type's table"""
-    # not necessary to drop indexes, that's implictly done when dropping
-    # the table
-    return 'DROP TABLE %s_relation;' % rschema.type
+    for sql in sqls.split(';'):
+        yield sql.strip()
 
 
 def grant_schema(schema, user, set_owner=True, skip_entities=(), prefix=''):
-    """write to the output stream a SQL schema to store the objects
-    corresponding to the given schema
+    """Yield SQL statements to give all access (and ownership if `set_owner` is True) on the
+    database tables for the given Yams schema to `user`.
+
+    `prefix` may be a string that will be prepended to all table / column names (usually, 'cw_').
     """
-    output = []
-    w = output.append
     for etype in sorted(schema.entities()):
         eschema = schema.eschema(etype)
         if eschema.final or etype in skip_entities:
             continue
-        w(grant_eschema(eschema, user, set_owner, prefix=prefix))
+        for sql in grant_eschema(eschema, user, set_owner, prefix=prefix):
+            yield sql
     for rtype in sorted(schema.relations()):
         rschema = schema.rschema(rtype)
         if rschema_has_table(rschema, skip_relations=()):  # XXX skip_relations should be specified
-            w(grant_rschema(rschema, user, set_owner))
-    return '\n'.join(output)
+            for sql in grant_rschema(rschema, user, set_owner):
+                yield sql
 
 
 def grant_eschema(eschema, user, set_owner=True, prefix=''):
-    output = []
-    w = output.append
+    """Yield SQL statements to give all access (and ownership if `set_owner` is True) on the
+    database tables for the given Yams entity schema to `user`.
+    """
     etype = eschema.type
     if set_owner:
-        w('ALTER TABLE %s%s OWNER TO %s;' % (prefix, etype, user))
-    w('GRANT ALL ON %s%s TO %s;' % (prefix, etype, user))
-    return '\n'.join(output)
+        yield 'ALTER TABLE %s%s OWNER TO %s' % (prefix, etype, user)
+    yield 'GRANT ALL ON %s%s TO %s' % (prefix, etype, user)
 
 
 def grant_rschema(rschema, user, set_owner=True):
-    output = []
+    """Yield SQL statements to give all access (and ownership if `set_owner` is True) on the
+    database tables for the given Yams relation schema to `user`.
+    """
     if set_owner:
-        output.append('ALTER TABLE %s_relation OWNER TO %s;' % (rschema.type, user))
-    output.append('GRANT ALL ON %s_relation TO %s;' % (rschema.type, user))
-    return '\n'.join(output)
+        yield 'ALTER TABLE %s_relation OWNER TO %s' % (rschema.type, user)
+    yield 'GRANT ALL ON %s_relation TO %s' % (rschema.type, user)
--- a/cubicweb/server/schemaserial.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/schemaserial.py	Thu Oct 20 18:28:46 2016 +0200
@@ -93,9 +93,8 @@
     dbhelper = repo.system_source.dbhelper
 
     # Computed Rtype
-    with cnx.ensure_cnx_set:
-        tables = set(t.lower() for t in dbhelper.list_tables(cnx.cnxset.cu))
-        has_computed_relations = 'cw_cwcomputedrtype' in tables
+    tables = set(t.lower() for t in dbhelper.list_tables(cnx.cnxset.cu))
+    has_computed_relations = 'cw_cwcomputedrtype' in tables
     # computed attribute
     try:
         cnx.system_sql("SELECT cw_formula FROM cw_CWAttribute")
--- a/cubicweb/server/session.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/session.py	Thu Oct 20 18:28:46 2016 +0200
@@ -305,7 +305,6 @@
         # other session utility
         if session.user.login == '__internal_manager__':
             self.user = session.user
-            self.set_language(self.user.prefered_language())
         else:
             self._set_user(session.user)
 
--- a/cubicweb/server/sources/native.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/sources/native.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -70,6 +70,7 @@
 NONSYSTEM_ETYPES = set()
 NONSYSTEM_RELATIONS = set()
 
+
 class LogCursor(object):
     def __init__(self, cursor):
         self.cu = cursor
@@ -142,12 +143,13 @@
     """check linked entity has not been redirected for this relation"""
     card = rdef.role_cardinality(role)
     if card in '?1' and tentity.related(rdef.rtype, role):
-        raise _UndoException(tentity._cw._(
+        msg = tentity._cw._(
             "Can't restore %(role)s relation %(rtype)s to entity %(eid)s which "
             "is already linked using this relation.")
-                            % {'role': neg_role(role),
-                               'rtype': rdef.rtype,
-                               'eid': tentity.eid})
+        raise _UndoException(msg % {'role': neg_role(role),
+                                    'rtype': rdef.rtype,
+                                    'eid': tentity.eid})
+
 
 def _undo_rel_info(cnx, subj, rtype, obj):
     entities = []
@@ -155,26 +157,27 @@
         try:
             entities.append(cnx.entity_from_eid(eid))
         except UnknownEid:
-            raise _UndoException(cnx._(
+            msg = cnx._(
                 "Can't restore relation %(rtype)s, %(role)s entity %(eid)s"
                 " doesn't exist anymore.")
-                                % {'role': cnx._(role),
-                                   'rtype': cnx._(rtype),
-                                   'eid': eid})
+            raise _UndoException(msg % {'role': cnx._(role),
+                                        'rtype': cnx._(rtype),
+                                        'eid': eid})
     sentity, oentity = entities
     try:
         rschema = cnx.vreg.schema.rschema(rtype)
         rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)]
     except KeyError:
-        raise _UndoException(cnx._(
+        msg = cnx._(
             "Can't restore relation %(rtype)s between %(subj)s and "
             "%(obj)s, that relation does not exists anymore in the "
             "schema.")
-                            % {'rtype': cnx._(rtype),
-                               'subj': subj,
-                               'obj': obj})
+        raise _UndoException(msg % {'rtype': cnx._(rtype),
+                                    'subj': subj,
+                                    'obj': obj})
     return sentity, oentity, rdef
 
+
 def _undo_has_later_transaction(cnx, eid):
     return cnx.system_sql('''\
 SELECT T.tx_uuid FROM transactions AS TREF, transactions AS T
@@ -270,56 +273,56 @@
     sqlgen_class = SQLGenerator
     options = (
         ('db-driver',
-         {'type' : 'string',
+         {'type': 'string',
           'default': 'postgres',
           # XXX use choice type
           'help': 'database driver (postgres, sqlite, sqlserver2005)',
           'group': 'native-source', 'level': 0,
           }),
         ('db-host',
-         {'type' : 'string',
+         {'type': 'string',
           'default': '',
           'help': 'database host',
           'group': 'native-source', 'level': 1,
           }),
         ('db-port',
-         {'type' : 'string',
+         {'type': 'string',
           'default': '',
           'help': 'database port',
           'group': 'native-source', 'level': 1,
           }),
         ('db-name',
-         {'type' : 'string',
+         {'type': 'string',
           'default': Method('default_instance_id'),
           'help': 'database name',
           'group': 'native-source', 'level': 0,
           }),
         ('db-namespace',
-         {'type' : 'string',
+         {'type': 'string',
           'default': '',
           'help': 'database namespace (schema) name',
           'group': 'native-source', 'level': 1,
           }),
         ('db-user',
-         {'type' : 'string',
+         {'type': 'string',
           'default': CubicWebNoAppConfiguration.mode == 'user' and getlogin() or 'cubicweb',
           'help': 'database user',
           'group': 'native-source', 'level': 0,
           }),
         ('db-password',
-         {'type' : 'password',
+         {'type': 'password',
           'default': '',
           'help': 'database password',
           'group': 'native-source', 'level': 0,
           }),
         ('db-encoding',
-         {'type' : 'string',
+         {'type': 'string',
           'default': 'utf8',
           'help': 'database encoding',
           'group': 'native-source', 'level': 1,
           }),
         ('db-extra-arguments',
-         {'type' : 'string',
+         {'type': 'string',
           'default': '',
           'help': 'set to "Trusted_Connection" if you are using SQLServer and '
                   'want trusted authentication for the database connection',
@@ -421,7 +424,6 @@
         else:
             raise ValueError('Unknown format %r' % format)
 
-
     def restore(self, backupfile, confirm, drop, format='native'):
         """method called to restore a backup of source's data"""
         if self.repo.config.init_cnxset_pool:
@@ -438,13 +440,12 @@
             if self.repo.config.init_cnxset_pool:
                 self.open_source_connections()
 
-
     def init(self, activated, source_entity):
         try:
             # test if 'asource' column exists
             query = self.dbhelper.sql_add_limit_offset('SELECT asource FROM entities', 1)
             source_entity._cw.system_sql(query)
-        except Exception as ex:
+        except Exception:
             self.eid_type_source = self.eid_type_source_pre_131
         super(NativeSQLSource, self).init(activated, source_entity)
         self.init_creating(source_entity._cw.cnxset)
@@ -499,7 +500,7 @@
         try:
             self._rql_sqlgen.schema = schema
         except AttributeError:
-            pass # __init__
+            pass  # __init__
         for authentifier in self.authentifiers:
             authentifier.set_schema(self.schema)
         clear_cache(self, 'need_fti_indexation')
@@ -508,17 +509,17 @@
         """return true if the given entity's type is handled by this adapter
         if write is true, return true only if it's a RW support
         """
-        return not etype in NONSYSTEM_ETYPES
+        return etype not in NONSYSTEM_ETYPES
 
     def support_relation(self, rtype, write=False):
         """return true if the given relation's type is handled by this adapter
         if write is true, return true only if it's a RW support
         """
         if write:
-            return not rtype in NONSYSTEM_RELATIONS
+            return rtype not in NONSYSTEM_RELATIONS
         # due to current multi-sources implementation, the system source
         # can't claim not supporting a relation
-        return True #not rtype == 'content_for'
+        return True  #not rtype == 'content_for'
 
     @statsd_timeit
     def authenticate(self, cnx, login, **kwargs):
@@ -556,7 +557,7 @@
                 self._cache[cachekey] = sql, qargs, cbs
         args = self.merge_args(args, qargs)
         assert isinstance(sql, string_types), repr(sql)
-        cursor = self.doexec(cnx, sql, args)
+        cursor = cnx.system_sql(sql, args)
         results = self.process_result(cursor, cnx, cbs)
         assert dbg_results(results)
         return results
@@ -596,7 +597,7 @@
                             to_restore = handler(entity, attr)
                             restore_values.append((entity, attr, to_restore))
         try:
-            yield # 2/ execute the source's instructions
+            yield  # 2/ execute the source's instructions
         finally:
             # 3/ restore original values
             for entity, attr, value in restore_values:
@@ -631,7 +632,7 @@
             if cnx.ertype_supports_undo(entity.cw_etype):
                 attrs = [SQL_PREFIX + r.type
                          for r in entity.e_schema.subject_relations()
-                         if (r.final or r.inlined) and not r in VIRTUAL_RTYPES]
+                         if (r.final or r.inlined) and r not in VIRTUAL_RTYPES]
                 changes = self._save_attrs(cnx, entity, attrs)
                 self._record_tx_action(cnx, 'tx_entity_actions', u'D',
                                        etype=text_type(entity.cw_etype), eid=entity.eid,
@@ -642,12 +643,12 @@
 
     def add_relation(self, cnx, subject, rtype, object, inlined=False):
         """add a relation to the source"""
-        self._add_relations(cnx,  rtype, [(subject, object)], inlined)
+        self._add_relations(cnx, rtype, [(subject, object)], inlined)
         if cnx.ertype_supports_undo(rtype):
             self._record_tx_action(cnx, 'tx_relation_actions', u'A',
                                    eid_from=subject, rtype=text_type(rtype), eid_to=object)
 
-    def add_relations(self, cnx,  rtype, subj_obj_list, inlined=False):
+    def add_relations(self, cnx, rtype, subj_obj_list, inlined=False):
         """add a relations to the source"""
         self._add_relations(cnx, rtype, subj_obj_list, inlined)
         if cnx.ertype_supports_undo(rtype):
@@ -662,7 +663,7 @@
             attrs = [{'eid_from': subject, 'eid_to': object}
                      for subject, object in subj_obj_list]
             sql.append((self.sqlgen.insert('%s_relation' % rtype, attrs[0]), attrs))
-        else: # used by data import
+        else:  # used by data import
             etypes = {}
             for subject, object in subj_obj_list:
                 etype = cnx.entity_metas(subject)['type']
@@ -674,7 +675,7 @@
                 attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object}
                          for subject, object in subj_obj_list]
                 sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0],
-                                     ['cw_eid']),
+                                               ['cw_eid']),
                             attrs))
         for statement, attrs in sql:
             self.doexecmany(cnx, statement, attrs)
@@ -694,7 +695,7 @@
             column = SQL_PREFIX + rtype
             sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column,
                                                                   SQL_PREFIX)
-            attrs = {'eid' : subject}
+            attrs = {'eid': subject}
         else:
             attrs = {'eid_from': subject, 'eid_to': object}
             sql = self.sqlgen.delete('%s_relation' % rtype, attrs)
@@ -716,7 +717,7 @@
                 # during test we get those message when trying to alter sqlite
                 # db schema
                 self.info("sql: %r\n args: %s\ndbms message: %r",
-                              query, args, ex.args[0])
+                          query, args, ex.args[0])
             if rollback:
                 try:
                     cnx.cnxset.rollback()
@@ -847,7 +848,7 @@
             self.exception('failed to query entities table for eid %s', eid)
         raise UnknownEid(eid)
 
-    def eid_type_source(self, cnx, eid): # pylint: disable=E0202
+    def eid_type_source(self, cnx, eid):  # pylint: disable=E0202
         """return a tuple (type, extid, source) for the entity with id <eid>"""
         sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid
         res = self._eid_type_source(cnx, eid, sql)
@@ -916,15 +917,18 @@
         # insert core relations: is, is_instance_of and cw_source
 
         if entity.e_schema.eid is not None:  # else schema has not yet been serialized
-            self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                         (entity.eid, entity.e_schema.eid))
+            self._handle_is_relation_sql(
+                cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
+                (entity.eid, entity.e_schema.eid))
             for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
-                self._handle_is_relation_sql(cnx,
-                                             'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                             (entity.eid, eschema.eid))
+                self._handle_is_relation_sql(
+                    cnx,
+                    'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
+                    (entity.eid, eschema.eid))
         if source.eid is not None:  # else the source has not yet been inserted
-            self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                         (entity.eid, source.eid))
+            self._handle_is_relation_sql(
+                cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
+                (entity.eid, source.eid))
         # now we can update the full text index
         if self.need_fti_indexation(entity.cw_etype):
             self.index_entity(cnx, entity=entity)
@@ -969,9 +973,9 @@
         if actionfilters:
             # we will need subqueries to filter transactions according to
             # actions done
-            tearestr = {} # filters on the tx_entity_actions table
-            trarestr = {} # filters on the tx_relation_actions table
-            genrestr = {} # generic filters, appliyable to both table
+            tearestr = {}  # filters on the tx_entity_actions table
+            trarestr = {}  # filters on the tx_relation_actions table
+            genrestr = {}  # generic filters, appliyable to both table
             # unless public explicitly set to false, we only consider public
             # actions
             if actionfilters.pop('public', True):
@@ -982,7 +986,7 @@
                     # filtering on etype implies filtering on entity actions
                     # only, and with no eid specified
                     assert actionfilters.get('action', 'C') in 'CUD'
-                    assert not 'eid' in actionfilters
+                    assert 'eid' not in actionfilters
                     tearestr['etype'] = text_type(val)
                 elif key == 'eid':
                     # eid filter may apply to 'eid' of tx_entity_actions or to
@@ -1044,16 +1048,14 @@
         sql = self.sqlgen.select('tx_entity_actions', restr,
                                  ('txa_action', 'txa_public', 'txa_order',
                                   'etype', 'eid', 'changes'))
-        with cnx.ensure_cnx_set:
-            cu = self.doexec(cnx, sql, restr)
-            actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c)))
-                       for a,p,o,et,e,c in cu.fetchall()]
+        cu = self.doexec(cnx, sql, restr)
+        actions = [tx.EntityAction(a, p, o, et, e, c and pickle.loads(self.binary_to_str(c)))
+                   for a, p, o, et, e, c in cu.fetchall()]
         sql = self.sqlgen.select('tx_relation_actions', restr,
                                  ('txa_action', 'txa_public', 'txa_order',
                                   'rtype', 'eid_from', 'eid_to'))
-        with cnx.ensure_cnx_set:
-            cu = self.doexec(cnx, sql, restr)
-            actions += [tx.RelationAction(*args) for args in cu.fetchall()]
+        cu = self.doexec(cnx, sql, restr)
+        actions += [tx.RelationAction(*args) for args in cu.fetchall()]
         return sorted(actions, key=lambda x: x.order)
 
     def undo_transaction(self, cnx, txuuid):
@@ -1146,12 +1148,12 @@
         for column, value in changes.items():
             rtype = column[len(SQL_PREFIX):]
             if rtype == "eid":
-                continue # XXX should even `eid` be stored in action changes?
+                continue  # XXX should even `eid` be stored in action changes?
             try:
                 rschema = getrschema[rtype]
             except KeyError:
                 err(cnx._("can't restore relation %(rtype)s of entity %(eid)s, "
-                              "this relation does not exist in the schema anymore.")
+                          "this relation does not exist in the schema anymore.")
                     % {'rtype': rtype, 'eid': eid})
             if not rschema.final:
                 if not rschema.inlined:
@@ -1160,11 +1162,11 @@
                 elif value is not None:
                     # not a deletion: we must put something in edited
                     try:
-                        entity._cw.entity_from_eid(value) # check target exists
+                        entity._cw.entity_from_eid(value)  # check target exists
                         edited[rtype] = value
                     except UnknownEid:
                         err(cnx._("can't restore entity %(eid)s of type %(eschema)s, "
-                                      "target of %(rtype)s (eid %(value)s) does not exist any longer")
+                                  "target of %(rtype)s (eid %(value)s) does not exist any longer")
                             % locals())
                         changes[column] = None
             elif eschema.destination(rtype) in ('Bytes', 'Password'):
@@ -1183,7 +1185,6 @@
         err = errors.append
         eid = action.eid
         etype = action.etype
-        _ = cnx._
         # get an entity instance
         try:
             entity = self.repo.vreg['etypes'].etype_class(etype)(cnx)
@@ -1239,8 +1240,7 @@
         # we should find an efficient way to do this (keeping current veolidf
         # massive deletion performance)
         if _undo_has_later_transaction(cnx, eid):
-            msg = cnx._('some later transaction(s) touch entity, undo them '
-                            'first')
+            msg = cnx._('some later transaction(s) touch entity, undo them first')
             raise ValidationError(eid, {None: msg})
         etype = action.etype
         # get an entity instance
@@ -1277,7 +1277,7 @@
             entity = cnx.entity_from_eid(action.eid)
         except UnknownEid:
             err(cnx._("can't restore state of entity %s, it has been "
-                          "deleted inbetween") % action.eid)
+                      "deleted inbetween") % action.eid)
             return errors
         self._reedit_entity(entity, action.changes, err)
         entity.cw_edited.check()
@@ -1346,10 +1346,9 @@
         try:
             for entity in entities:
                 cursor_unindex_object(entity.eid, cursor)
-        except Exception: # let KeyboardInterrupt / SystemExit propagate
+        except Exception:  # let KeyboardInterrupt / SystemExit propagate
             self.exception('error while unindexing %s', entity)
 
-
     def fti_index_entities(self, cnx, entities):
         """add text content of created/modified entities to the full text index
         """
@@ -1362,7 +1361,7 @@
                 cursor_index_object(entity.eid,
                                     entity.cw_adapt_to('IFTIndexable'),
                                     cursor)
-        except Exception: # let KeyboardInterrupt / SystemExit propagate
+        except Exception:  # let KeyboardInterrupt / SystemExit propagate
             self.exception('error while indexing %s', entity)
 
 
@@ -1391,14 +1390,15 @@
         source.fti_unindex_entities(cnx, to_reindex)
         source.fti_index_entities(cnx, to_reindex)
 
+
 def sql_schema(driver):
+    """Yield SQL statements to create system tables in the database."""
     helper = get_db_helper(driver)
     typemap = helper.TYPE_MAPPING
-    schema = """
-/* Create the repository's system database */
-
-%s
-
+    # XXX should return a list of sql statements rather than ';' joined statements
+    for sql in helper.sql_create_numrange('entities_id_seq').split(';'):
+        yield sql
+    for sql in ("""
 CREATE TABLE entities (
   eid INTEGER PRIMARY KEY NOT NULL,
   type VARCHAR(64) NOT NULL,
@@ -1447,48 +1447,36 @@
 CREATE INDEX tx_relation_actions_txa_public_idx ON tx_relation_actions(txa_public);;
 CREATE INDEX tx_relation_actions_eid_from_idx ON tx_relation_actions(eid_from);;
 CREATE INDEX tx_relation_actions_eid_to_idx ON tx_relation_actions(eid_to);;
-CREATE INDEX tx_relation_actions_tx_uuid_idx ON tx_relation_actions(tx_uuid);;
-""" % (helper.sql_create_numrange('entities_id_seq').replace(';', ';;'),
-       typemap['Datetime'],
-       typemap['Boolean'], typemap['Bytes'], typemap['Boolean'])
+CREATE INDEX tx_relation_actions_tx_uuid_idx ON tx_relation_actions(tx_uuid)
+""" % (typemap['Datetime'],
+       typemap['Boolean'], typemap['Bytes'], typemap['Boolean'])).split(';'):
+        yield sql
     if helper.backend_name == 'sqlite':
         # sqlite support the ON DELETE CASCADE syntax but do nothing
-        schema += '''
+        yield '''
 CREATE TRIGGER fkd_transactions
 BEFORE DELETE ON transactions
 FOR EACH ROW BEGIN
     DELETE FROM tx_entity_actions WHERE tx_uuid=OLD.tx_uuid;
     DELETE FROM tx_relation_actions WHERE tx_uuid=OLD.tx_uuid;
-END;;
+END;
 '''
     # define a multi-columns index on a single index to please sqlserver, which doesn't like several
     # null entries in a UNIQUE column
-    schema += ';;'.join(helper.sqls_create_multicol_unique_index('entities', ['extid'], 'entities_extid_idx'))
-    schema += ';;\n'
-    return schema
-
-
-def sql_drop_schema(driver):
-    helper = get_db_helper(driver)
-    return """
-%s;
-%s
-DROP TABLE entities;
-DROP TABLE tx_entity_actions;
-DROP TABLE tx_relation_actions;
-DROP TABLE transactions;
-""" % (';'.join(helper.sqls_drop_multicol_unique_index('entities', ['extid'])),
-       helper.sql_drop_numrange('entities_id_seq'))
+    for sql in helper.sqls_create_multicol_unique_index('entities', ['extid'],
+                                                        'entities_extid_idx'):
+        yield sql
 
 
 def grant_schema(user, set_owner=True):
-    result = ''
+    """Yield SQL statements to give all access (and ownership if `set_owner` is True) on the
+    database system tables to `user`.
+    """
     for table in ('entities', 'entities_id_seq',
                   'transactions', 'tx_entity_actions', 'tx_relation_actions'):
         if set_owner:
-            result = 'ALTER TABLE %s OWNER TO %s;\n' % (table, user)
-        result += 'GRANT ALL ON %s TO %s;\n' % (table, user)
-    return result
+            yield 'ALTER TABLE %s OWNER TO %s;' % (table, user)
+        yield 'GRANT ALL ON %s TO %s;' % (table, user)
 
 
 class BaseAuthentifier(object):
@@ -1500,6 +1488,7 @@
         """set the instance'schema"""
         pass
 
+
 class LoginPasswordAuthentifier(BaseAuthentifier):
     passwd_rql = 'Any P WHERE X is CWUser, X login %(login)s, X upassword P'
     auth_rql = (u'Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s, '
@@ -1508,7 +1497,7 @@
 
     def set_schema(self, schema):
         """set the instance'schema"""
-        if 'CWUser' in schema: # probably an empty schema if not true...
+        if 'CWUser' in schema:  # probably an empty schema if not true...
             # rql syntax trees used to authenticate users
             self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols)
             self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols)
@@ -1520,7 +1509,7 @@
         two queries are needed since passwords are stored crypted, so we have
         to fetch the salt first
         """
-        args = {'login': login, 'pwd' : None}
+        args = {'login': login, 'pwd': None}
         if password is not None:
             rset = self.source.syntax_tree_search(cnx, self._passwd_rqlst, args)
             try:
@@ -1541,15 +1530,15 @@
             # before 3.14.7), update with a fresh one
             if pwd is not None and pwd.getvalue():
                 verify, newhash = verify_and_update(password, pwd.getvalue())
-                if not verify: # should not happen, but...
+                if not verify:  # should not happen, but...
                     raise AuthenticationError('bad password')
                 if newhash:
-                    cnx.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s" % (
-                                        SQL_PREFIX + 'CWUser',
-                                        SQL_PREFIX + 'upassword',
-                                        SQL_PREFIX + 'login'),
-                                       {'newhash': self.source._binary(newhash.encode('ascii')),
-                                        'login': login})
+                    cnx.system_sql("UPDATE %s SET %s=%%(newhash)s WHERE %s=%%(login)s"
+                                   % (SQL_PREFIX + 'CWUser',
+                                      SQL_PREFIX + 'upassword',
+                                      SQL_PREFIX + 'login'),
+                                   {'newhash': self.source._binary(newhash.encode('ascii')),
+                                    'login': login})
                     cnx.commit()
             return user
         except IndexError:
@@ -1560,11 +1549,11 @@
     def authenticate(self, cnx, login, **authinfo):
         # email_auth flag prevent from infinite recursion (call to
         # repo.check_auth_info at the end of this method may lead us here again)
-        if not '@' in login or authinfo.pop('email_auth', None):
+        if '@' not in login or authinfo.pop('email_auth', None):
             raise AuthenticationError('not an email')
         rset = cnx.execute('Any L WHERE U login L, U primary_email M, '
-                               'M address %(login)s', {'login': login},
-                               build_descr=False)
+                           'M address %(login)s', {'login': login},
+                           build_descr=False)
         if rset.rowcount != 1:
             raise AuthenticationError('unexisting email')
         login = rset.rows[0][0]
@@ -1649,7 +1638,7 @@
             eschema = self.schema.eschema(etype)
             if eschema.final:
                 continue
-            etype_tables.append('%s%s'%(prefix, etype))
+            etype_tables.append('%s%s' % (prefix, etype))
         for rtype in self.schema.relations():
             rschema = self.schema.rschema(rtype)
             if rschema.final or rschema.inlined or rschema in VIRTUAL_RTYPES:
@@ -1701,7 +1690,7 @@
                 serialized = self._serialize(table, columns, rows)
                 archive.writestr('tables/%s.%04d' % (table, i), serialized)
                 self.logger.debug('wrote rows %d to %d (out of %d) to %s.%04d',
-                                  start, start+len(rows)-1,
+                                  start, start + len(rows) - 1,
                                   rowcount,
                                   table, i)
         else:
@@ -1807,7 +1796,6 @@
             self.cnx.commit()
         self.logger.info('inserted %d rows', row_count)
 
-
     def _parse_versions(self, version_str):
         versions = set()
         for line in version_str.splitlines():
--- a/cubicweb/server/sqlutils.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/sqlutils.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -115,69 +115,45 @@
 def sqlgrants(schema, driver, user,
               text_index=True, set_owner=True,
               skip_relations=(), skip_entities=()):
-    """return sql to give all access privileges to the given user on the system
-    schema
+    """Return a list of SQL statements to give all access privileges to the given user on the
+    database.
     """
     from cubicweb.server.schema2sql import grant_schema
     from cubicweb.server.sources import native
-    output = []
-    w = output.append
-    w(native.grant_schema(user, set_owner))
-    w('')
+    stmts = list(native.grant_schema(user, set_owner))
     if text_index:
         dbhelper = db.get_db_helper(driver)
-        w(dbhelper.sql_grant_user_on_fti(user))
-        w('')
-    w(grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX))
-    return '\n'.join(output)
+        # XXX should return a list of sql statements rather than ';' joined statements
+        stmts += dbhelper.sql_grant_user_on_fti(user).split(';')
+    stmts += grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX)
+    return stmts
 
 
 def sqlschema(schema, driver, text_index=True,
               user=None, set_owner=False,
               skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()):
-    """return the system sql schema, according to the given parameters"""
+    """Return the database SQL schema as a list of SQL statements, according to the given parameters.
+    """
     from cubicweb.server.schema2sql import schema2sql
     from cubicweb.server.sources import native
     if set_owner:
         assert user, 'user is argument required when set_owner is true'
-    output = []
-    w = output.append
-    w(native.sql_schema(driver))
-    w('')
+    stmts = list(native.sql_schema(driver))
     dbhelper = db.get_db_helper(driver)
     if text_index:
-        w(dbhelper.sql_init_fti().replace(';', ';;'))
-        w('')
-    w(schema2sql(dbhelper, schema, prefix=SQL_PREFIX,
-                 skip_entities=skip_entities,
-                 skip_relations=skip_relations).replace(';', ';;'))
+        stmts += dbhelper.sql_init_fti().split(';')  # XXX
+    stmts += schema2sql(dbhelper, schema, prefix=SQL_PREFIX,
+                        skip_entities=skip_entities,
+                        skip_relations=skip_relations)
     if dbhelper.users_support and user:
-        w('')
-        w(sqlgrants(schema, driver, user, text_index, set_owner,
-                    skip_relations, skip_entities).replace(';', ';;'))
-    return '\n'.join(output)
-
-
-def sqldropschema(schema, driver, text_index=True,
-                  skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()):
-    """return the sql to drop the schema, according to the given parameters"""
-    from cubicweb.server.schema2sql import dropschema2sql
-    from cubicweb.server.sources import native
-    output = []
-    w = output.append
-    if text_index:
-        dbhelper = db.get_db_helper(driver)
-        w(dbhelper.sql_drop_fti())
-        w('')
-    w(dropschema2sql(dbhelper, schema, prefix=SQL_PREFIX,
-                     skip_entities=skip_entities,
-                     skip_relations=skip_relations))
-    w('')
-    w(native.sql_drop_schema(driver))
-    return '\n'.join(output)
+        stmts += sqlgrants(schema, driver, user, text_index, set_owner,
+                           skip_relations, skip_entities)
+    return stmts
 
 
 _SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION = re.compile('^(?!(sql|pg)_)').match
+
+
 def sql_drop_all_user_tables(driver_or_helper, sqlcursor):
     """Return ths sql to drop all tables found in the database system."""
     if not getattr(driver_or_helper, 'list_tables', None):
@@ -185,14 +161,16 @@
     else:
         dbhelper = driver_or_helper
 
-    cmds = [dbhelper.sql_drop_sequence('entities_id_seq')]
+    stmts = [dbhelper.sql_drop_sequence('entities_id_seq')]
     # for mssql, we need to drop views before tables
     if hasattr(dbhelper, 'list_views'):
-        cmds += ['DROP VIEW %s;' % name
-                 for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))]
-    cmds += ['DROP TABLE %s;' % name
-             for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
-    return '\n'.join(cmds)
+        stmts += ['DROP VIEW %s;' % name
+                  for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION,
+                                     dbhelper.list_views(sqlcursor))]
+    stmts += ['DROP TABLE %s;' % name
+              for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION,
+                                 dbhelper.list_tables(sqlcursor))]
+    return stmts
 
 
 class ConnectionWrapper(object):
@@ -225,7 +203,7 @@
 
     def close(self, i_know_what_i_do=False):
         """close all connections in the set"""
-        if i_know_what_i_do is not True: # unexpected closing safety belt
+        if i_know_what_i_do is not True:  # unexpected closing safety belt
             raise RuntimeError('connections set shouldn\'t be closed')
         try:
             self.cu.close()
@@ -242,7 +220,7 @@
 
     def cnxset_freed(self):
         """connections set is being freed from a session"""
-        pass # no nothing by default
+        pass  # no nothing by default
 
     def reconnect(self):
         """reopen a connection for this source or all sources if none specified
@@ -293,6 +271,7 @@
             self._cnx = self._source.get_connection()
             self._cu = self._cnx.cursor()
         return self._cnx
+
     @cnx.setter
     def cnx(self, value):
         self._cnx = value
@@ -303,6 +282,7 @@
             self._cnx = self._source.get_connection()
             self._cu = self._cnx.cursor()
         return self._cu
+
     @cu.setter
     def cu(self, value):
         self._cu = value
@@ -460,7 +440,7 @@
                     # than add_entity (native) as this behavior
                     # may also be used for update.
                     value = converters[atype](value)
-                elif atype == 'Password': # XXX could be done using a TYPE_CONVERTERS callback
+                elif atype == 'Password':  # XXX could be done using a TYPE_CONVERTERS callback
                     # if value is a Binary instance, this mean we got it
                     # from a query result and so it is already encrypted
                     if isinstance(value, Binary):
@@ -470,13 +450,13 @@
                     value = self._binary(value)
                 elif isinstance(value, Binary):
                     value = self._binary(value.getvalue())
-            attrs[SQL_PREFIX+str(attr)] = value
-        attrs[SQL_PREFIX+'eid'] = entity.eid
+            attrs[SQL_PREFIX + str(attr)] = value
+        attrs[SQL_PREFIX + 'eid'] = entity.eid
         return attrs
 
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
-    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+    info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
 
 set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
 
@@ -536,9 +516,11 @@
     class group_concat(object):
         def __init__(self):
             self.values = set()
+
         def step(self, value):
             if value is not None:
                 self.values.add(value)
+
         def finalize(self):
             return ', '.join(text_type(v) for v in self.values)
 
@@ -562,11 +544,12 @@
     cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
 
     from logilab.common.date import strptime
+
     def weekday(ustr):
         try:
             dt = strptime(ustr, '%Y-%m-%d %H:%M:%S')
         except:
-            dt =  strptime(ustr, '%Y-%m-%d')
+            dt = strptime(ustr, '%Y-%m-%d')
         # expect sunday to be 1, saturday 7 while weekday method return 0 for
         # monday
         return (dt.weekday() + 1) % 7
--- a/cubicweb/server/test/data-migractions/migratedapp/schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/data-migractions/migratedapp/schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -119,7 +119,7 @@
     nom    = String(fulltextindexed=True, required=True, maxsize=64)
     prenom = String(fulltextindexed=True, maxsize=64)
     civility   = String(maxsize=1, default='M', fulltextindexed=True)
-    promo  = String(vocabulary=('bon','pasbon'))
+    promo  = String(vocabulary=('bon','pasbon', 'pasbondutout'))
     titre  = String(fulltextindexed=True, maxsize=128)
     adel   = String(maxsize=128)
     ass    = String(maxsize=128)
--- a/cubicweb/server/test/data-schema2sql/schema/schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/data-schema2sql/schema/schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2004-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of yams.
@@ -38,7 +38,7 @@
     nom    = String(maxsize=64, fulltextindexed=True, required=True)
     prenom = String(maxsize=64, fulltextindexed=True)
     sexe   = String(maxsize=1, default='M')
-    promo  = String(vocabulary=('bon','pasbon'))
+    promo  = String(vocabulary=('bon','pasbon','pas;bon;;du;;;tout;;;;'))
     titre  = String(maxsize=128, fulltextindexed=True)
     adel   = String(maxsize=128)
     ass    = String(maxsize=128)
--- a/cubicweb/server/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-psycopg2
-ldap3
-cubicweb-basket
-cubicweb-card
-cubicweb-comment
-cubicweb-file
-cubicweb-localperms
-cubicweb-tag
--- a/cubicweb/server/test/unittest_migractions.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/unittest_migractions.py	Thu Oct 20 18:28:46 2016 +0200
@@ -22,6 +22,7 @@
 from datetime import date
 from contextlib import contextmanager
 import tempfile
+from hashlib import md5
 
 from logilab.common.testlib import unittest_main, Tags, tag, with_tempdir
 from logilab.common import tempattr
@@ -596,6 +597,13 @@
             relations = [r.name for r in rset.get_entity(0, 0).relations]
             self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss'))
 
+            # serialized constraint changed
+            constraints = self.table_constraints(mh, 'cw_Personne')
+            self.assertEqual(len(constraints), 1, constraints)
+            rdef = migrschema['promo'].rdefs['Personne', 'String']
+            cstr = rdef.constraint_by_type('StaticVocabularyConstraint')
+            self.assertIn(cstr.name_for(rdef), constraints)
+
     def _erqlexpr_rset(self, cnx, action, ertype):
         rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action
         return cnx.execute(rql, {'name': ertype})
--- a/cubicweb/server/test/unittest_querier.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/unittest_querier.py	Thu Oct 20 18:28:46 2016 +0200
@@ -608,15 +608,15 @@
                               [[u'description_format', 13],
                                [u'description', 14],
                                [u'name', 19],
-                               [u'created_by', 45],
-                               [u'creation_date', 45],
-                               [u'cw_source', 45],
-                               [u'cwuri', 45],
-                               [u'in_basket', 45],
-                               [u'is', 45],
-                               [u'is_instance_of', 45],
-                               [u'modification_date', 45],
-                               [u'owned_by', 45]])
+                               [u'created_by', 46],
+                               [u'creation_date', 46],
+                               [u'cw_source', 46],
+                               [u'cwuri', 46],
+                               [u'in_basket', 46],
+                               [u'is', 46],
+                               [u'is_instance_of', 46],
+                               [u'modification_date', 46],
+                               [u'owned_by', 46]])
 
     def test_select_aggregat_having_dumb(self):
         # dumb but should not raise an error
--- a/cubicweb/server/test/unittest_schema2sql.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/unittest_schema2sql.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2004-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,6 +24,7 @@
 from logilab.database import get_db_helper
 
 from yams.reader import SchemaLoader
+
 from cubicweb.server import schema2sql
 
 schema2sql.SET_DEFAULT = True
@@ -33,19 +34,17 @@
 schema = SchemaLoader().load([DATADIR])
 
 
-EXPECTED_DATA_NO_DROP = """
-CREATE TABLE Affaire(
+EXPECTED_DATA_NO_DROP = [
+    """CREATE TABLE Affaire(
  sujet varchar(128),
  ref varchar(12),
  inline_rel integer REFERENCES entities (eid)
-);
-CREATE INDEX idx_444e29ba3bd1f6c7ea89008613345d7b ON Affaire(inline_rel);
-
-CREATE TABLE Company(
+)""",
+    "CREATE INDEX idx_444e29ba3bd1f6c7ea89008613345d7b ON Affaire(inline_rel)",
+    """CREATE TABLE Company(
  name text
-);
-
-CREATE TABLE Datetest(
+)""",
+    """CREATE TABLE Datetest(
  dt1 timestamp,
  dt2 timestamp,
  d1 date,
@@ -53,42 +52,36 @@
  t1 time,
  t2 time
 , CONSTRAINT cstrf6a3dad792ba13c2cddcf61a2b737c00 CHECK(d1 <= CAST(clock_timestamp() AS DATE))
-);
-
-CREATE TABLE Division(
+)""",
+    """CREATE TABLE Division(
  name text
-);
-
-CREATE TABLE EPermission(
+)""",
+    """CREATE TABLE EPermission(
  name varchar(100) NOT NULL
-);
-CREATE INDEX idx_86fb596553c6f1ebc159422169f76c32 ON EPermission(name);
-
-CREATE TABLE Eetype(
+)""",
+    "CREATE INDEX idx_86fb596553c6f1ebc159422169f76c32 ON EPermission(name)",
+    """CREATE TABLE Eetype(
  name varchar(64) NOT NULL,
  description text,
  meta boolean,
  final boolean,
  initial_state integer REFERENCES entities (eid)
-);
-CREATE INDEX idx_f1f29b77c85f57921df19d2c29044d2d ON Eetype(name);
-ALTER TABLE Eetype ADD CONSTRAINT key_f1f29b77c85f57921df19d2c29044d2d UNIQUE(name);
-CREATE INDEX idx_27be7c0b18181bbdc76f3a54296dd81f ON Eetype(initial_state);
-
-CREATE TABLE Employee(
-);
-
-CREATE TABLE Note(
+)""",
+    "CREATE INDEX idx_f1f29b77c85f57921df19d2c29044d2d ON Eetype(name)",
+    "ALTER TABLE Eetype ADD CONSTRAINT key_f1f29b77c85f57921df19d2c29044d2d UNIQUE(name)",
+    "CREATE INDEX idx_27be7c0b18181bbdc76f3a54296dd81f ON Eetype(initial_state)",
+    """CREATE TABLE Employee(
+)""",
+    """CREATE TABLE Note(
  date varchar(10),
  type varchar(1),
  para varchar(512)
-);
-
-CREATE TABLE Person(
+)""",
+    """CREATE TABLE Person(
  nom varchar(64) NOT NULL,
  prenom varchar(64),
  sexe varchar(1) DEFAULT 'M',
- promo varchar(6),
+ promo varchar(22),
  titre varchar(128),
  adel varchar(128),
  ass varchar(128),
@@ -98,15 +91,14 @@
  datenaiss date,
  test boolean,
  salary float
-, CONSTRAINT cstr151c2116c0c09de13fded0619d5b4aac CHECK(promo IN ('bon', 'pasbon'))
-);
-CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom);
-
-CREATE TABLE Salaried(
+, CONSTRAINT cstrf5ac746b90a5fdd00fbe037ec9cf18eb CHECK(promo IN ('bon', 'pasbon', 'pas;bon;;du;;;tout;;;;'))
+)""",
+    "CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom)",
+    """CREATE TABLE Salaried(
  nom varchar(64) NOT NULL,
  prenom varchar(64),
  sexe varchar(1) DEFAULT 'M',
- promo varchar(6),
+ promo varchar(22),
  titre varchar(128),
  adel varchar(128),
  ass varchar(128),
@@ -116,11 +108,10 @@
  datenaiss date,
  test boolean,
  salary float
-, CONSTRAINT cstr069569cf1791dba1a2726197c53aeb44 CHECK(promo IN ('bon', 'pasbon'))
-);
-CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom);
-
-CREATE TABLE Societe(
+, CONSTRAINT cstrb73206eeba9fe96a05105a9db62a1509 CHECK(promo IN ('bon', 'pasbon', 'pas;bon;;du;;;tout;;;;'))
+)""",
+    "CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom)",
+    """CREATE TABLE Societe(
  nom varchar(64),
  web varchar(128),
  tel integer,
@@ -132,25 +123,21 @@
  cp varchar(12),
  ville varchar(32)
 , CONSTRAINT cstra0a1deaa997dcd5f9b83a77654d7c287 CHECK(fax <= tel)
-);
-ALTER TABLE Societe ADD CONSTRAINT key_abace82c402eba4a37ac54a7872607af UNIQUE(tel);
-
-CREATE TABLE State(
+)""",
+    "ALTER TABLE Societe ADD CONSTRAINT key_abace82c402eba4a37ac54a7872607af UNIQUE(tel)",
+    """CREATE TABLE State(
  eid integer PRIMARY KEY REFERENCES entities (eid),
  name varchar(256) NOT NULL,
  description text
-);
-CREATE INDEX idx_fba3802ef9056558bb9c06b5c6ba9aab ON State(name);
-
-CREATE TABLE Subcompany(
+)""",
+    "CREATE INDEX idx_fba3802ef9056558bb9c06b5c6ba9aab ON State(name)",
+    """CREATE TABLE Subcompany(
  name text
-);
-
-CREATE TABLE Subdivision(
+)""",
+    """CREATE TABLE Subdivision(
  name text
-);
-
-CREATE TABLE pkginfo(
+)""",
+    """CREATE TABLE pkginfo(
  modname varchar(30) NOT NULL,
  version varchar(10) DEFAULT '0.1' NOT NULL,
  copyright text NOT NULL,
@@ -163,125 +150,100 @@
  debian_handler varchar(6)
 , CONSTRAINT cstrbffed5ce7306d65a0db51182febd4a7b CHECK(license IN ('GPL', 'ZPL'))
 , CONSTRAINT cstr2238b33d09bf7c441e0888be354c2444 CHECK(debian_handler IN ('machin', 'bidule'))
-);
-
-
-CREATE TABLE concerne_relation (
+)""",
+    """CREATE TABLE concerne_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_19e70eabae35becb48c247bc4a688170 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_5ee7db9477832d6e0e847d9d9cd39f5f ON concerne_relation(eid_from);
-CREATE INDEX idx_07f609872b384bb1e598cc355686a53c ON concerne_relation(eid_to);
-
-CREATE TABLE division_of_relation (
+)""",
+    "CREATE INDEX idx_5ee7db9477832d6e0e847d9d9cd39f5f ON concerne_relation(eid_from)",
+    "CREATE INDEX idx_07f609872b384bb1e598cc355686a53c ON concerne_relation(eid_to)",
+    """CREATE TABLE division_of_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_ca129a4cfa4c185c7783654e9e97da5a PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_78da9d594180fecb68ef1eba0c17a975 ON division_of_relation(eid_from);
-CREATE INDEX idx_0e6bd09d8d25129781928848e2f6d8d5 ON division_of_relation(eid_to);
-
-CREATE TABLE evaluee_relation (
+)""",
+    "CREATE INDEX idx_78da9d594180fecb68ef1eba0c17a975 ON division_of_relation(eid_from)",
+    "CREATE INDEX idx_0e6bd09d8d25129781928848e2f6d8d5 ON division_of_relation(eid_to)",
+    """CREATE TABLE evaluee_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_61aa7ea90ed7e43818c9865a3a7eb046 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_69358dbe47990b4f8cf22af55b064dc5 ON evaluee_relation(eid_from);
-CREATE INDEX idx_634663371244297334ff655a26d6cce3 ON evaluee_relation(eid_to);
-
-CREATE TABLE next_state_relation (
+)""",
+    "CREATE INDEX idx_69358dbe47990b4f8cf22af55b064dc5 ON evaluee_relation(eid_from)",
+    "CREATE INDEX idx_634663371244297334ff655a26d6cce3 ON evaluee_relation(eid_to)",
+    """CREATE TABLE next_state_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_24a1275472da1ccc1031f6c463cdaa95 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_e5c1a2ddc41a057eaaf6bdf9f5c6b587 ON next_state_relation(eid_from);
-CREATE INDEX idx_a3cf3cb065213186cf825e13037df826 ON next_state_relation(eid_to);
-
-CREATE TABLE obj_wildcard_relation (
+)""",
+    "CREATE INDEX idx_e5c1a2ddc41a057eaaf6bdf9f5c6b587 ON next_state_relation(eid_from)",
+    "CREATE INDEX idx_a3cf3cb065213186cf825e13037df826 ON next_state_relation(eid_to)",
+    """CREATE TABLE obj_wildcard_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_d252c56177735139c85aee463cd65703 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_efbd9bd98c44bdfe2add479ab6704017 ON obj_wildcard_relation(eid_from);
-CREATE INDEX idx_e8c168c66f9d6057ce14e644b8436808 ON obj_wildcard_relation(eid_to);
-
-CREATE TABLE require_permission_relation (
+)""",
+    "CREATE INDEX idx_efbd9bd98c44bdfe2add479ab6704017 ON obj_wildcard_relation(eid_from)",
+    "CREATE INDEX idx_e8c168c66f9d6057ce14e644b8436808 ON obj_wildcard_relation(eid_to)",
+    """CREATE TABLE require_permission_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_24f38c4edaf84fdcc0f0d093fec3d5c7 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_193987ddfd7c66bf43ded029ea363605 ON require_permission_relation(eid_from);
-CREATE INDEX idx_f6dd784ff5161c4461a753591fe1de94 ON require_permission_relation(eid_to);
-
-CREATE TABLE state_of_relation (
+)""",
+    "CREATE INDEX idx_193987ddfd7c66bf43ded029ea363605 ON require_permission_relation(eid_from)",
+    "CREATE INDEX idx_f6dd784ff5161c4461a753591fe1de94 ON require_permission_relation(eid_to)",
+    """CREATE TABLE state_of_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_be6983bc3072230d2e22f7631a0c9e25 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_5f17c14443de03bd1ef79750c89c2390 ON state_of_relation(eid_from);
-CREATE INDEX idx_0ee453927e090f6eec01c412278dea9b ON state_of_relation(eid_to);
-
-CREATE TABLE subcompany_of_relation (
+)""",
+    "CREATE INDEX idx_5f17c14443de03bd1ef79750c89c2390 ON state_of_relation(eid_from)",
+    "CREATE INDEX idx_0ee453927e090f6eec01c412278dea9b ON state_of_relation(eid_to)",
+    """CREATE TABLE subcompany_of_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_25bee50df3b495a40a02aa39f832377f PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_1e6ee813030fec8d4439fc186ce752b0 ON subcompany_of_relation(eid_from);
-CREATE INDEX idx_259f9ba242f4cb80b9b2f2f9a754fca7 ON subcompany_of_relation(eid_to);
-
-CREATE TABLE subdivision_of_relation (
+)""",
+    "CREATE INDEX idx_1e6ee813030fec8d4439fc186ce752b0 ON subcompany_of_relation(eid_from)",
+    "CREATE INDEX idx_259f9ba242f4cb80b9b2f2f9a754fca7 ON subcompany_of_relation(eid_to)",
+    """CREATE TABLE subdivision_of_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_4d6f7368345676ebb66758ab71f60aef PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_a90a958166c767b50a7294e93858c1a8 ON subdivision_of_relation(eid_from);
-CREATE INDEX idx_0360028629649b26da96044a12735ad4 ON subdivision_of_relation(eid_to);
-
-CREATE TABLE subj_wildcard_relation (
+)""",
+    "CREATE INDEX idx_a90a958166c767b50a7294e93858c1a8 ON subdivision_of_relation(eid_from)",
+    "CREATE INDEX idx_0360028629649b26da96044a12735ad4 ON subdivision_of_relation(eid_to)",
+    """CREATE TABLE subj_wildcard_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_712ea3ec0bc1976bddc93ceba0acff06 PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_4dbfa4a0d44aaa0f0816560fa8b81c22 ON subj_wildcard_relation(eid_from);
-CREATE INDEX idx_09aa23f8a8b63189d05a63f8d49c7bc0 ON subj_wildcard_relation(eid_to);
-
-CREATE TABLE sym_rel_relation (
+)""",
+    "CREATE INDEX idx_4dbfa4a0d44aaa0f0816560fa8b81c22 ON subj_wildcard_relation(eid_from)",
+    "CREATE INDEX idx_09aa23f8a8b63189d05a63f8d49c7bc0 ON subj_wildcard_relation(eid_to)",
+    """CREATE TABLE sym_rel_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_c787b80522205c42402530580b0d307b PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_a46ed54f98cc4d91f0df5375d3ef73cb ON sym_rel_relation(eid_from);
-CREATE INDEX idx_0faa43abe25fc83e9400a3b96daed2b2 ON sym_rel_relation(eid_to);
-
-CREATE TABLE travaille_relation (
+)""",
+    "CREATE INDEX idx_a46ed54f98cc4d91f0df5375d3ef73cb ON sym_rel_relation(eid_from)",
+    "CREATE INDEX idx_0faa43abe25fc83e9400a3b96daed2b2 ON sym_rel_relation(eid_to)",
+    """CREATE TABLE travaille_relation (
   eid_from INTEGER NOT NULL REFERENCES entities (eid),
   eid_to INTEGER NOT NULL REFERENCES entities (eid),
   CONSTRAINT key_d7b209a1f84d9cae74a98626ef0aba0b PRIMARY KEY(eid_from, eid_to)
-);
-
-CREATE INDEX idx_b00e86c772e6577ad7a7901dd0b257b2 ON travaille_relation(eid_from);
-CREATE INDEX idx_970c052363294a9871a4824c9588e220 ON travaille_relation(eid_to);
-"""
+)""",
+    "CREATE INDEX idx_b00e86c772e6577ad7a7901dd0b257b2 ON travaille_relation(eid_from)",
+    "CREATE INDEX idx_970c052363294a9871a4824c9588e220 ON travaille_relation(eid_to)",
+]
 
 
 class SQLSchemaTC(TestCase):
 
     def test_known_values(self):
         dbhelper = get_db_helper('postgres')
-        output = schema2sql.schema2sql(dbhelper, schema, skip_relations=('works_for',))
-        self.assertMultiLineEqual(EXPECTED_DATA_NO_DROP.strip(), output.strip())
+        output = list(schema2sql.schema2sql(dbhelper, schema, skip_relations=('works_for',)))
+        self.assertEqual(output, EXPECTED_DATA_NO_DROP)
 
 
 if __name__ == '__main__':
--- a/cubicweb/server/test/unittest_schemaserial.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/unittest_schemaserial.py	Thu Oct 20 18:28:46 2016 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -58,6 +58,7 @@
                'FormatConstraint': 'FormatConstraint_eid',
                }
 
+
 class Schema2RQLTC(TestCase):
 
     def test_eschema2rql1(self):
@@ -71,8 +72,8 @@
 
     def test_eschema2rql2(self):
         self.assertListEqual([
-                ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s',
-                 {'description': u'', 'final': True, 'name': u'String'})],
+            ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s',
+             {'description': u'', 'final': True, 'name': u'String'})],
                              list(eschema2rql(schema.eschema('String'))))
 
     def test_eschema2rql_specialization(self):
@@ -87,7 +88,7 @@
         expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,'
                      'X name %(name)s',
                      {'description': u'',
-                     'name': u'BabarTestType', 'final': True},)]
+                      'name': u'BabarTestType', 'final': True},)]
         got = list(eschema2rql(schema.eschema('BabarTestType')))
         self.assertListEqual(expected, got)
 
@@ -99,7 +100,7 @@
              {'description': u'link a relation definition to its relation type',
               'symmetric': False,
               'name': u'relation_type',
-              'final' : False,
+              'final': False,
               'fulltext_container': None,
               'inlined': True}),
 
@@ -124,7 +125,8 @@
               'ordernum': 1, 'cardinality': u'1*'}),
             ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
              'WHERE CT eid %(ct)s, EDEF eid %(x)s',
-             {'x': None, 'ct': u'RQLConstraint_eid', 'value': u'{"expression": "O final FALSE", "mainvars": ["O"], "msg": null}'}),
+             {'x': None, 'ct': u'RQLConstraint_eid',
+              'value': u'{"expression": "O final FALSE", "mainvars": ["O"], "msg": null}'}),
         ],
                              list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)))
 
@@ -184,13 +186,13 @@
             ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
              'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
              'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
-            {'cardinality': u'**',
-             'composite': None,
-             'description': u'groups allowed to add entities/relations of this type',
-             'oe': None,
-             'ordernum': 9999,
-             'rt': None,
-             'se': None}),
+             {'cardinality': u'**',
+              'composite': None,
+              'description': u'groups allowed to add entities/relations of this type',
+              'oe': None,
+              'ordernum': 9999,
+              'rt': None,
+              'se': None}),
             ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
              'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
              'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
@@ -387,10 +389,10 @@
 class Perms2RQLTC(TestCase):
     GROUP_MAPPING = {
         'managers': 0,
-        'users':  1,
+        'users': 1,
         'guests': 2,
         'owners': 3,
-        }
+    }
 
     def test_eperms2rql1(self):
         self.assertListEqual([('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
@@ -422,9 +424,6 @@
                               for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'),
                                                              self.GROUP_MAPPING)])
 
-    #def test_perms2rql(self):
-    #    self.assertListEqual(perms2rql(schema, self.GROUP_MAPPING),
-    #                         ['INSERT CWEType X: X name 'Societe', X final FALSE'])
 
 class ComputedAttributeAndRelationTC(CubicWebTC):
     appid = 'data-cwep002'
@@ -442,6 +441,7 @@
         self.assertEqual('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA',
                          schema['total_salary'].rdefs['Company', 'Int'].formula)
 
+
 if __name__ == '__main__':
     from unittest import main
     main()
--- a/cubicweb/server/test/unittest_security.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/server/test/unittest_security.py	Thu Oct 20 18:28:46 2016 +0200
@@ -113,6 +113,7 @@
 
     def test_not_relation_read_security(self):
         with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
+            cnx.user.groups  # fill the cache before screwing syntax_tree_search
             self.hijack_source_execute()
             cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire')
             self.assertEqual(self.query[0][1].as_string(),
--- a/cubicweb/skeleton/DISTNAME.spec.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/skeleton/DISTNAME.spec.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -34,7 +34,7 @@
 %%endif
 
 %%install
-NO_SETUPTOOLS=1 %%{__python} setup.py --quiet install --no-compile --prefix=%%{_prefix} --root="$RPM_BUILD_ROOT"
+%%{__python} setup.py --quiet install --no-compile --prefix=%%{_prefix} --root="$RPM_BUILD_ROOT"
 # remove generated .egg-info file
 rm -rf $RPM_BUILD_ROOT/usr/lib/python*
 
--- a/cubicweb/skeleton/MANIFEST.in	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-include *.py
-include */*.py
-recursive-include data *.gif *.png *.ico *.css *.js
-recursive-include i18n *.po
-recursive-include wdoc *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/MANIFEST.in.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,10 @@
+include *.py
+include */*.py
+recursive-include cubicweb_%(cubename)s *.py
+recursive-include cubicweb_%(cubename)s/data *.gif *.png *.ico *.css *.js
+recursive-include cubicweb_%(cubename)s/i18n *.po
+recursive-include cubicweb_%(cubename)s/wdoc *
+recursive-include test/data bootstrap_cubes *.py
+include tox.ini
+recursive-include debian changelog compat control copyright rules
+include cubicweb-%(cubename)s.spec
--- a/cubicweb/skeleton/__init__.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-"""cubicweb-%(cubename)s application package
-
-%(longdesc)s
-"""
--- a/cubicweb/skeleton/__pkginfo__.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,50 +0,0 @@
-# pylint: disable=W0622
-"""%(distname)s application packaging information"""
-
-from os import listdir as _listdir
-from os.path import join, isdir
-from glob import glob
-
-
-modname = '%(cubename)s'
-distname = '%(distname)s'
-
-numversion = (0, 1, 0)
-version = '.'.join(str(num) for num in numversion)
-
-license = '%(license)s'
-author = '%(author)s'
-author_email = '%(author-email)s'
-description = '%(shortdesc)s'
-web = 'http://www.cubicweb.org/project/%%s' %% distname
-
-__depends__ = %(dependencies)s
-__recommends__ = {}
-
-classifiers = [
-    'Environment :: Web Environment',
-    'Framework :: CubicWeb',
-    'Programming Language :: Python',
-    'Programming Language :: JavaScript',
-]
-
-THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname)
-
-
-def listdir(dirpath):
-    return [join(dirpath, fname) for fname in _listdir(dirpath)
-            if fname[0] != '.' and not fname.endswith('.pyc') and
-            not fname.endswith('~') and
-            not isdir(join(dirpath, fname))]
-
-data_files = [
-    # common files
-    [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']],
-]
-# check for possible extended cube layout
-for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data',
-              'wdoc', 'i18n', 'migration'):
-    if isdir(dname):
-        data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)])
-# Note: here, you'll need to add subdirectories if you want
-# them to be included in the debian package
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/__init__.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,4 @@
+"""cubicweb-%(cubename)s application package
+
+%(longdesc)s
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/__pkginfo__.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,25 @@
+# pylint: disable=W0622
+"""%(distname)s application packaging information"""
+
+
+modname = 'cubicweb_%(cubename)s'
+distname = '%(distname)s'
+
+numversion = (0, 1, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = '%(license)s'
+author = '%(author)s'
+author_email = '%(author-email)s'
+description = '%(shortdesc)s'
+web = 'http://www.cubicweb.org/project/%%s' %% distname
+
+__depends__ = %(dependencies)s
+__recommends__ = {}
+
+classifiers = [
+    'Environment :: Web Environment',
+    'Framework :: CubicWeb',
+    'Programming Language :: Python',
+    'Programming Language :: JavaScript',
+]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/data/cubes.CUBENAME.css	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+/* cube-specific CSS */
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/data/cubes.CUBENAME.js	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+// This contains cube-specific javascript
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/entities.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s entity's classes"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/hooks.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s specific hooks and operations"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/i18n/en.po	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,9 @@
+msgid ""
+msgstr ""
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/i18n/es.po	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,9 @@
+msgid ""
+msgstr ""
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/i18n/fr.po	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,9 @@
+msgid ""
+msgstr ""
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: pygettext.py 1.5\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/migration/postcreate.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,13 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s postcreate script, executed at instance creation time or when
+the cube is added to an existing instance.
+
+You could setup site properties or a workflow here for example.
+"""
+
+# Example of site property change
+#set_property('ui.site-title', "<sitename>")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/migration/precreate.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s precreate script, executed at instance creation time or when
+the cube is added to an existing instance, before the schema is serialized.
+
+This is typically to create groups referenced by the cube'schema.
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/schema.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s schema"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/sobjects.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s repository side views, usually for notification"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/uiprops.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+###############################################################################
+#
+# Put here information about external resources / styles used by your cube,
+# or to overides existing UI properties.
+#
+# Existing properties are available through the `sheet` dictionary available
+# in the global namespace. You also have access to a `data` function which
+# will return proper url for resources in the 'data' directory.
+#
+# /!\ this file should not be imported /!\
+###############################################################################
+
+# CSS stylesheets to include in HTML headers
+# uncomment the line below to use template specific stylesheet
+# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/cubicweb_CUBENAME/views.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+# copyright %(year)s %(author)s, all rights reserved.
+# contact %(author-web-site)s -- mailto:%(author-email)s
+#
+%(long-license)s
+"""%(distname)s views/forms/actions/components for web ui"""
--- a/cubicweb/skeleton/data/cubes.CUBENAME.css	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-/* cube-specific CSS */
--- a/cubicweb/skeleton/data/cubes.CUBENAME.js	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-// This contains cube-specific javascript
\ No newline at end of file
--- a/cubicweb/skeleton/debian/changelog.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/skeleton/debian/changelog.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -1,6 +1,6 @@
-%(distname)s (0.1.0-1) unstable; urgency=low
+%(distname)s (0.1.0-1) UNRELEASED; urgency=low
 
   * initial release
 
- -- 
+ -- %(author)s <%(author-email)s>  %(rfc2822-date)s
 
--- a/cubicweb/skeleton/debian/control.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/skeleton/debian/control.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -4,7 +4,9 @@
 Maintainer: %(author)s <%(author-email)s>
 Build-Depends:
  debhelper (>= 7),
+ dh-python,
  python (>= 2.6.5),
+ python-setuptools,
 Standards-Version: 3.9.3
 X-Python-Version: >= 2.6
 
--- a/cubicweb/skeleton/debian/rules	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/skeleton/debian/rules	Thu Oct 20 18:28:46 2016 +0200
@@ -1,9 +1,4 @@
 #!/usr/bin/make -f
 
-export NO_SETUPTOOLS=1
-
 %:
 	dh $@ --with python2
-
-override_dh_python2:
-	dh_python2 -i /usr/share/cubicweb
--- a/cubicweb/skeleton/entities.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s entity's classes"""
--- a/cubicweb/skeleton/hooks.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s specific hooks and operations"""
--- a/cubicweb/skeleton/i18n/en.po	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-msgid ""
-msgstr ""
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: pygettext.py 1.5\n"
-"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
-
--- a/cubicweb/skeleton/i18n/es.po	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-msgid ""
-msgstr ""
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: pygettext.py 1.5\n"
-"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
-
--- a/cubicweb/skeleton/i18n/fr.po	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-msgid ""
-msgstr ""
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: pygettext.py 1.5\n"
-"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-"PO-Revision-Date: YEAR-MO-DA HO:MI +ZONE\n"
-
--- a/cubicweb/skeleton/migration/postcreate.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s postcreate script, executed at instance creation time or when
-the cube is added to an existing instance.
-
-You could setup site properties or a workflow here for example.
-"""
-
-# Example of site property change
-#set_property('ui.site-title', "<sitename>")
--- a/cubicweb/skeleton/migration/precreate.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s precreate script, executed at instance creation time or when
-the cube is added to an existing instance, before the schema is serialized.
-
-This is typically to create groups referenced by the cube'schema.
-"""
--- a/cubicweb/skeleton/schema.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s schema"""
--- a/cubicweb/skeleton/setup.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,213 +0,0 @@
-#!/usr/bin/env python
-# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
-#
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of a CubicWeb cube.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Generic Setup script, takes package info from __pkginfo__.py file
-"""
-__docformat__ = "restructuredtext en"
-
-import os
-import sys
-import shutil
-from os.path import exists, join, dirname
-
-try:
-    if os.environ.get('NO_SETUPTOOLS'):
-        raise ImportError()  # do as there is no setuptools
-    from setuptools import setup
-    from setuptools.command import install_lib
-    USE_SETUPTOOLS = True
-except ImportError:
-    from distutils.core import setup
-    from distutils.command import install_lib
-    USE_SETUPTOOLS = False
-from distutils.command import install_data
-
-
-# load metadata from the __pkginfo__.py file so there is no risk of conflict
-# see https://packaging.python.org/en/latest/single_source_version.html
-base_dir = dirname(__file__)
-pkginfo = {}
-with open(join(base_dir, "__pkginfo__.py")) as f:
-    exec(f.read(), pkginfo)
-
-# get required metadatas
-modname = pkginfo['modname']
-version = pkginfo['version']
-license = pkginfo['license']
-description = pkginfo['description']
-web = pkginfo['web']
-author = pkginfo['author']
-author_email = pkginfo['author_email']
-classifiers = pkginfo['classifiers']
-
-with open(join(base_dir, 'README')) as f:
-    long_description = f.read()
-
-# get optional metadatas
-distname = pkginfo.get('distname', modname)
-scripts = pkginfo.get('scripts', ())
-include_dirs = pkginfo.get('include_dirs', ())
-data_files = pkginfo.get('data_files', None)
-ext_modules = pkginfo.get('ext_modules', None)
-dependency_links = pkginfo.get('dependency_links', ())
-
-if USE_SETUPTOOLS:
-    requires = {}
-    for entry in ("__depends__",):  # "__recommends__"):
-        requires.update(pkginfo.get(entry, {}))
-    install_requires = [("%s %s" % (d, v and v or "")).strip()
-                        for d, v in requires.items()]
-else:
-    install_requires = []
-
-BASE_BLACKLIST = ('CVS', '.svn', '.hg', '.git', 'debian', 'dist', 'build')
-IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~')
-
-
-def ensure_scripts(linux_scripts):
-    """
-    Creates the proper script names required for each platform
-    (taken from 4Suite)
-    """
-    from distutils import util
-    if util.get_platform()[:3] == 'win':
-        scripts_ = [script + '.bat' for script in linux_scripts]
-    else:
-        scripts_ = linux_scripts
-    return scripts_
-
-
-def export(from_dir, to_dir,
-           blacklist=BASE_BLACKLIST,
-           ignore_ext=IGNORED_EXTENSIONS,
-           verbose=True):
-    try:
-        os.mkdir(to_dir)
-    except OSError as ex:
-        # file exists ?
-        import errno
-        if ex.errno != errno.EEXIST:
-            raise
-    for dirpath, dirnames, filenames in os.walk(from_dir):
-        for norecurs in blacklist:
-            try:
-                dirnames.remove(norecurs)
-            except ValueError:
-                pass
-        for dir_name in dirnames:
-            dest = join(to_dir, dir_name)
-            if not exists(dest):
-                os.mkdir(dest)
-        for filename in filenames:
-            # don't include binary files
-            src = join(dirpath, filename)
-            dest = to_dir + src[len(from_dir):]
-            if filename[-4:] in ignore_ext:
-                continue
-            if filename[-1] == '~':
-                continue
-            if exists(dest):
-                os.remove(dest)
-            if verbose:
-                sys.stderr.write('%s -> %s\n' % (src, dest))
-            shutil.copy2(src, dest)
-
-
-class MyInstallLib(install_lib.install_lib):
-    """extend install_lib command to handle  package __init__.py and
-    include_dirs variable if necessary
-    """
-    def run(self):
-        """overridden from install_lib class"""
-        install_lib.install_lib.run(self)
-        # manually install included directories if any
-        if include_dirs:
-            base = modname
-            for directory in include_dirs:
-                dest = join(self.install_dir, base, directory)
-                export(directory, dest, verbose=False)
-
-
-# re-enable copying data files in sys.prefix
-old_install_data = install_data.install_data
-if USE_SETUPTOOLS:
-    # overwrite InstallData to use sys.prefix instead of the egg directory
-    class MyInstallData(old_install_data):
-        """A class that manages data files installation"""
-        def run(self):
-            _old_install_dir = self.install_dir
-            if self.install_dir.endswith('egg'):
-                self.install_dir = sys.prefix
-            old_install_data.run(self)
-            self.install_dir = _old_install_dir
-    try:
-        # only if easy_install available
-        import setuptools.command.easy_install  # noqa
-        # monkey patch: Crack SandboxViolation verification
-        from setuptools.sandbox import DirectorySandbox as DS
-        old_ok = DS._ok
-
-        def _ok(self, path):
-            """Return True if ``path`` can be written during installation."""
-            out = old_ok(self, path)  # here for side effect from setuptools
-            realpath = os.path.normcase(os.path.realpath(path))
-            allowed_path = os.path.normcase(sys.prefix)
-            if realpath.startswith(allowed_path):
-                out = True
-            return out
-        DS._ok = _ok
-    except ImportError:
-        pass
-
-
-def install(**kwargs):
-    """setup entry point"""
-    if USE_SETUPTOOLS:
-        if '--force-manifest' in sys.argv:
-            sys.argv.remove('--force-manifest')
-    # install-layout option was introduced in 2.5.3-1~exp1
-    elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv:
-        sys.argv.remove('--install-layout=deb')
-    cmdclass = {'install_lib': MyInstallLib}
-    if USE_SETUPTOOLS:
-        kwargs['install_requires'] = install_requires
-        kwargs['dependency_links'] = dependency_links
-        kwargs['zip_safe'] = False
-        cmdclass['install_data'] = MyInstallData
-
-    return setup(name=distname,
-                 version=version,
-                 license=license,
-                 description=description,
-                 long_description=long_description,
-                 author=author,
-                 author_email=author_email,
-                 url=web,
-                 scripts=ensure_scripts(scripts),
-                 data_files=data_files,
-                 ext_modules=ext_modules,
-                 cmdclass=cmdclass,
-                 classifiers=classifiers,
-                 **kwargs
-                 )
-
-
-if __name__ == '__main__':
-    install()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/setup.py.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# pylint: disable=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
+#
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of a %(distname)s.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb_%(cubename)s setup module using data from
+cubicweb_%(cubename)s/__pkginfo__.py file
+"""
+
+from os.path import join, dirname
+
+from setuptools import find_packages, setup
+
+
+here = dirname(__file__)
+
+# load metadata from the __pkginfo__.py file so there is no risk of conflict
+# see https://packaging.python.org/en/latest/single_source_version.html
+pkginfo = join(here, 'cubicweb_%(cubename)s', '__pkginfo__.py')
+__pkginfo__ = {}
+with open(pkginfo) as f:
+    exec(f.read(), __pkginfo__)
+
+# get required metadatas
+distname = __pkginfo__['distname']
+version = __pkginfo__['version']
+license = __pkginfo__['license']
+description = __pkginfo__['description']
+web = __pkginfo__['web']
+author = __pkginfo__['author']
+author_email = __pkginfo__['author_email']
+classifiers = __pkginfo__['classifiers']
+
+with open(join(here, 'README')) as f:
+    long_description = f.read()
+
+# get optional metadatas
+data_files = __pkginfo__.get('data_files', None)
+dependency_links = __pkginfo__.get('dependency_links', ())
+
+requires = {}
+for entry in ("__depends__",):  # "__recommends__"):
+    requires.update(__pkginfo__.get(entry, {}))
+install_requires = ["{0} {1}".format(d, v and v or "").strip()
+                    for d, v in requires.items()]
+
+
+setup(
+    name=distname,
+    version=version,
+    license=license,
+    description=description,
+    long_description=long_description,
+    author=author,
+    author_email=author_email,
+    url=web,
+    classifiers=classifiers,
+    packages=find_packages(exclude=['test']),
+    install_requires=install_requires,
+    include_package_data=True,
+    entry_points={
+        'cubicweb.cubes': [
+            '%(cubename)s=cubicweb_%(cubename)s',
+        ],
+    },
+    zip_safe=False,
+)
--- a/cubicweb/skeleton/sobjects.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s repository side views, usually for notification"""
--- a/cubicweb/skeleton/test/pytestconf.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,61 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-import os
-import sys
-
-from logilab.common.pytest import PyTester
-
-
-def getlogin():
-    """avoid usinng os.getlogin() because of strange tty / stdin problems
-    (man 3 getlogin)
-    Another solution would be to use $LOGNAME, $USER or $USERNAME
-    """
-    if sys.platform == 'win32':
-        return os.environ.get('USERNAME') or 'cubicweb'
-    import pwd
-    return pwd.getpwuid(os.getuid())[0]
-
-
-def update_parser(parser):
-    login = getlogin()
-    parser.add_option('-r', '--rebuild-database', dest='rebuild_db',
-                      default=False, action="store_true",
-                      help="remove tmpdb and rebuilds the test database")
-    parser.add_option('-u', '--dbuser', dest='dbuser', action='store',
-                      default=login, help="database user")
-    parser.add_option('-w', '--dbpassword', dest='dbpassword', action='store',
-                      default=login, help="database user's password")
-    parser.add_option('-n', '--dbname', dest='dbname', action='store',
-                      default=None, help="database name")
-    parser.add_option('--euser', dest='euser', action='store',
-                      default=login, help="euser name")
-    parser.add_option('--epassword', dest='epassword', action='store',
-                      default=login, help="euser's password' name")
-    return parser
-
-
-class CustomPyTester(PyTester):
-    def __init__(self, cvg, options):
-        super(CustomPyTester, self).__init__(cvg, options)
-        if options.rebuild_db:
-            os.unlink('tmpdb')
-            os.unlink('tmpdb-template')
--- a/cubicweb/skeleton/tox.ini	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-[tox]
-envlist = py27,py34,flake8
-
-[testenv]
-sitepackages = true
-deps =
-  pytest
-commands =
-  {envpython} -m pytest {posargs:test}
-
-[testenv:flake8]
-skip_install = true
-whitelist_externals =
-  flake8
-deps =
-  flake8
-commands = flake8
-
-[flake8]
-exclude = migration/*,test/data/*,setup.py,.tox/*
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/skeleton/tox.ini.tmpl	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,20 @@
+[tox]
+envlist = py27,py34,flake8
+
+[testenv]
+sitepackages = true
+deps =
+  pytest
+commands =
+  {envpython} -m pytest {posargs:test}
+
+[testenv:flake8]
+skip_install = true
+whitelist_externals =
+  flake8
+deps =
+  flake8
+commands = flake8
+
+[flake8]
+exclude = cubicweb_%(cubename)s/migration/*,test/data/*,.tox/*
--- a/cubicweb/skeleton/uiprops.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,16 +0,0 @@
-# -*- coding: utf-8 -*-
-###############################################################################
-#
-# Put here information about external resources / styles used by your cube,
-# or to overides existing UI properties.
-#
-# Existing properties are available through the `sheet` dictionary available
-# in the global namespace. You also have access to a `data` function which
-# will return proper url for resources in the 'data' directory.
-#
-# /!\ this file should not be imported /!\
-###############################################################################
-
-# CSS stylesheets to include in HTML headers
-# uncomment the line below to use template specific stylesheet
-# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')]
--- a/cubicweb/skeleton/views.py.tmpl	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright %(year)s %(author)s, all rights reserved.
-# contact %(author-web-site)s -- mailto:%(author-email)s
-#
-%(long-license)s
-"""%(distname)s views/forms/actions/components for web ui"""
--- a/cubicweb/sobjects/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-cubicweb-card
-cubicweb-comment
--- a/cubicweb/test/data/cubes/comment/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- a/cubicweb/test/data/cubes/comment/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-# pylint: disable=W0622
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb-comment packaging information"""
-
-distname = "cubicweb-comment"
-modname = distname.split('-', 1)[1]
-
-numversion = (1, 4, 3)
-version = '.'.join(str(num) for num in numversion)
--- a/cubicweb/test/data/cubes/email/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- a/cubicweb/test/data/cubes/email/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-# pylint: disable=W0622
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb-email packaging information"""
-
-distname = "cubicweb-email"
-modname = distname.split('-', 1)[1]
-
-numversion = (1, 4, 3)
-version = '.'.join(str(num) for num in numversion)
-
-
-__depends__ = {'cubicweb': None,
-               'cubicweb-file': None}
-__recommends__ = {'cubicweb-comment': None}
--- a/cubicweb/test/data/cubes/email/entities.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/email/hooks.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/email/views/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/file/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- a/cubicweb/test/data/cubes/file/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-# pylint: disable=W0622
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb-file packaging information"""
-
-distname = "cubicweb-file"
-modname = distname.split('-', 1)[1]
-
-numversion = (1, 4, 3)
-version = '.'.join(str(num) for num in numversion)
--- a/cubicweb/test/data/cubes/file/entities/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/file/hooks/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/file/views.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-"test"
--- a/cubicweb/test/data/cubes/forge/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- a/cubicweb/test/data/cubes/forge/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,32 +0,0 @@
-# pylint: disable=W0622
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb-forge packaging information"""
-
-distname = "cubicweb-forge"
-modname = distname.split('-', 1)[1]
-
-numversion = (1, 4, 3)
-version = '.'.join(str(num) for num in numversion)
-
-
-__depends__ = {'cubicweb': None,
-               'cubicweb-file': None,
-               'cubicweb-email': None,
-               'cubicweb-comment': None,
-               }
--- a/cubicweb/test/data/cubes/mycube/__init__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""mycube's __init__
-
-"""
--- a/cubicweb/test/data/cubes/mycube/__pkginfo__.py	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-distname = 'cubicweb-mycube'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/legacy_cubes/comment	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_comment
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/legacy_cubes/email	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_email/
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/legacy_cubes/file	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_file
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/legacy_cubes/forge	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_forge
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/legacy_cubes/mycube	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+../libpython/cubicweb_mycube
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_comment/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,17 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_comment/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,25 @@
+# pylint: disable=W0622
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb-comment packaging information"""
+
+distname = "cubicweb-comment"
+modname = distname.split('-', 1)[1]
+
+numversion = (1, 4, 3)
+version = '.'.join(str(num) for num in numversion)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_email/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,17 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_email/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,30 @@
+# pylint: disable=W0622
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb-email packaging information"""
+
+distname = "cubicweb-email"
+modname = distname.split('-', 1)[1]
+
+numversion = (1, 4, 3)
+version = '.'.join(str(num) for num in numversion)
+
+
+__depends__ = {'cubicweb': None,
+               'cubicweb-file': None}
+__recommends__ = {'cubicweb-comment': None}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_email/entities.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_email/hooks.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_email/views/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_file/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,17 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_file/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,25 @@
+# pylint: disable=W0622
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb-file packaging information"""
+
+distname = "cubicweb-file"
+modname = distname.split('-', 1)[1]
+
+numversion = (1, 4, 3)
+version = '.'.join(str(num) for num in numversion)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_file/entities/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_file/hooks/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_file/views.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+"test"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_forge/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,17 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_forge/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,32 @@
+# pylint: disable=W0622
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""cubicweb-forge packaging information"""
+
+distname = "cubicweb-forge"
+modname = distname.split('-', 1)[1]
+
+numversion = (1, 4, 3)
+version = '.'.join(str(num) for num in numversion)
+
+
+__depends__ = {'cubicweb': None,
+               'cubicweb-file': None,
+               'cubicweb-email': None,
+               'cubicweb-comment': None,
+               }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_mycube/__init__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,20 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""mycube's __init__
+
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_mycube/__pkginfo__.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,21 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+"""
+distname = 'cubicweb-mycube'
--- a/cubicweb/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-Pygments
-#fyzz XXX pip install fails
-cubicweb-card
-cubicweb-file
-cubicweb-localperms
-cubicweb-tag
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/unittest_cubes.py	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,123 @@
+# copyright 2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""Unit tests for "cubes" importer."""
+
+from contextlib import contextmanager
+import os
+from os import path
+import shutil
+import sys
+import tempfile
+import unittest
+
+from six import PY2
+
+from cubicweb import _CubesImporter
+from cubicweb.cwconfig import CubicWebConfiguration
+
+
+@contextmanager
+def temp_cube():
+    tempdir = tempfile.mkdtemp()
+    try:
+        libdir = path.join(tempdir, 'libpython')
+        cubedir = path.join(libdir, 'cubicweb_foo')
+        os.makedirs(cubedir)
+        with open(path.join(cubedir, '__init__.py'), 'w') as f:
+            f.write('"""cubicweb_foo application package"""')
+        with open(path.join(cubedir, 'bar.py'), 'w') as f:
+            f.write('baz = 1')
+        sys.path.append(libdir)
+        yield cubedir
+    finally:
+        shutil.rmtree(tempdir)
+        sys.path.remove(libdir)
+
+
+class CubesImporterTC(unittest.TestCase):
+
+    def setUp(self):
+        # During discovery, CubicWebConfiguration.cls_adjust_sys_path may be
+        # called (probably because of cubicweb.devtools's __init__.py), so
+        # uninstall _CubesImporter.
+        for x in sys.meta_path:
+            if isinstance(x, _CubesImporter):
+                sys.meta_path.remove(x)
+        # Keep track of initial sys.path and sys.meta_path.
+        self.orig_sys_path = sys.path[:]
+        self.orig_sys_meta_path = sys.meta_path[:]
+
+    def tearDown(self):
+        # Cleanup any imported "cubes".
+        for name in list(sys.modules):
+            if name.startswith('cubes') or name.startswith('cubicweb_'):
+                del sys.modules[name]
+        # Restore sys.{meta_,}path
+        sys.path[:] = self.orig_sys_path
+        sys.meta_path[:] = self.orig_sys_meta_path
+
+    def test_importer_install(self):
+        _CubesImporter.install()
+        self.assertIsInstance(sys.meta_path[-1], _CubesImporter)
+
+    def test_config_installs_importer(self):
+        CubicWebConfiguration.cls_adjust_sys_path()
+        self.assertIsInstance(sys.meta_path[-1], _CubesImporter)
+
+    def test_import_cube_as_package_legacy_name(self):
+        """Check for import of an actual package-cube using legacy name"""
+        with temp_cube() as cubedir:
+            import cubicweb_foo  # noqa
+            del sys.modules['cubicweb_foo']
+            with self.assertRaises(ImportError):
+                import cubes.foo
+            CubicWebConfiguration.cls_adjust_sys_path()
+            import cubes.foo  # noqa
+            self.assertEqual(cubes.foo.__path__, [cubedir])
+            self.assertEqual(cubes.foo.__doc__,
+                             'cubicweb_foo application package')
+            # Import a submodule.
+            from cubes.foo import bar
+            self.assertEqual(bar.baz, 1)
+
+    def test_import_legacy_cube(self):
+        """Check that importing a legacy cube works when sys.path got adjusted.
+        """
+        CubicWebConfiguration.cls_adjust_sys_path()
+        import cubes.card  # noqa
+
+    def test_import_cube_as_package_after_legacy_cube(self):
+        """Check import of a "cube as package" after a legacy cube."""
+        CubicWebConfiguration.cls_adjust_sys_path()
+        with temp_cube() as cubedir:
+            import cubes.card
+            import cubes.foo
+        self.assertEqual(cubes.foo.__path__, [cubedir])
+
+    def test_cube_inexistant(self):
+        """Check for import of an inexistant cube"""
+        CubicWebConfiguration.cls_adjust_sys_path()
+        with self.assertRaises(ImportError) as cm:
+            import cubes.doesnotexists  # noqa
+        msg = "No module named " + ("doesnotexists" if PY2 else "'cubes.doesnotexists'")
+        self.assertEqual(str(cm.exception), msg)
+
+
+if __name__ == '__main__':
+    import unittest
+    unittest.main()
--- a/cubicweb/test/unittest_cwconfig.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_cwconfig.py	Thu Oct 20 18:28:46 2016 +0200
@@ -21,37 +21,76 @@
 import os
 import tempfile
 from os.path import dirname, join, abspath
+from pkg_resources import EntryPoint, Distribution
+import unittest
+
+from mock import patch
+from six import PY3
 
 from logilab.common.modutils import cleanup_sys_modules
-from logilab.common.testlib import (TestCase, unittest_main,
-                                    with_tempdir)
+from logilab.common.testlib import with_tempdir
 from logilab.common.changelog import Version
 
-from cubicweb.devtools import ApptestConfiguration
+from cubicweb.devtools import ApptestConfiguration, testlib
 from cubicweb.cwconfig import _find_prefix
 
+
 def unabsolutize(path):
     parts = path.split(os.sep)
     for i, part in reversed(tuple(enumerate(parts))):
-        if part.startswith('cubicweb') or part == 'cubes':
-            return '/'.join(parts[i+1:])
+        if part.startswith('cubicweb_'):
+            return os.sep.join([part[len('cubicweb_'):]] + parts[i+1:])
+        if part.startswith('cubicweb') or part == 'legacy_cubes':
+            return os.sep.join(parts[i+1:])
     raise Exception('duh? %s' % path)
 
-CUSTOM_CUBES_DIR = abspath(join(dirname(__file__), 'data', 'cubes'))
 
+class CubicWebConfigurationTC(testlib.BaseTestCase):
 
-class CubicWebConfigurationTC(TestCase):
+    @classmethod
+    def setUpClass(cls):
+        sys.path.append(cls.datapath('libpython'))
+
+    @classmethod
+    def tearDownClass(cls):
+        sys.path.remove(cls.datapath('libpython'))
+
     def setUp(self):
-        cleanup_sys_modules([CUSTOM_CUBES_DIR, ApptestConfiguration.CUBES_DIR])
         self.config = ApptestConfiguration('data', __file__)
         self.config._cubes = ('email', 'file')
 
     def tearDown(self):
         ApptestConfiguration.CUBES_PATH = []
 
+    def iter_entry_points(group, name):
+        """Mock pkg_resources.iter_entry_points to yield EntryPoint from
+        packages found in test/data/libpython even though these are not
+        installed.
+        """
+        libpython = CubicWebConfigurationTC.datapath('libpython')
+        prefix = 'cubicweb_'
+        for pkgname in os.listdir(libpython):
+            if not pkgname.startswith(prefix):
+                continue
+            location = join(libpython, pkgname)
+            yield EntryPoint(pkgname[len(prefix):], pkgname,
+                             dist=Distribution(location))
+
+    @patch('pkg_resources.iter_entry_points', side_effect=iter_entry_points)
+    def test_available_cubes(self, mock_iter_entry_points):
+        expected_cubes = [
+            'card', 'comment', 'cubicweb_comment', 'cubicweb_email', 'file',
+            'cubicweb_file', 'cubicweb_forge', 'localperms',
+            'cubicweb_mycube', 'tag',
+        ]
+        self._test_available_cubes(expected_cubes)
+        mock_iter_entry_points.assert_called_once_with(
+            group='cubicweb.cubes', name=None)
+
+    def _test_available_cubes(self, expected_cubes):
+        self.assertEqual(self.config.available_cubes(), expected_cubes)
+
     def test_reorder_cubes(self):
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
-        self.config.adjust_sys_path()
         # forge depends on email and file and comment
         # email depends on file
         self.assertEqual(self.config.reorder_cubes(['file', 'email', 'forge']),
@@ -68,9 +107,10 @@
                           ('forge', 'email', 'file'))
 
     def test_reorder_cubes_recommends(self):
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
-        self.config.adjust_sys_path()
-        from cubes.comment import __pkginfo__ as comment_pkginfo
+        from cubicweb_comment import __pkginfo__ as comment_pkginfo
+        self._test_reorder_cubes_recommends(comment_pkginfo)
+
+    def _test_reorder_cubes_recommends(self, comment_pkginfo):
         comment_pkginfo.__recommends_cubes__ = {'file': None}
         try:
             # email recommends comment
@@ -87,35 +127,73 @@
             comment_pkginfo.__recommends_cubes__ = {}
 
     def test_expand_cubes(self):
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
-        self.config.adjust_sys_path()
         self.assertEqual(self.config.expand_cubes(('email', 'comment')),
                           ['email', 'comment', 'file'])
 
     def test_appobjects_path(self):
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
-        self.config.adjust_sys_path()
         path = [unabsolutize(p) for p in self.config.appobjects_path()]
         self.assertEqual(path[0], 'entities')
         self.assertCountEqual(path[1:4], ['web/views', 'sobjects', 'hooks'])
         self.assertEqual(path[4], 'file/entities')
-        self.assertCountEqual(path[5:7], ['file/views.py', 'file/hooks'])
+        self.assertCountEqual(path[5:7],
+                              ['file/views.py', 'file/hooks'])
         self.assertEqual(path[7], 'email/entities.py')
-        self.assertCountEqual(path[8:10], ['email/views', 'email/hooks.py'])
+        self.assertCountEqual(path[8:10],
+                              ['email/views', 'email/hooks.py'])
         self.assertEqual(path[10:], ['test/data/entities.py', 'test/data/views.py'])
 
+    def test_init_cubes_ignore_pyramid_cube(self):
+        warning_msg = 'cubicweb-pyramid got integrated into CubicWeb'
+        with self.assertLogs('cubicweb.configuration', level='WARNING') as cm:
+            self.config.init_cubes(['pyramid', 'card'])
+        self.assertIn(warning_msg, cm.output[0])
+        self.assertNotIn('pyramid', self.config._cubes)
+
+class CubicWebConfigurationWithLegacyCubesTC(CubicWebConfigurationTC):
+
+    @classmethod
+    def setUpClass(cls):
+        pass
+
+    @classmethod
+    def tearDownClass(cls):
+        pass
+
+    def setUp(self):
+        self.custom_cubes_dir = self.datapath('legacy_cubes')
+        cleanup_sys_modules([self.custom_cubes_dir, ApptestConfiguration.CUBES_DIR])
+        super(CubicWebConfigurationWithLegacyCubesTC, self).setUp()
+        self.config.__class__.CUBES_PATH = [self.custom_cubes_dir]
+        self.config.adjust_sys_path()
+
+    def tearDown(self):
+        ApptestConfiguration.CUBES_PATH = []
+
+    def test_available_cubes(self):
+        expected_cubes = sorted(set([
+            # local cubes
+            'comment', 'email', 'file', 'forge', 'mycube',
+            # test dependencies
+            'card', 'file', 'localperms', 'tag',
+        ]))
+        self._test_available_cubes(expected_cubes)
+
+    def test_reorder_cubes_recommends(self):
+        from cubes.comment import __pkginfo__ as comment_pkginfo
+        self._test_reorder_cubes_recommends(comment_pkginfo)
+
     def test_cubes_path(self):
         # make sure we don't import the email cube, but the stdlib email package
         import email
         self.assertNotEqual(dirname(email.__file__), self.config.CUBES_DIR)
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
+        self.config.__class__.CUBES_PATH = [self.custom_cubes_dir]
         self.assertEqual(self.config.cubes_search_path(),
-                          [CUSTOM_CUBES_DIR, self.config.CUBES_DIR])
-        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR,
+                          [self.custom_cubes_dir, self.config.CUBES_DIR])
+        self.config.__class__.CUBES_PATH = [self.custom_cubes_dir,
                                             self.config.CUBES_DIR, 'unexistant']
         # filter out unexistant and duplicates
         self.assertEqual(self.config.cubes_search_path(),
-                          [CUSTOM_CUBES_DIR,
+                          [self.custom_cubes_dir,
                            self.config.CUBES_DIR])
         self.assertIn('mycube', self.config.available_cubes())
         # test cubes python path
@@ -124,15 +202,24 @@
         self.assertEqual(cubes.__path__, self.config.cubes_search_path())
         # this import should succeed once path is adjusted
         from cubes import mycube
-        self.assertEqual(mycube.__path__, [join(CUSTOM_CUBES_DIR, 'mycube')])
+        self.assertEqual(mycube.__path__, [join(self.custom_cubes_dir, 'mycube')])
         # file cube should be overriden by the one found in data/cubes
-        sys.modules.pop('cubes.file', None)
-        del cubes.file
+        if sys.modules.pop('cubes.file', None) and PY3:
+            del cubes.file
         from cubes import file
-        self.assertEqual(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')])
+        self.assertEqual(file.__path__, [join(self.custom_cubes_dir, 'file')])
+
+    def test_config_value_from_environment(self):
+        self.assertIsNone(self.config['base-url'])
+        os.environ['CW_BASE_URL'] = 'https://www.cubicweb.org'
+        try:
+            self.assertEqual(self.config['base-url'],
+                             'https://www.cubicweb.org')
+        finally:
+            del os.environ['CW_BASE_URL']
 
 
-class FindPrefixTC(TestCase):
+class FindPrefixTC(unittest.TestCase):
     def make_dirs(self, *args):
         path = join(tempfile.tempdir, *args)
         if not os.path.exists(path):
@@ -231,4 +318,4 @@
 
 
 if __name__ == '__main__':
-    unittest_main()
+    unittest.main()
--- a/cubicweb/test/unittest_cwctl.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_cwctl.py	Thu Oct 20 18:28:46 2016 +0200
@@ -19,11 +19,10 @@
 import os
 from os.path import join
 from io import StringIO, BytesIO
+import unittest
 
 from six import PY2
 
-from logilab.common.testlib import TestCase, unittest_main
-
 from cubicweb.cwconfig import CubicWebConfiguration
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.server.migractions import ServerMigrationHelper
@@ -31,10 +30,12 @@
 CubicWebConfiguration.load_cwctl_plugins() # XXX necessary?
 
 
-class CubicWebCtlTC(TestCase):
+class CubicWebCtlTC(unittest.TestCase):
+
     def setUp(self):
         self.stream = BytesIO() if PY2 else StringIO()
         sys.stdout = self.stream
+
     def tearDown(self):
         sys.stdout = sys.__stdout__
 
@@ -65,4 +66,4 @@
 
 
 if __name__ == '__main__':
-    unittest_main()
+    unittest.main()
--- a/cubicweb/test/unittest_schema.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_schema.py	Thu Oct 20 18:28:46 2016 +0200
@@ -174,7 +174,7 @@
             'CWCache', 'CWComputedRType', 'CWConstraint',
             'CWConstraintType', 'CWDataImport', 'CWEType',
             'CWAttribute', 'CWGroup', 'EmailAddress',
-            'CWRelation', 'CWPermission', 'CWProperty', 'CWRType',
+            'CWRelation', 'CWPermission', 'CWProperty', 'CWRType', 'CWSession',
             'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig',
             'CWUniqueTogetherConstraint', 'CWUser',
             'ExternalUri', 'FakeFile', 'Float', 'Int', 'Interval', 'Note',
@@ -196,7 +196,8 @@
             'constrained_by', 'constraint_of',
             'content', 'content_format', 'contrat_exclusif',
             'created_by', 'creation_date', 'cstrtype', 'custom_workflow',
-            'cwuri', 'cw_for_source', 'cw_import_of', 'cw_host_config_of', 'cw_schema', 'cw_source',
+            'cwuri', 'cwsessiondata', 'cw_for_source', 'cw_import_of', 'cw_host_config_of',
+            'cw_schema', 'cw_source',
 
             'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval',
             'delete_permission', 'description', 'description_format', 'destination_state',
@@ -526,6 +527,7 @@
                      ('cw_source', 'CWProperty', 'CWSource', 'object'),
                      ('cw_source', 'CWRType', 'CWSource', 'object'),
                      ('cw_source', 'CWRelation', 'CWSource', 'object'),
+                     ('cw_source', 'CWSession', 'CWSource', 'object'),
                      ('cw_source', 'CWSource', 'CWSource', 'object'),
                      ('cw_source', 'CWSourceHostConfig', 'CWSource', 'object'),
                      ('cw_source', 'CWSourceSchemaConfig', 'CWSource', 'object'),
--- a/cubicweb/test/unittest_spa2rql.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_spa2rql.py	Thu Oct 20 18:28:46 2016 +0200
@@ -33,15 +33,20 @@
 xy.add_equivalence('Project name', 'doap:Project dc:title')
 
 
-config = TestServerConfiguration('data', __file__)
-config.bootstrap_cubes()
-schema = config.load_schema()
-
 
 @unittest.skipIf(SKIPCAUSE, SKIPCAUSE)
 class XYTC(TestCase):
+
+    schema = None
+
+    @classmethod
+    def setUpClass(cls):
+        config = TestServerConfiguration('data', __file__)
+        config.bootstrap_cubes()
+        cls.schema = config.load_schema()
+
     def setUp(self):
-        self.tr = Sparql2rqlTranslator(schema)
+        self.tr = Sparql2rqlTranslator(self.schema)
 
     def _test(self, sparql, rql, args={}):
         qi = self.tr.translate(sparql)
--- a/cubicweb/test/unittest_toolsutils.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_toolsutils.py	Thu Oct 20 18:28:46 2016 +0200
@@ -17,12 +17,15 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
 
-from logilab.common.testlib import TestCase, unittest_main
+import os
+import tempfile
+import unittest
 
-from cubicweb.toolsutils import RQLExecuteMatcher
+from cubicweb.toolsutils import (RQLExecuteMatcher, option_value_from_env,
+                                 read_config)
 
 
-class RQLExecuteMatcherTests(TestCase):
+class RQLExecuteMatcherTests(unittest.TestCase):
     def matched_query(self, text):
         match = RQLExecuteMatcher.match(text)
         if match is None:
@@ -53,5 +56,68 @@
             self.assertEqual(query, 'Any X WHERE X is ')
 
 
+SOURCES_CONTENT = b"""
+[admin]
+
+# cubicweb manager account's login (this user will be created)
+login=admin
+
+# cubicweb manager account's password
+password=admin
+
+[system]
+
+# database driver (postgres, sqlite, sqlserver2005)
+db-driver=postgres
+
+# database host
+db-host=
+
+# database port
+db-port=
+"""
+
+
+class ToolsUtilsTC(unittest.TestCase):
+
+    def test_option_value_from_env(self):
+        os.environ['CW_DB_HOST'] = 'here'
+        try:
+            self.assertEqual(option_value_from_env('db-host'), 'here')
+            self.assertEqual(option_value_from_env('db-host', 'nothere'), 'here')
+            self.assertEqual(option_value_from_env('db-hots', 'nothere'), 'nothere')
+        finally:
+            del os.environ['CW_DB_HOST']
+
+    def test_read_config(self):
+        with tempfile.NamedTemporaryFile() as f:
+            f.write(SOURCES_CONTENT)
+            f.seek(0)
+            config = read_config(f.name)
+        expected = {
+            'admin': {
+                'password': 'admin',
+                'login': 'admin',
+            },
+            'system': {
+                'db-port': None,
+                'db-driver': 'postgres',
+                'db-host': None,
+            },
+        }
+        self.assertEqual(config, expected)
+
+    def test_read_config_env(self):
+        os.environ['CW_DB_HOST'] = 'here'
+        try:
+            with tempfile.NamedTemporaryFile() as f:
+                f.write(SOURCES_CONTENT)
+                f.seek(0)
+                config = read_config(f.name)
+        finally:
+            del os.environ['CW_DB_HOST']
+        self.assertEqual(config['system']['db-host'], 'here')
+
+
 if __name__ == '__main__':
-    unittest_main()
+    unittest.main()
--- a/cubicweb/test/unittest_utils.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/test/unittest_utils.py	Thu Oct 20 18:28:46 2016 +0200
@@ -17,15 +17,19 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for module cubicweb.utils"""
 
+import base64
 import datetime
 import decimal
 import doctest
 import re
+try:
+    from unittest2 import TestCase
+except ImportError:  # Python3
+    from unittest import TestCase
 
 from six.moves import range
 
-from logilab.common.testlib import TestCase, unittest_main
-
+from cubicweb import Binary
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.utils import (make_uid, UStringIO, RepeatList, HTMLHead,
                             QueryCache, parse_repo_uri)
@@ -200,9 +204,16 @@
         self.assertEqual(json.loads(self.encode([e])),
                           [{'pouet': 'hop', 'eid': 2}])
 
+    def test_encoding_binary(self):
+        for content in (b'he he', b'h\xe9 hxe9'):
+            with self.subTest(content=content):
+                encoded = self.encode(Binary(content))
+                self.assertEqual(base64.b64decode(encoded), content)
+
     def test_encoding_unknown_stuff(self):
         self.assertEqual(self.encode(TestCase), 'null')
 
+
 class HTMLHeadTC(CubicWebTC):
 
     def htmlhead(self, datadir_url):
@@ -321,4 +332,5 @@
 
 
 if __name__ == '__main__':
-    unittest_main()
+    import unittest
+    unittest.main()
--- a/cubicweb/toolsutils.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/toolsutils.py	Thu Oct 20 18:28:46 2016 +0200
@@ -131,6 +131,9 @@
     targetdir = normpath(targetdir)
     for dirpath, dirnames, filenames in walk(skeldir):
         tdirpath = dirpath.replace(skeldir, targetdir)
+        if 'cubicweb_CUBENAME' in tdirpath:
+            tdirpath = tdirpath.replace('cubicweb_CUBENAME',
+                                        'cubicweb_' + context['cubename'])
         create_dir(tdirpath)
         for fname in filenames:
             if any(fnmatch(fname, pat) for pat in exclude):
@@ -152,6 +155,7 @@
                 show_diffs(tfpath, fpath, askconfirm)
             else:
                 shutil.copyfile(fpath, tfpath)
+                shutil.copymode(fpath, tfpath)
 
 def fill_templated_file(fpath, tfpath, context):
     with io.open(fpath, encoding='ascii') as fobj:
@@ -167,6 +171,15 @@
         print('-> set permissions to 0600 for %s' % filepath)
     chmod(filepath, 0o600)
 
+
+def option_value_from_env(option, default=None):
+    """Return the value of configuration `option` from cannonical environment
+    variable.
+    """
+    envvar = ('CW_' + '_'.join(option.split('-'))).upper()
+    return os.environ.get(envvar, default)
+
+
 def read_config(config_file, raise_if_unreadable=False):
     """read some simple configuration from `config_file` and return it as a
     dictionary. If `raise_if_unreadable` is false (the default), an empty
@@ -191,7 +204,7 @@
                 sys.stderr.write('ignoring malformed line\n%r\n' % line)
                 continue
             option = option.strip().replace(' ', '_')
-            value = value.strip()
+            value = option_value_from_env(option, value.strip())
             current[option] = value or None
     except IOError as ex:
         if raise_if_unreadable:
--- a/cubicweb/utils.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/utils.py	Thu Oct 20 18:28:46 2016 +0200
@@ -21,6 +21,7 @@
 
 __docformat__ = "restructuredtext en"
 
+import base64
 import decimal
 import datetime
 import random
@@ -42,6 +43,9 @@
 from logilab.common.deprecation import deprecated
 from logilab.common.date import ustrftime
 
+from cubicweb import Binary
+
+
 _MARKER = object()
 
 # initialize random seed from current time
@@ -507,6 +511,8 @@
             return (obj.days * 24 * 60 * 60) + obj.seconds
         elif isinstance(obj, decimal.Decimal):
             return float(obj)
+        elif isinstance(obj, Binary):
+            return base64.b64encode(obj.getvalue()).decode('ascii')
         try:
             return json.JSONEncoder.default(self, obj)
         except TypeError:
--- a/cubicweb/web/application.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/application.py	Thu Oct 20 18:28:46 2016 +0200
@@ -51,9 +51,9 @@
 def anonymized_request(req):
     orig_cnx = req.cnx
     anon_cnx = anonymous_cnx(orig_cnx.session.repo)
-    req.set_cnx(anon_cnx)
     try:
         with anon_cnx:
+            req.set_cnx(anon_cnx)
             yield req
     finally:
         req.set_cnx(orig_cnx)
@@ -262,9 +262,10 @@
         try:
             try:
                 session = self.get_session(req)
-                from  cubicweb import repoapi
-                cnx = repoapi.Connection(session)
-                req.set_cnx(cnx)
+                cnx = session.new_cnx()
+                with cnx:  # may need an open connection to access to e.g. properties
+                    req.set_cnx(cnx)
+                cnx._open = None  # XXX needed to reuse it a few line later :'(
             except AuthenticationError:
                 # Keep the dummy session set at initialisation.  such session will work to some
                 # extend but raise an AuthenticationError on any database access.
--- a/cubicweb/web/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-requests
-webtest
-Twisted < 16.0.0
-cubicweb-blog
-cubicweb-file
-cubicweb-tag
--- a/cubicweb/web/test/test_views.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/test/test_views.py	Thu Oct 20 18:28:46 2016 +0200
@@ -31,7 +31,7 @@
         # some EntityType. The two Blog types below require the sioc cube that
         # we do not want to add as a dependency.
         etypes = super(AutomaticWebTest, self).to_test_etypes()
-        etypes -= set(('Blog', 'BlogEntry'))
+        etypes -= set(('Blog', 'BlogEntry', 'CWSession'))
         return etypes
 
 
--- a/cubicweb/web/test/unittest_application.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/test/unittest_application.py	Thu Oct 20 18:28:46 2016 +0200
@@ -610,7 +610,9 @@
     def _test_auth_anon(self, req):
         asession = self.app.get_session(req)
         # important otherwise _reset_cookie will not use the right session
-        req.set_cnx(repoapi.Connection(asession))
+        cnx = asession.new_cnx()
+        with cnx:
+            req.set_cnx(cnx)
         self.assertEqual(len(self.open_sessions), 1)
         self.assertEqual(asession.login, 'anon')
         self.assertTrue(asession.anonymous_session)
@@ -619,8 +621,10 @@
     def _test_anon_auth_fail(self, req):
         self.assertEqual(1, len(self.open_sessions))
         session = self.app.get_session(req)
-        # important otherwise _reset_cookie will not use the right session
-        req.set_cnx(repoapi.Connection(session))
+        cnx = session.new_cnx()
+        with cnx:
+            # important otherwise _reset_cookie will not use the right session
+            req.set_cnx(cnx)
         self.assertEqual(req.message, 'authentication failure')
         self.assertEqual(req.session.anonymous_session, True)
         self.assertEqual(1, len(self.open_sessions))
--- a/cubicweb/web/test/unittest_form.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/test/unittest_form.py	Thu Oct 20 18:28:46 2016 +0200
@@ -48,6 +48,8 @@
             self.assertEqual(StringField().format(form), 'text/plain')
             req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"')
             req.cnx.commit()
+        with self.admin_access.web_request() as req:
+            form = FieldsForm(req, None)
             self.assertEqual(StringField().format(form), 'text/rest')
 
 
--- a/cubicweb/web/test/unittest_formfields.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/test/unittest_formfields.py	Thu Oct 20 18:28:46 2016 +0200
@@ -136,6 +136,8 @@
             req.cnx.create_entity('CWProperty', pkey=u"ui.default-text-format", value=u"text/rest",
                                   for_user=req.user.eid)
             req.cnx.commit()
+        with self.admin_access.web_request() as req:
+            form = EntityFieldsForm(req, entity=e)
             self.assertEqual(description_format_field.value(form), 'text/rest')
 
     def test_property_key_field(self):
--- a/cubicweb/web/views/cwsources.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/views/cwsources.py	Thu Oct 20 18:28:46 2016 +0200
@@ -343,7 +343,7 @@
     def render_table(self, w, actions, paginate):
         default_level = self.view.cw_extra_kwargs['default_level']
         if default_level != 'Debug':
-            self._cw.add_onload('$("select.logFilter").val("%s").change();'
+            self._cw.add_onload('$("select.log_filter").val("%s").change();'
                            % self._cw.form.get('logLevel', default_level))
         w(u'\n<form action="#"><fieldset>')
         w(u'<label>%s</label>' % self._cw._(u'Message threshold'))
--- a/cubicweb/web/views/editcontroller.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/cubicweb/web/views/editcontroller.py	Thu Oct 20 18:28:46 2016 +0200
@@ -202,7 +202,7 @@
         except (RequestError, NothingToEdit) as ex:
             if '__linkto' in req.form and 'eid' in req.form:
                 self.execute_linkto()
-            elif not ('__delete' in req.form or '__insert' in req.form):
+            elif '__delete' not in req.form:
                 raise ValidationError(None, {None: text_type(ex)})
         # all pending inlined relations to newly created entities have been
         # treated now (pop to ensure there are no attempt to add new ones)
--- a/cubicweb/wsgi/test/requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-webtest
--- a/debian/changelog	Wed Oct 19 22:31:32 2016 +0200
+++ b/debian/changelog	Thu Oct 20 18:28:46 2016 +0200
@@ -1,6 +1,6 @@
 cubicweb (3.23.2-1) unstable; urgency=medium
 
-  * new upstream release 
+  * new upstream release
 
  -- Sylvain Thenault <sylvain.thenault@logilab.fr>  Wed, 19 Oct 2016 19:21:09 +0200
 
@@ -18,7 +18,7 @@
 
 cubicweb (3.22.4-1) unstable; urgency=medium
 
-  * new upstream release 
+  * new upstream release
 
  -- David Douard <david.douard@logilab.fr>  Thu, 21 Jul 2016 21:02:14 +0200
 
@@ -96,7 +96,7 @@
 
 cubicweb (3.20.16-1) unstable; urgency=medium
 
-  * new upstream release 
+  * new upstream release
 
  -- David Douard <david.douard@logilab.fr>  Tue, 19 Jul 2016 12:48:45 +0200
 
--- a/debian/control	Wed Oct 19 22:31:32 2016 +0200
+++ b/debian/control	Thu Oct 20 18:28:46 2016 +0200
@@ -18,6 +18,9 @@
  python-rql (>= 0.34.0),
  python-yams (>= 0.44.0),
  python-lxml,
+ python-setuptools,
+ python-pyramid,
+ python-waitress,
 Standards-Version: 3.9.1
 Homepage: https://www.cubicweb.org
 X-Python-Version: >= 2.6
@@ -120,6 +123,30 @@
  This package provides only the twisted server part of the library.
 
 
+Package: cubicweb-pyramid
+Architecture: all
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-web (= ${source:Version}),
+ cubicweb-ctl (= ${source:Version}),
+ python-pyramid (>= 1.5.0),
+ python-pyramid-multiauth,
+ python-waitress (>= 0.8.9),
+ python-wsgicors,
+Recommends:
+ python-pyramid-debugtoolbar
+Conflicts:
+ pyramid-cubicweb
+Replaces:
+ pyramid-cubicweb
+Description: Integrate CubicWeb with a Pyramid application
+ Provides pyramid extensions to load a CubicWeb instance and serve it through
+ the pyramid stack.
+ .
+ It prefigures what CubicWeb 4.0 will be.
+
+
 Package: cubicweb-web
 Architecture: all
 Depends:
--- a/debian/copyright	Wed Oct 19 22:31:32 2016 +0200
+++ b/debian/copyright	Thu Oct 20 18:28:46 2016 +0200
@@ -5,11 +5,13 @@
 Upstream Author:
 
     Logilab <contact@logilab.fr>
+    Christophe de Vienne
 
 Copyright:
 
     Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
     http://www.logilab.fr/ -- mailto:contact@logilab.fr
+    Copyright (c) 2014 Unlish
 
 License:
 
@@ -43,4 +45,3 @@
 
     The rights to each pictogram in the social extension are either
     trademarked or copyrighted by the respective company.
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/pydist-overrides	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+cubicweb cubicweb-common
--- a/debian/rules	Wed Oct 19 22:31:32 2016 +0200
+++ b/debian/rules	Thu Oct 20 18:28:46 2016 +0200
@@ -8,7 +8,7 @@
 build: build-stamp
 build-stamp:
 	dh_testdir
-	NO_SETUPTOOLS=1 python setup.py build
+	python setup.py build
 	# cubicweb.foo needs to be importable by sphinx, so create a cubicweb symlink to the source dir
 	mkdir -p debian/pythonpath
 	ln -sf $(CURDIR)/cubicweb debian/pythonpath
@@ -34,7 +34,7 @@
 	dh_clean
 	dh_installdirs
 
-	NO_SETUPTOOLS=1 python setup.py -q install --no-compile --prefix=debian/tmp/usr
+	python setup.py -q install --no-compile --prefix=debian/tmp/usr
 
 	# Put all the python library and data in cubicweb-common
 	# and scripts in cubicweb-server
@@ -55,6 +55,7 @@
 	rm -rf debian/cubicweb-twisted/usr/lib/python2*/*-packages/cubicweb/etwist/test
 	rm -rf debian/cubicweb-common/usr/lib/python2*/*-packages/cubicweb/ext/test
 	rm -rf debian/cubicweb-common/usr/lib/python2*/*-packages/cubicweb/entities/test
+	rm -rf debian/cubicweb-pyramid/usr/lib/python2*/*-packages/cubicweb/pyramid/tests
 
 
 # Build architecture-independent files here.
--- a/dev-requirements.txt	Wed Oct 19 22:31:32 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-pytest
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,17 @@
+:mod:`cubicweb.pyramid`
+=======================
+
+.. automodule:: cubicweb.pyramid
+
+    .. autofunction:: make_cubicweb_application
+
+    .. autofunction:: wsgi_application_from_cwconfig
+
+    .. autofunction:: wsgi_application
+
+.. toctree::
+    :maxdepth: 1
+    :glob:
+
+    pyramid/*
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/auth.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,12 @@
+.. _auth_module:
+
+:mod:`cubicweb.pyramid.auth`
+----------------------------
+
+.. automodule:: cubicweb.pyramid.auth
+
+    .. autofunction:: includeme
+
+    .. autoclass:: UpdateLoginTimeAuthenticationPolicy
+        :show-inheritance:
+        :members:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/authplugin.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,10 @@
+.. _authplugin_module:
+
+:mod:`cubicweb.pyramid.authplugin`
+----------------------------------
+
+.. automodule:: cubicweb.pyramid.authplugin
+
+    .. autoclass:: DirectAuthentifier
+        :show-inheritance:
+        :members:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/bwcompat.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,19 @@
+.. _bwcompat_module:
+
+:mod:`cubicweb.pyramid.bwcompat`
+--------------------------------
+
+.. automodule:: cubicweb.pyramid.bwcompat
+
+    .. autofunction:: includeme
+
+    .. autoclass:: PyramidSessionHandler
+        :members:
+
+    .. autoclass:: CubicWebPyramidHandler
+        :members:
+
+        .. automethod:: __call__
+
+    .. autoclass:: TweenHandler
+        :members:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/core.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,23 @@
+.. _core_module:
+
+:mod:`cubicweb.pyramid.core`
+----------------------------
+
+.. automodule:: cubicweb.pyramid.core
+
+    .. autofunction:: includeme
+
+    .. autofunction:: cw_to_pyramid
+
+    .. autofunction:: render_view
+
+    .. autofunction:: repo_connect
+    .. autofunction:: get_principals
+
+    .. autoclass:: CubicWebPyramidRequest
+        :show-inheritance:
+        :members:
+
+    .. autofunction:: _cw_session
+    .. autofunction:: _cw_cnx
+    .. autofunction:: _cw_request
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/defaults.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,8 @@
+.. _defaults_module:
+
+:mod:`cubicweb.pyramid.defaults`
+--------------------------------
+
+.. automodule:: cubicweb.pyramid.defaults
+
+    .. autofunction:: includeme
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/login.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,16 @@
+.. _login_module:
+
+:mod:`cubicweb.pyramid.login`
+-----------------------------
+
+.. automodule:: cubicweb.pyramid.login
+
+    .. autofunction:: includeme
+
+
+    Views
+    -----
+
+    .. autofunction:: login_form
+    .. autofunction:: login_password_login
+    .. autofunction:: login_already_loggedin
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/profile.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,19 @@
+.. _profile_module:
+
+:mod:`cubicweb.pyramid.profile`
+===============================
+
+.. automodule:: cubicweb.pyramid.profile
+
+    Views
+    -----
+
+    .. autofunction:: ping
+
+    .. autofunction:: cnx
+
+    WSGI
+    ----
+
+    .. autofunction:: wsgi_profile
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/session.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,10 @@
+.. _session_module:
+
+:mod:`cubicweb.pyramid.session`
+-------------------------------
+
+.. automodule:: cubicweb.pyramid.session
+
+    .. autofunction:: includeme
+
+    .. autofunction:: CWSessionFactory
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/api/pyramid/tools.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,13 @@
+.. _tools_module:
+
+:mod:`cubicweb.pyramid.tools`
+----------------------------
+
+.. automodule:: cubicweb.pyramid.tools
+
+    .. autofunction:: includeme
+
+    .. autofunction:: clone_user
+    .. autofunction:: cnx_attach_entity
+    .. autofunction:: cached_build_user
+    .. autofunction:: clear_cache
--- a/doc/book/admin/cubicweb-ctl.rst	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/book/admin/cubicweb-ctl.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -34,12 +34,9 @@
 
 Create your new cube cube ::
 
-   cubicweb-ctl newcube
+   cubicweb-ctl newcube -d <target directory>
 
-This will create a new cube in
-``/path/to/grshell-cubicweb/cubes/<mycube>`` for a Mercurial
-installation, or in ``/usr/share/cubicweb/cubes`` for a debian
-packages installation.
+This will create a new cube ``<target directory>``.
 
 Create an instance
 -------------------
--- a/doc/book/admin/instance-config.rst	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/book/admin/instance-config.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -17,6 +17,11 @@
 each option name is prefixed with its own section and followed by its
 default value if necessary, e.g. "`<section>.<option>` [value]."
 
+.. note:: At runtime, configuration options can be overriden by environments
+    variables which name follows the option name with ``-`` replaced by ``_``
+    and a ``CW_`` prefix. For instance ``CW_BASE_URL=https://www.example.com``
+    would override the ``base-url`` configuration option.
+
 .. _`WebServerConfig`:
 
 Configuring the Web server
--- a/doc/book/devrepo/cubes/cc-newcube.rst	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/book/devrepo/cubes/cc-newcube.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -3,12 +3,12 @@
 
 Let's start by creating the cube environment in which we will develop ::
 
-  cd ~/cubes
+  cd ~/myproject
   # use cubicweb-ctl to generate a template for the cube
   # will ask some questions, most with nice default
   cubicweb-ctl newcube mycube
   # makes the cube source code managed by mercurial
-  cd mycube
+  cd cubicweb-mycube
   hg init
   hg add .
   hg ci
@@ -22,36 +22,3 @@
 This variable is used for the instance packaging (dependencies handled
 by system utility tools such as APT) and to find used cubes when the
 database for the instance is created.
-
-On a Unix system, the available cubes are usually stored in the
-directory :file:`/usr/share/cubicweb/cubes`. If you are using the
-cubicweb mercurial repository (:ref:`SourceInstallation`), the cubes
-are searched in the directory
-:file:`/path/to/cubicweb_toplevel/cubes`. In this configuration
-cubicweb itself ought to be located at
-:file:`/path/to/cubicweb_toplevel/cubicweb`.
-
-.. note::
-
-    Please note that if you do not wish to use default directory for your cubes
-    library, you should set the :envvar:`CW_CUBES_PATH` environment variable to
-    add extra directories where cubes will be search, and you'll then have to use
-    the option `--directory` to specify where you would like to place the source
-    code of your cube:
-
-    ``cubicweb-ctl newcube --directory=/path/to/cubes/library mycube``
-
-
-.. XXX resurrect once live-server is back
-.. Usage of :command:`cubicweb-ctl liveserver`
-.. -------------------------------------------
-
-.. To quickly test a new cube, you can also use the `liveserver` command for cubicweb-ctl
-.. which allows to create an instance in memory (using an SQLite database by
-.. default) and make it accessible through a web server ::
-
-..   cubicweb-ctl live-server mycube
-
-.. or by using an existing database (SQLite or Postgres)::
-
-..   cubicweb-ctl live-server -s myfile_sources mycube
--- a/doc/book/devrepo/cubes/layout.rst	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/book/devrepo/cubes/layout.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -6,17 +6,46 @@
 Standard structure for a cube
 -----------------------------
 
-A cube is structured as follows:
+A cube named "mycube" is Python package "cubicweb-mycube" structured as
+follows:
 
 ::
 
-  mycube/
+  cubicweb-mycube/
   |
-  |-- data/
-  |   |-- cubes.mycube.css
-  |   |-- cubes.mycube.js
-  |   `-- external_resources
-  |
+  |-- cubicweb_mycube/
+  |   |
+  |   |-- data/
+  |   |   |-- cubes.mycube.css
+  |   |   |-- cubes.mycube.js
+  |   |   `-- external_resources
+  |   |
+  |   |
+  |   |-- entities.py
+  |   |
+  |   |-- i18n/
+  |   |   |-- en.po
+  |   |   |-- es.po
+  |   |   `-- fr.po
+  |   |
+  |   |-- __init__.py
+  |   |
+  |   |
+  |   |-- migration/
+  |   |   |-- postcreate.py
+  |   |   `-- precreate.py
+  |   |
+  |   |-- __pkginfo__.py
+  |   |
+  |   |-- schema.py
+  |   |
+  |   |
+  |   |-- site_cubicweb.py
+  |   |
+  |   |-- hooks.py
+  |   |
+  |   |
+  |   `-- views.py
   |-- debian/
   |   |-- changelog
   |   |-- compat
@@ -24,56 +53,32 @@
   |   |-- copyright
   |   |-- cubicweb-mycube.prerm
   |   `-- rules
-  |
-  |-- entities.py
-  |
-  |-- i18n/
-  |   |-- en.po
-  |   |-- es.po
-  |   `-- fr.po
-  |
-  |-- __init__.py
-  |
   |-- MANIFEST.in
-  |
-  |-- migration/
-  |   |-- postcreate.py
-  |   `-- precreate.py
-  |
-  |-- __pkginfo__.py
-  |
-  |-- schema.py
-  |
   |-- setup.py
-  |
-  |-- site_cubicweb.py
-  |
-  |-- hooks.py
-  |
-  |-- test/
-  |   |-- data/
-  |   |   `-- bootstrap_cubes
-  |   |-- pytestconf.py
-  |   |-- realdb_test_mycube.py
-  |   `-- test_mycube.py
-  |
-  `-- views.py
+  `-- test/
+      |-- data/
+      |   `-- bootstrap_cubes
+      |-- pytestconf.py
+      |-- realdb_test_mycube.py
+      `-- test_mycube.py
 
 
-We can use subpackages instead of python modules for ``views.py``, ``entities.py``,
+We can use subpackages instead of Python modules for ``views.py``, ``entities.py``,
 ``schema.py`` or ``hooks.py``. For example, we could have:
 
 ::
 
-  mycube/
+  cubicweb-mycube/
   |
-  |-- entities.py
-  |-- hooks.py
-  `-- views/
-      |-- __init__.py
-      |-- forms.py
-      |-- primary.py
-      `-- widgets.py
+  |-- cubicweb_mycube/
+  |   |
+      |-- entities.py
+  .   |-- hooks.py
+  .   `-- views/
+  .       |-- __init__.py
+          |-- forms.py
+          |-- primary.py
+          `-- widgets.py
 
 
 where :
@@ -127,6 +132,27 @@
 something defined in the other's schema; on database creation, on something
 created by the other's postcreate, and so on.
 
+The :file:`setup.py` file
+-------------------------
+
+This is standard setuptools based setup module which reads most of its data
+from :file:`__pkginfo__.py`. In the ``setup`` function call, it should also
+include an entry point definition under the ``cubicweb.cubes`` group so that
+CubicWeb can discover cubes (in particular their custom ``cubicweb-ctl``
+commands):
+
+::
+
+    setup(
+      # ...
+      entry_points={
+          'cubicweb.cubes': [
+              'mycube=cubicweb_mycube',
+          ],
+      },
+      # ...
+    )
+
 
 :file:`migration/precreate.py` and :file:`migration/postcreate.py`
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/auth.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,40 @@
+Authentication
+==============
+
+Overview
+--------
+
+A default authentication stack is provided by the :mod:`cubicweb.pyramid.auth`
+module, which is included by :mod:`cubicweb.pyramid.default`.
+
+The authentication stack is built around `pyramid_multiauth`_, and provides a
+few default policies that reproduce the default cubicweb behavior.
+
+.. note::
+
+    Note that this module only provides an authentication policy, not the views
+    that handle the login form. See :ref:`login_module`
+
+Customize
+---------
+
+The default policies can be individually deactivated, as well as the default
+authentication callback that returns the current user groups as :term:`principals`.
+
+The following settings can be set to `False`:
+
+-   :confval:`cubicweb.auth.update_login_time`. Activate the policy that update
+    the user `login_time` when `remember` is called.
+-   :confval:`cubicweb.auth.authtkt` and all its subvalues.
+-   :confval:`cubicweb.auth.groups_principals`
+
+Additionnal policies can be added by accessing the MultiAuthenticationPolicy
+instance in the registry:
+
+.. code-block:: python
+
+    mypolicy = SomePolicy()
+    authpolicy = config.registry['cubicweb.authpolicy']
+    authpolicy._policies.append(mypolicy)
+
+.. _pyramid_multiauth: https://github.com/mozilla-services/pyramid_multiauth
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/ctl.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,63 @@
+.. _cubicweb-ctl_pyramid:
+
+The 'pyramid' command
+=====================
+
+.. program:: cubicweb-ctl pyramid
+
+The 'pyramid' command is a replacement for the 'start' command of :ref:`cubicweb-ctl`.
+It provides the same options and a few other ones.
+
+.. note::
+
+    The 'pyramid' command is provided by the ``pyramid`` cube.
+
+Options
+-------
+
+
+.. option:: --no-daemon
+
+    Run the server in the foreground.
+
+.. option:: --debug-mode
+
+    Activate the repository debug mode (logs in the console and the debug
+    toolbar). Implies :option:`--no-daemon`.
+
+    Also force the following pyramid options:
+
+    .. code-block:: ini
+    
+        pyramid.debug_authorization = yes
+        pyramid.debug_notfound = yes
+        pyramid.debug_routematch = yes
+        pyramid.reload_templates = yes
+
+.. option:: -D, --debug
+
+    Equals to :option:`--debug-mode` :option:`--no-daemon` :option:`--reload`
+
+.. option:: --reload
+
+    Restart the server if any source file is changed
+
+.. option:: --reload-interval=RELOAD_INTERVAL
+
+    Interval, in seconds, between file modifications checks [current: 1]
+
+.. option:: -l <log level>, --loglevel=<log level>
+
+    Set the loglevel. debug if -D is set, error otherwise
+
+.. option:: -p, --profile
+
+    Enable profiling. See :ref:`profiling`.
+
+.. option:: --profile-output=PROFILE_OUTPUT
+
+    Profiling output file (default: "program.prof")
+
+.. option:: --profile-dump-every=N
+
+    Dump profile stats to ouput every N requests (default: 100)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/index.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,36 @@
+================
+Pyramid Cubicweb
+================
+
+Pyramid Cubicweb is an attempt to rebase the CubicWeb framework on pyramid.
+
+It can be used in two different ways:
+
+-   Within CubicWeb, through the 'pyramid' cube and the
+    :ref:`pyramid command <cubicweb-ctl_pyramid>`.
+    In this mode, the Pyramid CubicWeb replaces some parts of
+    CubicWeb and make the pyramid api available to the cubes.
+
+-   Within a pyramid application, it provides easy access to a CubicWeb
+    instance and registry.
+
+Narrative Documentation
+=======================
+
+.. toctree::
+    :maxdepth: 2
+    
+    quickstart
+    ctl
+    settings
+    auth
+    profiling
+
+Api Documentation
+=================
+
+.. toctree::
+    :maxdepth: 2
+    :glob:
+
+    ../../api/pyramid
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/profiling.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,52 @@
+.. _profiling:
+
+Profiling
+=========
+
+Profiling of requests by the pyramid debug toolbar can be a little restrictive
+when a specific url needs thin profiling that includes the whole pyramid
+dispatch.
+
+Pyramid CubicWeb provides facilities to profile requests as a
+:func:`wsgi middleware <cubicweb.pyramid.profile.wsgi_profile>`, and a few
+views that facilitate profiling of basic features.
+
+The views and the wsgi middleware are activated when the 'profile' option is
+given. This can be done on the command line
+(:option:`cubicweb-ctl pyramid --profile`) or in the :ref:`pyramid_settings`.
+
+Views
+-----
+
+The following routes and corresponding views are provided when profiling is on:
+
+-   ``/_profile/ping``: Reply 'ping' without doing anything else. See also
+    :func:`cubicweb.pyramid.profile.ping`.
+
+-   ``/_profile/cnx``: Reply 'ping' after getting a cnx. See also
+    :func:`cubicweb.pyramid.profile.cnx`.
+
+Typical Usage
+-------------
+
+Let's say we want to measure the cost of having a ``cnx``.
+
+-   Start the application with profile enabled:
+
+    .. code-block:: console
+
+        $ cubicweb-ctl pyramid --no-daemon --profile --profile-dump-every 100
+
+-   Use 'ab' or any other http benchmark tool to throw a lot of requests:
+
+    .. code-block:: console
+
+        $ ab -c 1 -n 100 http://localhost:8080/_profile/cnx
+
+-   Analyse the results. I personnaly fancy SnakeViz_:
+
+    .. code-block:: console
+
+        $ snakeviz program.prof
+
+.. _SnakeViz: http://jiffyclub.github.io/snakeviz/
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/quickstart.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,59 @@
+Quick start
+===========
+
+.. highlight:: bash
+
+Prerequites
+-----------
+
+-   Install everything (here with pip, possibly in a virtualenv)::
+
+        pip install pyramid-cubicweb cubicweb-pyramid pyramid_debugtoolbar
+
+-   Have a working Cubicweb instance, for example:
+
+
+    -   Make sure CubicWeb is in user mode::
+
+            export CW_MODE=user
+
+    -   Create a CubicWeb instance, and install the 'pyramid' cube on it (see
+        :ref:`configenv` for more details on this step)::
+
+            cubicweb-ctl create pyramid myinstance
+
+-   Edit your ``~/etc/cubicweb.d/myinstance/all-in-one.conf`` and set values for
+    :confval:`pyramid-auth-secret` and :confval:`pyramid-session-secret`.
+    *required if cubicweb.pyramid.auth and pyramid_cubiweb.session get
+    included, which is the default*
+
+From CubicWeb
+-------------
+
+-   Start the instance with the :ref:`'pyramid' command <cubicweb-ctl_pyramid>`
+    instead of 'start'::
+
+        cubicweb-ctl pyramid --debug myinstance
+
+In a pyramid application
+------------------------
+
+-   Create a pyramid application
+
+-   Include cubicweb.pyramid:
+
+    .. code-block:: python
+
+        def includeme(config):
+            # ...
+            config.include('cubicweb.pyramid')
+            # ...
+
+-   Configure the instance name (in the .ini file):
+
+    .. code-block:: ini
+
+        cubicweb.instance = myinstance
+
+-   Configure the base-url and https-url in all-in-one.conf to match the ones
+    of the pyramid configuration (this is a temporary limitation).
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/pyramid/settings.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,146 @@
+Settings
+========
+
+.. _cubicweb_settings:
+
+Cubicweb Settings
+-----------------
+
+Pyramid CubicWeb will **not** make use of the configuration entries
+found in the cubicweb configuration (a.k.a. `all-in-one.conf`) for any
+pyramid related configuration value.
+
+
+.. _pyramid_settings:
+
+Pyramid Settings
+----------------
+
+If a ``pyramid.ini`` file is found in the instance home directory (where the
+``all-in-one.conf`` file is), its ``[main]`` section will be read and used as the
+``settings`` of the pyramid Configurator.
+
+This configuration file is almost the same as the one read by ``pserve``, which
+allow to easily add any pyramid extension and configure it.
+
+A typical ``pyramid.ini`` file is:
+
+.. code-block:: ini
+
+    [main]
+    pyramid.includes =
+        pyramid_redis_sessions
+
+    cubicweb.defaults = no
+    cubicweb.includes =
+        cubicweb.pyramid.auth
+        cubicweb.pyramid.login
+
+    cubicweb.profile = no
+
+    redis.sessions.secret = your_cookie_signing_secret
+    redis.sessions.timeout = 1200
+
+    redis.sessions.host = mywheezy
+
+The Pyramid CubicWeb specific configuration entries are:
+
+.. confval:: cubicweb.instance (string)
+
+    A CubicWeb instance name. Useful when the application is not run by
+    :ref:`cubicweb-ctl_pyramid`.
+
+.. confval:: cubicweb.debug (bool)
+
+    Enables the cubicweb debugmode. Works only if the instance is setup by
+    :confval:`cubicweb.instance`.
+
+    Unlike when the debugmode is set by the :option:`cubicweb-ctl pyramid --debug-mode`
+    command, the pyramid debug options are untouched.
+
+.. confval:: cubicweb.includes (list)
+
+    Same as ``pyramid.includes``, but the includes are done after the cubicweb
+    specific registry entries are initialized.
+
+    Useful to include extensions that requires these entries.
+
+.. confval:: cubicweb.bwcompat (bool)
+
+    (True) Enable/disable backward compatibility. See :ref:`bwcompat_module`.
+
+.. confval:: cubicweb.bwcompat.errorhandler (bool)
+
+    (True) Enable/disable the backward compatibility error handler.
+    Set to 'no' if you need to define your own error handlers.
+
+.. confval:: cubicweb.defaults (bool)
+
+    (True) Enable/disable defaults. See :ref:`defaults_module`.
+
+.. confval:: cubicweb.profile (bool)
+
+    (False) Enable/disable profiling. See :ref:`profiling`.
+
+.. confval:: cubicweb.auth.update_login_time (bool)
+
+    (True) Add a :class:`cubicweb.pyramid.auth.UpdateLoginTimeAuthenticationPolicy`
+    policy, that update the CWUser.login_time attribute when a user login.
+    
+.. confval:: cubicweb.auth.authtkt (bool)
+
+    (True) Enables the 2 cookie-base auth policies, which activate/deactivate
+    depending on the `persistent` argument passed to `remember`.
+
+    The default login views set persistent to True if a `__setauthcookie`
+    parameters is passed to them, and evals to True in
+    :func:`pyramid.settings.asbool`.
+
+    The configuration values of the policies are arguments for
+    :class:`pyramid.authentication.AuthTktAuthenticationPolicy`.
+
+    The first policy handles session authentication. It doesn't get
+    activated if `remember()` is called with `persistent=False`:
+
+    .. confval:: cubicweb.auth.authtkt.session.cookie_name (str)
+
+        ('auth_tkt') The cookie name. Must be different from the persistent
+        authentication cookie name.
+
+    .. confval:: cubicweb.auth.authtkt.session.timeout (int)
+
+        (1200) Cookie timeout.
+
+    .. confval:: cubicweb.auth.authtkt.session.reissue_time (int)
+
+        (120) Reissue time.
+
+    The second policy handles persistent authentication. It doesn't get
+    activated if `remember()` is called with `persistent=True`:
+
+    .. confval:: cubicweb.auth.authtkt.persistent.cookie_name (str)
+
+        ('auth_tkt') The cookie name. Must be different from the session
+        authentication cookie name.
+
+    .. confval:: cubicweb.auth.authtkt.persistent.max_age (int)
+
+        (30 days) Max age in seconds.
+
+    .. confval:: cubicweb.auth.authtkt.persistent.reissue_time (int)
+
+        (1 day) Reissue time in seconds.
+
+    Both policies set the ``secure`` flag to ``True`` by default, meaning that
+    cookies will only be sent back over a secure connection (see
+    `Authentication Policies documentation`_ for details). This can be
+    configured through :confval:`cubicweb.auth.authtkt.persistent.secure` and
+    :confval:`cubicweb.auth.authtkt.session.secure` configuration options.
+
+    .. _`Authentication Policies documentation`: \
+        http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html
+
+.. confval:: cubicweb.auth.groups_principals (bool)
+
+    (True) Setup a callback on the authentication stack that inject the user
+    groups in the principals.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/changes/3.24.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,15 @@
+3.34 (UNRELEASED)
+=================
+
+New features
+------------
+
+* Various bits of a CubicWeb application configuration can be now be
+  overridden through environments variables matching configuration option
+  names prefixed by ``CW_`` (for instance ``CW_BASE_URL``).
+
+* Cubes are now standard Python packages named as ``cubicweb_<cubename>``.
+  They are not anymore installed in ``<prefix>/share/cubicweb/cubes``. Their
+  discovery by CubicWeb is handled by a new setuptools entry point
+  ``cubicweb.cubes``. A backward compatibility layer is kept for "legacy"
+  cubes.
--- a/doc/conf.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/conf.py	Thu Oct 20 18:28:46 2016 +0200
@@ -224,3 +224,8 @@
 .. |yams| replace:: *Yams*
 .. |rql| replace:: *RQL*
 """
+
+def setup(app):
+    app.add_object_type('confval', 'confval',
+                        objname='configuration value',
+                        indextemplate='pair: %s; configuration value')
--- a/doc/index.rst	Wed Oct 19 22:31:32 2016 +0200
+++ b/doc/index.rst	Thu Oct 20 18:28:46 2016 +0200
@@ -73,6 +73,7 @@
 
    book/devrepo/index
    book/devweb/index
+   book/pyramid/index
 
 .. toctree::
    :maxdepth: 2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/requirements/dev.txt	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,1 @@
+pytest
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/requirements/test-misc.txt	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,30 @@
+### Requirements for tests in various cubicweb/**/test directories. ###
+
+## shared by several test folders
+cubicweb-card
+docutils
+Twisted < 16.0.0
+webtest
+
+## cubicweb/test
+Pygments
+mock
+#fyzz XXX pip install fails
+cubicweb-file
+cubicweb-localperms
+cubicweb-tag
+
+## cubicweb/devtools/test
+flake8
+
+## cubicweb/hooks/test
+psycopg2
+
+## cubicweb/pyramid/test
+pyramid >= 1.5.0
+waitress >= 0.8.9
+wsgicors >= 0.3
+pyramid_multiauth
+
+## cubicweb/sobject/test
+cubicweb-comment
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/requirements/test-server.txt	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,8 @@
+psycopg2
+ldap3
+cubicweb-basket
+cubicweb-card
+cubicweb-comment
+cubicweb-file
+cubicweb-localperms
+cubicweb-tag
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/requirements/test-web.txt	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+Twisted < 16.0.0
+requests
+webtest
+cubicweb-blog
+cubicweb-file
+cubicweb-tag
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/setup.cfg	Thu Oct 20 18:28:46 2016 +0200
@@ -0,0 +1,6 @@
+[check-manifest]
+ignore =
+  debian
+  debian/*
+  cubicweb.spec
+  __pkginfo__.py
--- a/setup.py	Wed Oct 19 22:31:32 2016 +0200
+++ b/setup.py	Thu Oct 20 18:28:46 2016 +0200
@@ -27,16 +27,8 @@
 import shutil
 from os.path import dirname, exists, isdir, join
 
-try:
-    if os.environ.get('NO_SETUPTOOLS'):
-        raise ImportError() # do as there is no setuptools
-    from setuptools import setup
-    from setuptools.command import install_lib
-    USE_SETUPTOOLS = True
-except ImportError:
-    from distutils.core import setup
-    from distutils.command import install_lib
-    USE_SETUPTOOLS = False
+from setuptools import setup
+from setuptools.command import install_lib
 from distutils.command import install_data
 
 here = dirname(__file__)
@@ -58,21 +50,11 @@
     long_description = f.read()
 
 # import optional features
-if USE_SETUPTOOLS:
-    requires = {}
-    for entry in ("__depends__",): # "__recommends__"):
-        requires.update(__pkginfo__.get(entry, {}))
-    install_requires = [("%s %s" % (d, v and v or "")).strip()
-                       for d, v in requires.items()]
-else:
-    install_requires = []
-
-distname = __pkginfo__.get('distname', modname)
-scripts = __pkginfo__.get('scripts', ())
-include_dirs = __pkginfo__.get('include_dirs', ())
-data_files = __pkginfo__.get('data_files', None)
-ext_modules = __pkginfo__.get('ext_modules', None)
-package_data = __pkginfo__.get('package_data', {})
+distname = __pkginfo__['distname']
+scripts = __pkginfo__['scripts']
+include_dirs = __pkginfo__['include_dirs']
+data_files = __pkginfo__['data_files']
+package_data = __pkginfo__['package_data']
 
 BASE_BLACKLIST = ('CVS', 'dist', 'build', '__buildlog')
 IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
@@ -179,57 +161,93 @@
         ini.close()
 
 # re-enable copying data files in sys.prefix
-if USE_SETUPTOOLS:
-    # overwrite MyInstallData to use sys.prefix instead of the egg directory
-    MyInstallMoreData = MyInstallData
-    class MyInstallData(MyInstallMoreData): # pylint: disable=E0102
-        """A class that manages data files installation"""
-        def run(self):
-            _old_install_dir = self.install_dir
-            if self.install_dir.endswith('egg'):
-                self.install_dir = sys.prefix
-            MyInstallMoreData.run(self)
-            self.install_dir = _old_install_dir
-    try:
-        import setuptools.command.easy_install # only if easy_install available
-        # monkey patch: Crack SandboxViolation verification
-        from setuptools.sandbox import DirectorySandbox as DS
-        old_ok = DS._ok
-        def _ok(self, path):
-            """Return True if ``path`` can be written during installation."""
-            out = old_ok(self, path) # here for side effect from setuptools
-            realpath = os.path.normcase(os.path.realpath(path))
-            allowed_path = os.path.normcase(sys.prefix)
-            if realpath.startswith(allowed_path):
-                out = True
-            return out
-        DS._ok = _ok
-    except ImportError:
-        pass
+# overwrite MyInstallData to use sys.prefix instead of the egg directory
+MyInstallMoreData = MyInstallData
+class MyInstallData(MyInstallMoreData): # pylint: disable=E0102
+    """A class that manages data files installation"""
+    def run(self):
+        _old_install_dir = self.install_dir
+        if self.install_dir.endswith('egg'):
+            self.install_dir = sys.prefix
+        MyInstallMoreData.run(self)
+        self.install_dir = _old_install_dir
+try:
+    import setuptools.command.easy_install # only if easy_install available
+    # monkey patch: Crack SandboxViolation verification
+    from setuptools.sandbox import DirectorySandbox as DS
+    old_ok = DS._ok
+    def _ok(self, path):
+        """Return True if ``path`` can be written during installation."""
+        out = old_ok(self, path) # here for side effect from setuptools
+        realpath = os.path.normcase(os.path.realpath(path))
+        allowed_path = os.path.normcase(sys.prefix)
+        if realpath.startswith(allowed_path):
+            out = True
+        return out
+    DS._ok = _ok
+except ImportError:
+    pass
+
 
-def install(**kwargs):
-    """setup entry point"""
-    if USE_SETUPTOOLS:
-        if '--force-manifest' in sys.argv:
-            sys.argv.remove('--force-manifest')
-    # install-layout option was introduced in 2.5.3-1~exp1
-    elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv:
-        sys.argv.remove('--install-layout=deb')
-    packages = [modname] + get_packages(join(here, modname), modname)
-    if USE_SETUPTOOLS:
-        kwargs['install_requires'] = install_requires
-        kwargs['zip_safe'] = False
-    kwargs['packages'] = packages
-    kwargs['package_data'] = package_data
-    return setup(name=distname, version=version, license=license, url=web,
-                 description=description, long_description=long_description,
-                 author=author, author_email=author_email,
-                 scripts=ensure_scripts(scripts), data_files=data_files,
-                 ext_modules=ext_modules,
-                 cmdclass={'install_lib': MyInstallLib,
-                           'install_data': MyInstallData},
-                 **kwargs
-                 )
-
-if __name__ == '__main__' :
-    install()
+setup(
+    name=distname,
+    version=version,
+    license=license,
+    url=web,
+    description=description,
+    long_description=long_description,
+    author=author,
+    author_email=author_email,
+    packages=[modname] + get_packages(join(here, modname), modname),
+    package_data=package_data,
+    scripts=ensure_scripts(scripts),
+    data_files=data_files,
+    install_requires=[
+        'six >= 1.4.0',
+        'logilab-common >= 1.2.2',
+        'logilab-mtconverter >= 0.8.0',
+        'rql >= 0.34.0',
+        'yams >= 0.44.0',
+        'lxml',
+        'logilab-database >= 1.15.0',
+        'passlib',
+        'pytz',
+        'Markdown',
+        'unittest2 >= 0.7.0',
+    ],
+    extra_requires={
+        'captcha': [
+            'Pillow',
+        ],
+        'crypto': [
+            'pycrypto',
+        ],
+        'etwist': [
+            'Twisted < 16.0.0',
+        ],
+        'ext': [
+            'docutils >= 0.6',
+        ],
+        'ical': [
+            'vobject >= 0.6.0',
+        ],
+        'pyramid': [
+            'pyramid >= 1.5.0',
+            'waitress >= 0.8.9',
+            'wsgicors >= 0.3',
+            'pyramid_multiauth',
+        ],
+        'rdf': [
+            'rdflib',
+        ],
+        'sparql': [
+            'fyzz >= 0.1.0',
+        ],
+        'zmq': [
+            'pyzmq',
+        ],
+    },
+    cmdclass={'install_lib': MyInstallLib,
+              'install_data': MyInstallData},
+    zip_safe=False,
+)
--- a/tox.ini	Wed Oct 19 22:31:32 2016 +0200
+++ b/tox.ini	Thu Oct 20 18:28:46 2016 +0200
@@ -1,44 +1,31 @@
 [tox]
 envlist =
-  py27-{cubicweb,dataimport,devtools,entities,etwist,ext,hooks,server,migractions,sobjects,web,wsgi},
-  py34-{cubicweb,dataimport,devtools,entities,ext,hooks,server,migractions,sobjects,web,wsgi}
+  check-manifest,flake8,
+  py{27,34}-{server,web,misc}
 
 [testenv]
 sitepackages = True
 whitelist_externals =
   /usr/bin/touch
 deps =
-  -rdev-requirements.txt
-  cubicweb: -r{toxinidir}/cubicweb/test/requirements.txt
-  devtools: -r{toxinidir}/cubicweb/devtools/test/requirements.txt
-  entities: -r{toxinidir}/cubicweb/entities/test/requirements.txt
-  etwist: -r{toxinidir}/cubicweb/etwist/test/requirements.txt
-  ext: -r{toxinidir}/cubicweb/ext/test/requirements.txt
-  hooks: -r{toxinidir}/cubicweb/hooks/test/requirements.txt
-  server: -r{toxinidir}/cubicweb/server/test/requirements.txt
-  migractions: -r{toxinidir}/cubicweb/server/test/requirements.txt
-  sobjects: -r{toxinidir}/cubicweb/sobjects/test/requirements.txt
-  web: -r{toxinidir}/cubicweb/web/test/requirements.txt
-  wsgi: -r{toxinidir}/cubicweb/wsgi/test/requirements.txt
+  -r{toxinidir}/requirements/dev.txt
+  misc: -r{toxinidir}/requirements/test-misc.txt
+  server: -r{toxinidir}/requirements/test-server.txt
+  web: -r{toxinidir}/requirements/test-web.txt
 commands =
-  py34-cubicweb: touch {envdir}/share/cubicweb/cubes/__init__.py
-  py34-server: touch {envdir}/share/cubicweb/cubes/__init__.py
-  py34-migractions: touch {envdir}/share/cubicweb/cubes/__init__.py
-  py34-sobjects: touch {envdir}/share/cubicweb/cubes/__init__.py
-  py34-web: touch {envdir}/share/cubicweb/cubes/__init__.py
-  cubicweb: {envpython} -m pip install --upgrade --no-deps --quiet git+git://github.com/logilab/yapps@master#egg=yapps
-  cubicweb: {envpython} -m pytest {toxinidir}/cubicweb/test {posargs}
-  dataimport: {envpython} -m pytest {toxinidir}/cubicweb/dataimport/test {posargs}
-  devtools: {envpython} -m pytest {toxinidir}/cubicweb/devtools/test {posargs}
-  entities: {envpython} -m pytest {toxinidir}/cubicweb/entities/test {posargs}
-  etwist: {envpython} -m pytest {toxinidir}/cubicweb/etwist/test {posargs}
-  ext: {envpython} -m pytest {toxinidir}/cubicweb/ext/test {posargs}
-  hooks: {envpython} -m pytest {toxinidir}/cubicweb/hooks/test {posargs}
-  server: {envpython} -m pytest {toxinidir}/cubicweb/server/test {posargs} --ignore={toxinidir}/cubicweb/server/test/unittest_migractions.py
-  migractions: {envpython} -m pytest {toxinidir}/cubicweb/server/test/unittest_migractions.py {posargs}
-  sobjects: {envpython} -m pytest {toxinidir}/cubicweb/sobjects/test {posargs}
-  web: {envpython} -m pytest {toxinidir}/cubicweb/web/test {posargs}
-  wsgi: {envpython} -m pytest {toxinidir}/cubicweb/wsgi/test {posargs}
+  py34: touch {envdir}/share/cubicweb/cubes/__init__.py
+  misc: {envpython} -m pip install --upgrade --no-deps --quiet git+git://github.com/logilab/yapps@master#egg=yapps
+  misc: {envpython} -m pytest {posargs} {toxinidir}/cubicweb/test {toxinidir}/cubicweb/dataimport/test {toxinidir}/cubicweb/devtools/test {toxinidir}/cubicweb/entities/test {toxinidir}/cubicweb/ext/test {toxinidir}/cubicweb/hooks/test {toxinidir}/cubicweb/sobjects/test {toxinidir}/cubicweb/wsgi/test {toxinidir}/cubicweb/pyramid/test
+  py27-misc: {envpython} -m pytest {posargs} {toxinidir}/cubicweb/etwist/test
+  server: {envpython} -m pytest {posargs} {toxinidir}/cubicweb/server/test
+  web: {envpython} -m pytest {posargs} {toxinidir}/cubicweb/web/test
+
+[testenv:flake8]
+skip_install = true
+deps =
+  flake8 >= 3
+commands =
+  flake8 {toxinidir}
 
 [testenv:doc]
 changedir = doc
@@ -47,5 +34,130 @@
 commands =
   {envpython} -c 'import sphinx; sphinx.main()' -b html -d {envtmpdir}/doctrees .  {envtmpdir}/html
 
+[testenv:check-manifest]
+skip_install = true
+deps =
+  check-manifest
+commands =
+  check-manifest {toxinidir} \
+# ignore symlinks that are not recognized by check-manifest, see
+# https://github.com/mgedmin/check-manifest/issues/69
+    --ignore cubicweb/devtools/test/data/cubes/i18ntestcube*,cubicweb/test/data/legacy_cubes*
+
 [pytest]
 python_files = *test_*.py
+
+[flake8]
+format = pylint
+ignore = W503
+max-line-length = 100
+exclude = setup.py,doc/*,cubicweb/misc/*,cubicweb/test/*,cubicweb/*/test/*,.tox/*
+filename=
+  cubicweb/dataimport/csv.py,
+  cubicweb/dataimport/importer.py,
+  cubicweb/dataimport/massive_store.py,
+  cubicweb/dataimport/stores.py,
+  cubicweb/dataimport/test/data-massimport/schema.py,
+  cubicweb/dataimport/test/data/schema.py,
+  cubicweb/dataimport/test/test_csv.py,
+  cubicweb/dataimport/test/test_pgstore.py,
+  cubicweb/dataimport/test/test_massive_store.py,
+  cubicweb/dataimport/test/test_stores.py,
+  cubicweb/dataimport/test/unittest_importer.py,
+  cubicweb/devtools/test/data/cubes/i18ntestcube/__init__.py,
+  cubicweb/devtools/test/data/cubes/__init__.py,
+  cubicweb/devtools/test/data/schema.py,
+  cubicweb/devtools/testlib.py,
+  cubicweb/devtools/test/unittest_devctl.py,
+  cubicweb/devtools/test/unittest_i18n.py,
+  cubicweb/devtools/test/unittest_webtest.py,
+  cubicweb/devtools/webtest.py,
+  cubicweb/entities/adapters.py,
+  cubicweb/entities/test/unittest_base.py,
+  cubicweb/etwist/__init__.py,
+  cubicweb/ext/__init__.py,
+  cubicweb/hooks/test/data/hooks.py,
+  cubicweb/hooks/test/unittest_notification.py,
+  cubicweb/hooks/test/unittest_security.py,
+  cubicweb/hooks/test/unittest_syncsession.py,
+  cubicweb/__init__.py,
+  cubicweb/__main__.py,
+  cubicweb/pylintext.py,
+  cubicweb/server/repository.py,
+  cubicweb/server/rqlannotation.py,
+  cubicweb/server/schema2sql.py,
+  cubicweb/server/session.py,
+  cubicweb/server/sqlutils.py,
+  cubicweb/server/test/datacomputed/migratedapp/schema.py,
+  cubicweb/server/test/datacomputed/schema.py,
+  cubicweb/server/test/data/entities.py,
+  cubicweb/server/test/data-migractions/cubes/fakecustomtype/__init__.py,
+  cubicweb/server/test/data-migractions/cubes/fakeemail/__init__.py,
+  cubicweb/server/test/data-migractions/cubes/__init__.py,
+  cubicweb/server/test/data-migractions/migratedapp/__init__.py,
+  cubicweb/server/test/data-schema2sql/__init__.py,
+  cubicweb/server/test/unittest_checkintegrity.py,
+  cubicweb/server/test/unittest_ldapsource.py,
+  cubicweb/skeleton/test/pytestconf.py,
+  cubicweb/sobjects/test/unittest_notification.py,
+  cubicweb/sobjects/test/unittest_register_user.py,
+  cubicweb/sobjects/textparsers.py,
+  cubicweb/test/data/cubes/comment/__init__.py,
+  cubicweb/test/data/cubes/comment/__pkginfo__.py,
+  cubicweb/test/data/cubes/email/entities.py,
+  cubicweb/test/data/cubes/email/hooks.py,
+  cubicweb/test/data/cubes/email/__init__.py,
+  cubicweb/test/data/cubes/email/__pkginfo__.py,
+  cubicweb/test/data/cubes/email/views/__init__.py,
+  cubicweb/test/data/cubes/file/entities/__init__.py,
+  cubicweb/test/data/cubes/file/hooks/__init__.py,
+  cubicweb/test/data/cubes/file/__init__.py,
+  cubicweb/test/data/cubes/file/__pkginfo__.py,
+  cubicweb/test/data/cubes/file/views.py,
+  cubicweb/test/data/cubes/forge/__init__.py,
+  cubicweb/test/data/cubes/forge/__pkginfo__.py,
+  cubicweb/test/data/cubes/mycube/__init__.py,
+  cubicweb/test/data/cubes/mycube/__pkginfo__.py,
+  cubicweb/test/data/migration/0.1.0_common.py,
+  cubicweb/test/data/migration/0.1.0_repository.py,
+  cubicweb/test/data_schemareader/schema.py,
+  cubicweb/test/data/server_migration/bootstrapmigration_repository.py,
+  cubicweb/test/data/views.py,
+  cubicweb/test/unittest_binary.py,
+  cubicweb/test/unittest_mail.py,
+  cubicweb/test/unittest_repoapi.py,
+  cubicweb/test/unittest_schema.py,
+  cubicweb/test/unittest_toolsutils.py,
+  cubicweb/test/unittest_utils.py,
+  cubicweb/web/formwidgets.py,
+  cubicweb/web/test/data/entities.py,
+  cubicweb/web/test/unittest_http_headers.py,
+  cubicweb/web/test/unittest_views_basetemplates.py,
+  cubicweb/web/test/unittest_views_cwsources.py,
+  cubicweb/web/test/unittest_views_json.py,
+  cubicweb/web/views/json.py,
+  cubicweb/web/views/searchrestriction.py,
+  cubicweb/xy.py,
+  cubicweb/pyramid/auth.py,
+  cubicweb/pyramid/bwcompat.py,
+  cubicweb/pyramid/core.py,
+  cubicweb/pyramid/defaults.py,
+  cubicweb/pyramid/init_instance.py,
+  cubicweb/pyramid/__init__.py,
+  cubicweb/pyramid/login.py,
+  cubicweb/pyramid/predicates.py,
+  cubicweb/pyramid/profile.py,
+  cubicweb/pyramid/resources.py,
+  cubicweb/pyramid/rest_api.py,
+  cubicweb/pyramid/session.py,
+  cubicweb/pyramid/tools.py,
+  cubicweb/pyramid/test/__init__.py,
+  cubicweb/pyramid/test/test_bw_request.py,
+  cubicweb/pyramid/test/test_core.py,
+  cubicweb/pyramid/test/test_login.py,
+  cubicweb/pyramid/test/test_rest_api.py,
+  cubicweb/pyramid/test/test_tools.py,
+  cubicweb/pyramid/pyramidctl.py,
+
+
+# vim: wrap sts=2 sw=2