merge with 3.21.2
authorRémi Cardona <remi.cardona@logilab.fr>
Mon, 12 Oct 2015 10:53:35 +0200
changeset 10651 9ca33768473c
parent 10622 3cc6154b94a3 (diff)
parent 10649 4be98ec89fef (current diff)
child 10652 e3e4a8c45695
merge with 3.21.2
__pkginfo__.py
cubicweb.spec
devtools/__init__.py
devtools/test/unittest_testlib.py
devtools/testlib.py
misc/migration/3.21.0_Any.py
predicates.py
server/checkintegrity.py
server/migractions.py
server/sources/__init__.py
server/sources/native.py
server/sources/rql2sql.py
server/sqlutils.py
server/test/unittest_ldapsource.py
server/test/unittest_querier.py
web/views/autoform.py
--- a/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,7 +22,6 @@
 
 # ignore the pygments UserWarnings
 import warnings
-import cPickle
 import zlib
 warnings.filterwarnings('ignore', category=UserWarning,
                         message='.*was already imported',
@@ -30,6 +29,7 @@
 
 
 import __builtin__
+from six import PY2, binary_type
 # '_' is available in builtins to mark internationalized string but should
 # not be used to do the actual translation
 if not hasattr(__builtin__, '_'):
@@ -38,7 +38,9 @@
 CW_SOFTWARE_ROOT = __path__[0]
 
 import sys, os, logging
-from StringIO import StringIO
+from io import BytesIO
+
+from six.moves import cPickle as pickle
 
 from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods
@@ -66,17 +68,19 @@
 #import threading
 #threading.settrace(log_thread)
 
-class Binary(StringIO):
-    """customize StringIO to make sure we don't use unicode"""
-    def __init__(self, buf=''):
-        assert isinstance(buf, (str, buffer, bytearray)), \
-               "Binary objects must use raw strings, not %s" % buf.__class__
-        StringIO.__init__(self, buf)
+class Binary(BytesIO):
+    """class to hold binary data. Use BytesIO to prevent use of unicode data"""
+    _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview)
+
+    def __init__(self, buf=b''):
+        assert isinstance(buf, self._allowed_types), \
+               "Binary objects must use bytes/buffer objects, not %s" % buf.__class__
+        super(Binary, self).__init__(buf)
 
     def write(self, data):
-        assert isinstance(data, (str, buffer, bytearray)), \
-               "Binary objects must use raw strings, not %s" % data.__class__
-        StringIO.write(self, data)
+        assert isinstance(data, self._allowed_types), \
+               "Binary objects must use bytes/buffer objects, not %s" % data.__class__
+        super(Binary, self).write(data)
 
     def to_file(self, fobj):
         """write a binary to disk
@@ -132,12 +136,12 @@
     def zpickle(cls, obj):
         """ return a Binary containing a gzipped pickle of obj """
         retval = cls()
-        retval.write(zlib.compress(cPickle.dumps(obj, protocol=2)))
+        retval.write(zlib.compress(pickle.dumps(obj, protocol=2)))
         return retval
 
     def unzpickle(self):
         """ decompress and loads the stream before returning it """
-        return cPickle.loads(zlib.decompress(self.getvalue()))
+        return pickle.loads(zlib.decompress(self.getvalue()))
 
 
 def check_password(eschema, value):
--- a/__pkginfo__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/__pkginfo__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -39,6 +39,7 @@
 ]
 
 __depends__ = {
+    'six': '>= 1.4.0',
     'logilab-common': '>= 0.63.1',
     'logilab-mtconverter': '>= 0.8.0',
     'rql': '>= 0.31.2',
--- a/_gcdebug.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/_gcdebug.py	Mon Oct 12 10:53:35 2015 +0200
@@ -15,6 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
 
 import gc, types, weakref
 
@@ -68,7 +69,7 @@
             except KeyError:
                 ocounters[key] = 1
         if isinstance(obj, viewreferrersclasses):
-            print '   ', obj, referrers(obj, showobjs, maxlevel)
+            print('   ', obj, referrers(obj, showobjs, maxlevel))
     garbage = [repr(obj) for obj in gc.garbage]
     return counters, ocounters, garbage
 
--- a/crypto.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/crypto.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,9 +18,10 @@
 """Simple cryptographic routines, based on python-crypto."""
 __docformat__ = "restructuredtext en"
 
-from pickle import dumps, loads
 from base64 import b64encode, b64decode
 
+from six.moves import cPickle as pickle
+
 from Crypto.Cipher import Blowfish
 
 
@@ -34,7 +35,7 @@
 
 
 def encrypt(data, seed):
-    string = dumps(data)
+    string = pickle.dumps(data)
     string = string + '*' * (8 - len(string) % 8)
     string = b64encode(_cypherer(seed).encrypt(string))
     return unicode(string)
@@ -43,4 +44,4 @@
 def decrypt(string, seed):
     # pickle ignores trailing characters so we do not need to strip them off
     string = _cypherer(seed).decrypt(b64decode(string))
-    return loads(string)
+    return pickle.loads(string)
--- a/cubicweb.spec	Mon Oct 12 09:19:07 2015 +0200
+++ b/cubicweb.spec	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,7 @@
 BuildArch:      noarch
 
 Requires:       %{python}
+Requires:       %{python}-six >= 1.4.0
 Requires:       %{python}-logilab-common >= 0.63.1
 Requires:       %{python}-logilab-mtconverter >= 0.8.0
 Requires:       %{python}-rql >= 0.31.2
--- a/cwconfig.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/cwconfig.py	Mon Oct 12 10:53:35 2015 +0200
@@ -164,6 +164,7 @@
 
    Directory where pid files will be written
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 _ = unicode
@@ -179,6 +180,8 @@
                      basename, isdir, dirname, splitext)
 from warnings import warn, filterwarnings
 
+from six import text_type
+
 from logilab.common.decorators import cached, classproperty
 from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods, init_log
@@ -350,7 +353,7 @@
           }),
         ('umask',
          {'type' : 'int',
-          'default': 077,
+          'default': 0o077,
           'help': 'permission umask for files created by the server',
           'group': 'main', 'level': 2,
           }),
@@ -650,7 +653,7 @@
         self.adjust_sys_path()
         self.load_defaults()
         # will be properly initialized later by _gettext_init
-        self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )}
+        self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )}
         self._site_loaded = set()
         # don't register ReStructured Text directives by simple import, avoid pb
         # with eg sphinx.
@@ -960,7 +963,7 @@
             i = 1
             while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
                 try:
-                    file(path, 'a')
+                    open(path, 'a')
                     break
                 except IOError:
                     path = '%s-%s.log' % (basepath, i)
@@ -994,6 +997,13 @@
         rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default))
         return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
 
+    # config -> repository
+
+    def repository(self, vreg=None):
+        from cubicweb.server.repository import Repository
+        from cubicweb.server.utils import TasksManager
+        return Repository(self, TasksManager(), vreg=vreg)
+
     # instance methods used to get instance specific resources #############
 
     def __init__(self, appid, debugmode=False, creating=False):
@@ -1001,7 +1011,7 @@
         # set to true while creating an instance
         self.creating = creating
         super(CubicWebConfiguration, self).__init__(debugmode)
-        fake_gettext = (unicode, lambda ctx, msgid: unicode(msgid))
+        fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid))
         for lang in self.available_languages():
             self.translations[lang] = fake_gettext
         self._cubes = None
@@ -1181,13 +1191,8 @@
 
     def set_sources_mode(self, sources):
         if not 'all' in sources:
-            print 'warning: ignoring specified sources, requires a repository '\
-                  'configuration'
-
-    def migration_handler(self):
-        """return a migration handler instance"""
-        from cubicweb.migration import MigrationHelper
-        return MigrationHelper(self, verbosity=self.verbosity)
+            print('warning: ignoring specified sources, requires a repository '
+                  'configuration')
 
     def i18ncompile(self, langs=None):
         from cubicweb import i18n
--- a/cwctl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/cwctl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,6 +18,7 @@
 """the cubicweb-ctl tool, based on logilab.common.clcommands to
 provide a pluggable commands system.
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -28,7 +29,6 @@
 from warnings import warn, filterwarnings
 from os import remove, listdir, system, pathsep
 from os.path import exists, join, isfile, isdir, dirname, abspath
-from urlparse import urlparse
 
 try:
     from os import kill, getpgid
@@ -38,6 +38,8 @@
     def getpgid():
         """win32 getpgid implementation"""
 
+from six.moves.urllib.parse import urlparse
+
 from logilab.common.clcommands import CommandLine
 from logilab.common.shellutils import ASK
 from logilab.common.configuration import merge_options
@@ -113,15 +115,15 @@
         _allinstances = list_instances(regdir)
         if isfile(join(regdir, 'startorder')):
             allinstances = []
-            for line in file(join(regdir, 'startorder')):
+            for line in open(join(regdir, 'startorder')):
                 line = line.strip()
                 if line and not line.startswith('#'):
                     try:
                         _allinstances.remove(line)
                         allinstances.append(line)
                     except ValueError:
-                        print ('ERROR: startorder file contains unexistant '
-                               'instance %s' % line)
+                        print('ERROR: startorder file contains unexistant '
+                              'instance %s' % line)
             allinstances += _allinstances
         else:
             allinstances = _allinstances
@@ -146,7 +148,7 @@
         status = 0
         for appid in args:
             if askconfirm:
-                print '*'*72
+                print('*'*72)
                 if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
                     continue
             try:
@@ -184,13 +186,13 @@
             forkcmd = None
         for appid in args:
             if askconfirm:
-                print '*'*72
+                print('*'*72)
                 if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
                     continue
             if forkcmd:
                 status = system('%s %s' % (forkcmd, appid))
                 if status:
-                    print '%s exited with status %s' % (forkcmd, status)
+                    print('%s exited with status %s' % (forkcmd, status))
             else:
                 self.run_arg(appid)
 
@@ -224,19 +226,19 @@
         from cubicweb.migration import ConfigurationProblem
 
         if mode == 'all':
-            print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
-            print
+            print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode))
+            print()
 
         if mode in ('all', 'config', 'configurations'):
-            print 'Available configurations:'
+            print('Available configurations:')
             for config in CONFIGURATIONS:
-                print '*', config.name
+                print('*', config.name)
                 for line in config.__doc__.splitlines():
                     line = line.strip()
                     if not line:
                         continue
-                    print '   ', line
-            print
+                    print('   ', line)
+            print()
 
         if mode in ('all', 'cubes'):
             cfgpb = ConfigurationProblem(cwcfg)
@@ -244,11 +246,11 @@
                 cubesdir = pathsep.join(cwcfg.cubes_search_path())
                 namesize = max(len(x) for x in cwcfg.available_cubes())
             except ConfigurationError as ex:
-                print 'No cubes available:', ex
+                print('No cubes available:', ex)
             except ValueError:
-                print 'No cubes available in %s' % cubesdir
+                print('No cubes available in %s' % cubesdir)
             else:
-                print 'Available cubes (%s):' % cubesdir
+                print('Available cubes (%s):' % cubesdir)
                 for cube in cwcfg.available_cubes():
                     try:
                         tinfo = cwcfg.cube_pkginfo(cube)
@@ -257,59 +259,59 @@
                     except (ConfigurationError, AttributeError) as ex:
                         tinfo = None
                         tversion = '[missing cube information: %s]' % ex
-                    print '* %s %s' % (cube.ljust(namesize), tversion)
+                    print('* %s %s' % (cube.ljust(namesize), tversion))
                     if self.config.verbose:
                         if tinfo:
                             descr = getattr(tinfo, 'description', '')
                             if not descr:
                                 descr = tinfo.__doc__
                             if descr:
-                                print '    '+ '    \n'.join(descr.splitlines())
+                                print('    '+ '    \n'.join(descr.splitlines()))
                         modes = detect_available_modes(cwcfg.cube_dir(cube))
-                        print '    available modes: %s' % ', '.join(modes)
-            print
+                        print('    available modes: %s' % ', '.join(modes))
+            print()
 
         if mode in ('all', 'instances'):
             try:
                 regdir = cwcfg.instances_dir()
             except ConfigurationError as ex:
-                print 'No instance available:', ex
-                print
+                print('No instance available:', ex)
+                print()
                 return
             instances = list_instances(regdir)
             if instances:
-                print 'Available instances (%s):' % regdir
+                print('Available instances (%s):' % regdir)
                 for appid in instances:
                     modes = cwcfg.possible_configurations(appid)
                     if not modes:
-                        print '* %s (BROKEN instance, no configuration found)' % appid
+                        print('* %s (BROKEN instance, no configuration found)' % appid)
                         continue
-                    print '* %s (%s)' % (appid, ', '.join(modes))
+                    print('* %s (%s)' % (appid, ', '.join(modes)))
                     try:
                         config = cwcfg.config_for(appid, modes[0])
                     except Exception as exc:
-                        print '    (BROKEN instance, %s)' % exc
+                        print('    (BROKEN instance, %s)' % exc)
                         continue
             else:
-                print 'No instance available in %s' % regdir
-            print
+                print('No instance available in %s' % regdir)
+            print()
 
         if mode == 'all':
             # configuration management problem solving
             cfgpb.solve()
             if cfgpb.warnings:
-                print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
+                print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings))
             if cfgpb.errors:
-                print 'Errors:'
+                print('Errors:')
                 for op, cube, version, src in cfgpb.errors:
                     if op == 'add':
-                        print '* cube', cube,
+                        print('* cube', cube, end=' ')
                         if version:
-                            print ' version', version,
-                        print 'is not installed, but required by %s' % src
+                            print(' version', version, end=' ')
+                        print('is not installed, but required by %s' % src)
                     else:
-                        print '* cube %s version %s is installed, but version %s is required by %s' % (
-                            cube, cfgpb.cubes[cube], version, src)
+                        print('* cube %s version %s is installed, but version %s is required by %s' % (
+                            cube, cfgpb.cubes[cube], version, src))
 
 def check_options_consistency(config):
     if config.automatic and config.config_level > 0:
@@ -380,20 +382,20 @@
             templdirs = [cwcfg.cube_dir(cube)
                          for cube in cubes]
         except ConfigurationError as ex:
-            print ex
-            print '\navailable cubes:',
-            print ', '.join(cwcfg.available_cubes())
+            print(ex)
+            print('\navailable cubes:', end=' ')
+            print(', '.join(cwcfg.available_cubes()))
             return
         # create the registry directory for this instance
-        print '\n'+underline_title('Creating the instance %s' % appid)
+        print('\n'+underline_title('Creating the instance %s' % appid))
         create_dir(config.apphome)
         # cubicweb-ctl configuration
         if not self.config.automatic:
-            print '\n'+underline_title('Configuring the instance (%s.conf)'
-                                       % configname)
+            print('\n'+underline_title('Configuring the instance (%s.conf)'
+                                       % configname))
             config.input_config('main', self.config.config_level)
         # configuration'specific stuff
-        print
+        print()
         helper.bootstrap(cubes, self.config.automatic, self.config.config_level)
         # input for cubes specific options
         if not self.config.automatic:
@@ -402,23 +404,23 @@
                            and odict.get('level') <= self.config.config_level)
             for section in sections:
                 if section not in ('main', 'email', 'web'):
-                    print '\n' + underline_title('%s options' % section)
+                    print('\n' + underline_title('%s options' % section))
                     config.input_config(section, self.config.config_level)
         # write down configuration
         config.save()
         self._handle_win32(config, appid)
-        print '-> generated config %s' % config.main_config_file()
+        print('-> generated config %s' % config.main_config_file())
         # handle i18n files structure
         # in the first cube given
         from cubicweb import i18n
         langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
         errors = config.i18ncompile(langs)
         if errors:
-            print '\n'.join(errors)
+            print('\n'.join(errors))
             if self.config.automatic \
                    or not ASK.confirm('error while compiling message catalogs, '
                                       'continue anyway ?'):
-                print 'creation not completed'
+                print('creation not completed')
                 return
         # create the additional data directory for this instance
         if config.appdatahome != config.apphome: # true in dev mode
@@ -427,9 +429,9 @@
         if config['uid']:
             from logilab.common.shellutils import chown
             # this directory should be owned by the uid of the server process
-            print 'set %s as owner of the data directory' % config['uid']
+            print('set %s as owner of the data directory' % config['uid'])
             chown(config.appdatahome, config['uid'])
-        print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1]
+        print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1])
         if not self.config.no_db_create:
             helper.postcreate(self.config.automatic, self.config.config_level)
 
@@ -487,7 +489,7 @@
             if ex.errno != errno.ENOENT:
                 raise
         confignames = ', '.join([config.name for config in configs])
-        print '-> instance %s (%s) deleted.' % (appid, confignames)
+        print('-> instance %s (%s) deleted.' % (appid, confignames))
 
 
 # instance commands ########################################################
@@ -551,7 +553,7 @@
 the --force option."
             raise ExecutionError(msg % (appid, pidf))
         if helper.start_server(config) == 1:
-            print 'instance %s started' % appid
+            print('instance %s started' % appid)
 
 
 def init_cmdline_log_threshold(config, loglevel):
@@ -606,7 +608,7 @@
         except OSError:
             # already removed by twistd
             pass
-        print 'instance %s stopped' % appid
+        print('instance %s stopped' % appid)
 
 
 class RestartInstanceCommand(StartInstanceCommand):
@@ -630,7 +632,7 @@
         # get instances in startorder
         for appid in args:
             if askconfirm:
-                print '*'*72
+                print('*'*72)
                 if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
                     continue
             StopInstanceCommand(self.logger).stop_instance(appid)
@@ -677,14 +679,14 @@
         status = 0
         for mode in cwcfg.possible_configurations(appid):
             config = cwcfg.config_for(appid, mode)
-            print '[%s-%s]' % (appid, mode),
+            print('[%s-%s]' % (appid, mode), end=' ')
             try:
                 pidf = config['pid-file']
             except KeyError:
-                print 'buggy instance, pid file not specified'
+                print('buggy instance, pid file not specified')
                 continue
             if not exists(pidf):
-                print "doesn't seem to be running"
+                print("doesn't seem to be running")
                 status = 1
                 continue
             pid = int(open(pidf).read().strip())
@@ -692,10 +694,10 @@
             try:
                 getpgid(pid)
             except OSError:
-                print "should be running with pid %s but the process can not be found" % pid
+                print("should be running with pid %s but the process can not be found" % pid)
                 status = 1
                 continue
-            print "running with pid %s" % (pid)
+            print("running with pid %s" % (pid))
         return status
 
 class UpgradeInstanceCommand(InstanceCommandFork):
@@ -756,7 +758,7 @@
         )
 
     def upgrade_instance(self, appid):
-        print '\n' + underline_title('Upgrading the instance %s' % appid)
+        print('\n' + underline_title('Upgrading the instance %s' % appid))
         from logilab.common.changelog import Version
         config = cwcfg.config_for(appid)
         instance_running = exists(config['pid-file'])
@@ -767,7 +769,7 @@
             set_sources_mode(self.config.ext_sources or ('migration',))
         # get instance and installed versions for the server and the componants
         mih = config.migration_handler()
-        repo = mih.repo_connect()
+        repo = mih.repo
         vcconf = repo.get_versions()
         helper = self.config_helper(config, required=False)
         if self.config.force_cube_version:
@@ -797,30 +799,30 @@
         # run cubicweb/componants migration scripts
         if self.config.fs_only or toupgrade:
             for cube, fromversion, toversion in toupgrade:
-                print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)
+                print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube))
             with mih.cnx:
                 with mih.cnx.security_enabled(False, False):
                     mih.migrate(vcconf, reversed(toupgrade), self.config)
         else:
-            print '-> no data migration needed for instance %s.' % appid
+            print('-> no data migration needed for instance %s.' % appid)
         # rewrite main configuration file
         mih.rewrite_configuration()
         mih.shutdown()
         # handle i18n upgrade
         if not self.i18nupgrade(config):
             return
-        print
+        print()
         if helper:
             helper.postupgrade(repo)
-        print '-> instance migrated.'
+        print('-> instance migrated.')
         if instance_running and not (CWDEV or self.config.nostartstop):
             # restart instance through fork to get a proper environment, avoid
             # uicfg pb (and probably gettext catalogs, to check...)
             forkcmd = '%s start %s' % (sys.argv[0], appid)
             status = system(forkcmd)
             if status:
-                print '%s exited with status %s' % (forkcmd, status)
-        print
+                print('%s exited with status %s' % (forkcmd, status))
+        print()
 
     def i18nupgrade(self, config):
         # handle i18n upgrade:
@@ -832,10 +834,10 @@
         langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
         errors = config.i18ncompile(langs)
         if errors:
-            print '\n'.join(errors)
+            print('\n'.join(errors))
             if not ASK.confirm('Error while compiling message catalogs, '
                                'continue anyway?'):
-                print '-> migration not completed.'
+                print('-> migration not completed.')
                 return False
         return True
 
@@ -856,10 +858,9 @@
         config.quick_start = True
         if hasattr(config, 'set_sources_mode'):
             config.set_sources_mode(('migration',))
-        repo = config.migration_handler().repo_connect()
-        vcconf = repo.get_versions()
+        vcconf = config.repository().get_versions()
         for key in sorted(vcconf):
-            print key+': %s.%s.%s' % vcconf[key]
+            print(key+': %s.%s.%s' % vcconf[key])
 
 class ShellCommand(Command):
     """Run an interactive migration shell on an instance. This is a python shell
@@ -940,9 +941,9 @@
                 repo = get_repository(appuri)
                 cnx = connect(repo, login=login, password=pwd, mulcnx=False)
             except AuthenticationError as ex:
-                print ex
+                print(ex)
             except (KeyboardInterrupt, EOFError):
-                print
+                print()
                 sys.exit(0)
             else:
                 break
@@ -1003,7 +1004,7 @@
             config.init_cubes(repo.get_cubes())
         errors = config.i18ncompile()
         if errors:
-            print '\n'.join(errors)
+            print('\n'.join(errors))
 
 
 class ListInstancesCommand(Command):
@@ -1015,7 +1016,7 @@
         """run the command with its specific arguments"""
         regdir = cwcfg.instances_dir()
         for appid in sorted(listdir(regdir)):
-            print appid
+            print(appid)
 
 
 class ListCubesCommand(Command):
@@ -1026,7 +1027,7 @@
     def run(self, args):
         """run the command with its specific arguments"""
         for cube in cwcfg.available_cubes():
-            print cube
+            print(cube)
 
 class ConfigureInstanceCommand(InstanceCommand):
     """Configure instance.
@@ -1145,10 +1146,10 @@
     try:
         CWCTL.run(args)
     except ConfigurationError as err:
-        print 'ERROR: ', err
+        print('ERROR: ', err)
         sys.exit(1)
     except ExecutionError as err:
-        print err
+        print(err)
         sys.exit(2)
 
 if __name__ == '__main__':
--- a/dataimport/csv.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/dataimport/csv.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,18 +16,19 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Functions to help importing CSV data"""
-
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import csv as csvmod
 import warnings
 import os.path as osp
 
+from six import string_types
+
 from logilab.common import shellutils
 
 
 def count_lines(stream_or_filename):
-    if isinstance(stream_or_filename, basestring):
+    if isinstance(stream_or_filename, string_types):
         f = open(stream_or_filename)
     else:
         f = stream_or_filename
@@ -48,7 +49,7 @@
     if quote is not None:
         quotechar = quote
         warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
-    if isinstance(stream_or_path, basestring):
+    if isinstance(stream_or_path, string_types):
         if not osp.exists(stream_or_path):
             raise Exception("file doesn't exists: %s" % stream_or_path)
         stream = open(stream_or_path)
@@ -64,7 +65,7 @@
         yield urow
         if withpb:
             pb.update()
-    print ' %s rows imported' % rowcount
+    print(' %s rows imported' % rowcount)
 
 
 def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"',
--- a/dataimport/deprecated.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/dataimport/deprecated.py	Mon Oct 12 10:53:35 2015 +0200
@@ -58,6 +58,7 @@
 .. BUG file with one column are not parsable
 .. TODO rollback() invocation is not possible yet
 """
+from __future__ import print_function
 
 import sys
 import traceback
@@ -103,7 +104,7 @@
 
 @deprecated('[3.21] deprecated')
 def tell(msg):
-    print msg
+    print(msg)
 
 
 @deprecated('[3.21] deprecated')
@@ -166,7 +167,9 @@
                 if res[dest] is None:
                     break
         except ValueError as err:
-            raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1]
+            exc = ValueError('error with %r field: %s' % (src, err))
+            exc.__traceback__ = sys.exc_info()[-1]
+            raise exc
     return res
 
 
--- a/dataimport/pgstore.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/dataimport/pgstore.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,10 +16,10 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Postgres specific store"""
+from __future__ import print_function
 
 import threading
 import warnings
-import cPickle
 import os.path as osp
 from StringIO import StringIO
 from time import asctime
@@ -27,6 +27,9 @@
 from collections import defaultdict
 from base64 import b64encode
 
+from six import string_types, integer_types
+from six.moves import cPickle as pickle, range
+
 from cubicweb.utils import make_uid
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.dataimport.stores import NoHookRQLObjectStore
@@ -40,7 +43,7 @@
     try:
         chunksize = (len(statements) / nb_threads) + 1
         threads = []
-        for i in xrange(nb_threads):
+        for i in range(nb_threads):
             chunks = statements[i*chunksize:(i+1)*chunksize]
             thread = threading.Thread(target=_execmany_thread,
                                       args=(sql_connect, chunks,
@@ -52,7 +55,7 @@
         for t in threads:
             t.join()
     except Exception:
-        print 'Error in import statements'
+        print('Error in import statements')
 
 def _execmany_thread_not_copy_from(cu, statement, data, table=None,
                                    columns=None, encoding='utf-8'):
@@ -100,7 +103,7 @@
                     columns = list(data[0])
                 execmany_func(cu, statement, data, table, columns, encoding)
             except Exception:
-                print 'unable to copy data into table %s' % table
+                print('unable to copy data into table %s' % table)
                 # Error in import statement, save data in dump_output_dir
                 if dump_output_dir is not None:
                     pdata = {'data': data, 'statement': statement,
@@ -108,11 +111,10 @@
                     filename = make_uid()
                     try:
                         with open(osp.join(dump_output_dir,
-                                           '%s.pickle' % filename), 'w') as fobj:
-                            fobj.write(cPickle.dumps(pdata))
+                                           '%s.pickle' % filename), 'wb') as fobj:
+                            pickle.dump(pdata, fobj)
                     except IOError:
-                        print 'ERROR while pickling in', dump_output_dir, filename+'.pickle'
-                        pass
+                        print('ERROR while pickling in', dump_output_dir, filename+'.pickle')
                 cnx.rollback()
                 raise
     finally:
@@ -135,7 +137,7 @@
     :encoding: resulting string encoding (default: utf-8)
     '''
     encoding = opts.get('encoding','utf-8')
-    escape_chars = ((u'\\', ur'\\'), (u'\t', u'\\t'), (u'\r', u'\\r'),
+    escape_chars = ((u'\\', u'\\\\'), (u'\t', u'\\t'), (u'\r', u'\\r'),
                     (u'\n', u'\\n'))
     for char, replace in escape_chars:
         value = value.replace(char, replace)
@@ -164,8 +166,8 @@
 # (types, converter) list.
 _COPYFROM_BUFFER_CONVERTERS = [
     (type(None), _copyfrom_buffer_convert_None),
-    ((long, int, float), _copyfrom_buffer_convert_number),
-    (basestring, _copyfrom_buffer_convert_string),
+    (integer_types + (float,), _copyfrom_buffer_convert_number),
+    (string_types, _copyfrom_buffer_convert_string),
     (datetime, _copyfrom_buffer_convert_datetime),
     (date, _copyfrom_buffer_convert_date),
     (time, _copyfrom_buffer_convert_time),
@@ -185,7 +187,7 @@
     rows = []
     if columns is None:
         if isinstance(data[0], (tuple, list)):
-            columns = range(len(data[0]))
+            columns = list(range(len(data[0])))
         elif isinstance(data[0], dict):
             columns = data[0].keys()
         else:
@@ -335,7 +337,7 @@
         self._sql.eid_insertdicts = {}
 
     def flush(self):
-        print 'starting flush'
+        print('starting flush')
         _entities_sql = self._sql.entities
         _relations_sql = self._sql.relations
         _inlined_relations_sql = self._sql.inlined_relations
--- a/dataimport/test/test_csv.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/dataimport/test/test_csv.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,7 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unittest for cubicweb.dataimport.csv"""
 
-from StringIO import StringIO
+from io import BytesIO
 
 from logilab.common.testlib import TestCase, unittest_main
 
@@ -27,7 +27,7 @@
 class UcsvreaderTC(TestCase):
 
     def test_empty_lines_skipped(self):
-        stream = StringIO('''a,b,c,d,
+        stream = BytesIO(b'''a,b,c,d,
 1,2,3,4,
 ,,,,
 ,,,,
@@ -45,7 +45,7 @@
                          list(csv.ucsvreader(stream, skip_empty=False)))
 
     def test_skip_first(self):
-        stream = StringIO('a,b,c,d,\n1,2,3,4,\n')
+        stream = BytesIO(b'a,b,c,d,\n1,2,3,4,\n')
         reader = csv.ucsvreader(stream, skipfirst=True, ignore_errors=True)
         self.assertEqual(list(reader),
                          [[u'1', u'2', u'3', u'4', u'']])
--- a/dataimport/test/test_pgstore.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/dataimport/test/test_pgstore.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,7 @@
 
 import datetime as DT
 
+from six import PY2
 from logilab.common.testlib import TestCase, unittest_main
 
 from cubicweb.dataimport import pgstore
@@ -36,7 +37,8 @@
     def test_convert_number(self):
         cnvt = pgstore._copyfrom_buffer_convert_number
         self.assertEqual('42', cnvt(42))
-        self.assertEqual('42', cnvt(42L))
+        if PY2:
+            self.assertEqual('42', cnvt(long(42)))
         self.assertEqual('42.42', cnvt(42.42))
 
     def test_convert_string(self):
@@ -64,9 +66,10 @@
 
     # test buffer
     def test_create_copyfrom_buffer_tuple(self):
-        data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6),
+        l = long if PY2 else int
+        data = ((42, l(42), 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6),
                  DT.datetime(666, 6, 13, 6, 6, 6)),
-                (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1),
+                (6, l(6), 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1),
                  DT.datetime(2014, 1, 1, 0, 0, 0)))
         results = pgstore._create_copyfrom_buffer(data)
         # all columns
--- a/debian/control	Mon Oct 12 09:19:07 2015 +0200
+++ b/debian/control	Mon Oct 12 10:53:35 2015 +0200
@@ -10,6 +10,7 @@
 Build-Depends:
  debhelper (>= 7),
  python (>= 2.6),
+ python-six (>= 1.4.0),
  python-sphinx,
  python-logilab-common,
  python-unittest2 | python (>= 2.7),
@@ -155,6 +156,7 @@
  ${python:Depends},
  graphviz,
  gettext,
+ python-six (>= 1.4.0),
  python-logilab-mtconverter (>= 0.8.0),
  python-logilab-common (>= 0.63.1),
  python-markdown,
--- a/devtools/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Test tools for cubicweb"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -24,7 +25,6 @@
 import errno
 import logging
 import shutil
-import pickle
 import glob
 import subprocess
 import warnings
@@ -35,6 +35,9 @@
 from os.path import (abspath, realpath, join, exists, split, isabs, isdir)
 from functools import partial
 
+from six import text_type
+from six.moves import cPickle as pickle
+
 from logilab.common.date import strptime
 from logilab.common.decorators import cached, clear_cache
 
@@ -92,7 +95,7 @@
 DEFAULT_PSQL_SOURCES = DEFAULT_SOURCES.copy()
 DEFAULT_PSQL_SOURCES['system'] = DEFAULT_SOURCES['system'].copy()
 DEFAULT_PSQL_SOURCES['system']['db-driver'] = 'postgres'
-DEFAULT_PSQL_SOURCES['system']['db-user'] = unicode(getpass.getuser())
+DEFAULT_PSQL_SOURCES['system']['db-user'] = text_type(getpass.getuser())
 DEFAULT_PSQL_SOURCES['system']['db-password'] = None
 
 def turn_repo_off(repo):
@@ -109,7 +112,7 @@
             try:
                 repo.close(sessionid)
             except BadConnectionId: #this is strange ? thread issue ?
-                print 'XXX unknown session', sessionid
+                print('XXX unknown session', sessionid)
         for cnxset in repo.cnxsets:
             cnxset.close(True)
         repo.system_source.shutdown()
@@ -193,7 +196,7 @@
     def sources_file(self):
         """define in subclasses self.sourcefile if necessary"""
         if self.sourcefile:
-            print 'Reading sources from', self.sourcefile
+            print('Reading sources from', self.sourcefile)
             sourcefile = self.sourcefile
             if not isabs(sourcefile):
                 sourcefile = join(self.apphome, sourcefile)
@@ -399,9 +402,8 @@
 
     def _new_repo(self, config):
         """Factory method to create a new Repository Instance"""
-        from cubicweb.repoapi import _get_inmemory_repo
         config._cubes = None
-        repo = _get_inmemory_repo(config)
+        repo = config.repository()
         # extending Repository class
         repo._has_started = False
         repo._needs_refresh = False
@@ -414,7 +416,7 @@
         from cubicweb.repoapi import connect
         repo = self.get_repo()
         sources = self.config.read_sources_file()
-        login  = unicode(sources['admin']['login'])
+        login  = text_type(sources['admin']['login'])
         password = sources['admin']['password'] or 'xxx'
         cnx = connect(repo, login, password=password)
         return cnx
@@ -493,7 +495,7 @@
         if test_db_id is DEFAULT_EMPTY_DB_ID:
             self.init_test_database()
         else:
-            print 'Building %s for database %s' % (test_db_id, self.dbname)
+            print('Building %s for database %s' % (test_db_id, self.dbname))
             self.build_db_cache(DEFAULT_EMPTY_DB_ID)
             self.restore_database(DEFAULT_EMPTY_DB_ID)
             repo = self.get_repo(startup=True)
@@ -542,7 +544,7 @@
         try:
             subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C'])
 
-        except OSError, err:
+        except OSError as err:
             if err.errno == errno.ENOENT:
                 raise OSError('"initdb" could not be found. '
                               'You should add the postgresql bin folder to your PATH '
@@ -561,7 +563,7 @@
         subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir,
                                '-o', options],
                               env=env)
-    except OSError, err:
+    except OSError as err:
         if err.errno == errno.ENOENT:
             raise OSError('"pg_ctl" could not be found. '
                           'You should add the postgresql bin folder to your PATH '
@@ -833,21 +835,21 @@
                 found_date = False
                 for row, rowdesc in zip(rset, rset.description):
                     for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)):
-                        if vtype in ('Date', 'Datetime') and type(value) is unicode:
+                        if vtype in ('Date', 'Datetime') and isinstance(value, text_type):
                             found_date = True
                             value = value.rsplit('.', 1)[0]
                             try:
                                 row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
                             except Exception:
                                 row[cellindex] = strptime(value, '%Y-%m-%d')
-                        if vtype == 'Time' and type(value) is unicode:
+                        if vtype == 'Time' and isinstance(value, text_type):
                             found_date = True
                             try:
                                 row[cellindex] = strptime(value, '%H:%M:%S')
                             except Exception:
                                 # DateTime used as Time?
                                 row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
-                        if vtype == 'Interval' and type(value) is int:
+                        if vtype == 'Interval' and isinstance(value, int):
                             found_date = True
                             row[cellindex] = timedelta(0, value, 0) # XXX value is in number of seconds?
                     if not found_date:
--- a/devtools/devctl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/devctl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,6 +18,7 @@
 """additional cubicweb-ctl commands and command handlers for cubicweb and
 cubicweb's cubes development
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -314,21 +315,21 @@
         from cubicweb.i18n import extract_from_tal, execute2
         tempdir = tempfile.mkdtemp(prefix='cw-')
         cwi18ndir = WebConfiguration.i18n_lib_dir()
-        print '-> extract messages:',
-        print 'schema',
+        print('-> extract messages:', end=' ')
+        print('schema', end=' ')
         schemapot = osp.join(tempdir, 'schema.pot')
         potfiles = [schemapot]
         potfiles.append(schemapot)
         # explicit close necessary else the file may not be yet flushed when
         # we'll using it below
-        schemapotstream = file(schemapot, 'w')
+        schemapotstream = open(schemapot, 'w')
         generate_schema_pot(schemapotstream.write, cubedir=None)
         schemapotstream.close()
-        print 'TAL',
+        print('TAL', end=' ')
         tali18nfile = osp.join(tempdir, 'tali18n.py')
         extract_from_tal(find(osp.join(BASEDIR, 'web'), ('.py', '.pt')),
                          tali18nfile)
-        print '-> generate .pot files.'
+        print('-> generate .pot files.')
         pyfiles = get_module_files(BASEDIR)
         pyfiles += globfind(osp.join(BASEDIR, 'misc', 'migration'), '*.py')
         schemafiles = globfind(osp.join(BASEDIR, 'schemas'), '*.py')
@@ -349,12 +350,12 @@
             if osp.exists(potfile):
                 potfiles.append(potfile)
             else:
-                print '-> WARNING: %s file was not generated' % potfile
-        print '-> merging %i .pot files' % len(potfiles)
+                print('-> WARNING: %s file was not generated' % potfile)
+        print('-> merging %i .pot files' % len(potfiles))
         cubicwebpot = osp.join(tempdir, 'cubicweb.pot')
         cmd = ['msgcat', '-o', cubicwebpot] + potfiles
         execute2(cmd)
-        print '-> merging main pot file with existing translations.'
+        print('-> merging main pot file with existing translations.')
         chdir(cwi18ndir)
         toedit = []
         for lang in CubicWebNoAppConfiguration.cw_languages():
@@ -368,10 +369,10 @@
         # cleanup
         rm(tempdir)
         # instructions pour la suite
-        print '-> regenerated CubicWeb\'s .po catalogs.'
-        print '\nYou can now edit the following files:'
-        print '* ' + '\n* '.join(toedit)
-        print 'when you are done, run "cubicweb-ctl i18ncube yourcube".'
+        print('-> regenerated CubicWeb\'s .po catalogs.')
+        print('\nYou can now edit the following files:')
+        print('* ' + '\n* '.join(toedit))
+        print('when you are done, run "cubicweb-ctl i18ncube yourcube".')
 
 
 class UpdateCubeCatalogCommand(Command):
@@ -398,25 +399,25 @@
     from subprocess import CalledProcessError
     for cubedir in cubes:
         if not osp.isdir(cubedir):
-            print '-> ignoring %s that is not a directory.' % cubedir
+            print('-> ignoring %s that is not a directory.' % cubedir)
             continue
         try:
             toedit = update_cube_catalogs(cubedir)
         except CalledProcessError as exc:
-            print '\n*** error while updating catalogs for cube', cubedir
-            print 'cmd:\n%s' % exc.cmd
-            print 'stdout:\n%s\nstderr:\n%s' % exc.data
+            print('\n*** error while updating catalogs for cube', cubedir)
+            print('cmd:\n%s' % exc.cmd)
+            print('stdout:\n%s\nstderr:\n%s' % exc.data)
         except Exception:
             import traceback
             traceback.print_exc()
-            print '*** error while updating catalogs for cube', cubedir
+            print('*** error while updating catalogs for cube', cubedir)
             return False
         else:
             # instructions pour la suite
             if toedit:
-                print '-> regenerated .po catalogs for cube %s.' % cubedir
-                print '\nYou can now edit the following files:'
-                print '* ' + '\n* '.join(toedit)
+                print('-> regenerated .po catalogs for cube %s.' % cubedir)
+                print('\nYou can now edit the following files:')
+                print('* ' + '\n* '.join(toedit))
                 print ('When you are done, run "cubicweb-ctl i18ninstance '
                        '<yourinstance>" to see changes in your instances.')
             return True
@@ -429,7 +430,7 @@
     from cubicweb.i18n import extract_from_tal, execute2
     cube = osp.basename(osp.normpath(cubedir))
     tempdir = tempfile.mkdtemp()
-    print underline_title('Updating i18n catalogs for cube %s' % cube)
+    print(underline_title('Updating i18n catalogs for cube %s' % cube))
     chdir(cubedir)
     if osp.exists(osp.join('i18n', 'entities.pot')):
         warn('entities.pot is deprecated, rename file to static-messages.pot (%s)'
@@ -439,20 +440,20 @@
         potfiles = [osp.join('i18n', 'static-messages.pot')]
     else:
         potfiles = []
-    print '-> extracting messages:',
-    print 'schema',
+    print('-> extracting messages:', end=' ')
+    print('schema', end=' ')
     schemapot = osp.join(tempdir, 'schema.pot')
     potfiles.append(schemapot)
     # explicit close necessary else the file may not be yet flushed when
     # we'll using it below
-    schemapotstream = file(schemapot, 'w')
+    schemapotstream = open(schemapot, 'w')
     generate_schema_pot(schemapotstream.write, cubedir)
     schemapotstream.close()
-    print 'TAL',
+    print('TAL', end=' ')
     tali18nfile = osp.join(tempdir, 'tali18n.py')
     ptfiles = find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST+('test',))
     extract_from_tal(ptfiles, tali18nfile)
-    print 'Javascript'
+    print('Javascript')
     jsfiles =  [jsfile for jsfile in find('.', '.js')
                 if osp.basename(jsfile).startswith('cub')]
     if jsfiles:
@@ -463,7 +464,7 @@
         # no pot file created if there are no string to translate
         if osp.exists(tmppotfile):
             potfiles.append(tmppotfile)
-    print '-> creating cube-specific catalog'
+    print('-> creating cube-specific catalog')
     tmppotfile = osp.join(tempdir, 'generated.pot')
     cubefiles = find('.', '.py', blacklist=STD_BLACKLIST+('test',))
     cubefiles.append(tali18nfile)
@@ -473,20 +474,20 @@
     if osp.exists(tmppotfile): # doesn't exists of no translation string found
         potfiles.append(tmppotfile)
     potfile = osp.join(tempdir, 'cube.pot')
-    print '-> merging %i .pot files' % len(potfiles)
+    print('-> merging %i .pot files' % len(potfiles))
     cmd = ['msgcat', '-o', potfile]
     cmd.extend(potfiles)
     execute2(cmd)
     if not osp.exists(potfile):
-        print 'no message catalog for cube', cube, 'nothing to translate'
+        print('no message catalog for cube', cube, 'nothing to translate')
         # cleanup
         rm(tempdir)
         return ()
-    print '-> merging main pot file with existing translations:',
+    print('-> merging main pot file with existing translations:', end=' ')
     chdir('i18n')
     toedit = []
     for lang in CubicWebNoAppConfiguration.cw_languages():
-        print lang,
+        print(lang, end=' ')
         cubepo = '%s.po' % lang
         if not osp.exists(cubepo):
             shutil.copy(potfile, cubepo)
@@ -496,7 +497,7 @@
             ensure_fs_mode(cubepo)
             shutil.move('%snew' % cubepo, cubepo)
         toedit.append(osp.abspath(cubepo))
-    print
+    print()
     # cleanup
     rm(tempdir)
     return toedit
@@ -620,7 +621,7 @@
                     " Please specify it using the --directory option")
             cubesdir = cubespath[0]
         if not osp.isdir(cubesdir):
-            print "-> creating cubes directory", cubesdir
+            print("-> creating cubes directory", cubesdir)
             try:
                 mkdir(cubesdir)
             except OSError as err:
@@ -649,7 +650,8 @@
         if verbose:
             longdesc = raw_input(
                 'Enter a long description (leave empty to reuse the short one): ')
-        dependencies = {'cubicweb': '>= %s' % cubicwebversion}
+        dependencies = {'cubicweb': '>= %s' % cubicwebversion,
+                        'six': '>= 1.4.0',}
         if verbose:
             dependencies.update(self._ask_for_dependencies())
         context = {'cubename' : cubename,
@@ -710,7 +712,7 @@
         requests = {}
         for filepath in args:
             try:
-                stream = file(filepath)
+                stream = open(filepath)
             except OSError as ex:
                 raise BadCommandUsage("can't open rql log file %s: %s"
                                       % (filepath, ex))
@@ -738,10 +740,10 @@
         stat.sort()
         stat.reverse()
         total_time = sum(clocktime for clocktime, cputime, occ, rql in stat) * 0.01
-        print 'Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query'
+        print('Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query')
         for clocktime, cputime, occ, rql in stat:
-            print '%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime,
-                                            cputime, occ, rql)
+            print('%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime,
+                                            cputime, occ, rql))
 
 
 class GenerateSchema(Command):
--- a/devtools/fake.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/fake.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 
 from contextlib import contextmanager
 
+from six import string_types
+
 from logilab.database import get_db_helper
 
 from cubicweb.req import RequestSessionBase
@@ -91,7 +93,7 @@
 
     def set_request_header(self, header, value, raw=False):
         """set an incoming HTTP header (for test purpose only)"""
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = [value]
         if raw:
             # adding encoded header is important, else page content
--- a/devtools/fill.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/fill.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,6 +17,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """This modules defines func / methods for creating test repositories"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -26,6 +27,8 @@
 from datetime import datetime, date, time, timedelta
 from decimal import Decimal
 
+from six.moves import range
+
 from logilab.common import attrdict
 from logilab.mtconverter import xml_escape
 from yams.constraints import (SizeConstraint, StaticVocabularyConstraint,
@@ -286,7 +289,7 @@
                         returns acceptable values for this attribute
     """
     queries = []
-    for index in xrange(entity_num):
+    for index in range(entity_num):
         restrictions = []
         args = {}
         for attrname, value in make_entity(etype, schema, vreg, index, choice_func).items():
@@ -363,7 +366,7 @@
             rql += ', %s is %s' % (selectvar, objtype)
         rset = cnx.execute(rql)
     except Exception:
-        print "could restrict eid_list with given constraints (%r)" % constraints
+        print("could restrict eid_list with given constraints (%r)" % constraints)
         return []
     return set(eid for eid, in rset.rows)
 
@@ -508,8 +511,8 @@
                     break
         else:
             # FIXME: 20 should be read from config
-            subjeidsiter = [choice(tuple(subjeids)) for i in xrange(min(len(subjeids), 20))]
-            objeidsiter = [choice(tuple(objeids)) for i in xrange(min(len(objeids), 20))]
+            subjeidsiter = [choice(tuple(subjeids)) for i in range(min(len(subjeids), 20))]
+            objeidsiter = [choice(tuple(objeids)) for i in range(min(len(objeids), 20))]
             for subjeid, objeid in zip(subjeidsiter, objeidsiter):
                 if subjeid != objeid and not (subjeid, objeid) in used:
                     used.add( (subjeid, objeid) )
--- a/devtools/htmlparser.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/htmlparser.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,7 +20,7 @@
 import re
 import sys
 from xml import sax
-from cStringIO import StringIO
+from io import BytesIO
 
 from lxml import etree
 
@@ -164,10 +164,10 @@
 
     def _parse(self, data):
         inpsrc = sax.InputSource()
-        inpsrc.setByteStream(StringIO(data))
+        inpsrc.setByteStream(BytesIO(data))
         try:
             self._parser.parse(inpsrc)
-        except sax.SAXParseException, exc:
+        except sax.SAXParseException as exc:
             new_exc = AssertionError(u'invalid document: %s' % exc)
             new_exc.position = (exc._linenum, exc._colnum)
             raise new_exc
--- a/devtools/httptest.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/httptest.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,13 +18,16 @@
 """this module contains base classes and utilities for integration with running
 http server
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import random
 import threading
 import socket
-import httplib
-from urlparse import urlparse
+
+from six.moves import range, http_client
+from six.moves.urllib.parse import urlparse
 
 from twisted.internet import reactor, error
 
@@ -110,8 +113,8 @@
         #pre init utils connection
         parseurl = urlparse(self.config['base-url'])
         assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port'])
-        self._web_test_cnx = httplib.HTTPConnection(parseurl.hostname,
-                                                    parseurl.port)
+        self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname,
+                                                        parseurl.port)
         self._ident_cookie = None
 
     def stop_server(self, timeout=15):
@@ -139,7 +142,7 @@
             passwd = user
         response = self.web_get("login?__login=%s&__password=%s" %
                                 (user, passwd))
-        assert response.status == httplib.SEE_OTHER, response.status
+        assert response.status == http_client.SEE_OTHER, response.status
         self._ident_cookie = response.getheader('Set-Cookie')
         assert self._ident_cookie
         return True
@@ -151,7 +154,7 @@
         self._ident_cookie = None
 
     def web_request(self, path='', method='GET', body=None, headers=None):
-        """Return an httplib.HTTPResponse object for the specified path
+        """Return an http_client.HTTPResponse object for the specified path
 
         Use available credential if available.
         """
@@ -178,5 +181,5 @@
             self.stop_server()
         except error.ReactorNotRunning as err:
             # Server could be launched manually
-            print err
+            print(err)
         super(CubicWebServerTC, self).tearDown()
--- a/devtools/instrument.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/instrument.py	Mon Oct 12 10:53:35 2015 +0200
@@ -14,6 +14,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with this program. If not, see <http://www.gnu.org/licenses/>.
 """Instrumentation utilities"""
+from __future__ import print_function
 
 import os
 
@@ -45,10 +46,10 @@
         return _COLORS[key]
 
 def warn(msg, *args):
-    print 'WARNING: %s' % (msg % args)
+    print('WARNING: %s' % (msg % args))
 
 def info(msg):
-    print 'INFO: ' + msg
+    print('INFO: ' + msg)
 
 
 class PropagationAnalyzer(object):
--- a/devtools/qunit.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/qunit.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,11 +19,12 @@
 import os, os.path as osp
 from tempfile import mkdtemp, NamedTemporaryFile, TemporaryFile
 import tempfile
-from Queue import Queue, Empty
 from subprocess import Popen, check_call, CalledProcessError
 from shutil import rmtree, copy as copyfile
 from uuid import uuid4
 
+from six.moves.queue import Queue, Empty
+
 # imported by default to simplify further import statements
 from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags
 from logilab.common.shellutils import getlogin
@@ -122,25 +123,20 @@
 
     def test_javascripts(self):
         for args in self.all_js_tests:
+            self.assertIn(len(args), (1, 2))
             test_file = self.abspath(args[0])
             if len(args) > 1:
                 depends   = [self.abspath(dep) for dep in args[1]]
             else:
                 depends = ()
-            if len(args) > 2:
-                data   = [self.abspath(data) for data in args[2]]
-            else:
-                data = ()
-            for js_test in self._test_qunit(test_file, depends, data):
+            for js_test in self._test_qunit(test_file, depends):
                 yield js_test
 
     @with_tempdir
-    def _test_qunit(self, test_file, depends=(), data_files=(), timeout=10):
+    def _test_qunit(self, test_file, depends=(), timeout=10):
         assert osp.exists(test_file), test_file
         for dep in depends:
             assert osp.exists(dep), dep
-        for data in data_files:
-            assert osp.exists(data), data
 
         QUnitView.test_file = test_file
         QUnitView.depends = depends
--- a/devtools/repotest.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/repotest.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,7 @@
 
 This module contains functions to initialize a new repository.
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -66,7 +67,7 @@
         self.assertEqual(len(step[-1]), len(expected[-1]),
                           'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1])))
     except AssertionError:
-        print 'error on step ',
+        print('error on step ', end=' ')
         pprint(step[:-1])
         raise
     children = step[-1]
--- a/devtools/stresstester.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/stresstester.py	Mon Oct 12 10:53:35 2015 +0200
@@ -41,6 +41,7 @@
 Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
 http://www.logilab.fr/ -- mailto:contact@logilab.fr
 """
+from __future__ import print_function
 
 import os
 import sys
@@ -84,7 +85,7 @@
 
 def usage(status=0):
     """print usage string and exit"""
-    print __doc__ % basename(sys.argv[0])
+    print(__doc__ % basename(sys.argv[0]))
     sys.exit(status)
 
 
@@ -133,7 +134,7 @@
                                                            'nb-times=', 'nb-threads=',
                                                            'profile', 'report-output=',])
     except Exception as ex:
-        print ex
+        print(ex)
         usage(1)
     repeat = 100
     threads = 1
@@ -155,7 +156,7 @@
         elif opt in ('-P', '--profile'):
             prof_file = val
         elif opt in ('-o', '--report-output'):
-            report_output = file(val, 'w')
+            report_output = open(val, 'w')
     if len(args) != 2:
         usage(1)
     queries =  [query for query in lines(args[1]) if not query.startswith('#')]
@@ -166,7 +167,7 @@
     from cubicweb.cwconfig import instance_configuration
     config = instance_configuration(args[0])
     # get local access to the repository
-    print "Creating repo", prof_file
+    print("Creating repo", prof_file)
     repo = Repository(config, prof_file)
     cnxid = repo.connect(user, password=password)
     # connection to the CubicWeb repository
--- a/devtools/test/unittest_dbfill.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/test/unittest_dbfill.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 import re
 import datetime
 
+from six.moves import range
+
 from logilab.common.testlib import TestCase, unittest_main
 
 from cubicweb.devtools.fill import ValueGenerator, make_tel
@@ -50,7 +52,7 @@
             return None
 
     def _available_Person_firstname(self, etype, attrname):
-        return [f.strip() for f in file(osp.join(DATADIR, 'firstnames.txt'))]
+        return [f.strip() for f in open(osp.join(DATADIR, 'firstnames.txt'))]
 
     def setUp(self):
         config = ApptestConfiguration('data', apphome=DATADIR)
@@ -86,7 +88,7 @@
         # Test for random index
         for index in range(5):
             cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index)
-            self.assertIn(cost_value, range(index+1))
+            self.assertIn(cost_value, list(range(index+1)))
 
     def test_date(self):
         """test date generation"""
--- a/devtools/test/unittest_httptest.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/test/unittest_httptest.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,7 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unittest for cubicweb.devtools.httptest module"""
 
-import httplib
+from six.moves import http_client
 
 from logilab.common.testlib import Tags
 from cubicweb.devtools.httptest import CubicWebServerTC
@@ -28,12 +28,12 @@
     def test_response(self):
         try:
             response = self.web_get()
-        except httplib.NotConnected as ex:
+        except http_client.NotConnected as ex:
             self.fail("Can't connection to test server: %s" % ex)
 
     def test_response_anon(self):
         response = self.web_get()
-        self.assertEqual(response.status, httplib.OK)
+        self.assertEqual(response.status, http_client.OK)
 
     def test_base_url(self):
         if self.config['base-url'] not in self.web_get().read():
@@ -47,20 +47,20 @@
 
     def test_response_denied(self):
         response = self.web_get()
-        self.assertEqual(response.status, httplib.FORBIDDEN)
+        self.assertEqual(response.status, http_client.FORBIDDEN)
 
     def test_login(self):
         response = self.web_get()
-        if response.status != httplib.FORBIDDEN:
+        if response.status != http_client.FORBIDDEN:
             self.skipTest('Already authenticated, "test_response_denied" must have failed')
         # login
         self.web_login(self.admlogin, self.admpassword)
         response = self.web_get()
-        self.assertEqual(response.status, httplib.OK, response.body)
+        self.assertEqual(response.status, http_client.OK, response.body)
         # logout
         self.web_logout()
         response = self.web_get()
-        self.assertEqual(response.status, httplib.FORBIDDEN, response.body)
+        self.assertEqual(response.status, http_client.FORBIDDEN, response.body)
 
 
 
--- a/devtools/test/unittest_testlib.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/test/unittest_testlib.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,9 +17,10 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unittests for cw.devtools.testlib module"""
 
-from cStringIO import StringIO
+from io import BytesIO, StringIO
+from unittest import TextTestRunner
 
-from unittest import TextTestRunner
+from six import PY2
 
 from logilab.common.testlib import TestSuite, TestCase, unittest_main
 from logilab.common.registry import yes
@@ -33,7 +34,7 @@
         class entity:
             cw_etype = 'Entity'
             eid = 0
-        sio = StringIO('hop\n')
+        sio = BytesIO(b'hop\n')
         form = CubicWebTC.fake_form('import',
                                     {'file': ('filename.txt', sio),
                                      'encoding': u'utf-8',
@@ -51,7 +52,7 @@
 class WebTestTC(TestCase):
 
     def setUp(self):
-        output = StringIO()
+        output = BytesIO() if PY2 else StringIO()
         self.runner = TextTestRunner(stream=output)
 
     def test_error_raised(self):
--- a/devtools/test/unittest_webtest.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/test/unittest_webtest.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,4 +1,4 @@
-import httplib
+from six.moves import http_client
 
 from logilab.common.testlib import Tags
 from cubicweb.devtools.webtest import CubicWebTestTC
@@ -21,19 +21,19 @@
 
     def test_reponse_denied(self):
         res = self.webapp.get('/', expect_errors=True)
-        self.assertEqual(httplib.FORBIDDEN, res.status_int)
+        self.assertEqual(http_client.FORBIDDEN, res.status_int)
 
     def test_login(self):
         res = self.webapp.get('/', expect_errors=True)
-        self.assertEqual(httplib.FORBIDDEN, res.status_int)
+        self.assertEqual(http_client.FORBIDDEN, res.status_int)
 
         self.login(self.admlogin, self.admpassword)
         res = self.webapp.get('/')
-        self.assertEqual(httplib.OK, res.status_int)
+        self.assertEqual(http_client.OK, res.status_int)
 
         self.logout()
         res = self.webapp.get('/', expect_errors=True)
-        self.assertEqual(httplib.FORBIDDEN, res.status_int)
+        self.assertEqual(http_client.FORBIDDEN, res.status_int)
 
 
 if __name__ == '__main__':
--- a/devtools/testlib.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/devtools/testlib.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,19 +16,22 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """this module contains base classes and utilities for cubicweb tests"""
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
 import re
-import urlparse
 from os.path import dirname, join, abspath
-from urllib import unquote
 from math import log
 from contextlib import contextmanager
 from warnings import warn
-from types import NoneType
 from itertools import chain
 
+from six import string_types
+from six.moves import range
+from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote
+
 import yams.schema
 
 from logilab.common.testlib import TestCase, InnerTest, Tags
@@ -60,7 +63,7 @@
     def do_view(self, arg):
         import webbrowser
         data = self._getval(arg)
-        with file('/tmp/toto.html', 'w') as toto:
+        with open('/tmp/toto.html', 'w') as toto:
             toto.write(data)
         webbrowser.open('file:///tmp/toto.html')
 
@@ -313,7 +316,6 @@
         login = unicode(db_handler.config.default_admin_config['login'])
         self.admin_access = self.new_access(login)
         self._admin_session = self.admin_access._session
-        self.config.repository = lambda x=None: self.repo
 
 
     # config management ########################################################
@@ -520,7 +522,7 @@
         """
         torestore = []
         for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.iteritems()):
-            if isinstance(erschema, basestring):
+            if isinstance(erschema, string_types):
                 erschema = self.schema[erschema]
             for action, actionperms in etypeperms.iteritems():
                 origperms = erschema.permissions[action]
@@ -738,8 +740,8 @@
         req = self.request(url=url)
         if isinstance(url, unicode):
             url = url.encode(req.encoding) # req.setup_params() expects encoded strings
-        querystring = urlparse.urlparse(url)[-2]
-        params = urlparse.parse_qs(querystring)
+        querystring = urlparse(url)[-2]
+        params = parse_qs(querystring)
         req.setup_params(params)
         return req
 
@@ -752,8 +754,8 @@
         with self.admin_access.web_request(url=url) as req:
             if isinstance(url, unicode):
                 url = url.encode(req.encoding) # req.setup_params() expects encoded strings
-            querystring = urlparse.urlparse(url)[-2]
-            params = urlparse.parse_qs(querystring)
+            querystring = urlparse(url)[-2]
+            params = parse_qs(querystring)
             req.setup_params(params)
             yield req
 
@@ -792,7 +794,7 @@
             path = location
             params = {}
         else:
-            cleanup = lambda p: (p[0], unquote(p[1]))
+            cleanup = lambda p: (p[0], urlunquote(p[1]))
             params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
         if path.startswith(req.base_url()): # may be relative
             path = path[len(req.base_url()):]
@@ -908,8 +910,11 @@
         view = viewsreg.select(vid, req, rset=rset, **kwargs)
         # set explicit test description
         if rset is not None:
+            # coerce to "bytes" on py2 because the description will be sent to
+            # sys.stdout/stderr which takes "bytes" on py2 and "unicode" on py3
+            rql = str(rset.printable_rql())
             self.set_description("testing vid=%s defined in %s with (%s)" % (
-                vid, view.__module__, rset.printable_rql()))
+                vid, view.__module__, rql))
         else:
             self.set_description("testing vid=%s defined in %s without rset" % (
                 vid, view.__module__))
@@ -941,7 +946,9 @@
                 msg = '[%s in %s] %s' % (klass, view.__regid__, exc)
             except Exception:
                 msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__)
-            raise AssertionError, msg, tcbk
+            exc = AssertionError(msg)
+            exc.__traceback__ = tcbk
+            raise exc
         return self._check_html(output, view, template)
 
     def get_validator(self, view=None, content_type=None, output=None):
@@ -1016,7 +1023,9 @@
                                          for idx, line in enumerate(content)
                                          if line_context_filter(idx+1, position))
                     msg += u'\nfor content:\n%s' % content
-            raise AssertionError, msg, tcbk
+            exc = AssertionError(msg)
+            exc.__traceback__ = tcbk
+            raise exc
 
     def assertDocTestFile(self, testfile):
         # doctest returns tuple (failure_count, test_count)
@@ -1167,7 +1176,7 @@
                 cnx.execute(rql, args)
             except ValidationError as ex:
                 # failed to satisfy some constraint
-                print 'error in automatic db population', ex
+                print('error in automatic db population', ex)
                 cnx.commit_state = None # reset uncommitable flag
         self.post_populate(cnx)
 
@@ -1180,7 +1189,7 @@
                 else:
                     rql = 'Any X WHERE X is %s' % etype
                 rset = req.execute(rql)
-                for row in xrange(len(rset)):
+                for row in range(len(rset)):
                     if limit and row > limit:
                         break
                     # XXX iirk
--- a/doc/tools/mode_plan.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/doc/tools/mode_plan.py	Mon Oct 12 10:53:35 2015 +0200
@@ -23,17 +23,19 @@
 rename A010-joe.en.txt to A030-joe.en.txt
 accept [y/N]?
 """
+from __future__ import print_function
+
 
 def ren(a,b):
     names = glob.glob('%s*'%a)
     for name in names :
-        print 'rename %s to %s' % (name, name.replace(a,b))
+        print('rename %s to %s' % (name, name.replace(a,b)))
     if raw_input('accept [y/N]?').lower() =='y':
         for name in names:
             os.system('hg mv %s %s' % (name, name.replace(a,b)))
 
 
-def ls(): print '\n'.join(sorted(os.listdir('.')))
+def ls(): print('\n'.join(sorted(os.listdir('.'))))
 
 def move():
     filenames = []
@@ -47,4 +49,4 @@
 
     for num, name in filenames:
         if num >= start:
-            print 'hg mv %s %2i%s' %(name,num+1,name[2:])
+            print('hg mv %s %2i%s' %(name,num+1,name[2:]))
--- a/entities/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/entities/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,7 @@
 
 __docformat__ = "restructuredtext en"
 
+from six import string_types
 
 from logilab.common.decorators import classproperty
 
@@ -134,7 +135,7 @@
             return self.dc_title().lower()
         value = self.cw_attr_value(rtype)
         # do not restrict to `unicode` because Bytes will return a `str` value
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             return self.printable_value(rtype, format='text/plain').lower()
         return value
 
--- a/entities/authobjs.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/entities/authobjs.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,8 @@
 
 __docformat__ = "restructuredtext en"
 
+from six import string_types
+
 from logilab.common.decorators import cached
 
 from cubicweb import Unauthorized
@@ -126,7 +128,7 @@
         :type groups: str or iterable(str)
         :param groups: a group name or an iterable on group names
         """
-        if isinstance(groups, basestring):
+        if isinstance(groups, string_types):
             groups = frozenset((groups,))
         elif isinstance(groups, (tuple, list)):
             groups = frozenset(groups)
--- a/entities/lib.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/entities/lib.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,9 +19,10 @@
 
 __docformat__ = "restructuredtext en"
 from warnings import warn
+from datetime import datetime
 
-from urlparse import urlsplit, urlunsplit
-from datetime import datetime
+from six.moves import range
+from six.moves.urllib.parse import urlsplit, urlunsplit
 
 from logilab.mtconverter import xml_escape
 
@@ -67,7 +68,7 @@
                                 {'y': self.eid})
         if skipeids is None:
             skipeids = set()
-        for i in xrange(len(rset)):
+        for i in range(len(rset)):
             eid = rset[i][0]
             if eid in skipeids:
                 continue
--- a/entities/wfobjs.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/entities/wfobjs.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,9 +21,11 @@
 * workflow history (TrInfo)
 * adapter for workflowable entities (IWorkflowableAdapter)
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
+from six import string_types
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated
@@ -224,19 +226,19 @@
             matches = user.matching_groups(groups)
             if matches:
                 if DBG:
-                    print 'may_be_fired: %r may fire: user matches %s' % (self.name, groups)
+                    print('may_be_fired: %r may fire: user matches %s' % (self.name, groups))
                 return matches
             if 'owners' in groups and user.owns(eid):
                 if DBG:
-                    print 'may_be_fired: %r may fire: user is owner' % self.name
+                    print('may_be_fired: %r may fire: user is owner' % self.name)
                 return True
         # check one of the rql expression conditions matches if any
         if self.condition:
             if DBG:
-                print ('my_be_fired: %r: %s' %
-                       (self.name, [(rqlexpr.expression,
+                print('my_be_fired: %r: %s' %
+                      (self.name, [(rqlexpr.expression,
                                     rqlexpr.check_expression(self._cw, eid))
-                                   for rqlexpr in self.condition]))
+                                    for rqlexpr in self.condition]))
             for rqlexpr in self.condition:
                 if rqlexpr.check_expression(self._cw, eid):
                     return True
@@ -258,10 +260,10 @@
                                     'WHERE T eid %(x)s, G name %(gn)s',
                                     {'x': self.eid, 'gn': unicode(gname)})
             assert rset, '%s is not a known group' % gname
-        if isinstance(conditions, basestring):
+        if isinstance(conditions, string_types):
             conditions = (conditions,)
         for expr in conditions:
-            if isinstance(expr, basestring):
+            if isinstance(expr, string_types):
                 kwargs = {'expr': unicode(expr)}
             else:
                 assert isinstance(expr, dict)
@@ -528,7 +530,7 @@
 
     def _get_transition(self, tr):
         assert self.current_workflow
-        if isinstance(tr, basestring):
+        if isinstance(tr, string_types):
             _tr = self.current_workflow.transition_by_name(tr)
             assert _tr is not None, 'not a %s transition: %s' % (
                 self.__regid__, tr)
--- a/entity.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/entity.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,9 @@
 from warnings import warn
 from functools import partial
 
+from six import string_types, integer_types
+from six.moves import range
+
 from logilab.common.decorators import cached
 from logilab.common.deprecation import deprecated
 from logilab.common.registry import yes
@@ -279,7 +282,7 @@
             select = Select()
             mainvar = select.get_variable(mainvar)
             select.add_selected(mainvar)
-        elif isinstance(mainvar, basestring):
+        elif isinstance(mainvar, string_types):
             assert mainvar in select.defined_vars
             mainvar = select.get_variable(mainvar)
         # eases string -> syntax tree test transition: please remove once stable
@@ -545,12 +548,12 @@
         raise NotImplementedError('comparison not implemented for %s' % self.__class__)
 
     def __eq__(self, other):
-        if isinstance(self.eid, (int, long)):
+        if isinstance(self.eid, integer_types):
             return self.eid == other.eid
         return self is other
 
     def __hash__(self):
-        if isinstance(self.eid, (int, long)):
+        if isinstance(self.eid, integer_types):
             return self.eid
         return super(Entity, self).__hash__()
 
@@ -695,7 +698,7 @@
         attr = str(attr)
         if value is _marker:
             value = getattr(self, attr)
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = value.strip()
         if value is None or value == '': # don't use "not", 0 is an acceptable value
             return u''
@@ -892,10 +895,10 @@
                 raise Exception('unable to fetch attributes for entity with eid %s'
                                 % self.eid)
             # handle attributes
-            for i in xrange(1, lastattr):
+            for i in range(1, lastattr):
                 self.cw_attr_cache[str(selected[i-1][0])] = rset[i]
             # handle relations
-            for i in xrange(lastattr, len(rset)):
+            for i in range(lastattr, len(rset)):
                 rtype, role = selected[i-1][0]
                 value = rset[i]
                 if value is None:
--- a/etwist/server.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/etwist/server.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,8 +22,10 @@
 import select
 import traceback
 import threading
-from urlparse import urlsplit, urlunsplit
 from cgi import FieldStorage, parse_header
+
+from six.moves.urllib.parse import urlsplit, urlunsplit
+
 from cubicweb.statsd_logger import statsd_timeit
 
 from twisted.internet import reactor, task, threads
--- a/etwist/service.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/etwist/service.py	Mon Oct 12 10:53:35 2015 +0200
@@ -15,6 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
 import os
 import sys
 
@@ -22,7 +24,7 @@
     import win32serviceutil
     import win32service
 except ImportError:
-    print 'Win32 extensions for Python are likely not installed.'
+    print('Win32 extensions for Python are likely not installed.')
     sys.exit(3)
 
 from os.path import join
--- a/ext/rest.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/ext/rest.py	Mon Oct 12 10:53:35 2015 +0200
@@ -37,7 +37,8 @@
 from itertools import chain
 from logging import getLogger
 from os.path import join
-from urlparse import urlsplit
+
+from six.moves.urllib.parse import urlsplit
 
 from docutils import statemachine, nodes, utils, io
 from docutils.core import Publisher
@@ -168,7 +169,7 @@
         rql = params['rql']
         if vid is None:
             vid = params.get('vid')
-    except (ValueError, KeyError), exc:
+    except (ValueError, KeyError) as exc:
         msg = inliner.reporter.error('Could not parse bookmark path %s [%s].'
                                      % (bookmark.path, exc), line=lineno)
         prb = inliner.problematic(rawtext, rawtext, msg)
@@ -182,7 +183,7 @@
             vid = 'noresult'
         view = _cw.vreg['views'].select(vid, _cw, rset=rset)
         content = view.render()
-    except Exception, exc:
+    except Exception as exc:
         content = 'An error occurred while interpreting directive bookmark: %r' % exc
     set_classes(options)
     return [nodes.raw('', content, format='html')], []
--- a/ext/tal.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/ext/tal.py	Mon Oct 12 10:53:35 2015 +0200
@@ -184,7 +184,10 @@
             interpreter.execute(self)
         except UnicodeError as unierror:
             LOGGER.exception(str(unierror))
-            raise simpleTALES.ContextContentException("found non-unicode %r string in Context!" % unierror.args[1]), None, sys.exc_info()[-1]
+            exc = simpleTALES.ContextContentException(
+                "found non-unicode %r string in Context!" % unierror.args[1])
+            exc.__traceback__ = sys.exc_info()[-1]
+            raise exc
 
 
 def compile_template(template):
@@ -203,7 +206,7 @@
     :type filepath: str
     :param template: path of the file to compile
     """
-    fp = file(filepath)
+    fp = open(filepath)
     file_content = unicode(fp.read()) # template file should be pure ASCII
     fp.close()
     return compile_template(file_content)
@@ -232,7 +235,8 @@
         result = eval(expr, globals, locals)
     except Exception as ex:
         ex = ex.__class__('in %r: %s' % (expr, ex))
-        raise ex, None, sys.exc_info()[-1]
+        ex.__traceback__ = sys.exc_info()[-1]
+        raise ex
     if (isinstance (result, simpleTALES.ContextVariable)):
         return result.value()
     return result
--- a/gettext.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/gettext.py	Mon Oct 12 10:53:35 2015 +0200
@@ -103,11 +103,10 @@
     try:
         danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
     except tokenize.TokenError:
-        raise ValueError, \
-              'plural forms expression error, maybe unbalanced parenthesis'
+        raise ValueError('plural forms expression error, maybe unbalanced parenthesis')
     else:
         if danger:
-            raise ValueError, 'plural forms expression could be dangerous'
+            raise ValueError('plural forms expression could be dangerous')
 
     # Replace some C operators by their Python equivalents
     plural = plural.replace('&&', ' and ')
@@ -133,7 +132,7 @@
                 # Actually, we never reach this code, because unbalanced
                 # parentheses get caught in the security check at the
                 # beginning.
-                raise ValueError, 'unbalanced parenthesis in plural form'
+                raise ValueError('unbalanced parenthesis in plural form')
             s = expr.sub(repl, stack.pop())
             stack[-1] += '(%s)' % s
         else:
--- a/i18n.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/i18n.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Some i18n/gettext utilities."""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -80,7 +81,7 @@
     """
     from subprocess import CalledProcessError
     from logilab.common.fileutils import ensure_fs_mode
-    print '-> compiling message catalogs to %s' % destdir
+    print('-> compiling message catalogs to %s' % destdir)
     errors = []
     for lang in langs:
         langdir = join(destdir, lang, 'LC_MESSAGES')
--- a/md5crypt.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/md5crypt.py	Mon Oct 12 10:53:35 2015 +0200
@@ -43,6 +43,9 @@
 
 from hashlib import md5 # pylint: disable=E0611
 
+from six.moves import range
+
+
 def to64 (v, n):
     ret = ''
     while (n - 1 >= 0):
@@ -62,7 +65,7 @@
     salt = salt[:8]
     ctx = pw + MAGIC + salt
     final = md5(pw + salt + pw).digest()
-    for pl in xrange(len(pw), 0, -16):
+    for pl in range(len(pw), 0, -16):
         if pl > 16:
             ctx = ctx + final[:16]
         else:
@@ -79,7 +82,7 @@
     # The following is supposed to make
     # things run slower.
     # my question: WTF???
-    for i in xrange(1000):
+    for i in range(1000):
         ctx1 = ''
         if i & 1:
             ctx1 = ctx1 + pw
--- a/migration.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/migration.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """utilities for instances migration"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -26,6 +27,8 @@
 from os.path import exists, join, basename, splitext
 from itertools import chain
 
+from six import string_types
+
 from logilab.common import IGNORED_EXTENSIONS
 from logilab.common.decorators import cached
 from logilab.common.configuration import REQUIRED, read_old_config
@@ -49,7 +52,7 @@
     assert fromversion <= toversion, (fromversion, toversion)
     if not exists(directory):
         if not quiet:
-            print directory, "doesn't exists, no migration path"
+            print(directory, "doesn't exists, no migration path")
         return []
     if fromversion == toversion:
         return []
@@ -93,9 +96,9 @@
             stream = open(scriptpath)
             scriptcontent = stream.read()
             stream.close()
-            print
-            print scriptcontent
-            print
+            print()
+            print(scriptcontent)
+            print()
         else:
             return True
 
@@ -139,9 +142,6 @@
             raise
         raise AttributeError(name)
 
-    def repo_connect(self):
-        return self.config.repository()
-
     def migrate(self, vcconf, toupgrade, options):
         """upgrade the given set of cubes
 
@@ -243,7 +243,7 @@
         # avoid '_' to be added to builtins by sys.display_hook
         def do_not_add___to_builtins(obj):
             if obj is not None:
-                print repr(obj)
+                print(repr(obj))
         sys.displayhook = do_not_add___to_builtins
         local_ctx = self._create_context()
         try:
@@ -349,7 +349,9 @@
             else:
                 pyname = splitext(basename(migrscript))[0]
             scriptlocals['__name__'] = pyname
-            execfile(migrscript, scriptlocals)
+            with open(migrscript, 'rb') as fobj:
+                code = compile(fobj.read(), migrscript, 'exec')
+            exec(code, scriptlocals)
             if funcname is not None:
                 try:
                     func = scriptlocals[funcname]
@@ -399,7 +401,7 @@
         """modify the list of used cubes in the in-memory config
         returns newly inserted cubes, including dependencies
         """
-        if isinstance(cubes, basestring):
+        if isinstance(cubes, string_types):
             cubes = (cubes,)
         origcubes = self.config.cubes()
         newcubes = [p for p in self.config.expand_cubes(cubes)
@@ -522,9 +524,9 @@
                     elif op == None:
                         continue
                     else:
-                        print ('unable to handle %s in %s, set to `%s %s` '
-                               'but currently up to `%s %s`' %
-                               (cube, source, oper, version, op, ver))
+                        print('unable to handle %s in %s, set to `%s %s` '
+                              'but currently up to `%s %s`' %
+                              (cube, source, oper, version, op, ver))
             # "solve" constraint satisfaction problem
             if cube not in self.cubes:
                 self.errors.append( ('add', cube, version, source) )
@@ -536,4 +538,4 @@
                 elif oper is None:
                     pass # no constraint on version
                 else:
-                    print 'unknown operator', oper
+                    print('unknown operator', oper)
--- a/misc/cwfs/cwfs_test.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/cwfs/cwfs_test.py	Mon Oct 12 10:53:35 2015 +0200
@@ -30,7 +30,7 @@
     sections = []
     buffer = ""
     in_section = False
-    for line in file(filename) :
+    for line in open(filename) :
         if line.startswith('Test::'):
             in_section = True
             buffer = ""
--- a/misc/migration/3.14.0_Any.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/3.14.0_Any.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 config['rql-cache-size'] = config['rql-cache-size'] * 10
 
 add_entity_type('CWDataImport')
@@ -10,4 +12,4 @@
     mainvars = guess_rrqlexpr_mainvars(expression)
     yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars)
     rqlcstr.cw_set(value=yamscstr.serialize())
-    print 'updated', rqlcstr.type, rqlcstr.value.strip()
+    print('updated', rqlcstr.type, rqlcstr.value.strip())
--- a/misc/migration/3.15.4_Any.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/3.15.4_Any.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 from logilab.common.shellutils import generate_password
 from cubicweb.server.utils import crypt_password
 
@@ -5,7 +7,7 @@
     salt = user.upassword.getvalue()
     if crypt_password('', salt) == salt:
         passwd = generate_password()
-        print 'setting random password for user %s' % user.login
+        print('setting random password for user %s' % user.login)
         user.set_attributes(upassword=passwd)
 
 commit()
--- a/misc/migration/3.21.0_Any.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/3.21.0_Any.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 from cubicweb.schema import PURE_VIRTUAL_RTYPES
 from cubicweb.server.schema2sql import rschema_has_table
 
@@ -27,7 +29,7 @@
                 '    SELECT eid FROM entities) AS eids' % args,
                 ask_confirm=False)[0][0]
     if count:
-        print '%s references %d unknown entities, deleting' % (rschema, count)
+        print('%s references %d unknown entities, deleting' % (rschema, count))
         sql('DELETE FROM %(r)s_relation '
             'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args)
         sql('DELETE FROM %(r)s_relation '
@@ -65,14 +67,14 @@
             broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args,
                               ask_confirm=False)
             if broken_eids:
-                print 'Required relation %(e)s.%(r)s missing' % args
+                print('Required relation %(e)s.%(r)s missing' % args)
                 args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
                 rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
             broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s '
                               'EXCEPT SELECT eid FROM entities)' % args,
                               ask_confirm=False)
             if broken_eids:
-                print 'Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args
+                print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args)
                 args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
                 rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
         else:
@@ -81,7 +83,7 @@
                    '  EXCEPT'
                    '    SELECT eid FROM entities) AS eids' % args,
                    ask_confirm=False)[0][0]:
-                print '%(e)s.%(r)s references unknown entities, deleting relation' % args
+                print('%(e)s.%(r)s references unknown entities, deleting relation' % args)
                 sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN '
                     '(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
 
@@ -104,7 +106,7 @@
            '  EXCEPT'
            '    SELECT eid FROM entities) AS eids' % args,
            ask_confirm=False)[0][0]:
-        print '%(e)s has nonexistent entities, deleting' % args
+        print('%(e)s has nonexistent entities, deleting' % args)
         sql('DELETE FROM cw_%(e)s WHERE cw_eid IN '
             '(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
     args['c'] = 'cw_%(e)s_cw_eid_fkey' % args
--- a/misc/migration/3.8.5_Any.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/3.8.5_Any.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 def migrate_varchar_to_nvarchar():
     dbdriver  = config.system_source_config['db-driver']
     if dbdriver != "sqlserver2005":
@@ -52,7 +54,7 @@
 
 
     for statement in generated_statements:
-        print statement
+        print(statement)
         sql(statement, ask_confirm=False)
     commit()
 
--- a/misc/migration/bootstrapmigration_repository.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/bootstrapmigration_repository.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,7 @@
 
 it should only include low level schema changes
 """
+from __future__ import print_function
 
 from cubicweb import ConfigurationError
 from cubicweb.server.session import hooks_control
@@ -77,8 +78,8 @@
         sql('ALTER TABLE "entities" DROP COLUMN "mtime"')
         sql('ALTER TABLE "entities" DROP COLUMN "source"')
     except: # programming error, already migrated
-        print "Failed to drop mtime or source database columns"
-        print "'entities' table of the database has probably been already updated"
+        print("Failed to drop mtime or source database columns")
+        print("'entities' table of the database has probably been already updated")
 
     commit()
 
@@ -101,7 +102,7 @@
     driver = config.system_source_config['db-driver']
     if not (driver == 'postgres' or driver.startswith('sqlserver')):
         import sys
-        print >>sys.stderr, 'This migration is not supported for backends other than sqlserver or postgres (yet).'
+        print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr)
         sys.exit(1)
 
     add_relation_definition('CWAttribute', 'add_permission', 'CWGroup')
@@ -196,7 +197,7 @@
                                                 (rschema.type, ','.join(subjects))))
             if martians:
                 martians = ','.join(martians)
-                print 'deleting broken relations %s for eids %s' % (rschema.type, martians)
+                print('deleting broken relations %s for eids %s' % (rschema.type, martians))
                 sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians))
             with session.deny_all_hooks_but():
                 rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type})
@@ -219,20 +220,20 @@
     if driver == 'postgres':
         for indexname, in sql('select indexname from pg_indexes'):
             if indexname.startswith('unique_'):
-                print 'dropping index', indexname
+                print('dropping index', indexname)
                 sql('DROP INDEX %s' % indexname)
         commit()
     elif driver.startswith('sqlserver'):
         for viewname, in sql('select name from sys.views'):
             if viewname.startswith('utv_'):
-                print 'dropping view (index should be cascade-deleted)', viewname
+                print('dropping view (index should be cascade-deleted)', viewname)
                 sql('DROP VIEW %s' % viewname)
         commit()
 
     # recreate the constraints, hook will lead to low-level recreation
     for eschema in sorted(schema.entities()):
         if eschema._unique_together:
-            print 'recreate unique indexes for', eschema
+            print('recreate unique indexes for', eschema)
             rql_args = schemaserial.uniquetogether2rqls(eschema)
             for rql, args in rql_args:
                 args['x'] = eschema.eid
@@ -243,10 +244,10 @@
     for rschema in sorted(schema.relations()):
         if rschema.final:
             if rschema.type in fsschema:
-                print 'sync perms for', rschema.type
+                print('sync perms for', rschema.type)
                 sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False)
             else:
-                print 'WARNING: attribute %s missing from fs schema' % rschema.type
+                print('WARNING: attribute %s missing from fs schema' % rschema.type)
     commit()
 
 if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0):
--- a/misc/migration/postcreate.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/migration/postcreate.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb post creation script, set user's workflow"""
+from __future__ import print_function
 
 # insert versions
 create_entity('CWProperty', pkey=u'system.version.cubicweb',
@@ -46,8 +47,8 @@
 if hasattr(config, 'anonymous_user'):
     anonlogin, anonpwd = config.anonymous_user()
     if anonlogin == session.user.login:
-        print 'you are using a manager account as anonymous user.'
-        print 'Hopefully this is not a production instance...'
+        print('you are using a manager account as anonymous user.')
+        print('Hopefully this is not a production instance...')
     elif anonlogin:
         from cubicweb.server import create_user
         create_user(session, unicode(anonlogin), anonpwd, u'guests')
--- a/misc/scripts/cwuser_ldap2system.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/cwuser_ldap2system.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
 import base64
 from cubicweb.server.utils import crypt_password
 
@@ -20,10 +22,10 @@
 rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False)
 for eid, type, source, extid, mtime in rset:
     if type != 'CWUser':
-        print "don't know what to do with entity type", type
+        print("don't know what to do with entity type", type)
         continue
     if not source.lower().startswith('ldap'):
-        print "don't know what to do with source type", source
+        print("don't know what to do with source type", source)
         continue
     extid = base64.decodestring(extid)
     ldapinfos = [x.strip().split('=') for x in extid.split(',')]
@@ -33,7 +35,7 @@
     args = dict(eid=eid, type=type, source=source, login=login,
                 firstname=firstname, surname=surname, mtime=mtime,
                 pwd=dbhelper.binary_value(crypt_password('toto')))
-    print args
+    print(args)
     sql(insert, args)
     sql(update, args)
 
--- a/misc/scripts/detect_cycle.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/detect_cycle.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,9 +1,10 @@
+from __future__ import print_function
 
 try:
     rtype, = __args__
 except ValueError:
-    print 'USAGE: cubicweb-ctl shell <instance> detect_cycle.py -- <relation type>'
-    print
+    print('USAGE: cubicweb-ctl shell <instance> detect_cycle.py -- <relation type>')
+    print()
 
 graph = {}
 for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype):
@@ -12,4 +13,4 @@
 from logilab.common.graph import get_cycles
 
 for cycle in get_cycles(graph):
-    print 'cycle', '->'.join(str(n) for n in cycle)
+    print('cycle', '->'.join(str(n) for n in cycle))
--- a/misc/scripts/ldap_change_base_dn.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/ldap_change_base_dn.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,10 +1,12 @@
+from __future__ import print_function
+
 from base64 import b64decode, b64encode
 try:
     uri, newdn = __args__
 except ValueError:
-    print 'USAGE: cubicweb-ctl shell <instance> ldap_change_base_dn.py -- <ldap source uri> <new dn>'
-    print
-    print 'you should not have updated your sources file yet'
+    print('USAGE: cubicweb-ctl shell <instance> ldap_change_base_dn.py -- <ldap source uri> <new dn>')
+    print()
+    print('you should not have updated your sources file yet')
 
 olddn = repo.sources_by_uri[uri].config['user-base-dn']
 
@@ -16,9 +18,9 @@
     olduserdn = b64decode(extid)
     newuserdn = olduserdn.replace(olddn, newdn)
     if newuserdn != olduserdn:
-        print olduserdn, '->', newuserdn
+        print(olduserdn, '->', newuserdn)
         sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid))
 
 commit()
 
-print 'you can now update the sources file to the new dn and restart the instance'
+print('you can now update the sources file to the new dn and restart the instance')
--- a/misc/scripts/ldapuser2ldapfeed.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/ldapuser2ldapfeed.py	Mon Oct 12 10:53:35 2015 +0200
@@ -2,6 +2,8 @@
 
 Once this script is run, execute c-c db-check to cleanup relation tables.
 """
+from __future__ import print_function
+
 import sys
 from collections import defaultdict
 from logilab.common.shellutils import generate_password
@@ -14,12 +16,12 @@
           ' on the command line)')
     sys.exit(1)
 except KeyError:
-    print '%s is not an active source' % source_name
+    print('%s is not an active source' % source_name)
     sys.exit(1)
 
 # check source is reachable before doing anything
 if not source.get_connection().cnx:
-    print '%s is not reachable. Fix this before running this script' % source_name
+    print('%s is not reachable. Fix this before running this script' % source_name)
     sys.exit(1)
 
 raw_input('Ensure you have shutdown all instances of this application before continuing.'
@@ -31,7 +33,7 @@
 from cubicweb.server.edition import EditedEntity
 
 
-print '******************** backport entity content ***************************'
+print('******************** backport entity content ***************************')
 
 todelete = defaultdict(list)
 extids = set()
@@ -39,17 +41,17 @@
 for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
     etype = entity.cw_etype
     if not source.support_entity(etype):
-        print "source doesn't support %s, delete %s" % (etype, entity.eid)
+        print("source doesn't support %s, delete %s" % (etype, entity.eid))
         todelete[etype].append(entity)
         continue
     try:
         entity.complete()
     except Exception:
-        print '%s %s much probably deleted, delete it (extid %s)' % (
-            etype, entity.eid, entity.cw_metainformation()['extid'])
+        print('%s %s much probably deleted, delete it (extid %s)' % (
+            etype, entity.eid, entity.cw_metainformation()['extid']))
         todelete[etype].append(entity)
         continue
-    print 'get back', etype, entity.eid
+    print('get back', etype, entity.eid)
     entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
     if not entity.creation_date:
         entity.cw_edited['creation_date'] = datetime.now()
@@ -61,7 +63,7 @@
     if not entity.cwuri:
         entity.cw_edited['cwuri'] = '%s/?dn=%s' % (
             source.urls[0], extid.decode('utf-8', 'ignore'))
-    print entity.cw_edited
+    print(entity.cw_edited)
     if extid in extids:
         duplicates.append(extid)
         continue
@@ -73,13 +75,13 @@
 # only cleanup entities table, remaining stuff should be cleaned by a c-c
 # db-check to be run after this script
 if duplicates:
-    print 'found %s duplicate entries' % len(duplicates)
+    print('found %s duplicate entries' % len(duplicates))
     from pprint import pprint
     pprint(duplicates)
 
-print len(todelete), 'entities will be deleted'
+print(len(todelete), 'entities will be deleted')
 for etype, entities in todelete.iteritems():
-    print 'deleting', etype, [e.login for e in entities]
+    print('deleting', etype, [e.login for e in entities])
     system_source.delete_info_multi(session, entities, source_name)
 
 
@@ -89,9 +91,9 @@
 
 
 if raw_input('Commit?') in 'yY':
-    print 'committing'
+    print('committing')
     commit()
 else:
     rollback()
-    print 'rolled back'
+    print('rolled back')
 
--- a/misc/scripts/pyroforge2datafeed.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/pyroforge2datafeed.py	Mon Oct 12 10:53:35 2015 +0200
@@ -2,6 +2,8 @@
 
 Once this script is run, execute c-c db-check to cleanup relation tables.
 """
+from __future__ import print_function
+
 import sys
 
 try:
@@ -12,14 +14,14 @@
           ' on the command line)')
     sys.exit(1)
 except KeyError:
-    print '%s is not an active source' % source_name
+    print('%s is not an active source' % source_name)
     sys.exit(1)
 
 # check source is reachable before doing anything
 try:
     source.get_connection()._repo
 except AttributeError:
-    print '%s is not reachable. Fix this before running this script' % source_name
+    print('%s is not reachable. Fix this before running this script' % source_name)
     sys.exit(1)
 
 raw_input('Ensure you have shutdown all instances of this application before continuing.'
@@ -39,7 +41,7 @@
         ))
 
 
-print '******************** backport entity content ***************************'
+print('******************** backport entity content ***************************')
 
 from cubicweb.server import debugged
 todelete = {}
@@ -47,20 +49,20 @@
 for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
         etype = entity.cw_etype
         if not source.support_entity(etype):
-            print "source doesn't support %s, delete %s" % (etype, entity.eid)
+            print("source doesn't support %s, delete %s" % (etype, entity.eid))
         elif etype in DONT_GET_BACK_ETYPES:
-            print 'ignore %s, delete %s' % (etype, entity.eid)
+            print('ignore %s, delete %s' % (etype, entity.eid))
         else:
             try:
                 entity.complete()
                 if not host in entity.cwuri:
-                    print 'SKIP foreign entity', entity.cwuri, source.config['base-url']
+                    print('SKIP foreign entity', entity.cwuri, source.config['base-url'])
                     continue
             except Exception:
-                print '%s %s much probably deleted, delete it (extid %s)' % (
-                    etype, entity.eid, entity.cw_metainformation()['extid'])
+                print('%s %s much probably deleted, delete it (extid %s)' % (
+                    etype, entity.eid, entity.cw_metainformation()['extid']))
             else:
-                print 'get back', etype, entity.eid
+                print('get back', etype, entity.eid)
                 entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
                 system_source.add_entity(session, entity)
                 sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s "
@@ -76,7 +78,7 @@
     system_source.delete_info_multi(session, entities, source_name)
 
 
-print '******************** backport mapping **********************************'
+print('******************** backport mapping **********************************')
 session.disable_hook_categories('cw.sources')
 mapping = []
 for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
@@ -91,7 +93,7 @@
                 continue
             if rdef.subject in DONT_GET_BACK_ETYPES \
                     and rdef.object in DONT_GET_BACK_ETYPES:
-                print 'dont map', rdef
+                print('dont map', rdef)
                 continue
             if rdef.subject in DONT_GET_BACK_ETYPES:
                 options = u'action=link\nlinkattr=name'
@@ -105,7 +107,7 @@
                     roles = 'object',
                 else:
                     roles = 'subject',
-            print 'map', rdef, options, roles
+            print('map', rdef, options, roles)
             for role in roles:
                 mapping.append( (
                         (str(rdef.subject), str(rdef.rtype), str(rdef.object)),
--- a/misc/scripts/repair_file_1-9_migration.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/repair_file_1-9_migration.py	Mon Oct 12 10:53:35 2015 +0200
@@ -4,13 +4,14 @@
 * on our intranet on July 07 2010
 * on our extranet on July 16 2010
 """
+from __future__ import print_function
 
 try:
     backupinstance, = __args__
 except ValueError:
-    print 'USAGE: cubicweb-ctl shell <instance> repair_file_1-9_migration.py -- <backup instance id>'
-    print
-    print 'you should restored the backup on a new instance, accessible through pyro'
+    print('USAGE: cubicweb-ctl shell <instance> repair_file_1-9_migration.py -- <backup instance id>')
+    print()
+    print('you should restored the backup on a new instance, accessible through pyro')
 
 from cubicweb import cwconfig, dbapi
 from cubicweb.server.session import hooks_control
@@ -32,20 +33,20 @@
                                    'XX from_entity YY, YY name "File")'):
         if rtype in ('is', 'is_instance_of'):
             continue
-        print rtype
+        print(rtype)
         for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype):
-            print 'restoring relation %s between file %s and %s' % (rtype, feid, xeid),
-            print rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype),
-                      {'f': feid, 'x': xeid})
+            print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ')
+            print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype),
+                      {'f': feid, 'x': xeid}))
 
     for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,'
                                    'X to_entity Y, Y name "Image", X is CWRelation, '
                                    'EXISTS(XX is CWRelation, XX relation_type RT, '
                                    'XX to_entity YY, YY name "File")'):
-        print rtype
+        print(rtype)
         for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype):
-            print 'restoring relation %s between %s and file %s' % (rtype, xeid, feid),
-            print rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype),
-                      {'f': feid, 'x': xeid})
+            print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ')
+            print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype),
+                      {'f': feid, 'x': xeid}))
 
 commit()
--- a/misc/scripts/repair_splitbrain_ldapuser_source.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/misc/scripts/repair_splitbrain_ldapuser_source.py	Mon Oct 12 10:53:35 2015 +0200
@@ -14,6 +14,7 @@
 deciding to apply it for you. And then ADAPT it tou your needs.
 
 """
+from __future__ import print_function
 
 import base64
 from collections import defaultdict
@@ -28,12 +29,12 @@
           ' on the command line)')
     sys.exit(1)
 except KeyError:
-    print '%s is not an active source' % source_name
+    print('%s is not an active source' % source_name)
     sys.exit(1)
 
 # check source is reachable before doing anything
 if not source.get_connection().cnx:
-    print '%s is not reachable. Fix this before running this script' % source_name
+    print('%s is not reachable. Fix this before running this script' % source_name)
     sys.exit(1)
 
 def find_dupes():
@@ -52,11 +53,11 @@
     CWUser = schema['CWUser']
     for extid, eids in dupes.items():
         newest = eids.pop() # we merge everything on the newest
-        print 'merging ghosts of', extid, 'into', newest
+        print('merging ghosts of', extid, 'into', newest)
         # now we merge pairwise into the newest
         for old in eids:
             subst = {'old': old, 'new': newest}
-            print '  merging', old
+            print('  merging', old)
             gone_eids.append(old)
             for rschema in CWUser.subject_relations():
                 if rschema.final or rschema == 'identity':
@@ -83,24 +84,24 @@
         rollback()
         return
     commit() # XXX flushing operations is wanted rather than really committing
-    print 'clean up entities table'
+    print('clean up entities table')
     sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids)))
     commit()
 
 def main():
     dupes = find_dupes()
     if not dupes:
-        print 'No duplicate user'
+        print('No duplicate user')
         return
 
-    print 'Found %s duplicate user instances' % len(dupes)
+    print('Found %s duplicate user instances' % len(dupes))
 
     while True:
-        print 'Fix or dry-run? (f/d)  ... or Ctrl-C to break out'
+        print('Fix or dry-run? (f/d)  ... or Ctrl-C to break out')
         answer = raw_input('> ')
         if answer.lower() not in 'fd':
             continue
-        print 'Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.'
+        print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.')
         raw_input('<I swear all running instances and workers of the application are stopped>')
         with hooks_control(session, session.HOOKS_DENY_ALL):
             merge_dupes(dupes, docommit=answer=='f')
--- a/multipart.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/multipart.py	Mon Oct 12 10:53:35 2015 +0200
@@ -41,14 +41,12 @@
 from wsgiref.headers import Headers
 import re, sys
 try:
-    from urlparse import parse_qs
-except ImportError: # pragma: no cover (fallback for Python 2.5)
-    from cgi import parse_qs
-try:
     from io import BytesIO
 except ImportError: # pragma: no cover (fallback for Python 2.5)
     from StringIO import StringIO as BytesIO
 
+from six.moves.urllib.parse import parse_qs
+
 ##############################################################################
 ################################ Helper & Misc ################################
 ##############################################################################
--- a/predicates.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/predicates.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,6 +24,9 @@
 from warnings import warn
 from operator import eq
 
+from six import string_types, integer_types
+from six.moves import range
+
 from logilab.common.deprecation import deprecated
 from logilab.common.registry import Predicate, objectify_predicate, yes
 
@@ -106,7 +109,7 @@
             if accept_none is None:
                 accept_none = self.accept_none
             if not accept_none and \
-                   any(rset[i][col] is None for i in xrange(len(rset))):
+                   any(row[col] is None for row in rset):
                 return 0
             etypes = rset.column_types(col)
         else:
@@ -332,7 +335,7 @@
             # on rset containing several entity types, each row may be
             # individually adaptable, while the whole rset won't be if the
             # same adapter can't be used for each type
-            for row in xrange(len(kwargs['rset'])):
+            for row in range(len(kwargs['rset'])):
                 kwargs.setdefault('col', 0)
                 _score = super(adaptable, self).__call__(cls, req, row=row, **kwargs)
                 if not _score:
@@ -611,7 +614,7 @@
         super(is_instance, self).__init__(**kwargs)
         self.expected_etypes = expected_etypes
         for etype in self.expected_etypes:
-            assert isinstance(etype, basestring), etype
+            assert isinstance(etype, string_types), etype
 
     def __str__(self):
         return '%s(%s)' % (self.__class__.__name__,
@@ -671,7 +674,7 @@
             score = scorefunc(*args, **kwargs)
             if not score:
                 return 0
-            if isinstance(score, (int, long)):
+            if isinstance(score, integer_types):
                 return score
             return 1
         self.score_entity = intscore
@@ -1091,7 +1094,7 @@
     """
     if from_state_name is not None:
         warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning)
-    if isinstance(tr_names, basestring):
+    if isinstance(tr_names, string_types):
         tr_names = set((tr_names,))
     def match_etype_and_transition(trinfo):
         # take care trinfo.transition is None when calling change_state
@@ -1291,7 +1294,7 @@
             raise ValueError("match_form_params() can't be called with both "
                              "positional and named arguments")
         if expected:
-            if len(expected) == 1 and not isinstance(expected[0], basestring):
+            if len(expected) == 1 and not isinstance(expected[0], string_types):
                 raise ValueError("match_form_params() positional arguments "
                                  "must be strings")
             super(match_form_params, self).__init__(*expected)
--- a/repoapi.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/repoapi.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,21 +17,14 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Official API to access the content of a repository
 """
+from warnings import warn
 from logilab.common.deprecation import class_deprecated
 
 from cubicweb.utils import parse_repo_uri
-from cubicweb import ConnectionError, AuthenticationError
+from cubicweb import AuthenticationError
 from cubicweb.server.session import Connection
 
 
-### private function for specific method ############################
-
-def _get_inmemory_repo(config, vreg=None):
-    from cubicweb.server.repository import Repository
-    from cubicweb.server.utils import TasksManager
-    return Repository(config, TasksManager(), vreg=vreg)
-
-
 ### public API ######################################################
 
 def get_repository(uri=None, config=None, vreg=None):
@@ -41,16 +34,11 @@
     The returned repository may be an in-memory repository or a proxy object
     using a specific RPC method, depending on the given URI.
     """
-    if uri is None:
-        return _get_inmemory_repo(config, vreg)
-
-    protocol, hostport, appid = parse_repo_uri(uri)
+    if uri is not None:
+        warn('[3.22] get_repository only wants a config')
 
-    if protocol == 'inmemory':
-        # me may have been called with a dummy 'inmemory://' uri ...
-        return _get_inmemory_repo(config, vreg)
-
-    raise ConnectionError('unknown protocol: `%s`' % protocol)
+    assert config is not None, 'get_repository(config=config)'
+    return config.repository(vreg)
 
 def connect(repo, login, **kwargs):
     """Take credential and return associated Connection.
--- a/req.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/req.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,10 +20,9 @@
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
-from urlparse import urlsplit, urlunsplit
-from urllib import quote as urlquote, unquote as urlunquote
 from datetime import time, datetime, timedelta
-from cgi import parse_qs, parse_qsl
+
+from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit
 
 from logilab.common.decorators import cached
 from logilab.common.deprecation import deprecated
--- a/rqlrewrite.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/rqlrewrite.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 """
 __docformat__ = "restructuredtext en"
 
+from six import string_types
+
 from rql import nodes as n, stmts, TypeResolverException
 from rql.utils import common_parent
 
@@ -427,7 +429,7 @@
     def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap):
         try:
             self.init_from_varmap(varmap, varexistsmap)
-        except VariableFromSubQuery, ex:
+        except VariableFromSubQuery as ex:
             # variable may have been moved to a newly inserted subquery
             # we should insert snippet in that subquery
             subquery = self.select.aliases[ex.variable].query
@@ -883,7 +885,7 @@
                 return n.Constant(vi['const'], 'Int')
             return n.VariableRef(stmt.get_variable(selectvar))
         vname_or_term = self._get_varname_or_term(node.name)
-        if isinstance(vname_or_term, basestring):
+        if isinstance(vname_or_term, string_types):
             return n.VariableRef(stmt.get_variable(vname_or_term))
         # shared term
         return vname_or_term.copy(stmt)
--- a/rset.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/rset.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,8 @@
 
 from warnings import warn
 
+from six.moves import range
+
 from logilab.common import nullobject
 from logilab.common.decorators import cached, clear_cache, copy_cache
 from rql import nodes, stmts
@@ -186,7 +188,7 @@
         """
         rows, descr = [], []
         rset = self.copy(rows, descr)
-        for i in xrange(len(self)):
+        for i in range(len(self)):
             if not filtercb(self.get_entity(i, col)):
                 continue
             rows.append(self.rows[i])
@@ -215,10 +217,10 @@
         rset = self.copy(rows, descr)
         if col >= 0:
             entities = sorted(enumerate(self.entities(col)),
-                              key=lambda (i, e): keyfunc(e), reverse=reverse)
+                              key=lambda t: keyfunc(t[1]), reverse=reverse)
         else:
             entities = sorted(enumerate(self),
-                              key=lambda (i, e): keyfunc(e), reverse=reverse)
+                              key=lambda t: keyfunc(t[1]), reverse=reverse)
         for index, _ in entities:
             rows.append(self.rows[index])
             descr.append(self.description[index])
@@ -311,7 +313,7 @@
             newselect.limit = limit
             newselect.offset = offset
             aliases = [nodes.VariableRef(newselect.get_variable(chr(65+i), i))
-                       for i in xrange(len(rqlst.children[0].selection))]
+                       for i in range(len(rqlst.children[0].selection))]
             for vref in aliases:
                 newselect.append_selected(nodes.VariableRef(vref.variable))
             newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False)
@@ -387,7 +389,7 @@
 
     def entities(self, col=0):
         """iter on entities with eid in the `col` column of the result set"""
-        for i in xrange(len(self)):
+        for i in range(len(self)):
             # may have None values in case of outer join (or aggregat on eid
             # hacks)
             if self.rows[i][col] is not None:
@@ -606,7 +608,7 @@
                 except AttributeError:
                     # not a variable
                     continue
-                for i in xrange(len(select.selection)):
+                for i in range(len(select.selection)):
                     if i == col:
                         continue
                     coletype = self.description[row][i]
--- a/rtags.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/rtags.py	Mon Oct 12 10:53:35 2015 +0200
@@ -40,6 +40,8 @@
 import logging
 from warnings import warn
 
+from six import string_types
+
 from logilab.common.logging_ext import set_log_methods
 from logilab.common.registry import RegistrableInstance, yes
 
@@ -145,7 +147,7 @@
         return tag
 
     def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs):
-        if isinstance(attr, basestring):
+        if isinstance(attr, string_types):
             attr, role = attr, 'subject'
         else:
             attr, role = attr
--- a/schema.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/schema.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """classes to define schemas for CubicWeb"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 _ = unicode
@@ -25,6 +26,9 @@
 from logging import getLogger
 from warnings import warn
 
+from six import string_types
+from six.moves import range
+
 from logilab.common import tempattr
 from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty
 from logilab.common.logging_ext import set_log_methods
@@ -204,7 +208,7 @@
         """
         self.eid = eid # eid of the entity representing this rql expression
         assert mainvars, 'bad mainvars %s' % mainvars
-        if isinstance(mainvars, basestring):
+        if isinstance(mainvars, string_types):
             mainvars = set(splitstrip(mainvars))
         elif not isinstance(mainvars, set):
             mainvars = set(mainvars)
@@ -363,7 +367,7 @@
             get_eschema = _cw.vreg.schema.eschema
             try:
                 for eaction, col in has_perm_defs:
-                    for i in xrange(len(rset)):
+                    for i in range(len(rset)):
                         eschema = get_eschema(rset.description[i][col])
                         eschema.check_perm(_cw, eaction, eid=rset[i][col])
                 if self.eid is not None:
@@ -576,7 +580,7 @@
     assert action in self.ACTIONS, action
     #assert action in self._groups, '%s %s' % (self, action)
     try:
-        return frozenset(g for g in self.permissions[action] if isinstance(g, basestring))
+        return frozenset(g for g in self.permissions[action] if isinstance(g, string_types))
     except KeyError:
         return ()
 PermissionMixIn.get_groups = get_groups
@@ -595,7 +599,7 @@
     assert action in self.ACTIONS, action
     #assert action in self._rqlexprs, '%s %s' % (self, action)
     try:
-        return tuple(g for g in self.permissions[action] if not isinstance(g, basestring))
+        return tuple(g for g in self.permissions[action] if not isinstance(g, string_types))
     except KeyError:
         return ()
 PermissionMixIn.get_rqlexprs = get_rqlexprs
@@ -665,7 +669,7 @@
     groups = self.get_groups(action)
     if _cw.user.matching_groups(groups):
         if DBG:
-            print ('check_perm: %r %r: user matches %s' % (action, _self_str, groups))
+            print('check_perm: %r %r: user matches %s' % (action, _self_str, groups))
         return
     # if 'owners' in allowed groups, check if the user actually owns this
     # object, if so that's enough
@@ -676,14 +680,14 @@
           kwargs.get('creating')
           or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))):
         if DBG:
-            print ('check_perm: %r %r: user is owner or creation time' %
-                   (action, _self_str))
+            print('check_perm: %r %r: user is owner or creation time' %
+                  (action, _self_str))
         return
     # else if there is some rql expressions, check them
     if DBG:
-        print ('check_perm: %r %r %s' %
-               (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs))
-                                    for rqlexpr in self.get_rqlexprs(action)]))
+        print('check_perm: %r %r %s' %
+              (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs))
+                                   for rqlexpr in self.get_rqlexprs(action)]))
     if any(rqlexpr.check(_cw, **kwargs)
            for rqlexpr in self.get_rqlexprs(action)):
         return
--- a/selectors.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/selectors.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,6 +18,8 @@
 
 from warnings import warn
 
+from six import string_types
+
 from logilab.common.deprecation import deprecated, class_renamed
 
 from cubicweb.predicates import *
@@ -84,7 +86,7 @@
 
     See `EntityPredicate` documentation for behaviour when row is not specified.
 
-    :param *etypes: entity types (`basestring`) which should be refused
+    :param *etypes: entity types (`string_types`) which should be refused
     """
     def __init__(self, *etypes):
         super(_but_etype, self).__init__()
--- a/server/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,7 @@
 
 The server module contains functions to initialize a new repository.
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -28,6 +29,8 @@
 from glob import glob
 from contextlib import contextmanager
 
+from six import string_types
+
 from logilab.common.modutils import LazyObject
 from logilab.common.textutils import splitstrip
 from logilab.common.registry import yes
@@ -138,7 +141,7 @@
     if not debugmode:
         DEBUG = 0
         return
-    if isinstance(debugmode, basestring):
+    if isinstance(debugmode, string_types):
         for mode in splitstrip(debugmode, sep='|'):
             DEBUG |= globals()[mode]
     else:
@@ -245,7 +248,7 @@
             remainings = filter(drop_filter, helper.list_tables(sqlcursor))
             assert not remainings, 'Remaining tables: %s' % ', '.join(remainings)
     _title = '-> creating tables '
-    print _title,
+    print(_title, end=' ')
     # schema entities and relations tables
     # can't skip entities table even if system source doesn't support them,
     # they are used sometimes by generated sql. Keeping them empty is much
@@ -255,8 +258,8 @@
     #               if not repo.system_source.support_entity(str(e))])
     failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;')
     if failed:
-        print 'The following SQL statements failed. You should check your schema.'
-        print failed
+        print('The following SQL statements failed. You should check your schema.')
+        print(failed)
         raise Exception('execution of the sql schema failed, you should check your schema')
     sqlcursor.close()
     sqlcnx.commit()
@@ -267,7 +270,7 @@
         repo.system_source.eid = ssource.eid
         cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid})
         # insert base groups and default admin
-        print '-> inserting default user and default groups.'
+        print('-> inserting default user and default groups.')
         try:
             login = unicode(sourcescfg['admin']['login'])
             pwd = sourcescfg['admin']['password']
@@ -310,7 +313,7 @@
     # (drop instance attribute to get back to class attribute)
     del config.cubicweb_appobject_path
     del config.cube_appobject_path
-    print '-> database for instance %s initialized.' % config.appid
+    print('-> database for instance %s initialized.' % config.appid)
 
 
 def initialize_schema(config, schema, mhandler, event='create'):
--- a/server/checkintegrity.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/checkintegrity.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 * integrity of a CubicWeb repository. Hum actually only the system database is
   checked.
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -90,11 +92,11 @@
     dbhelper = repo.system_source.dbhelper
     cursor = cnx.cnxset.cu
     if not dbhelper.has_fti_table(cursor):
-        print 'no text index table'
+        print('no text index table')
         dbhelper.init_fti(cursor)
     repo.system_source.do_fti = True  # ensure full-text indexation is activated
     if etypes is None:
-        print 'Reindexing entities'
+        print('Reindexing entities')
         etypes = set()
         for eschema in schema.entities():
             if eschema.final:
@@ -107,8 +109,8 @@
         # clear fti table first
         cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table)
     else:
-        print 'Reindexing entities of type %s' % \
-              ', '.join(sorted(str(e) for e in etypes))
+        print('Reindexing entities of type %s' % \
+              ', '.join(sorted(str(e) for e in etypes)))
         # clear fti table first. Use subquery for sql compatibility
         cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES "
                        "WHERE eid=%s AND type IN (%s))" % (
@@ -135,7 +137,7 @@
 
 def check_schema(schema, cnx, eids, fix=1):
     """check serialized schema"""
-    print 'Checking serialized schema'
+    print('Checking serialized schema')
     unique_constraints = ('SizeConstraint', 'FormatConstraint',
                           'VocabularyConstraint',
                           'RQLVocabularyConstraint')
@@ -147,16 +149,16 @@
         if count == 1:
             continue
         if cstrname in unique_constraints:
-            print "ERROR: got %s %r constraints on relation %s.%s.%s" % (
-                count, cstrname, sn, rn, on)
+            print("ERROR: got %s %r constraints on relation %s.%s.%s" % (
+                count, cstrname, sn, rn, on))
             if fix:
-                print 'dunno how to fix, do it yourself'
+                print('dunno how to fix, do it yourself')
 
 
 
 def check_text_index(schema, cnx, eids, fix=1):
     """check all entities registered in the text index"""
-    print 'Checking text index'
+    print('Checking text index')
     msg = '  Entity with eid %s exists in the text index but in no source (autofix will remove from text index)'
     cursor = cnx.system_sql('SELECT uid FROM appears;')
     for row in cursor.fetchall():
@@ -170,7 +172,7 @@
 
 def check_entities(schema, cnx, eids, fix=1):
     """check all entities registered in the repo system table"""
-    print 'Checking entities system table'
+    print('Checking entities system table')
     # system table but no source
     msg = '  Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)'
     cursor = cnx.system_sql('SELECT eid,type FROM entities;')
@@ -228,7 +230,7 @@
                            'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs '
                            '  WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)')
         notify_fixed(True)
-    print 'Checking entities tables'
+    print('Checking entities tables')
     msg = '  Entity with eid %s exists in the %s table but not in the system table (autofix will delete the entity)'
     for eschema in schema.entities():
         if eschema.final:
@@ -263,7 +265,7 @@
     """check that eids referenced by relations are registered in the repo system
     table
     """
-    print 'Checking relations'
+    print('Checking relations')
     for rschema in schema.relations():
         if rschema.final or rschema.type in PURE_VIRTUAL_RTYPES:
             continue
@@ -287,7 +289,7 @@
             cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema)
         except Exception as ex:
             # usually because table doesn't exist
-            print 'ERROR', ex
+            print('ERROR', ex)
             continue
         for row in cursor.fetchall():
             eid = row[0]
@@ -310,7 +312,7 @@
 
 def check_mandatory_relations(schema, cnx, eids, fix=1):
     """check entities missing some mandatory relation"""
-    print 'Checking mandatory relations'
+    print('Checking mandatory relations')
     msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)'
     for rschema in schema.relations():
         if rschema.final or rschema in PURE_VIRTUAL_RTYPES or rschema in ('is', 'is_instance_of'):
@@ -340,7 +342,7 @@
     """check for entities stored in the system source missing some mandatory
     attribute
     """
-    print 'Checking mandatory attributes'
+    print('Checking mandatory attributes')
     msg = '%s #%s is missing mandatory attribute %s (autofix will delete the entity)'
     for rschema in schema.relations():
         if not rschema.final or rschema in VIRTUAL_RTYPES:
@@ -361,7 +363,7 @@
 
     FIXME: rewrite using RQL queries ?
     """
-    print 'Checking metadata'
+    print('Checking metadata')
     cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;")
     eidcolumn = SQL_PREFIX + 'eid'
     msg = '  %s with eid %s has no %s (autofix will set it to now)'
@@ -403,9 +405,9 @@
         if fix:
             cnx.commit()
         else:
-            print
+            print()
         if not fix:
-            print 'WARNING: Diagnostic run, nothing has been corrected'
+            print('WARNING: Diagnostic run, nothing has been corrected')
     if reindex:
         cnx.rollback()
         reindex_entities(repo.schema, cnx, withpb=withpb)
--- a/server/hook.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/hook.py	Mon Oct 12 10:53:35 2015 +0200
@@ -248,6 +248,8 @@
 .. autoclass:: cubicweb.server.hook.LateOperation
 .. autoclass:: cubicweb.server.hook.DataOperationMixIn
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 from warnings import warn
@@ -331,7 +333,7 @@
                         with cnx.running_hooks_ops():
                             for hook in hooks:
                                 if debug:
-                                    print event, _kwargs, hook
+                                    print(event, _kwargs, hook)
                                 hook()
 
     def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs):
--- a/server/hooksmanager.py	Mon Oct 12 09:19:07 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-from logilab.common.deprecation import class_renamed, class_moved
-from cubicweb.server import hook
-
-SystemHook = class_renamed('SystemHook', hook.Hook)
-Hook = class_moved(hook.Hook)
--- a/server/migractions.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/migractions.py	Mon Oct 12 10:53:35 2015 +0200
@@ -26,6 +26,8 @@
 * add an entity
 * execute raw RQL queries
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -93,7 +95,7 @@
             self.repo = repo
             self.session = cnx.session
         elif connect:
-            self.repo_connect()
+            self.repo = config.repository()
             self.set_cnx()
         else:
             self.session = None
@@ -134,27 +136,21 @@
             try:
                 self.cnx = repoapi.connect(self.repo, login, password=pwd)
                 if not 'managers' in self.cnx.user.groups:
-                    print 'migration need an account in the managers group'
+                    print('migration need an account in the managers group')
                 else:
                     break
             except AuthenticationError:
-                print 'wrong user/password'
+                print('wrong user/password')
             except (KeyboardInterrupt, EOFError):
-                print 'aborting...'
+                print('aborting...')
                 sys.exit(0)
             try:
                 login, pwd = manager_userpasswd()
             except (KeyboardInterrupt, EOFError):
-                print 'aborting...'
+                print('aborting...')
                 sys.exit(0)
         self.session = self.repo._get_session(self.cnx.sessionid)
 
-
-    @cached
-    def repo_connect(self):
-        self.repo = repoapi.get_repository(config=self.config)
-        return self.repo
-
     def cube_upgraded(self, cube, version):
         self.cmd_set_property('system.version.%s' % cube.lower(),
                               unicode(version))
@@ -191,7 +187,7 @@
 
     def backup_database(self, backupfile=None, askconfirm=True, format='native'):
         config = self.config
-        repo = self.repo_connect()
+        repo = self.repo
         # paths
         timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
         instbkdir = osp.join(config.appdatahome, 'backup')
@@ -202,13 +198,13 @@
         # check backup has to be done
         if osp.exists(backupfile) and not \
                 self.confirm('Backup file %s exists, overwrite it?' % backupfile):
-            print '-> no backup done.'
+            print('-> no backup done.')
             return
         elif askconfirm and not self.confirm('Backup %s database?' % config.appid):
-            print '-> no backup done.'
+            print('-> no backup done.')
             return
         open(backupfile,'w').close() # kinda lock
-        os.chmod(backupfile, 0600)
+        os.chmod(backupfile, 0o600)
         # backup
         source = repo.system_source
         tmpdir = tempfile.mkdtemp()
@@ -217,7 +213,7 @@
             try:
                 source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format)
             except Exception as ex:
-                print '-> error trying to backup %s [%s]' % (source.uri, ex)
+                print('-> error trying to backup %s [%s]' % (source.uri, ex))
                 if not self.confirm('Continue anyway?', default='n'):
                     raise SystemExit(1)
                 else:
@@ -236,7 +232,7 @@
                 # call hooks
                 repo.hm.call_hooks('server_backup', repo=repo, timestamp=timestamp)
                 # done
-                print '-> backup file',  backupfile
+                print('-> backup file',  backupfile)
         finally:
             shutil.rmtree(tmpdir)
 
@@ -268,19 +264,19 @@
                 if written_format in ('portable', 'native'):
                     format = written_format
         self.config.init_cnxset_pool = False
-        repo = self.repo_connect()
+        repo = self.repo
         source = repo.system_source
         try:
             source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format)
         except Exception as exc:
-            print '-> error trying to restore %s [%s]' % (source.uri, exc)
+            print('-> error trying to restore %s [%s]' % (source.uri, exc))
             if not self.confirm('Continue anyway?', default='n'):
                 raise SystemExit(1)
         shutil.rmtree(tmpdir)
         # call hooks
         repo.init_cnxset_pool()
         repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile)
-        print '-> database restored.'
+        print('-> database restored.')
 
     def commit(self):
         self.cnx.commit()
@@ -362,11 +358,11 @@
             directory = osp.join(self.config.cube_dir(cube), 'schema')
         sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver))
         for fpath in sql_scripts:
-            print '-> installing', fpath
+            print('-> installing', fpath)
             failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False,
                              delimiter=';;')
             if failed:
-                print '-> ERROR, skipping', fpath
+                print('-> ERROR, skipping', fpath)
 
     # schema synchronization internals ########################################
 
@@ -552,12 +548,12 @@
                 for name in cols:
                     rschema = repoeschema.subjrels.get(name)
                     if rschema is None:
-                        print 'dont add %s unique constraint on %s, missing %s' % (
-                            ','.join(cols), eschema, name)
+                        print('dont add %s unique constraint on %s, missing %s' % (
+                            ','.join(cols), eschema, name))
                         return False
                     if not (rschema.final or rschema.inlined):
-                        print 'dont add %s unique constraint on %s, %s is neither final nor inlined' % (
-                            ','.join(cols), eschema, name)
+                        print('dont add %s unique constraint on %s, %s is neither final nor inlined' % (
+                            ','.join(cols), eschema, name))
                         return False
                 return True
 
@@ -743,8 +739,8 @@
             rschema = self.repo.schema.rschema(attrname)
             attrtype = rschema.objects(etype)[0]
         except KeyError:
-            print 'warning: attribute %s %s is not known, skip deletion' % (
-                etype, attrname)
+            print('warning: attribute %s %s is not known, skip deletion' % (
+                etype, attrname))
         else:
             self.cmd_drop_relation_definition(etype, attrname, attrtype,
                                               commit=commit)
@@ -781,7 +777,7 @@
         instschema = self.repo.schema
         eschema = self.fs_schema.eschema(etype)
         if etype in instschema and not (eschema.final and eschema.eid is None):
-            print 'warning: %s already known, skip addition' % etype
+            print('warning: %s already known, skip addition' % etype)
             return
         confirm = self.verbosity >= 2
         groupmap = self.group_mapping()
@@ -918,7 +914,7 @@
         """
         schema = self.repo.schema
         if oldname not in schema:
-            print 'warning: entity type %s is unknown, skip renaming' % oldname
+            print('warning: entity type %s is unknown, skip renaming' % oldname)
             return
         # if merging two existing entity types
         if newname in schema:
@@ -1017,8 +1013,8 @@
         rschema = self.fs_schema.rschema(rtype)
         execute = self.cnx.execute
         if rtype in reposchema:
-            print 'warning: relation type %s is already known, skip addition' % (
-                rtype)
+            print('warning: relation type %s is already known, skip addition' % (
+                rtype))
         elif rschema.rule:
             gmap = self.group_mapping()
             ss.execschemarql(execute, rschema, ss.crschema2rql(rschema, gmap))
@@ -1098,8 +1094,8 @@
         if not rtype in self.repo.schema:
             self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
         if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs:
-            print 'warning: relation %s %s %s is already known, skip addition' % (
-                subjtype, rtype, objtype)
+            print('warning: relation %s %s %s is already known, skip addition' % (
+                subjtype, rtype, objtype))
             return
         rdef = self._get_rdef(rschema, subjtype, objtype)
         ss.execschemarql(self.cnx.execute, rdef,
@@ -1351,7 +1347,7 @@
             # remove from entity cache to avoid memory exhaustion
             del entity.cw_attr_cache[attribute]
             pb.update()
-        print
+        print()
         source.set_storage(etype, attribute, storage)
 
     def cmd_create_entity(self, etype, commit=False, **kwargs):
--- a/server/querier.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/querier.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,10 +18,15 @@
 """Helper classes to execute RQL queries on a set of sources, performing
 security checking and data aggregation.
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 from itertools import repeat
 
+from six import string_types, integer_types
+from six.moves import range
+
 from rql import RQLSyntaxError, CoercionError
 from rql.stmts import Union
 from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj, Relation, Exists, Not
@@ -103,13 +108,13 @@
                                                    solution, args))
                 if not user.matching_groups(rdef.get_groups('read')):
                     if DBG:
-                        print ('check_read_access: %s %s does not match %s' %
-                               (rdef, user.groups, rdef.get_groups('read')))
+                        print('check_read_access: %s %s does not match %s' %
+                              (rdef, user.groups, rdef.get_groups('read')))
                     # XXX rqlexpr not allowed
                     raise Unauthorized('read', rel.r_type)
                 if DBG:
-                    print ('check_read_access: %s %s matches %s' %
-                           (rdef, user.groups, rdef.get_groups('read')))
+                    print('check_read_access: %s %s matches %s' %
+                          (rdef, user.groups, rdef.get_groups('read')))
 
 def get_local_checks(cnx, rqlst, solution):
     """Check that the given user has credentials to access data read by the
@@ -138,8 +143,8 @@
                 ex = Unauthorized('read', solution[varname])
                 ex.var = varname
                 if DBG:
-                    print ('check_read_access: %s %s %s %s' %
-                           (varname, eschema, user.groups, eschema.get_groups('read')))
+                    print('check_read_access: %s %s %s %s' %
+                          (varname, eschema, user.groups, eschema.get_groups('read')))
                 raise ex
             # don't insert security on variable only referenced by 'NOT X relation Y' or
             # 'NOT EXISTS(X relation Y)'
@@ -446,13 +451,13 @@
         relations = {}
         for subj, rtype, obj in self.relation_defs():
             # if a string is given into args instead of an int, we get it here
-            if isinstance(subj, basestring):
+            if isinstance(subj, string_types):
                 subj = int(subj)
-            elif not isinstance(subj, (int, long)):
+            elif not isinstance(subj, integer_types):
                 subj = subj.entity.eid
-            if isinstance(obj, basestring):
+            if isinstance(obj, string_types):
                 obj = int(obj)
-            elif not isinstance(obj, (int, long)):
+            elif not isinstance(obj, integer_types):
                 obj = obj.entity.eid
             if repo.schema.rschema(rtype).inlined:
                 if subj not in edited_entities:
@@ -539,8 +544,8 @@
         """
         if server.DEBUG & (server.DBG_RQL | server.DBG_SQL):
             if server.DEBUG & (server.DBG_MORE | server.DBG_SQL):
-                print '*'*80
-            print 'querier input', repr(rql), repr(args)
+                print('*'*80)
+            print('querier input', repr(rql), repr(args))
         # parse the query and binds variables
         cachekey = (rql,)
         try:
@@ -641,7 +646,7 @@
                 # so compute description manually even if there is only
                 # one solution
                 basedescr = [None] * len(plan.selected)
-                todetermine = zip(xrange(len(plan.selected)), repeat(False))
+                todetermine = zip(range(len(plan.selected)), repeat(False))
                 descr = _build_descr(cnx, results, basedescr, todetermine)
             # FIXME: get number of affected entities / relations on non
             # selection queries ?
@@ -668,7 +673,7 @@
     unstables = rqlst.get_variable_indices()
     basedescr = []
     todetermine = []
-    for i in xrange(len(rqlst.children[0].selection)):
+    for i in range(len(rqlst.children[0].selection)):
         ttype = _selection_idx_type(i, rqlst, args)
         if ttype is None or ttype == 'Any':
             ttype = None
--- a/server/repository.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/repository.py	Mon Oct 12 10:53:35 2015 +0200
@@ -25,15 +25,18 @@
   point to a cubicweb instance.
 * handles session management
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import threading
-import Queue
 from warnings import warn
 from itertools import chain
 from time import time, localtime, strftime
 from contextlib import contextmanager
 
+from six.moves import range, queue
+
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated
 
@@ -186,8 +189,8 @@
         # registry hook to fix user class on registry reload
         @onevent('after-registry-reload', self)
         def fix_user_classes(self):
-            # After registery reload the 'CWUser' class used for CWEtype
-            # changed.  To any existing user object have a different class than
+            # After registry reload the 'CWUser' class used for CWEtype
+            # changed.  So any existing user object have a different class than
             # the new loaded one. We are hot fixing this.
             usercls = self.vreg['etypes'].etype_class('CWUser')
             for session in self._sessions.itervalues():
@@ -197,7 +200,7 @@
     def init_cnxset_pool(self):
         """should be called bootstrap_repository, as this is what it does"""
         config = self.config
-        self._cnxsets_pool = Queue.Queue()
+        self._cnxsets_pool = queue.Queue()
         # 0. init a cnxset that will be used to fetch bootstrap information from
         #    the database
         self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection())
@@ -240,7 +243,7 @@
         #    proper initialization
         self._get_cnxset().close(True)
         self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue)
-        for i in xrange(config['connections-pool-size']):
+        for i in range(config['connections-pool-size']):
             self.cnxsets.append(self.system_source.wrapped_connection())
             self._cnxsets_pool.put_nowait(self.cnxsets[-1])
 
@@ -377,7 +380,7 @@
     def _get_cnxset(self):
         try:
             return self._cnxsets_pool.get(True, timeout=5)
-        except Queue.Empty:
+        except queue.Empty:
             raise Exception('no connections set available after 5 secs, probably either a '
                             'bug in code (too many uncommited/rolled back '
                             'connections) or too much load on the server (in '
@@ -387,13 +390,6 @@
     def _free_cnxset(self, cnxset):
         self._cnxsets_pool.put_nowait(cnxset)
 
-    def pinfo(self):
-        # XXX: session.cnxset is accessed from a local storage, would be interesting
-        #      to see if there is a cnxset set in any thread specific data)
-        return '%s: %s (%s)' % (self._cnxsets_pool.qsize(),
-                                ','.join(session.user.login for session in self._sessions.itervalues()
-                                         if session.cnxset),
-                                threading.currentThread())
     def shutdown(self):
         """called on server stop event to properly close opened sessions and
         connections
@@ -919,7 +915,7 @@
         # set caches asap
         extid = self.init_entity_caches(cnx, entity, source)
         if server.DEBUG & server.DBG_REPO:
-            print 'ADD entity', self, entity.cw_etype, entity.eid, edited
+            print('ADD entity', self, entity.cw_etype, entity.eid, edited)
         prefill_entity_caches(entity)
         self.hm.call_hooks('before_add_entity', cnx, entity=entity)
         relations = preprocess_inlined_relations(cnx, entity)
@@ -950,8 +946,8 @@
         """
         entity = edited.entity
         if server.DEBUG & server.DBG_REPO:
-            print 'UPDATE entity', entity.cw_etype, entity.eid, \
-                  entity.cw_attr_cache, edited
+            print('UPDATE entity', entity.cw_etype, entity.eid,
+                  entity.cw_attr_cache, edited)
         hm = self.hm
         eschema = entity.e_schema
         cnx.set_entity_cache(entity)
@@ -1045,7 +1041,7 @@
         source = self.system_source
         for etype, entities in data_by_etype.iteritems():
             if server.DEBUG & server.DBG_REPO:
-                print 'DELETE entities', etype, [entity.eid for entity in entities]
+                print('DELETE entities', etype, [entity.eid for entity in entities])
             self.hm.call_hooks('before_delete_entity', cnx, entities=entities)
             self._delete_cascade_multi(cnx, entities)
             source.delete_entities(cnx, entities)
@@ -1070,7 +1066,7 @@
         for rtype, eids_subj_obj in relations.iteritems():
             if server.DEBUG & server.DBG_REPO:
                 for subjeid, objeid in eids_subj_obj:
-                    print 'ADD relation', subjeid, rtype, objeid
+                    print('ADD relation', subjeid, rtype, objeid)
             for subjeid, objeid in eids_subj_obj:
                 if rtype in relations_by_rtype:
                     relations_by_rtype[rtype].append((subjeid, objeid))
@@ -1120,7 +1116,7 @@
     def glob_delete_relation(self, cnx, subject, rtype, object):
         """delete a relation from the repository"""
         if server.DEBUG & server.DBG_REPO:
-            print 'DELETE relation', subject, rtype, object
+            print('DELETE relation', subject, rtype, object)
         source = self.system_source
         self.hm.call_hooks('before_delete_relation', cnx,
                            eidfrom=subject, rtype=rtype, eidto=object)
--- a/server/rqlannotation.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/rqlannotation.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,6 +18,7 @@
 """Functions to add additional annotations on a rql syntax tree to ease later
 code generation.
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -354,9 +355,9 @@
                     continue
 
     def _debug_print(self):
-        print 'varsols', dict((x, sorted(str(v) for v in values))
-                               for x, values in self.varsols.iteritems())
-        print 'ambiguous vars', sorted(self.ambiguousvars)
+        print('varsols', dict((x, sorted(str(v) for v in values))
+                               for x, values in self.varsols.iteritems()))
+        print('ambiguous vars', sorted(self.ambiguousvars))
 
     def set_rel_constraint(self, term, rel, etypes_func):
         if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
--- a/server/schemaserial.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/schemaserial.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """functions for schema / permissions (de)serialization using RQL"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -23,6 +24,8 @@
 import json
 import sys
 
+from six import string_types
+
 from logilab.common.shellutils import ProgressBar, DummyProgressBar
 
 from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo
@@ -49,11 +52,11 @@
         return res
     missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res]
     if missing:
-        print 'some native groups are missing but the following groups have been found:'
-        print '\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())
-        print
-        print 'enter the eid of a to group to map to each missing native group'
-        print 'or just type enter to skip permissions granted to a group'
+        print('some native groups are missing but the following groups have been found:')
+        print('\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items()))
+        print()
+        print('enter the eid of a to group to map to each missing native group')
+        print('or just type enter to skip permissions granted to a group')
         for group in missing:
             while True:
                 value = raw_input('eid for group %s: ' % group).strip()
@@ -62,13 +65,13 @@
                 try:
                     eid = int(value)
                 except ValueError:
-                    print 'eid should be an integer'
+                    print('eid should be an integer')
                     continue
                 for eid_ in res.values():
                     if eid == eid_:
                         break
                 else:
-                    print 'eid is not a group eid'
+                    print('eid is not a group eid')
                     continue
                 res[name] = eid
                 break
@@ -344,7 +347,7 @@
     current schema
     """
     _title = '-> storing the schema in the database '
-    print _title,
+    print(_title, end=' ')
     execute = cnx.execute
     eschemas = schema.entities()
     pb_size = (len(eschemas + schema.relations())
@@ -397,7 +400,7 @@
     for rql, kwargs in specialize2rql(schema):
         execute(rql, kwargs, build_descr=False)
         pb.update()
-    print
+    print()
 
 
 # high level serialization functions
@@ -455,8 +458,8 @@
     columnset = set()
     for columns in eschema._unique_together:
         if columns in columnset:
-            print ('schemaserial: skipping duplicate unique together %r %r' %
-                   (eschema.type, columns))
+            print('schemaserial: skipping duplicate unique together %r %r' %
+                  (eschema.type, columns))
             continue
         columnset.add(columns)
         rql, args = _uniquetogether2rql(eschema, columns)
@@ -613,14 +616,14 @@
             # may occurs when modifying persistent schema
             continue
         for group_or_rqlexpr in grantedto:
-            if isinstance(group_or_rqlexpr, basestring):
+            if isinstance(group_or_rqlexpr, string_types):
                 # group
                 try:
                     yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action,
                            {'g': groupmap[group_or_rqlexpr]})
                 except KeyError:
-                    print ("WARNING: group %s used in permissions for %s was ignored because it doesn't exist."
-                           " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema))
+                    print("WARNING: group %s used in permissions for %s was ignored because it doesn't exist."
+                          " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema))
                     continue
             else:
                 # rqlexpr
--- a/server/serverconfig.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/serverconfig.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,12 +16,13 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """server.serverconfig definition"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
 import sys
 from os.path import join, exists
-from StringIO import StringIO
+from io import StringIO
 
 import logilab.common.configuration as lgconfig
 from logilab.common.decorators import cached
@@ -234,7 +235,7 @@
 
     def bootstrap_cubes(self):
         from logilab.common.textutils import splitstrip
-        for line in file(join(self.apphome, 'bootstrap_cubes')):
+        for line in open(join(self.apphome, 'bootstrap_cubes')):
             line = line.strip()
             if not line or line.startswith('#'):
                 continue
@@ -245,7 +246,7 @@
             self.init_cubes(())
 
     def write_bootstrap_cubes_file(self, cubes):
-        stream = file(join(self.apphome, 'bootstrap_cubes'), 'w')
+        stream = open(join(self.apphome, 'bootstrap_cubes'), 'w')
         stream.write('# this is a generated file only used for bootstraping\n')
         stream.write('# you should not have to edit this\n')
         stream.write('%s\n' % ','.join(cubes))
@@ -276,7 +277,7 @@
                 assert len(self.sources_mode) == 1
                 if source.connect_for_migration:
                     return True
-                print 'not connecting to source', source.uri, 'during migration'
+                print('not connecting to source', source.uri, 'during migration')
                 return False
             if 'all' in self.sources_mode:
                 assert len(self.sources_mode) == 1
--- a/server/serverctl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/serverctl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb-ctl commands and command handlers specific to the repository"""
+from __future__ import print_function
 
 __docformat__ = 'restructuredtext en'
 
@@ -28,6 +29,8 @@
 import logging
 import subprocess
 
+from six import string_types
+
 from logilab.common import nullobject
 from logilab.common.configuration import Configuration, merge_options
 from logilab.common.shellutils import ASK, generate_password
@@ -55,25 +58,25 @@
     driver = source['db-driver']
     dbhelper = get_db_helper(driver)
     if interactive:
-        print '-> connecting to %s database' % driver,
+        print('-> connecting to %s database' % driver, end=' ')
         if dbhost:
-            print '%s@%s' % (dbname, dbhost),
+            print('%s@%s' % (dbname, dbhost), end=' ')
         else:
-            print dbname,
+            print(dbname, end=' ')
     if dbhelper.users_support:
         if not interactive or (not special_privs and source.get('db-user')):
             user = source.get('db-user', os.environ.get('USER', ''))
             if interactive:
-                print 'as', user
+                print('as', user)
             password = source.get('db-password')
         else:
-            print
+            print()
             if special_privs:
-                print 'WARNING'
+                print('WARNING')
                 print ('the user will need the following special access rights '
                        'on the database:')
-                print special_privs
-                print
+                print(special_privs)
+                print()
             default_user = source.get('db-user', os.environ.get('USER', ''))
             user = raw_input('Connect as user ? [%r]: ' % default_user)
             user = user.strip() or default_user
@@ -146,7 +149,7 @@
             cnx = repoapi.connect(repo, login, password=pwd)
             return repo, cnx
         except AuthenticationError:
-            print '-> Error: wrong user/password.'
+            print('-> Error: wrong user/password.')
             # reset cubes else we'll have an assertion error on next retry
             config._cubes = None
         login, pwd = manager_userpasswd()
@@ -164,9 +167,9 @@
         """
         config = self.config
         if not automatic:
-            print underline_title('Configuring the repository')
+            print(underline_title('Configuring the repository'))
             config.input_config('email', inputlevel)
-            print '\n'+underline_title('Configuring the sources')
+            print('\n'+underline_title('Configuring the sources'))
         sourcesfile = config.sources_file()
         # hack to make Method('default_instance_id') usable in db option defs
         # (in native.py)
@@ -174,12 +177,12 @@
                                       options=SOURCE_TYPES['native'].options)
         if not automatic:
             sconfig.input_config(inputlevel=inputlevel)
-            print
+            print()
         sourcescfg = {'system': sconfig}
         if automatic:
             # XXX modify a copy
             password = generate_password()
-            print '-> set administrator account to admin / %s' % password
+            print('-> set administrator account to admin / %s' % password)
             USER_OPTIONS[1][1]['default'] = password
             sconfig = Configuration(options=USER_OPTIONS)
         else:
@@ -197,8 +200,8 @@
             CWCTL.run(['db-create', '--config-level', str(inputlevel),
                        self.config.appid])
         else:
-            print ('-> nevermind, you can do it later with '
-                   '"cubicweb-ctl db-create %s".' % self.config.appid)
+            print('-> nevermind, you can do it later with '
+                  '"cubicweb-ctl db-create %s".' % self.config.appid)
 
 
 @contextmanager
@@ -242,26 +245,26 @@
         with db_transaction(source, privilege='DROP SCHEMA') as cursor:
             helper = get_db_helper(source['db-driver'])
             helper.drop_schema(cursor, db_namespace)
-            print '-> database schema %s dropped' % db_namespace
+            print('-> database schema %s dropped' % db_namespace)
 
     def _drop_database(self, source):
         dbname = source['db-name']
         if source['db-driver'] == 'sqlite':
-            print 'deleting database file %(db-name)s' % source
+            print('deleting database file %(db-name)s' % source)
             os.unlink(source['db-name'])
-            print '-> database %(db-name)s dropped.' % source
+            print('-> database %(db-name)s dropped.' % source)
         else:
             helper = get_db_helper(source['db-driver'])
             with db_sys_transaction(source, privilege='DROP DATABASE') as cursor:
-                print 'dropping database %(db-name)s' % source
+                print('dropping database %(db-name)s' % source)
                 cursor.execute('DROP DATABASE "%(db-name)s"' % source)
-                print '-> database %(db-name)s dropped.' % source
+                print('-> database %(db-name)s dropped.' % source)
 
     def _drop_user(self, source):
         user = source['db-user'] or None
         if user is not None:
             with db_sys_transaction(source, privilege='DROP USER') as cursor:
-                print 'dropping user %s' % user
+                print('dropping user %s' % user)
                 cursor.execute('DROP USER %s' % user)
 
     def _cleanup_steps(self, source):
@@ -288,7 +291,7 @@
                 try:
                     step(source)
                 except Exception as exc:
-                    print 'ERROR', exc
+                    print('ERROR', exc)
                     if ASK.confirm('An error occurred. Continue anyway?',
                                    default_is_yes=False):
                         continue
@@ -357,7 +360,7 @@
                 ASK.confirm('Database %s already exists. Drop it?' % dbname)):
                 os.unlink(dbname)
         elif self.config.create_db:
-            print '\n'+underline_title('Creating the system database')
+            print('\n'+underline_title('Creating the system database'))
             # connect on the dbms system base to create our base
             dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER',
                                 interactive=not automatic)
@@ -368,17 +371,17 @@
                     if not helper.user_exists(cursor, user) and (automatic or \
                            ASK.confirm('Create db user %s ?' % user, default_is_yes=False)):
                         helper.create_user(source['db-user'], source.get('db-password'))
-                        print '-> user %s created.' % user
+                        print('-> user %s created.' % user)
                 if dbname in helper.list_databases(cursor):
                     if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
                         cursor.execute('DROP DATABASE "%s"' % dbname)
                     else:
-                        print ('you may want to run "cubicweb-ctl db-init '
-                               '--drop %s" manually to continue.' % config.appid)
+                        print('you may want to run "cubicweb-ctl db-init '
+                              '--drop %s" manually to continue.' % config.appid)
                         return
                 createdb(helper, source, dbcnx, cursor)
                 dbcnx.commit()
-                print '-> database %s created.' % dbname
+                print('-> database %s created.' % dbname)
             except BaseException:
                 dbcnx.rollback()
                 raise
@@ -400,13 +403,13 @@
                     try:
                         helper.create_language(cursor, extlang)
                     except Exception as exc:
-                        print '-> ERROR:', exc
-                        print '-> could not create language %s, some stored procedures might be unusable' % extlang
+                        print('-> ERROR:', exc)
+                        print('-> could not create language %s, some stored procedures might be unusable' % extlang)
                         cnx.rollback()
                     else:
                         cnx.commit()
-        print '-> database for instance %s created and necessary extensions installed.' % appid
-        print
+        print('-> database for instance %s created and necessary extensions installed.' % appid)
+        print()
         if automatic:
             CWCTL.run(['db-init', '--automatic', '--config-level', '0',
                        config.appid])
@@ -414,8 +417,8 @@
             CWCTL.run(['db-init', '--config-level',
                        str(self.config.config_level), config.appid])
         else:
-            print ('-> nevermind, you can do it later with '
-                   '"cubicweb-ctl db-init %s".' % config.appid)
+            print('-> nevermind, you can do it later with '
+                  '"cubicweb-ctl db-init %s".' % config.appid)
 
 
 class InitInstanceCommand(Command):
@@ -452,7 +455,7 @@
 
     def run(self, args):
         check_options_consistency(self.config)
-        print '\n'+underline_title('Initializing the system database')
+        print('\n'+underline_title('Initializing the system database'))
         from cubicweb.server import init_repository
         appid = args[0]
         config = ServerConfiguration.config_for(appid)
@@ -506,7 +509,7 @@
                     type = raw_input('source type (%s): '
                                         % ', '.join(sorted(SOURCE_TYPES)))
                     if type not in SOURCE_TYPES:
-                        print '-> unknown source type, use one of the available types.'
+                        print('-> unknown source type, use one of the available types.')
                         continue
                     sourcemodule = SOURCE_TYPES[type].module
                     if not sourcemodule.startswith('cubicweb.'):
@@ -524,16 +527,16 @@
                                         % ', '.join(sorted(repo.vreg['parsers'])))
                     if parser in repo.vreg['parsers']:
                         break
-                    print '-> unknown parser identifier, use one of the available types.'
+                    print('-> unknown parser identifier, use one of the available types.')
                 while True:
                     sourceuri = raw_input('source identifier (a unique name used to '
                                           'tell sources apart): ').strip()
                     if not sourceuri:
-                        print '-> mandatory.'
+                        print('-> mandatory.')
                     else:
                         sourceuri = unicode(sourceuri, sys.stdin.encoding)
                         if sourceuri in used:
-                            print '-> uri already used, choose another one.'
+                            print('-> uri already used, choose another one.')
                         else:
                             break
                 url = raw_input('source URL (leave empty for none): ').strip()
@@ -583,10 +586,10 @@
             cnx.rollback()
             import traceback
             traceback.print_exc()
-            print '-> an error occurred:', ex
+            print('-> an error occurred:', ex)
         else:
             cnx.commit()
-            print '-> rights granted to %s on instance %s.' % (appid, user)
+            print('-> rights granted to %s on instance %s.' % (appid, user))
 
 
 class ResetAdminPasswordCommand(Command):
@@ -617,7 +620,7 @@
         try:
             adminlogin = sourcescfg['admin']['login']
         except KeyError:
-            print '-> Error: could not get cubicweb administrator login.'
+            print('-> Error: could not get cubicweb administrator login.')
             sys.exit(1)
         cnx = source_cnx(sourcescfg['system'])
         driver = sourcescfg['system']['db-driver']
@@ -627,9 +630,9 @@
         cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s",
                        {'l': adminlogin})
         if not cursor.fetchall():
-            print ("-> error: admin user %r specified in sources doesn't exist "
-                   "in the database" % adminlogin)
-            print "   fix your sources file before running this command"
+            print("-> error: admin user %r specified in sources doesn't exist "
+                  "in the database" % adminlogin)
+            print("   fix your sources file before running this command")
             cnx.close()
             sys.exit(1)
         if self.config.password is None:
@@ -650,10 +653,10 @@
             cnx.rollback()
             import traceback
             traceback.print_exc()
-            print '-> an error occurred:', ex
+            print('-> an error occurred:', ex)
         else:
             cnx.commit()
-            print '-> password reset, sources file regenerated.'
+            print('-> password reset, sources file regenerated.')
         cnx.close()
 
 
@@ -666,17 +669,17 @@
     if sudo:
         dmpcmd = 'sudo %s' % (dmpcmd)
     dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd)
-    print dmpcmd
+    print(dmpcmd)
     if os.system(dmpcmd):
         raise ExecutionError('Error while dumping the database')
     if output is None:
         output = filename
     cmd = 'scp %s:/tmp/%s %s' % (host, filename, output)
-    print cmd
+    print(cmd)
     if os.system(cmd):
         raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename)
     rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename)
-    print rmcmd
+    print(rmcmd)
     if os.system(rmcmd) and not ASK.confirm(
         'An error occurred while deleting remote dump at /tmp/%s. '
         'Continue anyway?' % filename):
@@ -696,28 +699,28 @@
     config.quick_start = True
     mih = config.migration_handler(connect=False, verbosity=1)
     mih.restore_database(backupfile, drop, askconfirm=False, format=format)
-    repo = mih.repo_connect()
+    repo = mih.repo
     # version of the database
     dbversions = repo.get_versions()
     mih.shutdown()
     if not dbversions:
-        print "bad or missing version information in the database, don't upgrade file system"
+        print("bad or missing version information in the database, don't upgrade file system")
         return
     # version of installed software
     eversion = dbversions['cubicweb']
     status = instance_status(config, eversion, dbversions)
     # * database version > installed software
     if status == 'needsoftupgrade':
-        print "** The database of %s is more recent than the installed software!" % config.appid
-        print "** Upgrade your software, then migrate the database by running the command"
-        print "** 'cubicweb-ctl upgrade %s'" % config.appid
+        print("** The database of %s is more recent than the installed software!" % config.appid)
+        print("** Upgrade your software, then migrate the database by running the command")
+        print("** 'cubicweb-ctl upgrade %s'" % config.appid)
         return
     # * database version < installed software, an upgrade will be necessary
     #   anyway, just rewrite vc.conf and warn user he has to upgrade
     elif status == 'needapplupgrade':
-        print "** The database of %s is older than the installed software." % config.appid
-        print "** Migrate the database by running the command"
-        print "** 'cubicweb-ctl upgrade %s'" % config.appid
+        print("** The database of %s is older than the installed software." % config.appid)
+        print("** Migrate the database by running the command")
+        print("** 'cubicweb-ctl upgrade %s'" % config.appid)
         return
     # * database version = installed software, database version = instance fs version
     #   ok!
@@ -732,12 +735,12 @@
         try:
             softversion = config.cube_version(cube)
         except ConfigurationError:
-            print '-> Error: no cube version information for %s, please check that the cube is installed.' % cube
+            print('-> Error: no cube version information for %s, please check that the cube is installed.' % cube)
             continue
         try:
             applversion = vcconf[cube]
         except KeyError:
-            print '-> Error: no cube version information for %s in version configuration.' % cube
+            print('-> Error: no cube version information for %s in version configuration.' % cube)
             continue
         if softversion == applversion:
             continue
@@ -883,7 +886,7 @@
         _local_restore(destappid, output, not self.config.no_drop,
                        self.config.format)
         if self.config.keep_dump:
-            print '-> you can get the dump file at', output
+            print('-> you can get the dump file at', output)
         else:
             os.remove(output)
 
@@ -1003,7 +1006,7 @@
             repo.shutdown()
         for key, val in stats.iteritems():
             if val:
-                print key, ':', val
+                print(key, ':', val)
 
 
 
@@ -1019,7 +1022,7 @@
     for p in ('read', 'add', 'update', 'delete'):
         rule = perms.get(p)
         if rule:
-            perms[p] = tuple(str(x) if isinstance(x, basestring) else x
+            perms[p] = tuple(str(x) if isinstance(x, string_types) else x
                              for x in rule)
     return perms, perms in defaultrelperms or perms in defaulteperms
 
--- a/server/session.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/session.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,8 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Repository users' and internal' sessions."""
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
 import sys
@@ -874,7 +876,7 @@
                 processed = []
                 self.commit_state = 'precommit'
                 if debug:
-                    print self.commit_state, '*' * 20
+                    print(self.commit_state, '*' * 20)
                 try:
                     with self.running_hooks_ops():
                         while self.pending_operations:
@@ -882,7 +884,7 @@
                             operation.processed = 'precommit'
                             processed.append(operation)
                             if debug:
-                                print operation
+                                print(operation)
                             operation.handle_event('precommit_event')
                     self.pending_operations[:] = processed
                     self.debug('precommit transaction %s done', self.connectionid)
@@ -899,11 +901,11 @@
                     # and revertcommit, that will be enough in mont case.
                     operation.failed = True
                     if debug:
-                        print self.commit_state, '*' * 20
+                        print(self.commit_state, '*' * 20)
                     with self.running_hooks_ops():
                         for operation in reversed(processed):
                             if debug:
-                                print operation
+                                print(operation)
                             try:
                                 operation.handle_event('revertprecommit_event')
                             except BaseException:
@@ -917,12 +919,12 @@
                 self.cnxset.commit()
                 self.commit_state = 'postcommit'
                 if debug:
-                    print self.commit_state, '*' * 20
+                    print(self.commit_state, '*' * 20)
                 with self.running_hooks_ops():
                     while self.pending_operations:
                         operation = self.pending_operations.pop(0)
                         if debug:
-                            print operation
+                            print(operation)
                         operation.processed = 'postcommit'
                         try:
                             operation.handle_event('postcommit_event')
--- a/server/sources/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,11 +16,15 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb server sources support"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
 from time import time
 from logging import getLogger
+from base64 import b64decode
+
+from six import text_type
 
 from logilab.common import configuration
 from logilab.common.deprecation import deprecated
@@ -35,25 +39,25 @@
 def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'):
     if server.DEBUG & server.DBG_RQL:
         global t
-        print '  %s %s source: %s' % (prefix, uri, repr(union.as_string()))
+        print('  %s %s source: %s' % (prefix, uri, repr(union.as_string())))
         t = time()
         if varmap:
-            print '    using varmap', varmap
+            print('    using varmap', varmap)
         if server.DEBUG & server.DBG_MORE:
-            print '    args', repr(args)
-            print '    cache key', cachekey
-            print '    solutions', ','.join(str(s.solutions)
-                                            for s in union.children)
+            print('    args', repr(args))
+            print('    cache key', cachekey)
+            print('    solutions', ','.join(str(s.solutions)
+                                            for s in union.children))
     # return true so it can be used as assertion (and so be killed by python -O)
     return True
 
 def dbg_results(results):
     if server.DEBUG & server.DBG_RQL:
         if len(results) > 10:
-            print '  -->', results[:10], '...', len(results),
+            print('  -->', results[:10], '...', len(results), end=' ')
         else:
-            print '  -->', results,
-        print 'time: ', time() - t
+            print('  -->', results, end=' ')
+        print('time: ', time() - t)
     # return true so it can be used as assertion (and so be killed by python -O)
     return True
 
@@ -140,7 +144,7 @@
         pass
 
     @classmethod
-    def check_conf_dict(cls, eid, confdict, _=unicode, fail_if_unknown=True):
+    def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True):
         """check configuration of source entity. Return config dict properly
         typed with defaults set.
         """
@@ -157,7 +161,7 @@
                 try:
                     value = configuration._validate(value, optdict, optname)
                 except Exception as ex:
-                    msg = unicode(ex) # XXX internationalization
+                    msg = text_type(ex) # XXX internationalization
                     raise ValidationError(eid, {role_name('config', 'subject'): msg})
             processed[optname] = value
         # cw < 3.10 bw compat
@@ -199,6 +203,12 @@
         else:
             self.urls = []
 
+    @staticmethod
+    def decode_extid(extid):
+        if extid is None:
+            return extid
+        return b64decode(extid)
+
     # source initialization / finalization #####################################
 
     def set_schema(self, schema):
--- a/server/sources/datafeed.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/datafeed.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,13 +19,15 @@
 database
 """
 
-import urllib2
 import StringIO
 from os.path import exists
 from datetime import datetime, timedelta
-from base64 import b64decode
-from cookielib import CookieJar
-import urlparse
+
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor
+from six.moves.urllib.error import HTTPError
+from six.moves.http_cookiejar import CookieJar
+
 from lxml import etree
 
 from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid
@@ -282,7 +284,7 @@
         sql = ('SELECT extid, eid, type FROM entities, cw_source_relation '
                'WHERE entities.eid=cw_source_relation.eid_from '
                'AND cw_source_relation.eid_to=%s' % self.eid)
-        return dict((b64decode(uri), (eid, type))
+        return dict((self.decode_extid(uri), (eid, type))
                     for uri, eid, type in cnx.system_sql(sql).fetchall())
 
     def init_import_log(self, cnx, **kwargs):
@@ -328,7 +330,7 @@
         For http URLs, it will try to find a cwclientlib config entry
         (if available) and use it as requester.
         """
-        purl = urlparse.urlparse(url)
+        purl = urlparse(url)
         if purl.scheme == 'file':
             return URLLibResponseAdapter(open(url[7:]), url)
 
@@ -354,7 +356,7 @@
         # no chance with cwclientlib, fall back to former implementation
         if purl.scheme in ('http', 'https'):
             self.source.info('GET %s', url)
-            req = urllib2.Request(url)
+            req = Request(url)
             return _OPENER.open(req, timeout=self.source.http_timeout)
 
         # url is probably plain content
@@ -530,10 +532,10 @@
             self.source.debug(str(exc))
 
         # no chance with cwclientlib, fall back to former implementation
-        if urlparse.urlparse(url).scheme in ('http', 'https'):
+        if urlparse(url).scheme in ('http', 'https'):
             try:
                 _OPENER.open(url, timeout=self.source.http_timeout)
-            except urllib2.HTTPError as ex:
+            except HTTPError as ex:
                 if ex.code == 404:
                     return True
         return False
@@ -560,10 +562,10 @@
         return Message(StringIO.StringIO())
 
 # use a cookie enabled opener to use session cookie if any
-_OPENER = urllib2.build_opener()
+_OPENER = build_opener()
 try:
     from logilab.common import urllib2ext
     _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
 except ImportError: # python-kerberos not available
     pass
-_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
+_OPENER.add_handler(HTTPCookieProcessor(CookieJar()))
--- a/server/sources/ldapfeed.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/ldapfeed.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,8 @@
 
 from datetime import datetime
 
+from six import string_types
+
 import ldap
 from ldap.ldapobject import ReconnectLDAPObject
 from ldap.filter import filter_format
@@ -376,7 +378,7 @@
                     itemdict[key] = value
         # we expect memberUid to be a list of user ids, make sure of it
         member = self.group_rev_attrs['member']
-        if isinstance(itemdict.get(member), basestring):
+        if isinstance(itemdict.get(member), string_types):
             itemdict[member] = [itemdict[member]]
         return itemdict
 
--- a/server/sources/native.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/native.py	Mon Oct 12 10:53:35 2015 +0200
@@ -23,13 +23,13 @@
   string. This is because it should actually be Bytes but we want an index on
   it for fast querying.
 """
+from __future__ import print_function
+
 __docformat__ = "restructuredtext en"
 
-from cPickle import loads, dumps
-import cPickle as pickle
 from threading import Lock
 from datetime import datetime
-from base64 import b64decode, b64encode
+from base64 import b64encode
 from contextlib import contextmanager
 from os.path import basename
 import re
@@ -38,6 +38,9 @@
 import logging
 import sys
 
+from six import string_types
+from six.moves import range, cPickle as pickle
+
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.configuration import Method
 from logilab.common.shellutils import getlogin
@@ -76,12 +79,12 @@
         it's a function just so that it shows up in profiling
         """
         if server.DEBUG & server.DBG_SQL:
-            print 'exec', query, args
+            print('exec', query, args)
         try:
             self.cu.execute(str(query), args)
         except Exception as ex:
-            print "sql: %r\n args: %s\ndbms message: %r" % (
-                query, args, ex.args[0])
+            print("sql: %r\n args: %s\ndbms message: %r" % (
+                query, args, ex.args[0]))
             raise
 
     def fetchall(self):
@@ -556,7 +559,7 @@
                 sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap)
                 self._cache[cachekey] = sql, qargs, cbs
         args = self.merge_args(args, qargs)
-        assert isinstance(sql, basestring), repr(sql)
+        assert isinstance(sql, string_types), repr(sql)
         cursor = self.doexec(cnx, sql, args)
         results = self.process_result(cursor, cnx, cbs)
         assert dbg_results(results)
@@ -621,7 +624,7 @@
                 changes = self._save_attrs(cnx, entity, attrs)
                 self._record_tx_action(cnx, 'tx_entity_actions', u'U',
                                        etype=unicode(entity.cw_etype), eid=entity.eid,
-                                       changes=self._binary(dumps(changes)))
+                                       changes=self._binary(pickle.dumps(changes)))
             sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs,
                                      ['cw_eid'])
             self.doexec(cnx, sql, attrs)
@@ -636,7 +639,7 @@
                 changes = self._save_attrs(cnx, entity, attrs)
                 self._record_tx_action(cnx, 'tx_entity_actions', u'D',
                                        etype=unicode(entity.cw_etype), eid=entity.eid,
-                                       changes=self._binary(dumps(changes)))
+                                       changes=self._binary(pickle.dumps(changes)))
             attrs = {'cw_eid': entity.eid}
             sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
             self.doexec(cnx, sql, attrs)
@@ -708,7 +711,7 @@
         """
         cursor = cnx.cnxset.cu
         if server.DEBUG & server.DBG_SQL:
-            print 'exec', query, args, cnx.cnxset.cnx
+            print('exec', query, args, cnx.cnxset.cnx)
         try:
             # str(query) to avoid error if it's a unicode string
             cursor.execute(str(query), args)
@@ -767,7 +770,7 @@
         it's a function just so that it shows up in profiling
         """
         if server.DEBUG & server.DBG_SQL:
-            print 'execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx
+            print('execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx)
         cursor = cnx.cnxset.cu
         try:
             # str(query) to avoid error if it's a unicode string
@@ -852,10 +855,9 @@
         """return a tuple (type, extid, source) for the entity with id <eid>"""
         sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid
         res = self._eid_type_source(cnx, eid, sql)
-        if res[-2] is not None:
-            if not isinstance(res, list):
-                res = list(res)
-            res[-2] = b64decode(res[-2])
+        if not isinstance(res, list):
+            res = list(res)
+        res[-2] = self.decode_extid(res[-2])
         return res
 
     def eid_type_source_pre_131(self, cnx, eid):
@@ -864,8 +866,7 @@
         res = self._eid_type_source(cnx, eid, sql)
         if not isinstance(res, list):
             res = list(res)
-        if res[-1] is not None:
-            res[-1] = b64decode(res[-1])
+        res[-1] = self.decode_extid(extid)
         res.append("system")
         return res
 
@@ -1044,7 +1045,7 @@
                                   'etype', 'eid', 'changes'))
         with cnx.ensure_cnx_set:
             cu = self.doexec(cnx, sql, restr)
-            actions = [tx.EntityAction(a,p,o,et,e,c and loads(self.binary_to_str(c)))
+            actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c)))
                        for a,p,o,et,e,c in cu.fetchall()]
         sql = self.sqlgen.select('tx_relation_actions', restr,
                                  ('txa_action', 'txa_public', 'txa_order',
@@ -1692,7 +1693,7 @@
         self.logger.info('number of rows: %d', rowcount)
         blocksize = self.blocksize
         if rowcount > 0:
-            for i, start in enumerate(xrange(0, rowcount, blocksize)):
+            for i, start in enumerate(range(0, rowcount, blocksize)):
                 rows = list(itertools.islice(rows_iterator, blocksize))
                 serialized = self._serialize(table, columns, rows)
                 archive.writestr('tables/%s.%04d' % (table, i), serialized)
@@ -1713,7 +1714,7 @@
         return tuple(columns), rows
 
     def _serialize(self, name, columns, rows):
-        return dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL)
+        return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL)
 
     def restore(self, backupfile):
         archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True)
@@ -1761,7 +1762,7 @@
         return sequences, numranges, tables, table_chunks
 
     def read_sequence(self, archive, seq):
-        seqname, columns, rows = loads(archive.read('sequences/%s' % seq))
+        seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq))
         assert seqname == seq
         assert len(rows) == 1
         assert len(rows[0]) == 1
@@ -1771,7 +1772,7 @@
         self.cnx.commit()
 
     def read_numrange(self, archive, numrange):
-        rangename, columns, rows = loads(archive.read('numrange/%s' % numrange))
+        rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange))
         assert rangename == numrange
         assert len(rows) == 1
         assert len(rows[0]) == 1
@@ -1786,7 +1787,7 @@
         self.cnx.commit()
         row_count = 0
         for filename in filenames:
-            tablename, columns, rows = loads(archive.read(filename))
+            tablename, columns, rows = pickle.loads(archive.read(filename))
             assert tablename == table
             if not rows:
                 continue
--- a/server/sources/rql2sql.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/rql2sql.py	Mon Oct 12 10:53:35 2015 +0200
@@ -51,6 +51,8 @@
 
 import threading
 
+from six.moves import range
+
 from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY
 
 from rql import BadRQLQuery, CoercionError
@@ -187,13 +189,13 @@
                 thisexistssols = [newsols[0]]
                 thisexistsvars = set()
                 existssols[var.scope] = thisexistssols, thisexistsvars
-            for i in xrange(len(newsols)-1, 0, -1):
+            for i in range(len(newsols)-1, 0, -1):
                 if vtype != newsols[i][vname]:
                     thisexistssols.append(newsols.pop(i))
                     thisexistsvars.add(vname)
         else:
             # remember unstable variables
-            for i in xrange(1, len(newsols)):
+            for i in range(1, len(newsols)):
                 if vtype != newsols[i][vname]:
                     unstable.add(vname)
     if invariants:
--- a/server/sources/storages.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sources/storages.py	Mon Oct 12 10:53:35 2015 +0200
@@ -44,7 +44,7 @@
       query result process of fetched attribute's value and should have the
       following prototype::
 
-        callback(self, source, session, value)
+        callback(self, source, cnx, value)
 
       where `value` is the value actually stored in the backend. None values
       will be skipped (eg callback won't be called).
@@ -92,20 +92,20 @@
     return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath)
 
 @contextmanager
-def fsimport(session):
-    present = 'fs_importing' in session.transaction_data
-    old_value = session.transaction_data.get('fs_importing')
-    session.transaction_data['fs_importing'] = True
+def fsimport(cnx):
+    present = 'fs_importing' in cnx.transaction_data
+    old_value = cnx.transaction_data.get('fs_importing')
+    cnx.transaction_data['fs_importing'] = True
     yield
     if present:
-        session.transaction_data['fs_importing'] = old_value
+        cnx.transaction_data['fs_importing'] = old_value
     else:
-        del session.transaction_data['fs_importing']
+        del cnx.transaction_data['fs_importing']
 
 
 class BytesFileSystemStorage(Storage):
     """store Bytes attribute value on the file system"""
-    def __init__(self, defaultdir, fsencoding='utf-8', wmode=0444):
+    def __init__(self, defaultdir, fsencoding='utf-8', wmode=0o444):
         if type(defaultdir) is unicode:
             defaultdir = defaultdir.encode(fsencoding)
         self.default_directory = defaultdir
@@ -126,7 +126,7 @@
         fileobj.close()
 
 
-    def callback(self, source, session, value):
+    def callback(self, source, cnx, value):
         """sql generator callback when some attribute with a custom storage is
         accessed
         """
@@ -236,12 +236,12 @@
         """migrate an entity attribute to the storage"""
         entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
         self.entity_added(entity, attribute)
-        session = entity._cw
-        source = session.repo.system_source
+        cnx = entity._cw
+        source = cnx.repo.system_source
         attrs = source.preprocess_entity(entity)
         sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs,
                                    ['cw_eid'])
-        source.doexec(session, sql, attrs)
+        source.doexec(cnx, sql, attrs)
         entity.cw_edited = None
 
 
--- a/server/sqlutils.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/sqlutils.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """SQL utilities functions and classes."""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -23,10 +24,12 @@
 import re
 import subprocess
 from os.path import abspath
-from itertools import ifilter
 from logging import getLogger
 from datetime import time, datetime
 
+from six import string_types
+from six.moves import filter
+
 from logilab import database as db, common as lgc
 from logilab.common.shellutils import ProgressBar, DummyProgressBar
 from logilab.common.deprecation import deprecated
@@ -44,7 +47,7 @@
 SQL_PREFIX = 'cw_'
 
 def _run_command(cmd):
-    print ' '.join(cmd)
+    print(' '.join(cmd))
     return subprocess.call(cmd)
 
 
@@ -69,7 +72,7 @@
     else:
         execute = cursor_or_execute
     sqlstmts_as_string = False
-    if isinstance(sqlstmts, basestring):
+    if isinstance(sqlstmts, string_types):
         sqlstmts_as_string = True
         sqlstmts = sqlstmts.split(delimiter)
     if withpb:
@@ -87,7 +90,7 @@
         try:
             # some dbapi modules doesn't accept unicode for sql string
             execute(str(sql))
-        except Exception, err:
+        except Exception as err:
             if cnx:
                 cnx.rollback()
             failed.append(sql)
@@ -95,7 +98,7 @@
             if cnx:
                 cnx.commit()
     if withpb:
-        print
+        print()
     if sqlstmts_as_string:
         failed = delimiter.join(failed)
     return failed
@@ -178,9 +181,9 @@
     # for mssql, we need to drop views before tables
     if hasattr(dbhelper, 'list_views'):
         cmds += ['DROP VIEW %s;' % name
-                 for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))]
+                 for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))]
     cmds += ['DROP TABLE %s;' % name
-             for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
+             for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
     return '\n'.join(cmds)
 
 
--- a/server/test/unittest_ldapsource.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_ldapsource.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb.server.sources.ldapusers unit and functional tests"""
+from __future__ import print_function
 
 import os
 import sys
@@ -25,6 +26,9 @@
 import subprocess
 import tempfile
 
+from six import string_types
+from six.moves import range
+
 from cubicweb import AuthenticationError
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.repotest import RQLGeneratorTC
@@ -49,8 +53,8 @@
     slapddir = tempfile.mkdtemp('cw-unittest-ldap')
     config = cls.config
     slapdconf = join(config.apphome, "slapd.conf")
-    confin = file(join(config.apphome, "slapd.conf.in")).read()
-    confstream = file(slapdconf, 'w')
+    confin = open(join(config.apphome, "slapd.conf.in")).read()
+    confstream = open(slapdconf, 'w')
     confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir})
     confstream.close()
     # fill ldap server with some data
@@ -61,13 +65,13 @@
     slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
     stdout, stderr = slapproc.communicate()
     if slapproc.returncode:
-        print >> sys.stderr, ('slapadd returned with status: %s'
-                              % slapproc.returncode)
+        print('slapadd returned with status: %s'
+              % slapproc.returncode, file=sys.stderr)
         sys.stdout.write(stdout)
         sys.stderr.write(stderr)
 
     #ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
-    port = get_available_port(xrange(9000, 9100))
+    port = get_available_port(range(9000, 9100))
     host = 'localhost:%s' % port
     ldapuri = 'ldap://%s' % host
     cmdline = ["/usr/sbin/slapd", "-f",  slapdconf,  "-h",  ldapuri, "-d", "0"]
@@ -94,8 +98,8 @@
             os.kill(cls.slapd_process.pid, signal.SIGTERM)
         stdout, stderr = cls.slapd_process.communicate()
         if cls.slapd_process.returncode:
-            print >> sys.stderr, ('slapd returned with status: %s'
-                                  % cls.slapd_process.returncode)
+            print('slapd returned with status: %s'
+                  % cls.slapd_process.returncode, file=sys.stderr)
             sys.stdout.write(stdout)
             sys.stderr.write(stderr)
         config.info('DONE')
@@ -150,7 +154,7 @@
         """
         modcmd = ['dn: %s'%dn, 'changetype: add']
         for key, values in mods.iteritems():
-            if isinstance(values, basestring):
+            if isinstance(values, string_types):
                 values = [values]
             for value in values:
                 modcmd.append('%s: %s'%(key, value))
@@ -170,7 +174,7 @@
         modcmd = ['dn: %s'%dn, 'changetype: modify']
         for (kind, key), values in mods.iteritems():
             modcmd.append('%s: %s' % (kind, key))
-            if isinstance(values, basestring):
+            if isinstance(values, string_types):
                 values = [values]
             for value in values:
                 modcmd.append('%s: %s'%(key, value))
--- a/server/test/unittest_postgres.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_postgres.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,8 @@
 from datetime import datetime
 from threading import Thread
 
+from six.moves import range
+
 from logilab.common.testlib import SkipTest
 
 from cubicweb import ValidationError
@@ -55,7 +57,7 @@
         range1 = []
         range2 = []
         def allocate_eid_ranges(session, target):
-            for x in xrange(1, 10):
+            for x in range(1, 10):
                 eid = source.create_eid(session, count=x)
                 target.extend(range(eid-x, eid))
 
--- a/server/test/unittest_querier.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_querier.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,7 @@
 
 from datetime import date, datetime, timedelta, tzinfo
 
+from six import PY2, integer_types
 from logilab.common.testlib import TestCase, unittest_main
 from rql import BadRQLQuery, RQLSyntaxError
 
@@ -263,8 +264,9 @@
         self.assertEqual(rset.description[0][0], 'Datetime')
         rset = self.qexecute('Any %(x)s', {'x': 1})
         self.assertEqual(rset.description[0][0], 'Int')
-        rset = self.qexecute('Any %(x)s', {'x': 1L})
-        self.assertEqual(rset.description[0][0], 'Int')
+        if PY2:
+            rset = self.qexecute('Any %(x)s', {'x': long(1)})
+            self.assertEqual(rset.description[0][0], 'Int')
         rset = self.qexecute('Any %(x)s', {'x': True})
         self.assertEqual(rset.description[0][0], 'Boolean')
         rset = self.qexecute('Any %(x)s', {'x': 1.0})
@@ -318,7 +320,7 @@
     def test_typed_eid(self):
         # should return an empty result set
         rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'})
-        self.assertIsInstance(rset[0][0], (int, long))
+        self.assertIsInstance(rset[0][0], integer_types)
 
     def test_bytes_storage(self):
         feid = self.qexecute('INSERT File X: X data_name "foo.pdf", '
@@ -1015,7 +1017,7 @@
         self.assertEqual(len(rset.rows), 1)
         self.assertEqual(rset.description, [('Personne',)])
         rset = self.qexecute('Personne X WHERE X nom "bidule"')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne',)])
 
     def test_insert_1_multiple(self):
@@ -1029,20 +1031,20 @@
         rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'")
         self.assertEqual(rset.description, [('Personne', 'Personne')])
         rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne',), ('Personne',)])
 
     def test_insert_3(self):
         self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y")
         rset = self.qexecute('Personne X WHERE X nom "admin"')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne',)])
 
     def test_insert_4(self):
         self.qexecute("INSERT Societe Y: Y nom 'toto'")
         self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'")
         rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_4bis(self):
@@ -1067,7 +1069,7 @@
         self.qexecute("INSERT Personne X: X nom 'bidule'")
         self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'")
         rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_5bis(self):
@@ -1075,20 +1077,20 @@
         self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
                      {'x': peid})
         rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_6(self):
         self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y")
         rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_7(self):
         self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', "
                       "X travaille Y WHERE U login 'admin', U login N")
         rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_7_2(self):
@@ -1103,7 +1105,7 @@
         self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y "
                       "WHERE U login 'admin', U login N")
         rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y')
-        self.assert_(rset.rows)
+        self.assertTrue(rset.rows)
         self.assertEqual(rset.description, [('Personne', 'Societe',)])
 
     def test_insert_9(self):
--- a/server/test/unittest_repository.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_repository.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 import time
 import logging
 
+from six.moves import range
+
 from yams.constraints import UniqueConstraint
 from yams import register_base_type, unregister_base_type
 
@@ -77,7 +79,7 @@
 
     def test_connect(self):
         cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
-        self.assert_(cnxid)
+        self.assertTrue(cnxid)
         self.repo.close(cnxid)
         self.assertRaises(AuthenticationError,
                           self.repo.connect, self.admlogin, password='nimportnawak')
@@ -100,7 +102,7 @@
             cnx.commit()
         repo = self.repo
         cnxid = repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))
-        self.assert_(cnxid)
+        self.assertTrue(cnxid)
         repo.close(cnxid)
 
     def test_rollback_on_execute_validation_error(self):
@@ -145,7 +147,7 @@
     def test_close(self):
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        self.assert_(cnxid)
+        self.assertTrue(cnxid)
         repo.close(cnxid)
 
     def test_check_session(self):
@@ -192,7 +194,7 @@
         constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints
         self.assertEqual(len(constraints), 1)
         cstr = constraints[0]
-        self.assert_(isinstance(cstr, RQLConstraint))
+        self.assertIsInstance(cstr, RQLConstraint)
         self.assertEqual(cstr.expression, 'O final TRUE')
 
         ownedby = schema.rschema('owned_by')
@@ -589,11 +591,11 @@
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
             t0 = time.time()
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
-            for j in xrange(0, 2000, 100):
+            for j in range(0, 2000, 100):
                 abraham.cw_set(personne_composite=personnes[j:j+100])
             t1 = time.time()
             self.info('creation: %.2gs', (t1 - t0))
@@ -610,7 +612,7 @@
     def test_add_relation_non_inlined(self):
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             cnx.commit()
@@ -619,7 +621,7 @@
                                         personne_composite=personnes[:100])
             t1 = time.time()
             self.info('creation: %.2gs', (t1 - t0))
-            for j in xrange(100, 2000, 100):
+            for j in range(100, 2000, 100):
                 abraham.cw_set(personne_composite=personnes[j:j+100])
             t2 = time.time()
             self.info('more relations: %.2gs', (t2-t1))
@@ -630,7 +632,7 @@
     def test_add_relation_inlined(self):
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             cnx.commit()
@@ -639,7 +641,7 @@
                                         personne_inlined=personnes[:100])
             t1 = time.time()
             self.info('creation: %.2gs', (t1 - t0))
-            for j in xrange(100, 2000, 100):
+            for j in range(100, 2000, 100):
                 abraham.cw_set(personne_inlined=personnes[j:j+100])
             t2 = time.time()
             self.info('more relations: %.2gs', (t2-t1))
@@ -652,7 +654,7 @@
         """ to be compared with test_session_add_relations"""
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -669,7 +671,7 @@
         """ to be compared with test_session_add_relation"""
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -686,7 +688,7 @@
         """ to be compared with test_session_add_relations"""
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -703,7 +705,7 @@
         """ to be compared with test_session_add_relation"""
         with self.admin_access.repo_cnx() as cnx:
             personnes = []
-            for i in xrange(2000):
+            for i in range(2000):
                 p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
                 personnes.append(p)
             abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
--- a/server/test/unittest_rql2sql.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_rql2sql.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for module cubicweb.server.sources.rql2sql"""
+from __future__ import print_function
 
 import sys
 import os
@@ -1246,13 +1247,13 @@
         except Exception as ex:
             if 'r' in locals():
                 try:
-                    print (r%args).strip()
+                    print((r%args).strip())
                 except KeyError:
-                    print 'strange, missing substitution'
-                    print r, nargs
-                print '!='
-                print sql.strip()
-            print 'RQL:', rql
+                    print('strange, missing substitution')
+                    print(r, nargs)
+                print('!=')
+                print(sql.strip())
+            print('RQL:', rql)
             raise
 
     def _parse(self, rqls):
@@ -1269,11 +1270,11 @@
             r, args, cbs = self.o.generate(rqlst, args)
             self.assertEqual((r.strip(), args), sql)
         except Exception as ex:
-            print rql
+            print(rql)
             if 'r' in locals():
-                print r.strip()
-                print '!='
-                print sql[0].strip()
+                print(r.strip())
+                print('!=')
+                print(sql[0].strip())
             raise
         return
 
--- a/server/test/unittest_rqlannotation.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_rqlannotation.py	Mon Oct 12 10:53:35 2015 +0200
@@ -64,7 +64,7 @@
             rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, '
                                   'A ?wf_info_for C')
             self.assertEqual(rqlst.defined_vars['A']._q_invariant, False)
-            self.assert_(rqlst.defined_vars['B'].stinfo['attrvar'])
+            self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar'])
             self.assertEqual(rqlst.defined_vars['C']._q_invariant, False)
             self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'},
                                           {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'},
@@ -87,7 +87,7 @@
                                   'Y nom NX, X eid XE, not Y eid XE')
             self.assertEqual(rqlst.defined_vars['X']._q_invariant, False)
             self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False)
-            self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar'])
+            self.assertTrue(rqlst.defined_vars['XE'].stinfo['attrvar'])
 
     def test_0_8(self):
         with self.session.new_cnx() as cnx:
--- a/server/test/unittest_security.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_security.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,6 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """functional tests for server'security"""
 
+from six.moves import range
+
 from logilab.common.testlib import unittest_main
 
 from cubicweb.devtools.testlib import CubicWebTC
@@ -559,7 +561,7 @@
             rset = cnx.execute('CWUser X')
             self.assertEqual([[anon.eid]], rset.rows)
             # anonymous user can read groups (necessary to check allowed transitions for instance)
-            self.assert_(cnx.execute('CWGroup X'))
+            self.assertTrue(cnx.execute('CWGroup X'))
             # should only be able to read the anonymous user, not another one
             self.assertRaises(Unauthorized,
                               cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid})
@@ -666,7 +668,7 @@
                 rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X')
                 msg = ['Preexisting email readable by anon found!']
                 tmpl = '  - "%s" used by user "%s"'
-                for i in xrange(len(rset)):
+                for i in range(len(rset)):
                     email, user = rset.get_entity(i, 0), rset.get_entity(i, 1)
                     msg.append(tmpl % (email.dc_title(), user.dc_title()))
                 raise RuntimeError('\n'.join(msg))
--- a/server/test/unittest_storage.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/test/unittest_storage.py	Mon Oct 12 10:53:35 2015 +0200
@@ -215,7 +215,7 @@
             f1 = cnx.create_entity('File', data=Binary(filepath),
                                    data_format=u'text/plain', data_name=u'foo')
             cw_value = f1.data.getvalue()
-            fs_value = file(filepath).read()
+            fs_value = open(filepath).read()
             if cw_value != fs_value:
                 self.fail('cw value %r is different from file content' % cw_value)
 
@@ -304,7 +304,7 @@
             old_fspath = self.fspath(cnx, f1)
             cnx.transaction_data['fs_importing'] = True
             new_fspath = osp.join(self.tempdir, 'newfile.txt')
-            file(new_fspath, 'w').write('the new data')
+            open(new_fspath, 'w').write('the new data')
             cnx.execute('SET F data %(d)s WHERE F eid %(f)s',
                          {'d': Binary(new_fspath), 'f': f1.eid})
             cnx.commit()
--- a/server/utils.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/server/utils.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Some utilities for the CubicWeb server."""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -92,7 +93,7 @@
                        passwdmsg='password'):
     if not user:
         if msg:
-            print msg
+            print(msg)
         while not user:
             user = raw_input('login: ')
         user = unicode(user, sys.stdin.encoding)
@@ -102,7 +103,7 @@
             passwd2 = getpass('confirm password: ')
             if passwd == passwd2:
                 break
-            print 'password doesn\'t match'
+            print('password doesn\'t match')
             passwd = getpass('password: ')
     # XXX decode password using stdin encoding then encode it using appl'encoding
     return user, passwd
--- a/setup.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/setup.py	Mon Oct 12 10:53:35 2015 +0200
@@ -42,7 +42,7 @@
 from __pkginfo__ import modname, version, license, description, web, \
      author, author_email
 
-long_description = file('README').read()
+long_description = open('README').read()
 
 # import optional features
 import __pkginfo__
--- a/skeleton/DISTNAME.spec.tmpl	Mon Oct 12 09:19:07 2015 +0200
+++ b/skeleton/DISTNAME.spec.tmpl	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,7 @@
 
 BuildRequires:  %%{python} %%{python}-setuptools
 Requires:       cubicweb >= %(version)s
+Requires:       %{python}-six >= 1.4.0
 
 %%description
 %(longdesc)s
--- a/skeleton/debian/control.tmpl	Mon Oct 12 09:19:07 2015 +0200
+++ b/skeleton/debian/control.tmpl	Mon Oct 12 10:53:35 2015 +0200
@@ -12,6 +12,7 @@
 Architecture: all
 Depends:
  cubicweb-common (>= %(version)s),
+ python-six (>= 1.4.0),
  ${python:Depends},
  ${misc:Depends},
 Description: %(shortdesc)s
--- a/skeleton/setup.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/skeleton/setup.py	Mon Oct 12 10:53:35 2015 +0200
@@ -44,7 +44,7 @@
     author, author_email, classifiers
 
 if exists('README'):
-    long_description = file('README').read()
+    long_description = open('README').read()
 else:
     long_description = ''
 
--- a/sobjects/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/sobjects/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -26,5 +26,5 @@
     if vreg.config.apphome:
         url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py')
         if osp.exists(url_mapping_file):
-            URL_MAPPING = eval(file(url_mapping_file).read())
+            URL_MAPPING = eval(open(url_mapping_file).read())
             vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file)
--- a/sobjects/cwxmlparser.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/sobjects/cwxmlparser.py	Mon Oct 12 10:53:35 2015 +0200
@@ -32,9 +32,10 @@
 """
 
 from datetime import datetime, time
-import urlparse
 import urllib
 
+from six.moves.urllib.parse import urlparse, urlunparse, parse_qs
+
 from logilab.common.date import todate, totime
 from logilab.common.textutils import splitstrip, text_to_dict
 from logilab.common.decorators import classproperty
@@ -242,9 +243,9 @@
     def normalize_url(self, url):
         """overridden to add vid=xml if vid is not set in the qs"""
         url = super(CWEntityXMLParser, self).normalize_url(url)
-        purl = urlparse.urlparse(url)
+        purl = urlparse(url)
         if purl.scheme in ('http', 'https'):
-            params = urlparse.parse_qs(purl.query)
+            params = parse_qs(purl.query)
             if 'vid' not in params:
                 params['vid'] = ['xml']
                 purl = list(purl)
@@ -263,8 +264,8 @@
         If `known_relations` is given, it should be a dictionary of already
         known relations, so they don't get queried again.
         """
-        purl = urlparse.urlparse(url)
-        params = urlparse.parse_qs(purl.query)
+        purl = urlparse(url)
+        params = parse_qs(purl.query)
         if etype is None:
             etype = purl.path.split('/')[-1]
         try:
@@ -278,7 +279,7 @@
             relations.add('%s-%s' % (rtype, role))
         purl = list(purl)
         purl[4] = urllib.urlencode(params, doseq=True)
-        return urlparse.urlunparse(purl)
+        return urlunparse(purl)
 
     def complete_item(self, item, rels):
         try:
--- a/sobjects/supervising.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/sobjects/supervising.py	Mon Oct 12 10:53:35 2015 +0200
@@ -128,13 +128,15 @@
         # XXX print changes
         self.w(u'  %s' % changedescr.entity.absolute_url())
 
-    def delete_entity(self, (eid, etype, title)):
+    def delete_entity(self, args):
+        eid, etype, title = args
         msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)')
         etype = display_name(self._cw, etype).lower()
         self.w(msg % locals())
 
-    def change_state(self, (entity, fromstate, tostate)):
+    def change_state(self, args):
         _ = self._cw._
+        entity, fromstate, tostate = args
         msg = _('changed state of %(etype)s #%(eid)s (%(title)s)')
         self.w(u'%s\n' % (msg % self._entity_context(entity)))
         self.w(_('  from state %(fromstate)s to state %(tostate)s\n' %
--- a/sobjects/test/unittest_cwxmlparser.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/sobjects/test/unittest_cwxmlparser.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,7 +17,8 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
 from datetime import datetime
-from urlparse import urlsplit, parse_qsl
+
+from six.moves.urllib.parse import urlsplit, parse_qsl
 
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser
@@ -214,8 +215,8 @@
 
         with self.admin_access.web_request() as req:
             user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
-            self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
-            self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+            self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59))
+            self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6))
             self.assertEqual(user.cwuri, 'http://pouet.org/5')
             self.assertEqual(user.cw_source[0].name, 'myfeed')
             self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
@@ -299,8 +300,8 @@
         with self.repo.internal_cnx() as cnx:
             stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
             user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
-            self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
-            self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+            self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59))
+            self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6))
             self.assertEqual(user.cwuri, 'http://pouet.org/5')
             self.assertEqual(user.cw_source[0].name, 'myfeed')
 
--- a/sobjects/test/unittest_supervising.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/sobjects/test/unittest_supervising.py	Mon Oct 12 10:53:35 2015 +0200
@@ -77,7 +77,7 @@
             # check prepared email
             op._prepare_email()
             self.assertEqual(len(op.to_send), 1)
-            self.assert_(op.to_send[0][0])
+            self.assertTrue(op.to_send[0][0])
             self.assertEqual(op.to_send[0][1], ['test@logilab.fr'])
             cnx.commit()
             # some other changes #######
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_binary.py	Mon Oct 12 10:53:35 2015 +0200
@@ -0,0 +1,54 @@
+from six import PY2
+
+from unittest import TestCase
+from tempfile import NamedTemporaryFile
+import os.path as osp
+
+from logilab.common.shellutils import tempdir
+from cubicweb import Binary
+
+
+class BinaryTC(TestCase):
+    def test_init(self):
+        Binary()
+        Binary(b'toto')
+        Binary(bytearray(b'toto'))
+        if PY2:
+            Binary(buffer('toto'))
+        else:
+            Binary(memoryview(b'toto'))
+        with self.assertRaises((AssertionError, TypeError)):
+            # TypeError is raised by BytesIO if python runs with -O
+            Binary(u'toto')
+
+    def test_write(self):
+        b = Binary()
+        b.write(b'toto')
+        b.write(bytearray(b'toto'))
+        if PY2:
+            b.write(buffer('toto'))
+        else:
+            b.write(memoryview(b'toto'))
+        with self.assertRaises((AssertionError, TypeError)):
+            # TypeError is raised by BytesIO if python runs with -O
+            b.write(u'toto')
+
+    def test_gzpickle_roundtrip(self):
+        old = (u'foo', b'bar', 42, {})
+        new = Binary.zpickle(old).unzpickle()
+        self.assertEqual(old, new)
+        self.assertIsNot(old, new)
+
+    def test_from_file_to_file(self):
+        with tempdir() as dpath:
+            fpath = osp.join(dpath, 'binary.bin')
+            with open(fpath, 'wb') as fobj:
+                Binary(b'binaryblob').to_file(fobj)
+
+            bobj = Binary.from_file(fpath)
+            self.assertEqual(bobj.getvalue(), b'binaryblob')
+
+
+if __name__ == '__main__':
+    from unittest import main
+    main()
--- a/test/unittest_cwctl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_cwctl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -57,7 +57,7 @@
                                    funcname=None)
             for script, args in scripts.items():
                 scriptname = os.path.join(self.datadir, 'scripts', script)
-                self.assert_(os.path.exists(scriptname))
+                self.assertTrue(os.path.exists(scriptname))
                 mih.cmd_process_script(scriptname, None, scriptargs=args)
 
 
--- a/test/unittest_migration.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_migration.py	Mon Oct 12 10:53:35 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -76,8 +76,6 @@
     def test_filter_scripts_for_mode(self):
         config = CubicWebConfiguration('data')
         config.verbosity = 0
-        self.assertNotIsInstance(config.migration_handler(), ServerMigrationHelper)
-        self.assertIsInstance(config.migration_handler(), MigrationHelper)
         config = self.config
         config.__class__.name = 'repository'
         self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
--- a/test/unittest_rqlrewrite.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_rqlrewrite.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,8 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
+from six import string_types
+
 from logilab.common.testlib import unittest_main, TestCase
 from logilab.common.testlib import mock_object
 from yams import BadSchemaDefinition
@@ -67,7 +69,7 @@
     rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs)
     snippets = []
     for v, exprs in sorted(snippets_map.items()):
-        rqlexprs = [isinstance(snippet, basestring)
+        rqlexprs = [isinstance(snippet, string_types)
                     and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
                                     expression='Any X WHERE '+snippet)
                     or snippet
--- a/test/unittest_rset.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_rset.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,8 +18,9 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for module cubicweb.utils"""
 
-from urlparse import urlsplit
-import pickle
+from six import string_types
+from six.moves import cPickle as pickle
+from six.moves.urllib.parse import urlsplit
 
 from rql import parse
 
@@ -100,7 +101,9 @@
 
     def test_pickle(self):
         del self.rset.req
-        self.assertEqual(len(pickle.dumps(self.rset)), 376)
+        # 373 for python 2.7's cPickle
+        # 376 for the old python pickle implementation
+        self.assertIn(len(pickle.dumps(self.rset)), (373, 376))
 
     def test_build_url(self):
         with self.admin_access.web_request() as req:
@@ -274,7 +277,7 @@
         """make sure syntax tree is cached"""
         rqlst1 = self.rset.syntax_tree()
         rqlst2 = self.rset.syntax_tree()
-        self.assert_(rqlst1 is rqlst2)
+        self.assertIs(rqlst1, rqlst2)
 
     def test_get_entity_simple(self):
         with self.admin_access.web_request() as req:
@@ -550,17 +553,17 @@
     def test_str(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)')
-            self.assertIsInstance(str(rset), basestring)
+            self.assertIsInstance(str(rset), string_types)
             self.assertEqual(len(str(rset).splitlines()), 1)
 
     def test_repr(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)')
-            self.assertIsInstance(repr(rset), basestring)
+            self.assertIsInstance(repr(rset), string_types)
             self.assertTrue(len(repr(rset).splitlines()) > 1)
 
             rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")')
-            self.assertIsInstance(str(rset), basestring)
+            self.assertIsInstance(str(rset), string_types)
             self.assertEqual(len(str(rset).splitlines()), 1)
 
     def test_nonregr_symmetric_relation(self):
--- a/test/unittest_schema.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_schema.py	Mon Oct 12 10:53:35 2015 +0200
@@ -158,7 +158,7 @@
 
     def test_knownValues_load_schema(self):
         schema = loader.load(config)
-        self.assert_(isinstance(schema, CubicWebSchema))
+        self.assertIsInstance(schema, CubicWebSchema)
         self.assertEqual(schema.name, 'data')
         entities = sorted([str(e) for e in schema.entities()])
         expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card',
--- a/test/unittest_utils.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/test/unittest_utils.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,7 @@
 import decimal
 import datetime
 
+from six.moves import range
 
 from logilab.common.testlib import TestCase, DocTest, unittest_main
 
@@ -67,7 +68,7 @@
     def test_querycache(self):
         c = QueryCache(ceiling=20)
         # write only
-        for x in xrange(10):
+        for x in range(10):
             c[x] = x
         self.assertEqual(c._usage_report(),
                          {'transientcount': 0,
@@ -75,7 +76,7 @@
                           'permanentcount': 0})
         c = QueryCache(ceiling=10)
         # we should also get a warning
-        for x in xrange(20):
+        for x in range(20):
             c[x] = x
         self.assertEqual(c._usage_report(),
                          {'transientcount': 0,
@@ -83,8 +84,8 @@
                           'permanentcount': 0})
         # write + reads
         c = QueryCache(ceiling=20)
-        for n in xrange(4):
-            for x in xrange(10):
+        for n in range(4):
+            for x in range(10):
                 c[x] = x
                 c[x]
         self.assertEqual(c._usage_report(),
@@ -92,8 +93,8 @@
                           'itemcount': 10,
                           'permanentcount': 0})
         c = QueryCache(ceiling=20)
-        for n in xrange(17):
-            for x in xrange(10):
+        for n in range(17):
+            for x in range(10):
                 c[x] = x
                 c[x]
         self.assertEqual(c._usage_report(),
@@ -101,8 +102,8 @@
                           'itemcount': 10,
                           'permanentcount': 10})
         c = QueryCache(ceiling=20)
-        for n in xrange(17):
-            for x in xrange(10):
+        for n in range(17):
+            for x in range(10):
                 c[x] = x
                 if n % 2:
                     c[x]
@@ -115,7 +116,7 @@
 
 class UStringIOTC(TestCase):
     def test_boolean_value(self):
-        self.assert_(UStringIO())
+        self.assertTrue(UStringIO())
 
 
 class RepeatListTC(TestCase):
@@ -169,14 +170,14 @@
 
     def test_append(self):
         l = SizeConstrainedList(10)
-        for i in xrange(12):
+        for i in range(12):
             l.append(i)
-        self.assertEqual(l, range(2, 12))
+        self.assertEqual(l, list(range(2, 12)))
 
     def test_extend(self):
-        testdata = [(range(5), range(5)),
-                    (range(10), range(10)),
-                    (range(12), range(2, 12)),
+        testdata = [(list(range(5)), list(range(5))),
+                    (list(range(10)), list(range(10))),
+                    (list(range(12)), list(range(2, 12))),
                     ]
         for extension, expected in testdata:
             l = SizeConstrainedList(10)
--- a/toolsutils.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/toolsutils.py	Mon Oct 12 10:53:35 2015 +0200
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """some utilities for cubicweb command line tools"""
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -62,29 +63,29 @@
     """create a directory if it doesn't exist yet"""
     try:
         makedirs(directory)
-        print '-> created directory %s' % directory
+        print('-> created directory %s' % directory)
     except OSError as ex:
         import errno
         if ex.errno != errno.EEXIST:
             raise
-        print '-> no need to create existing directory %s' % directory
+        print('-> no need to create existing directory %s' % directory)
 
 def create_symlink(source, target):
     """create a symbolic link"""
     if exists(target):
         remove(target)
     symlink(source, target)
-    print '[symlink] %s <-- %s' % (target, source)
+    print('[symlink] %s <-- %s' % (target, source))
 
 def create_copy(source, target):
     import shutil
-    print '[copy] %s <-- %s' % (target, source)
+    print('[copy] %s <-- %s' % (target, source))
     shutil.copy2(source, target)
 
 def rm(whatever):
     import shutil
     shutil.rmtree(whatever)
-    print '-> removed %s' % whatever
+    print('-> removed %s' % whatever)
 
 def show_diffs(appl_file, ref_file, askconfirm=True):
     """interactivly replace the old file with the new file according to
@@ -95,8 +96,8 @@
     diffs = pipe.stdout.read()
     if diffs:
         if askconfirm:
-            print
-            print diffs
+            print()
+            print(diffs)
             action = ASK.ask('Replace ?', ('Y', 'n', 'q'), 'Y').lower()
         else:
             action = 'y'
@@ -106,17 +107,17 @@
             except IOError:
                 os.system('chmod a+w %s' % appl_file)
                 shutil.copyfile(ref_file, appl_file)
-            print 'replaced'
+            print('replaced')
         elif action == 'q':
             sys.exit(0)
         else:
             copy_file = appl_file + '.default'
-            copy = file(copy_file, 'w')
+            copy = open(copy_file, 'w')
             copy.write(open(ref_file).read())
             copy.close()
-            print 'keep current version, the new file has been written to', copy_file
+            print('keep current version, the new file has been written to', copy_file)
     else:
-        print 'no diff between %s and %s' % (appl_file, ref_file)
+        print('no diff between %s and %s' % (appl_file, ref_file))
 
 SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py')
 def copy_skeleton(skeldir, targetdir, context,
@@ -143,15 +144,15 @@
                 if not askconfirm or not exists(tfpath) or \
                        ASK.confirm('%s exists, overwrite?' % tfpath):
                     fill_templated_file(fpath, tfpath, context)
-                    print '[generate] %s <-- %s' % (tfpath, fpath)
+                    print('[generate] %s <-- %s' % (tfpath, fpath))
             elif exists(tfpath):
                 show_diffs(tfpath, fpath, askconfirm)
             else:
                 shutil.copyfile(fpath, tfpath)
 
 def fill_templated_file(fpath, tfpath, context):
-    fobj = file(tfpath, 'w')
-    templated = file(fpath).read()
+    fobj = open(tfpath, 'w')
+    templated = open(fpath).read()
     fobj.write(templated % context)
     fobj.close()
 
@@ -160,8 +161,8 @@
     if log:
         log('set permissions to 0600 for %s', filepath)
     else:
-        print '-> set permissions to 0600 for %s' % filepath
-    chmod(filepath, 0600)
+        print('-> set permissions to 0600 for %s' % filepath)
+    chmod(filepath, 0o600)
 
 def read_config(config_file, raise_if_unreadable=False):
     """read some simple configuration from `config_file` and return it as a
@@ -234,7 +235,7 @@
             raise ConfigurationError(msg)
 
     def fail(self, reason):
-        print "command failed:", reason
+        print("command failed:", reason)
         sys.exit(1)
 
 
--- a/uilib.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/uilib.py	Mon Oct 12 10:53:35 2015 +0200
@@ -26,7 +26,9 @@
 
 import csv
 import re
-from StringIO import StringIO
+from io import StringIO
+
+from six import string_types, integer_types
 
 from logilab.mtconverter import xml_escape, html_unescape
 from logilab.common.date import ustrftime
@@ -92,7 +94,7 @@
 _('%d seconds')
 
 def print_timedelta(value, req, props, displaytime=True):
-    if isinstance(value, (int, long)):
+    if isinstance(value, integer_types):
         # `date - date`, unlike `datetime - datetime` gives an int
         # (number of days), not a timedelta
         # XXX should rql be fixed to return Int instead of Interval in
@@ -559,7 +561,7 @@
     def __call__(self, function):
         def newfunc(*args, **kwargs):
             ret = function(*args, **kwargs)
-            if isinstance(ret, basestring):
+            if isinstance(ret, string_types):
                 return ret[:self.maxsize]
             return ret
         return newfunc
@@ -568,6 +570,6 @@
 def htmlescape(function):
     def newfunc(*args, **kwargs):
         ret = function(*args, **kwargs)
-        assert isinstance(ret, basestring)
+        assert isinstance(ret, string_types)
         return xml_escape(ret)
     return newfunc
--- a/utils.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/utils.py	Mon Oct 12 10:53:35 2015 +0200
@@ -33,9 +33,9 @@
 from uuid import uuid4
 from warnings import warn
 from threading import Lock
-from urlparse import urlparse
+from logging import getLogger
 
-from logging import getLogger
+from six.moves.urllib.parse import urlparse
 
 from logilab.mtconverter import xml_escape
 from logilab.common.deprecation import deprecated
--- a/view.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/view.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,6 +24,8 @@
 from warnings import warn
 from functools import partial
 
+from six.moves import range
+
 from logilab.common.deprecation import deprecated
 from logilab.common.registry import yes
 from logilab.mtconverter import xml_escape
@@ -173,7 +175,7 @@
         # specific view
         if rset.rowcount != 1:
             kwargs.setdefault('initargs', self.cw_extra_kwargs)
-            for i in xrange(len(rset)):
+            for i in range(len(rset)):
                 if wrap:
                     self.w(u'<div class="section">')
                 self.wview(self.__regid__, rset, row=i, **kwargs)
@@ -394,7 +396,7 @@
         if rset is None:
             rset = self.cw_rset = self._cw.execute(self.startup_rql())
         if rset:
-            for i in xrange(len(rset)):
+            for i in range(len(rset)):
                 self.wview(self.__regid__, rset, row=i, **kwargs)
         else:
             self.no_entities(**kwargs)
--- a/web/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,8 +22,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
-from urllib import quote as urlquote
-
+from six.moves.urllib.parse import quote as urlquote
 from logilab.common.deprecation import deprecated
 
 from cubicweb.web._exceptions import *
--- a/web/_exceptions.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/_exceptions.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,7 +20,7 @@
 
 __docformat__ = "restructuredtext en"
 
-import httplib
+from six.moves import http_client
 
 from cubicweb._exceptions import *
 from cubicweb.utils import json_dumps
@@ -41,7 +41,7 @@
     """base class for publishing related exception"""
 
     def __init__(self, *args, **kwargs):
-        self.status = kwargs.pop('status', httplib.OK)
+        self.status = kwargs.pop('status', http_client.OK)
         super(PublishException, self).__init__(*args, **kwargs)
 
 class LogOut(PublishException):
@@ -52,7 +52,7 @@
 
 class Redirect(PublishException):
     """raised to redirect the http request"""
-    def __init__(self, location, status=httplib.SEE_OTHER):
+    def __init__(self, location, status=http_client.SEE_OTHER):
         super(Redirect, self).__init__(status=status)
         self.location = location
 
@@ -71,7 +71,7 @@
     """raised when a request can't be served because of a bad input"""
 
     def __init__(self, *args, **kwargs):
-        kwargs.setdefault('status', httplib.BAD_REQUEST)
+        kwargs.setdefault('status', http_client.BAD_REQUEST)
         super(RequestError, self).__init__(*args, **kwargs)
 
 
@@ -79,14 +79,14 @@
     """raised when an edit request doesn't specify any eid to edit"""
 
     def __init__(self, *args, **kwargs):
-        kwargs.setdefault('status', httplib.BAD_REQUEST)
+        kwargs.setdefault('status', http_client.BAD_REQUEST)
         super(NothingToEdit, self).__init__(*args, **kwargs)
 
 class ProcessFormError(RequestError):
     """raised when posted data can't be processed by the corresponding field
     """
     def __init__(self, *args, **kwargs):
-        kwargs.setdefault('status', httplib.BAD_REQUEST)
+        kwargs.setdefault('status', http_client.BAD_REQUEST)
         super(ProcessFormError, self).__init__(*args, **kwargs)
 
 class NotFound(RequestError):
@@ -94,13 +94,13 @@
        a 404 error should be returned"""
 
     def __init__(self, *args, **kwargs):
-        kwargs.setdefault('status', httplib.NOT_FOUND)
+        kwargs.setdefault('status', http_client.NOT_FOUND)
         super(NotFound, self).__init__(*args, **kwargs)
 
 class RemoteCallFailed(RequestError):
     """raised when a json remote call fails
     """
-    def __init__(self, reason='', status=httplib.INTERNAL_SERVER_ERROR):
+    def __init__(self, reason='', status=http_client.INTERNAL_SERVER_ERROR):
         super(RemoteCallFailed, self).__init__(reason, status=status)
         self.reason = reason
 
--- a/web/application.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/application.py	Mon Oct 12 10:53:35 2015 +0200
@@ -25,7 +25,7 @@
 from warnings import warn
 import json
 
-import httplib
+from six.moves import http_client
 
 from logilab.common.deprecation import deprecated
 
@@ -68,8 +68,8 @@
     def __init__(self, appli):
         self.repo = appli.repo
         self.vreg = appli.vreg
-        self.session_manager = self.vreg['components'].select('sessionmanager',
-                                                              repo=self.repo)
+        self.session_manager = self.vreg['sessions'].select('sessionmanager',
+                                                            repo=self.repo)
         global SESSION_MANAGER
         SESSION_MANAGER = self.session_manager
         if self.vreg.config.mode != 'test':
@@ -290,7 +290,7 @@
                 if self.vreg.config['auth-mode'] == 'cookie' and ex.url:
                     req.headers_out.setHeader('location', str(ex.url))
                 if ex.status is not None:
-                    req.status_out = httplib.SEE_OTHER
+                    req.status_out = http_client.SEE_OTHER
                 # When the authentification is handled by http we must
                 # explicitly ask for authentification to flush current http
                 # authentification information
@@ -310,18 +310,18 @@
             # the request does not use https, redirect to login form
             https_url = self.vreg.config['https-url']
             if https_url and req.base_url() != https_url:
-                req.status_out = httplib.SEE_OTHER
+                req.status_out = http_client.SEE_OTHER
                 req.headers_out.setHeader('location', https_url + 'login')
             else:
                 # We assume here that in http auth mode the user *May* provide
                 # Authentification Credential if asked kindly.
                 if self.vreg.config['auth-mode'] == 'http':
-                    req.status_out = httplib.UNAUTHORIZED
+                    req.status_out = http_client.UNAUTHORIZED
                 # In the other case (coky auth) we assume that there is no way
                 # for the user to provide them...
                 # XXX But WHY ?
                 else:
-                    req.status_out = httplib.FORBIDDEN
+                    req.status_out = http_client.FORBIDDEN
                 # If previous error handling already generated a custom content
                 # do not overwrite it. This is used by LogOut Except
                 # XXX ensure we don't actually serve content
@@ -394,12 +394,12 @@
         except Unauthorized as ex:
             req.data['errmsg'] = req._('You\'re not authorized to access this page. '
                                        'If you think you should, please contact the site administrator.')
-            req.status_out = httplib.FORBIDDEN
+            req.status_out = http_client.FORBIDDEN
             result = self.error_handler(req, ex, tb=False)
         except Forbidden as ex:
             req.data['errmsg'] = req._('This action is forbidden. '
                                        'If you think it should be allowed, please contact the site administrator.')
-            req.status_out = httplib.FORBIDDEN
+            req.status_out = http_client.FORBIDDEN
             result = self.error_handler(req, ex, tb=False)
         except (BadRQLQuery, RequestError) as ex:
             result = self.error_handler(req, ex, tb=False)
@@ -413,7 +413,7 @@
             raise
         ### Last defense line
         except BaseException as ex:
-            req.status_out = httplib.INTERNAL_SERVER_ERROR
+            req.status_out = http_client.INTERNAL_SERVER_ERROR
             result = self.error_handler(req, ex, tb=True)
         finally:
             if req.cnx and not commited:
@@ -453,9 +453,9 @@
             # messages.
             location = req.form['__errorurl'].rsplit('#', 1)[0]
             req.headers_out.setHeader('location', str(location))
-            req.status_out = httplib.SEE_OTHER
+            req.status_out = http_client.SEE_OTHER
             return ''
-        req.status_out = httplib.CONFLICT
+        req.status_out = http_client.CONFLICT
         return self.error_handler(req, ex, tb=False)
 
     def error_handler(self, req, ex, tb=False):
@@ -491,7 +491,7 @@
 
     def ajax_error_handler(self, req, ex):
         req.set_header('content-type', 'application/json')
-        status = httplib.INTERNAL_SERVER_ERROR
+        status = http_client.INTERNAL_SERVER_ERROR
         if isinstance(ex, PublishException) and ex.status is not None:
             status = ex.status
         if req.status_out < 400:
--- a/web/captcha.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/captcha.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,7 +22,9 @@
 __docformat__ = "restructuredtext en"
 
 from random import randint, choice
-from cStringIO import StringIO
+from io import BytesIO
+
+from six.moves import range
 
 from PIL import Image, ImageFont, ImageDraw, ImageFilter
 
@@ -51,7 +53,7 @@
     draw = ImageDraw.Draw(img)
     # draw 100 random colored boxes on the background
     x, y = img.size
-    for num in xrange(100):
+    for num in range(100):
         draw.rectangle((randint(0, x), randint(0, y),
                         randint(0, x), randint(0, y)),
                        fill=randint(0, 0xffffff))
@@ -67,7 +69,7 @@
     """
     text = u''.join(choice('QWERTYUOPASDFGHJKLZXCVBNM') for i in range(size))
     img = pil_captcha(text, fontfile, fontsize)
-    out = StringIO()
+    out = BytesIO()
     img.save(out, format)
     out.seek(0)
     return text, out
--- a/web/cors.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/cors.py	Mon Oct 12 10:53:35 2015 +0200
@@ -14,7 +14,7 @@
 
 """
 
-import urlparse
+from six.moves.urllib.parse import urlsplit
 
 from cubicweb.web import LOGGER
 info = LOGGER.info
@@ -37,7 +37,7 @@
 
     In case of non-compliance, no CORS-related header is set.
     """
-    base_url = urlparse.urlsplit(req.base_url())
+    base_url = urlsplit(req.base_url())
     expected_host = '://'.join((base_url.scheme, base_url.netloc))
     if not req.get_header('Origin') or req.get_header('Origin') == expected_host:
         # not a CORS request, nothing to do
@@ -50,7 +50,7 @@
                 process_preflight(req, config)
         else: # Simple CORS or actual request
             process_simple(req, config)
-    except CORSFailed, exc:
+    except CORSFailed as exc:
         info('Cross origin resource sharing failed: %s' % exc)
     except CORSPreflight:
         info('Cross origin resource sharing: valid Preflight request %s')
@@ -101,7 +101,7 @@
     if '*' not in allowed_origins and origin not in allowed_origins:
         raise CORSFailed('Origin is not allowed')
     # bit of sanity check; see "6.3 Security"
-    myhost = urlparse.urlsplit(req.base_url()).netloc
+    myhost = urlsplit(req.base_url()).netloc
     host = req.get_header('Host')
     if host != myhost:
         info('cross origin resource sharing detected possible '
--- a/web/facet.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/facet.py	Mon Oct 12 10:53:35 2015 +0200
@@ -57,6 +57,8 @@
 from copy import deepcopy
 from datetime import datetime, timedelta
 
+from six import string_types
+
 from logilab.mtconverter import xml_escape
 from logilab.common.graph import has_path
 from logilab.common.decorators import cached, cachedproperty
@@ -754,7 +756,7 @@
         # XXX handle rel is None case in RQLPathFacet?
         if self.restr_attr != 'eid':
             self.select.set_distinct(True)
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             # only one value selected
             if value:
                 self.select.add_constant_restriction(
@@ -922,9 +924,7 @@
         if self.rql_sort:
             return [(_(value), value) for value, in rset]
         values = [(_(value), value) for value, in rset]
-        if self.sortasc:
-            return sorted(values)
-        return reversed(sorted(values))
+        return sorted(values, reverse=not self.sortasc)
 
 
 class AttributeFacet(RelationAttributeFacet):
@@ -1073,7 +1073,7 @@
         assert self.path and isinstance(self.path, (list, tuple)), \
             'path should be a list of 3-uples, not %s' % self.path
         for part in self.path:
-            if isinstance(part, basestring):
+            if isinstance(part, string_types):
                 part = part.split()
             assert len(part) == 3, \
                    'path should be a list of 3-uples, not %s' % part
@@ -1149,7 +1149,7 @@
         varmap = {'X': self.filtered_variable}
         actual_filter_variable = None
         for part in self.path:
-            if isinstance(part, basestring):
+            if isinstance(part, string_types):
                 part = part.split()
             subject, rtype, object = part
             if skiplabel and object == self.label_variable:
@@ -1392,7 +1392,7 @@
             skiplabel=True, skipattrfilter=True)
         restrel = None
         for part in self.path:
-            if isinstance(part, basestring):
+            if isinstance(part, string_types):
                 part = part.split()
             subject, rtype, object = part
             if object == self.filter_variable:
--- a/web/formfields.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/formfields.py	Mon Oct 12 10:53:35 2015 +0200
@@ -66,6 +66,8 @@
 from warnings import warn
 from datetime import datetime, timedelta
 
+from six import string_types
+
 from logilab.mtconverter import xml_escape
 from logilab.common import nullobject
 from logilab.common.date import ustrftime
@@ -842,7 +844,7 @@
             self.widget.attrs.setdefault('size', self.default_text_input_size)
 
     def _ensure_correctly_typed(self, form, value):
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = value.strip()
             if not value:
                 return None
@@ -924,7 +926,7 @@
         return self.format_single_value(req, 1.234)
 
     def _ensure_correctly_typed(self, form, value):
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = value.strip()
             if not value:
                 return None
@@ -956,7 +958,7 @@
         return u'20s, 10min, 24h, 4d'
 
     def _ensure_correctly_typed(self, form, value):
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = value.strip()
             if not value:
                 return None
@@ -986,7 +988,7 @@
         return self.format_single_value(req, datetime.now())
 
     def _ensure_correctly_typed(self, form, value):
-        if isinstance(value, basestring):
+        if isinstance(value, string_types):
             value = value.strip()
             if not value:
                 return None
--- a/web/formwidgets.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/formwidgets.py	Mon Oct 12 10:53:35 2015 +0200
@@ -99,6 +99,8 @@
 from datetime import date
 from warnings import warn
 
+from six import string_types
+
 from logilab.mtconverter import xml_escape
 from logilab.common.deprecation import deprecated
 from logilab.common.date import todatetime
@@ -282,7 +284,7 @@
         """
         posted = form._cw.form
         val = posted.get(field.input_name(form, self.suffix))
-        if isinstance(val, basestring):
+        if isinstance(val, string_types):
             val = val.strip()
         return val
 
@@ -993,12 +995,12 @@
         req = form._cw
         values = {}
         path = req.form.get(field.input_name(form, 'path'))
-        if isinstance(path, basestring):
+        if isinstance(path, string_types):
             path = path.strip()
         if path is None:
             path = u''
         fqs = req.form.get(field.input_name(form, 'fqs'))
-        if isinstance(fqs, basestring):
+        if isinstance(fqs, string_types):
             fqs = fqs.strip() or None
             if fqs:
                 for i, line in enumerate(fqs.split('\n')):
--- a/web/htmlwidgets.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/htmlwidgets.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,6 +24,8 @@
 import random
 from math import floor
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.deprecation import class_deprecated
 
@@ -343,7 +345,7 @@
             self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name or u''))
         self.w(u'</tr>')
         self.w(u'</thead><tbody>')
-        for rowindex in xrange(len(self.model.get_rows())):
+        for rowindex in range(len(self.model.get_rows())):
             klass = (rowindex%2==1) and 'odd' or 'even'
             self.w(u'<tr class="%s" %s>' % (klass, self.highlight))
             for column, sortvalue in self.itercols(rowindex):
--- a/web/http_headers.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/http_headers.py	Mon Oct 12 10:53:35 2015 +0200
@@ -2,11 +2,14 @@
 # http://twistedmatrix.com/trac/wiki/TwistedWeb2
 
 
-import types, time
+import time
 from calendar import timegm
 import base64
 import re
-import urlparse
+
+from six import string_types
+from six.moves.urllib.parse import urlparse
+
 
 def dashCapitalize(s):
     ''' Capitalize a string, making sure to treat - as a word seperator '''
@@ -295,9 +298,9 @@
         cur = cur+1
 
     if qpair:
-        raise ValueError, "Missing character after '\\'"
+        raise ValueError("Missing character after '\\'")
     if quoted:
-        raise ValueError, "Missing end quote"
+        raise ValueError("Missing end quote")
 
     if start != cur:
         if foldCase:
@@ -347,7 +350,7 @@
 ##### parser utilities:
 def checkSingleToken(tokens):
     if len(tokens) != 1:
-        raise ValueError, "Expected single token, not %s." % (tokens,)
+        raise ValueError("Expected single token, not %s." % (tokens,))
     return tokens[0]
 
 def parseKeyValue(val):
@@ -355,7 +358,7 @@
         return val[0], None
     elif len(val) == 3 and val[1] == Token('='):
         return val[0], val[2]
-    raise ValueError, "Expected key or key=value, but got %s." % (val,)
+    raise ValueError("Expected key or key=value, but got %s." % (val,))
 
 def parseArgs(field):
     args = split(field, Token(';'))
@@ -380,7 +383,7 @@
 
 def unique(seq):
     '''if seq is not a string, check it's a sequence of one element and return it'''
-    if isinstance(seq, basestring):
+    if isinstance(seq, string_types):
         return seq
     if len(seq) != 1:
         raise ValueError('single value required, not %s' % seq)
@@ -398,7 +401,7 @@
     """Ensure origin is a valid URL-base stuff, or null"""
     if origin == 'null':
         return origin
-    p = urlparse.urlparse(origin)
+    p = urlparse(origin)
     if p.params or p.query or p.username or p.path not in ('', '/'):
         raise ValueError('Incorrect Accept-Control-Allow-Origin value %s' % origin)
     if p.scheme not in ('http', 'https'):
@@ -452,10 +455,10 @@
 
     """
     if (value in (True, 1) or
-            isinstance(value, basestring) and value.lower() == 'true'):
+            isinstance(value, string_types) and value.lower() == 'true'):
         return 'true'
     if (value in (False, 0) or
-            isinstance(value, basestring) and value.lower() == 'false'):
+            isinstance(value, string_types) and value.lower() == 'false'):
         return 'false'
     raise ValueError("Invalid true/false header value: %s" % value)
 
@@ -506,7 +509,7 @@
     type, args = parseArgs(field)
 
     if len(type) != 3 or type[1] != Token('/'):
-        raise ValueError, "MIME Type "+str(type)+" invalid."
+        raise ValueError("MIME Type "+str(type)+" invalid.")
 
     # okay, this spec is screwy. A 'q' parameter is used as the separator
     # between MIME parameters and (as yet undefined) additional HTTP
@@ -569,7 +572,7 @@
     type, args = parseArgs(header)
 
     if len(type) != 3 or type[1] != Token('/'):
-        raise ValueError, "MIME Type "+str(type)+" invalid."
+        raise ValueError("MIME Type "+str(type)+" invalid.")
 
     args = [(kv[0].lower(), kv[1]) for kv in args]
 
@@ -766,7 +769,8 @@
             v = [field.strip().lower() for field in v.split(',')]
     return k, v
 
-def generateCacheControl((k, v)):
+def generateCacheControl(args):
+    k, v = args
     if v is None:
         return str(k)
     else:
--- a/web/propertysheet.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/propertysheet.py	Mon Oct 12 10:53:35 2015 +0200
@@ -57,7 +57,9 @@
     def load(self, fpath):
         scriptglobals = self.context.copy()
         scriptglobals['__file__'] = fpath
-        execfile(fpath, scriptglobals, self)
+        with open(fpath, 'rb') as fobj:
+            code = compile(fobj.read(), fpath, 'exec')
+        exec(code, scriptglobals, self)
         for name, type in TYPE_CHECKS:
             if name in self:
                 if not isinstance(self[name], type):
@@ -96,7 +98,7 @@
             if not osp.exists(rcachedir):
                 os.makedirs(rcachedir)
             sourcefile = osp.join(rdirectory, rid)
-            content = file(sourcefile).read()
+            content = open(sourcefile).read()
             # XXX replace % not followed by a paren by %% to avoid having to do
             # this in the source css file ?
             try:
@@ -105,7 +107,7 @@
                 self.error("can't process %s/%s: %s", rdirectory, rid, ex)
                 adirectory = rdirectory
             else:
-                stream = file(cachefile, 'w')
+                stream = open(cachefile, 'w')
                 stream.write(content)
                 stream.close()
                 adirectory = self._cache_directory
--- a/web/request.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/request.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,15 +22,16 @@
 import time
 import random
 import base64
-import urllib
-from StringIO import StringIO
 from hashlib import sha1 # pylint: disable=E0611
-from Cookie import SimpleCookie
 from calendar import timegm
 from datetime import date, datetime
-from urlparse import urlsplit
-import httplib
 from warnings import warn
+from io import BytesIO
+
+from six import string_types
+from six.moves import http_client
+from six.moves.urllib.parse import urlsplit, quote as urlquote
+from six.moves.http_cookies import SimpleCookie
 
 from rql.utils import rqlvar_maker
 
@@ -142,7 +143,7 @@
         #: form parameters
         self.setup_params(form)
         #: received body
-        self.content = StringIO()
+        self.content = BytesIO()
         # prepare output header
         #: Header used for the final response
         self.headers_out = Headers()
@@ -437,7 +438,7 @@
             eids = form['eid']
         except KeyError:
             raise NothingToEdit(self._('no selected entities'))
-        if isinstance(eids, basestring):
+        if isinstance(eids, string_types):
             eids = (eids,)
         for peid in eids:
             if withtype:
@@ -580,7 +581,7 @@
             header.append('filename="%s"' % ascii_filename)
             if unicode_filename is not None:
                 # encoded filename according RFC5987
-                urlquoted_filename = urllib.quote(unicode_filename.encode('utf-8'), '')
+                urlquoted_filename = urlquote(unicode_filename.encode('utf-8'), '')
                 header.append("filename*=utf-8''" + urlquoted_filename)
             self.set_header('content-disposition', ';'.join(header))
 
@@ -596,7 +597,7 @@
         :param localfile: if True, the default data dir prefix is added to the
                           JS filename
         """
-        if isinstance(jsfiles, basestring):
+        if isinstance(jsfiles, string_types):
             jsfiles = (jsfiles,)
         for jsfile in jsfiles:
             if localfile:
@@ -616,7 +617,7 @@
                        the css inclusion. cf:
                        http://msdn.microsoft.com/en-us/library/ms537512(VS.85).aspx
         """
-        if isinstance(cssfiles, basestring):
+        if isinstance(cssfiles, string_types):
             cssfiles = (cssfiles,)
         if ieonly:
             if self.ie_browser():
@@ -738,9 +739,9 @@
             # overwrite headers_out to forge a brand new not-modified response
             self.headers_out = self._forge_cached_headers()
             if self.http_method() in ('HEAD', 'GET'):
-                self.status_out = httplib.NOT_MODIFIED
+                self.status_out = http_client.NOT_MODIFIED
             else:
-                self.status_out = httplib.PRECONDITION_FAILED
+                self.status_out = http_client.PRECONDITION_FAILED
             # XXX replace by True once validate_cache bw compat method is dropped
             return self.status_out
         # XXX replace by False once validate_cache bw compat method is dropped
--- a/web/schemaviewer.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/schemaviewer.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six import string_types
+
 from logilab.common.ureports import Section, Title, Table, Link, Span, Text
 
 from yams.schema2dot import CARD_MAP
@@ -226,7 +228,7 @@
                     elif isinstance(val, (list, tuple)):
                         val = sorted(val)
                         val = ', '.join(str(v) for v in val)
-                    elif val and isinstance(val, basestring):
+                    elif val and isinstance(val, string_types):
                         val = _(val)
                     else:
                         val = str(val)
--- a/web/test/data/views.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/data/views.py	Mon Oct 12 10:53:35 2015 +0200
@@ -45,7 +45,7 @@
         for key, value in self._cw.form.iteritems():
             result_dict[key] = _recursive_replace_stream_by_content(value)
         return result_dict
-    except Exception, ex:
+    except Exception as ex:
         import traceback as tb
         tb.print_exc(ex)
 
--- a/web/test/jstests/ajax_url2.html	Mon Oct 12 09:19:07 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-<div id="ajaxroot">
-  <div class="ajaxHtmlHead">
-    <script src="http://foo.js" type="text/javascript"> </script>
-    <link rel="stylesheet" type="text/css" media="all" href="qunit.css" />
-  </div>
-  <h1>Hello</h1>
-</div>
--- a/web/test/test_jscript.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/test_jscript.py	Mon Oct 12 10:53:35 2015 +0200
@@ -28,11 +28,6 @@
             "../../web/data/cubicweb.compat.js",
             "../../web/data/cubicweb.htmlhelpers.js",
             "../../web/data/cubicweb.ajax.js",
-            ), (
-            "jstests/ajax_url0.html",
-            "jstests/ajax_url1.html",
-            "jstests/ajax_url2.html",
-            "jstests/ajaxresult.json",
             ),
          ),
     )
--- a/web/test/unittest_application.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_application.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,8 +17,10 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for cubicweb.web.application"""
 
-import base64, Cookie
-import httplib
+import base64
+
+from six.moves import http_client
+from six.moves.http_cookies import SimpleCookie
 
 from logilab.common.testlib import TestCase, unittest_main
 from logilab.common.decorators import clear_cache, classproperty
@@ -267,7 +269,7 @@
                 with self.admin_access.web_request(vid='test.ajax.error') as req:
                     req.ajax_request = True
                     page = app.handle_request(req, '')
-        self.assertEqual(httplib.INTERNAL_SERVER_ERROR,
+        self.assertEqual(http_client.INTERNAL_SERVER_ERROR,
                          req.status_out)
 
     def _test_cleaned(self, kwargs, injected, cleaned):
@@ -361,7 +363,7 @@
     def _reset_cookie(self, req):
         # preparing the suite of the test
         # set session id in cookie
-        cookie = Cookie.SimpleCookie()
+        cookie = SimpleCookie()
         sessioncookie = self.app.session_handler.session_cookie(req)
         cookie[sessioncookie] = req.session.sessionid
         req.set_request_header('Cookie', cookie[sessioncookie].OutputString(),
--- a/web/test/unittest_formwidgets.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_formwidgets.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,16 +24,23 @@
 
 from cubes.file.entities import File
 
-def setUpModule(*args):
-    global schema
-    config = TestServerConfiguration('data', apphome=WidgetsTC.datadir)
-    config.bootstrap_cubes()
-    schema = config.load_schema()
 
 class WidgetsTC(TestCase):
 
+    @classmethod
+    def setUpClass(cls):
+        super(WidgetsTC, cls).setUpClass()
+        config = TestServerConfiguration('data', apphome=cls.datadir)
+        config.bootstrap_cubes()
+        cls.schema = config.load_schema()
+
+    @classmethod
+    def tearDownClass(cls):
+        del cls.schema
+        super(WidgetsTC, cls).tearDownClass()
+
     def test_editableurl_widget(self):
-        field = formfields.guess_field(schema['Bookmark'], schema['path'])
+        field = formfields.guess_field(self.schema['Bookmark'], self.schema['path'])
         widget = formwidgets.EditableURLWidget()
         req = fake.FakeRequest(form={'path-subjectfqs:A': 'param=value&vid=view'})
         form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A'))
@@ -41,7 +48,7 @@
                          '?param=value%26vid%3Dview')
 
     def test_bitselect_widget(self):
-        field = formfields.guess_field(schema['CWAttribute'], schema['ordernum'])
+        field = formfields.guess_field(self.schema['CWAttribute'], self.schema['ordernum'])
         field.choices = [('un', '1',), ('deux', '2',)]
         widget = formwidgets.BitSelect(settabindex=False)
         req = fake.FakeRequest(form={'ordernum-subject:A': ['1', '2']})
--- a/web/test/unittest_idownloadable.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_idownloadable.py	Mon Oct 12 10:53:35 2015 +0200
@@ -91,9 +91,9 @@
 
     def test_header_with_space_and_comma(self):
         with self.admin_access.web_request() as req:
-            self.create_user(req, login=ur'c " l\ a', password='babar')
+            self.create_user(req, login=u'c " l\\ a', password='babar')
             req.cnx.commit()
-        with self.new_access(ur'c " l\ a').web_request() as req:
+        with self.new_access(u'c " l\\ a').web_request() as req:
             req.form['vid'] = 'download'
             req.form['eid'] = str(req.user.eid)
             data = self.ctrl_publish(req,'view')
--- a/web/test/unittest_magicsearch.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_magicsearch.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,8 @@
 import sys
 from contextlib import contextmanager
 
+from six.moves import range
+
 from logilab.common.testlib import TestCase, unittest_main
 
 from rql import BadRQLQuery, RQLSyntaxError
@@ -330,7 +332,7 @@
         # suggestions should contain any possible value for
         # a given attribute (limited to 10)
         with self.admin_access.web_request() as req:
-            for i in xrange(15):
+            for i in range(15):
                 req.create_entity('Personne', nom=u'n%s' % i, prenom=u'p%s' % i)
             req.cnx.commit()
         self.assertListEqual(['Any X WHERE X is Personne, X nom "n0"',
--- a/web/test/unittest_urlpublisher.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_urlpublisher.py	Mon Oct 12 10:53:35 2015 +0200
@@ -25,7 +25,7 @@
 from cubicweb.rset import ResultSet
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.fake import FakeRequest
-from cubicweb.web import NotFound, Redirect
+from cubicweb.web import NotFound, Redirect, views
 from cubicweb.web.views.urlrewrite import SimpleReqRewriter
 
 
@@ -69,6 +69,7 @@
             self.assertEqual("Any X,AA,AB ORDERBY AB WHERE X is_instance_of CWEType, "
                              "X modification_date AA, X name AB",
                              rset.printable_rql())
+            self.assertEqual(req.form['vid'], 'sameetypelist')
 
     def test_rest_path_by_attr(self):
         with self.admin_access.web_request() as req:
@@ -91,6 +92,7 @@
                              'X firstname AA, X login AB, X modification_date AC, '
                              'X surname AD, X login "admin"',
                              rset.printable_rql())
+            self.assertEqual(req.form['vid'], 'primary')
 
     def test_rest_path_eid(self):
         with self.admin_access.web_request() as req:
@@ -125,6 +127,15 @@
                              'X title "hell\'o"',
                              rset.printable_rql())
 
+    def test_rest_path_use_vid_from_rset(self):
+        with self.admin_access.web_request(headers={'Accept': 'application/rdf+xml'}) as req:
+            views.VID_BY_MIMETYPE['application/rdf+xml'] = 'rdf'
+            try:
+                ctrl, rset = self.process(req, 'CWEType')
+            finally:
+                views.VID_BY_MIMETYPE.pop('application/rdf+xml')
+            self.assertEqual(req.form['vid'], 'rdf')
+
     def test_rest_path_errors(self):
         with self.admin_access.web_request() as req:
             self.assertRaises(NotFound, self.process, req, 'CWUser/eid/30000')
@@ -141,25 +152,24 @@
             self.assertRaises(NotFound, self.process, req, '1/non_action')
             self.assertRaises(NotFound, self.process, req, 'CWUser/login/admin/non_action')
 
-
     def test_regexp_path(self):
         """tests the regexp path resolution"""
         with self.admin_access.web_request() as req:
             ctrl, rset = self.process(req, 'add/Task')
             self.assertEqual(ctrl, 'view')
             self.assertEqual(rset, None)
-            self.assertEqual(req.form, {'etype' : "Task", 'vid' : "creation"})
+            self.assertEqual(req.form, {'etype': "Task", 'vid': "creation"})
             self.assertRaises(NotFound, self.process, req, 'add/foo/bar')
 
     def test_nonascii_path(self):
         oldrules = SimpleReqRewriter.rules
-        SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo')),]
+        SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo'))]
         with self.admin_access.web_request() as req:
             try:
                 path = str(FakeRequest().url_quote(u'été'))
                 ctrl, rset = self.process(req, path)
                 self.assertEqual(rset, None)
-                self.assertEqual(req.form, {'vid' : "foo"})
+                self.assertEqual(req.form, {'vid': "foo"})
             finally:
                 SimpleReqRewriter.rules = oldrules
 
--- a/web/test/unittest_views_basecontrollers.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,12 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb.web.views.basecontrollers unit tests"""
 
-from urlparse import urlsplit, urlunsplit, urljoin
-# parse_qs is deprecated in cgi and has been moved to urlparse in Python 2.6
-try:
-    from urlparse import parse_qs as url_parse_query
-except ImportError:
-    from cgi import parse_qs as url_parse_query
+from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs
 
 import lxml
 
@@ -662,7 +657,7 @@
                         '_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject',
                         'login-subject:X': u'adim',
                         'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
-                        'in_group-subject:X': `gueid`,
+                        'in_group-subject:X': repr(gueid),
 
                         '__type:Y': 'EmailAddress',
                         '_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object',
@@ -1042,7 +1037,7 @@
         """
         with self.admin_access.web_request() as req:
             scheme, netloc, path, query, fragment = urlsplit(url)
-            query_dict = url_parse_query(query)
+            query_dict = parse_qs(query)
             expected_url = urljoin(req.base_url(), expected_path)
             self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url)
 
--- a/web/test/unittest_viewselector.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/test/unittest_viewselector.py	Mon Oct 12 10:53:35 2015 +0200
@@ -17,6 +17,7 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """XXX rename, split, reorganize this"""
+from __future__ import print_function
 
 from logilab.common.testlib import unittest_main
 
@@ -76,9 +77,9 @@
         try:
             self.assertSetEqual(list(content), expected)
         except Exception:
-            print registry, sorted(expected), sorted(content)
-            print 'no more', [v for v in expected if not v in content]
-            print 'missing', [v for v in content if not v in expected]
+            print(registry, sorted(expected), sorted(content))
+            print('no more', [v for v in expected if not v in content])
+            print('missing', [v for v in content if not v in expected])
             raise
 
     def setUp(self):
@@ -461,7 +462,7 @@
                 obj = self.vreg['views'].select(vid, req, rset=rset, **args)
                 return obj.render(**args)
             except Exception:
-                print vid, rset, args
+                print(vid, rset, args)
                 raise
 
     def test_form(self):
--- a/web/views/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -77,7 +77,7 @@
     #'text/xml': 'xml',
     # XXX rss, owl...
 }
-def vid_from_rset(req, rset, schema):
+def vid_from_rset(req, rset, schema, check_table=True):
     """given a result set, return a view id"""
     if rset is None:
         return 'index'
@@ -90,7 +90,7 @@
         return 'noresult'
     # entity result set
     if not schema.eschema(rset.description[0][0]).final:
-        if need_table_view(rset, schema):
+        if check_table and need_table_view(rset, schema):
             return 'table'
         if nb_rows == 1:
             if req.search_state[0] == 'normal':
--- a/web/views/authentication.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/authentication.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,12 +19,9 @@
 
 __docformat__ = "restructuredtext en"
 
-from threading import Lock
-
-from logilab.common.decorators import clear_cache
 from logilab.common.deprecation import class_renamed
 
-from cubicweb import AuthenticationError, BadConnectionId
+from cubicweb import AuthenticationError
 from cubicweb.view import Component
 from cubicweb.web import InvalidSession
 
@@ -101,41 +98,11 @@
     '("ie" instead of "ei")')
 
 
-class AbstractAuthenticationManager(Component):
-    """authenticate user associated to a request and check session validity"""
-    __abstract__ = True
-    __regid__ = 'authmanager'
 
-    def __init__(self, repo):
-        self.vreg = repo.vreg
-
-    def validate_session(self, req, session):
-        """check session validity, reconnecting it to the repository if the
-        associated connection expired in the repository side (hence the
-        necessity for this method).
-
-        raise :exc:`InvalidSession` if session is corrupted for a reason or
-        another and should be closed
-        """
-        raise NotImplementedError()
-
-    def authenticate(self, req):
-        """authenticate user using connection information found in the request,
-        and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
-        as well as login and authentication information dictionary used to open
-        the connection.
-
-        raise :exc:`cubicweb.AuthenticationError` if authentication failed
-        (no authentication info found or wrong user/password)
-        """
-        raise NotImplementedError()
-
-
-class RepositoryAuthenticationManager(AbstractAuthenticationManager):
+class RepositoryAuthenticationManager(object):
     """authenticate user associated to a request and check session validity"""
 
     def __init__(self, repo):
-        super(RepositoryAuthenticationManager, self).__init__(repo)
         self.repo = repo
         vreg = repo.vreg
         self.log_queries = vreg.config['query-log-file']
--- a/web/views/autoform.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/autoform.py	Mon Oct 12 10:53:35 2015 +0200
@@ -123,6 +123,8 @@
 
 from warnings import warn
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import iclassmethod, cached
 from logilab.common.deprecation import deprecated
@@ -491,7 +493,8 @@
     pendings.remove( (int(eidfrom), rel, int(eidto)) )
 
 @ajaxfunc(output_type='json')
-def remove_pending_insert(self, (eidfrom, rel, eidto)):
+def remove_pending_insert(self, args):
+    eidfrom, rel, eidto = args
     _remove_pending(self._cw, eidfrom, rel, eidto, 'insert')
 
 @ajaxfunc(output_type='json')
@@ -500,11 +503,13 @@
         _add_pending(self._cw, eidfrom, rel, eidto, 'insert')
 
 @ajaxfunc(output_type='json')
-def remove_pending_delete(self, (eidfrom, rel, eidto)):
+def remove_pending_delete(self, args):
+    eidfrom, rel, eidto = args
     _remove_pending(self._cw, eidfrom, rel, eidto, 'delete')
 
 @ajaxfunc(output_type='json')
-def add_pending_delete(self, (eidfrom, rel, eidto)):
+def add_pending_delete(self, args):
+    eidfrom, rel, eidto = args
     _add_pending(self._cw, eidfrom, rel, eidto, 'delete')
 
 
@@ -608,7 +613,7 @@
                     toggleable_rel_link_func = toggleable_relation_link
                 else:
                     toggleable_rel_link_func = lambda x, y, z: u''
-                for row in xrange(rset.rowcount):
+                for row in range(rset.rowcount):
                     nodeid = relation_id(entity.eid, rschema, role,
                                          rset[row][0])
                     if nodeid in pending_deletes:
--- a/web/views/baseviews.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/baseviews.py	Mon Oct 12 10:53:35 2015 +0200
@@ -81,6 +81,8 @@
 from datetime import timedelta
 from warnings import warn
 
+from six.moves import range
+
 from rql import nodes
 
 from logilab.mtconverter import TransformError, xml_escape
@@ -231,8 +233,8 @@
         """
         rset = self.cw_rset
         if rset is None:
-            raise NotImplementedError, self
-        for i in xrange(len(rset)):
+            raise NotImplementedError(self)
+        for i in range(len(rset)):
             self.wview(self.__regid__, rset, row=i, **kwargs)
             if len(rset) > 1:
                 self.w(u"\n")
@@ -314,7 +316,7 @@
             self.w(u'<ul>\n')
         else:
             self.w(u'<ul%s class="%s">\n' % (listid, klass or 'section'))
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(row=i, col=0, vid=subvid, klass=klass, **kwargs)
         self.w(u'</ul>\n')
         if title:
@@ -427,7 +429,7 @@
     def call(self, subvid=None, **kwargs):
         kwargs['vid'] = subvid
         rset = self.cw_rset
-        for i in xrange(len(rset)):
+        for i in range(len(rset)):
             self.cell_call(i, 0, **kwargs)
             if i < rset.rowcount-1:
                 self.w(self.separator)
--- a/web/views/csvexport.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/csvexport.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from cubicweb.schema import display_name
 from cubicweb.predicates import any_rset, empty_rset
 from cubicweb.uilib import UnicodeCSVWriter
@@ -88,7 +90,7 @@
         rows_by_type = {}
         writer = self.csvwriter()
         rowdef_by_type = {}
-        for index in xrange(len(self.cw_rset)):
+        for index in range(len(self.cw_rset)):
             entity = self.cw_rset.complete_entity(index)
             if entity.e_schema not in rows_by_type:
                 rowdef_by_type[entity.e_schema] = [rs for rs, at in entity.e_schema.attribute_definitions()
--- a/web/views/cwsources.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/cwsources.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,6 +24,9 @@
 
 import logging
 from itertools import repeat
+
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import cachedproperty
 
@@ -95,7 +98,7 @@
             if hostconfig:
                 self.w(u'<h3>%s</h3>' % self._cw._('CWSourceHostConfig_plural'))
                 self._cw.view('table', hostconfig, w=self.w,
-                              displaycols=range(2),
+                              displaycols=list(range(2)),
                               cellvids={1: 'editable-final'})
 
 
--- a/web/views/cwuser.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/cwuser.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 
 from hashlib import sha1 # pylint: disable=E0611
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 
 from cubicweb import tags
@@ -64,7 +66,7 @@
 <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
          xmlns:rdfs="http://www.w3org/2000/01/rdf-schema#"
          xmlns:foaf="http://xmlns.com/foaf/0.1/"> '''% self._cw.encoding)
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(i, 0)
         self.w(u'</rdf:RDF>\n')
 
--- a/web/views/editforms.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/editforms.py	Mon Oct 12 10:53:35 2015 +0200
@@ -24,6 +24,8 @@
 
 from copy import copy
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import cached
 from logilab.common.registry import yes
@@ -230,7 +232,7 @@
     def __init__(self, req, rset, **kwargs):
         kwargs.setdefault('__redirectrql', rset.printable_rql())
         super(TableEditForm, self).__init__(req, rset=rset, **kwargs)
-        for row in xrange(len(self.cw_rset)):
+        for row in range(len(self.cw_rset)):
             form = self._cw.vreg['forms'].select('edition', self._cw,
                                                  rset=self.cw_rset, row=row,
                                                  formtype='muledit',
--- a/web/views/idownloadable.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/idownloadable.py	Mon Oct 12 10:53:35 2015 +0200
@@ -22,6 +22,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from logilab.mtconverter import BINARY_ENCODINGS, TransformError, xml_escape
 from logilab.common.deprecation import class_renamed, deprecated
 
@@ -166,7 +168,7 @@
 
     def call(self, **kwargs):
         rset = self.cw_rset
-        for i in xrange(len(rset)):
+        for i in range(len(rset)):
             self.w(u'<div class="efile">')
             self.wview(self.__regid__, rset, row=i, col=0, **kwargs)
             self.w(u'</div>')
--- a/web/views/owl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/owl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -21,6 +21,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from logilab.mtconverter import TransformError, xml_escape
 
 from cubicweb.view import StartupView, EntityView
@@ -166,7 +168,7 @@
 
     def call(self):
         self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name})
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(i, 0)
         self.w(OWL_CLOSING_ROOT)
 
--- a/web/views/plots.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/plots.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from logilab.common.date import datetime2ticks
 from logilab.common.deprecation import class_deprecated
 from logilab.common.registry import objectify_predicate
@@ -154,7 +156,7 @@
         abscissa = [row[0] for row in self.cw_rset]
         plots = []
         nbcols = len(self.cw_rset.rows[0])
-        for col in xrange(1, nbcols):
+        for col in range(1, nbcols):
             data = [row[col] for row in self.cw_rset]
             plots.append(filterout_nulls(abscissa, data))
         plotwidget = FlotPlotWidget(varnames, plots, timemode=self.timemode)
--- a/web/views/pyviews.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/pyviews.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,8 @@
 """
 __docformat__ = "restructuredtext en"
 
+from six.moves import range
+
 from cubicweb.view import View
 from cubicweb.predicates import match_kwargs
 from cubicweb.web.views import tableview
@@ -100,7 +102,7 @@
 
     def build_column_renderers(self):
         return [self.column_renderer(colid)
-                for colid in xrange(len(self.pyvalue[0]))]
+                for colid in range(len(self.pyvalue[0]))]
 
     def facets_form(self, mainvar=None):
         return None # not supported
--- a/web/views/rdf.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/rdf.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from yams import xy
 
 from cubicweb.schema import VIRTUAL_RTYPES
@@ -56,7 +58,7 @@
             graph.bind('cw', CW)
             for prefix, xmlns in xy.XY.prefixes.items():
                 graph.bind(prefix, rdflib.Namespace(xmlns))
-            for i in xrange(self.cw_rset.rowcount):
+            for i in range(self.cw_rset.rowcount):
                 entity = self.cw_rset.complete_entity(i, 0)
                 self.entity2graph(graph, entity)
             self.w(graph.serialize(format=self.format))
--- a/web/views/sessions.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/sessions.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,20 +19,28 @@
 __docformat__ = "restructuredtext en"
 
 from time import time
+from logging import getLogger
 
-from cubicweb import RepositoryError, Unauthorized, BadConnectionId
-from cubicweb.web import InvalidSession, component
+from logilab.common.registry import RegistrableObject
+
+from cubicweb import RepositoryError, Unauthorized, BadConnectionId, set_log_methods
+from cubicweb.predicates import yes
+from cubicweb.web import InvalidSession
+
+from cubicweb.web.views import authentication
 
 
-class AbstractSessionManager(component.Component):
+class AbstractSessionManager(RegistrableObject):
     """manage session data associated to a session identifier"""
     __abstract__ = True
+    __select__ = yes()
+    __registry__ = 'sessions'
     __regid__ = 'sessionmanager'
 
     def __init__(self, repo):
         vreg = repo.vreg
         self.session_time = vreg.config['http-session-time'] or None
-        self.authmanager = vreg['components'].select('authmanager', repo=repo)
+        self.authmanager = authentication.RepositoryAuthenticationManager(repo)
         interval = (self.session_time or 0) / 2.
         if vreg.config.anonymous_user()[0] is not None:
             self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60
@@ -53,15 +61,7 @@
         closed, total = 0, 0
         for session in self.current_sessions():
             total += 1
-            try:
-                last_usage_time = session.cnx.check()
-            except AttributeError:
-                last_usage_time = session.mtime
-            except BadConnectionId:
-                self.close_session(session)
-                closed += 1
-                continue
-
+            last_usage_time = session.mtime
             no_use_time = (time() - last_usage_time)
             if session.anonymous_session:
                 if no_use_time >= self.cleanup_anon_session_time:
@@ -95,11 +95,14 @@
         raise NotImplementedError()
 
 
+set_log_methods(AbstractSessionManager, getLogger('cubicweb.sessionmanager'))
+
+
 class InMemoryRepositorySessionManager(AbstractSessionManager):
     """manage session data associated to a session identifier"""
 
     def __init__(self, *args, **kwargs):
-        AbstractSessionManager.__init__(self, *args, **kwargs)
+        super(InMemoryRepositorySessionManager, self).__init__(*args, **kwargs)
         # XXX require a RepositoryAuthenticationManager which violates
         #     authenticate interface by returning a session instead of a user
         #assert isinstance(self.authmanager, RepositoryAuthenticationManager)
--- a/web/views/sparql.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/sparql.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from yams import xy
 from rql import TypeResolverException
 
@@ -111,7 +113,7 @@
         rqlst = self.cw_rset.syntax_tree().children[0]
         varnames = [var.name for var in rqlst.selection]
         results = E.results()
-        for rowidx in xrange(len(self.cw_rset)):
+        for rowidx in range(len(self.cw_rset)):
             result = E.result()
             for colidx, varname in enumerate(varnames):
                 result.append(self.cell_binding(rowidx, colidx, varname))
--- a/web/views/startup.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/startup.py	Mon Oct 12 10:53:35 2015 +0200
@@ -106,7 +106,7 @@
 
     def entity_types_table(self, eschemas):
         infos = sorted(self.entity_types(eschemas),
-                       key=lambda (l,a,e): unormalize(l))
+                       key=lambda t: unormalize(t[0]))
         q, r = divmod(len(infos), 2)
         if r:
             infos.append( (None, '&#160;', '&#160;') )
--- a/web/views/tableview.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/tableview.py	Mon Oct 12 10:53:35 2015 +0200
@@ -67,6 +67,9 @@
 from copy import copy
 from types import MethodType
 
+from six import string_types
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.decorators import cachedproperty
 from logilab.common.deprecation import class_deprecated
@@ -225,7 +228,7 @@
 
     def render_table_body(self, w, colrenderers):
         w(u'<tbody>')
-        for rownum in xrange(self.view.table_size):
+        for rownum in range(self.view.table_size):
             self.render_row(w, rownum, colrenderers)
         w(u'</tbody>')
 
@@ -284,7 +287,7 @@
         attrs = renderer.attributes.copy()
         if renderer.sortable:
             sortvalue = renderer.sortvalue(rownum)
-            if isinstance(sortvalue, basestring):
+            if isinstance(sortvalue, string_types):
                 sortvalue = sortvalue[:self.sortvalue_limit]
             if sortvalue is not None:
                 attrs[u'cubicweb:sortvalue'] = js_dumps(sortvalue)
@@ -646,10 +649,10 @@
         # compute displayed columns
         if self.displaycols is None:
             if headers is not None:
-                displaycols = range(len(headers))
+                displaycols = list(range(len(headers)))
             else:
                 rqlst = self.cw_rset.syntax_tree()
-                displaycols = range(len(rqlst.children[0].selection))
+                displaycols = list(range(len(rqlst.children[0].selection)))
         else:
             displaycols = self.displaycols
         # compute table headers
@@ -977,9 +980,9 @@
             if 'displaycols' in self._cw.form:
                 displaycols = [int(idx) for idx in self._cw.form['displaycols']]
             elif headers is not None:
-                displaycols = range(len(headers))
+                displaycols = list(range(len(headers)))
             else:
-                displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection))
+                displaycols = list(range(len(self.cw_rset.syntax_tree().children[0].selection)))
         return displaycols
 
     def _setup_tablesorter(self, divid):
@@ -1298,7 +1301,7 @@
         self.w(u'<table class="%s">' % self.table_css)
         self.table_header(sample)
         self.w(u'<tbody>')
-        for row in xrange(self.cw_rset.rowcount):
+        for row in range(self.cw_rset.rowcount):
             self.cell_call(row=row, col=0)
         self.w(u'</tbody>')
         self.w(u'</table>')
--- a/web/views/tabs.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/tabs.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six import string_types
+
 from logilab.common.deprecation import class_renamed
 from logilab.mtconverter import xml_escape
 
@@ -114,7 +116,7 @@
         active_tab = uilib.domid(default_tab)
         viewsvreg = self._cw.vreg['views']
         for tab in tabs:
-            if isinstance(tab, basestring):
+            if isinstance(tab, string_types):
                 tabid, tabkwargs = tab, {}
             else:
                 tabid, tabkwargs = tab
--- a/web/views/timetable.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/timetable.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 from logilab.common.date import ONEDAY, date_range, todatetime
 
@@ -51,7 +53,7 @@
         users = []
         users_max = {}
         # XXX: try refactoring with calendar.py:OneMonthCal
-        for row in xrange(self.cw_rset.rowcount):
+        for row in range(self.cw_rset.rowcount):
             task = self.cw_rset.get_entity(row, 0)
             icalendarable = task.cw_adapt_to('ICalendarable')
             if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser':
@@ -88,7 +90,7 @@
 
         rows = []
         # colors here are class names defined in cubicweb.css
-        colors = ["col%x" % i for i in xrange(12)]
+        colors = ["col%x" % i for i in range(12)]
         next_color_index = 0
 
         visited_tasks = {} # holds a description of a task for a user
--- a/web/views/uicfg.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/uicfg.py	Mon Oct 12 10:53:35 2015 +0200
@@ -57,6 +57,8 @@
 
 from warnings import warn
 
+from six import string_types
+
 from cubicweb import neg_role
 from cubicweb.rtags import (RelationTags, RelationTagsBool, RelationTagsSet,
                             RelationTagsDict, NoTargetRelationTagsDict,
@@ -650,7 +652,7 @@
                 self.tag_relation((sschema, rschema, oschema, role), True)
 
     def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs):
-        if isinstance(attr, basestring):
+        if isinstance(attr, string_types):
             attr, role = attr, 'subject'
         else:
             attr, role = attr
--- a/web/views/urlpublishing.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/urlpublishing.py	Mon Oct 12 10:53:35 2015 +0200
@@ -60,7 +60,7 @@
 from rql import TypeResolverException
 
 from cubicweb import RegistryException
-from cubicweb.web import NotFound, Redirect, component
+from cubicweb.web import NotFound, Redirect, component, views
 
 
 class PathDontMatch(Exception):
@@ -201,18 +201,14 @@
             return self.handle_etype_attr(req, cls, attrname, value)
         return self.handle_etype(req, cls)
 
-    def set_vid_for_rset(self, req, cls, rset):# cls is there to ease overriding
+    def set_vid_for_rset(self, req, cls, rset):  # cls is there to ease overriding
         if rset.rowcount == 0:
             raise NotFound()
-        # we've to set a default vid here, since vid_from_rset may try to use a
-        # table view if fetch_rql include some non final relation
-        if rset.rowcount == 1:
-            req.form.setdefault('vid', 'primary')
-        else: # rset.rowcount >= 1
-            if len(rset.column_types(0)) > 1:
-                req.form.setdefault('vid', 'list')
-            else:
-                req.form.setdefault('vid', 'sameetypelist')
+        if 'vid' not in req.form:
+            # check_table=False tells vid_from_rset not to try to use a table view if fetch_rql
+            # include some non final relation
+            req.form['vid'] = views.vid_from_rset(req, rset, req.vreg.schema,
+                                                  check_table=False)
 
     def handle_etype(self, req, cls):
         rset = req.execute(cls.fetch_rql(req.user))
--- a/web/views/urlrewrite.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/urlrewrite.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,6 +19,8 @@
 
 import re
 
+from six import string_types
+
 from cubicweb.uilib import domid
 from cubicweb.appobject import AppObject
 
@@ -122,14 +124,14 @@
                 required_groups = None
             if required_groups and not req.user.matching_groups(required_groups):
                 continue
-            if isinstance(inputurl, basestring):
+            if isinstance(inputurl, string_types):
                 if inputurl == uri:
                     req.form.update(infos)
                     break
             elif inputurl.match(uri): # it's a regexp
                 # XXX what about i18n? (vtitle for instance)
                 for param, value in infos.items():
-                    if isinstance(value, basestring):
+                    if isinstance(value, string_types):
                         req.form[param] = inputurl.sub(value, uri)
                     else:
                         req.form[param] = value
@@ -222,7 +224,7 @@
                 required_groups = None
             if required_groups and not req.user.matching_groups(required_groups):
                 continue
-            if isinstance(inputurl, basestring):
+            if isinstance(inputurl, string_types):
                 if inputurl == uri:
                     return callback(inputurl, uri, req, self._cw.vreg.schema)
             elif inputurl.match(uri): # it's a regexp
--- a/web/views/xbel.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/xbel.py	Mon Oct 12 10:53:35 2015 +0200
@@ -20,6 +20,8 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 
 from cubicweb.predicates import is_instance
@@ -42,7 +44,7 @@
         self.w(u'<!DOCTYPE xbel PUBLIC "+//IDN python.org//DTD XML Bookmark Exchange Language 1.0//EN//XML" "http://www.python.org/topics/xml/dtds/xbel-1.0.dtd">')
         self.w(u'<xbel version="1.0">')
         self.w(u'<title>%s</title>' % self._cw._('bookmarks'))
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(i, 0)
         self.w(u"</xbel>")
 
--- a/web/views/xmlrss.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/views/xmlrss.py	Mon Oct 12 10:53:35 2015 +0200
@@ -23,6 +23,8 @@
 from base64 import b64encode
 from time import timezone
 
+from six.moves import range
+
 from logilab.mtconverter import xml_escape
 
 from cubicweb.predicates import (is_instance, non_final_entity, one_line_rset,
@@ -64,7 +66,7 @@
         """display a list of entities by calling their <item_vid> view"""
         self.w(u'<?xml version="1.0" encoding="%s"?>\n' % self._cw.encoding)
         self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.cw_rset)))
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(i, 0)
         self.w(u'</%s>\n' % self.xml_root)
 
@@ -256,7 +258,7 @@
     def call(self):
         """display a list of entities by calling their <item_vid> view"""
         self._open()
-        for i in xrange(self.cw_rset.rowcount):
+        for i in range(self.cw_rset.rowcount):
             self.cell_call(i, 0)
         self._close()
 
--- a/web/webconfig.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/webconfig.py	Mon Oct 12 10:53:35 2015 +0200
@@ -280,18 +280,6 @@
                 continue
             yield key, pdef
 
-    # don't use @cached: we want to be able to disable it while this must still
-    # be cached
-    def repository(self, vreg=None):
-        """return the instance's repository object"""
-        try:
-            return self.__repo
-        except AttributeError:
-            from cubicweb.repoapi import get_repository
-            repo = get_repository(config=self, vreg=vreg)
-            self.__repo = repo
-            return repo
-
     def vc_config(self):
         return self.repository().get_versions()
 
@@ -472,7 +460,7 @@
             staticdir = join(staticdir, rdir)
             if not isdir(staticdir) and 'w' in mode:
                 os.makedirs(staticdir)
-        return file(join(staticdir, filename), mode)
+        return open(join(staticdir, filename), mode)
 
     def static_file_add(self, rpath, data):
         stream = self.static_file_open(rpath)
--- a/web/webctl.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/web/webctl.py	Mon Oct 12 10:53:35 2015 +0200
@@ -18,6 +18,7 @@
 """cubicweb-ctl commands and command handlers common to twisted/modpython
 web configuration
 """
+from __future__ import print_function
 
 __docformat__ = "restructuredtext en"
 
@@ -44,7 +45,7 @@
     def bootstrap(self, cubes, automatic=False, inputlevel=0):
         """bootstrap this configuration"""
         if not automatic:
-            print '\n' + underline_title('Generic web configuration')
+            print('\n' + underline_title('Generic web configuration'))
             config = self.config
             config.input_config('web', inputlevel)
             if ASK.confirm('Allow anonymous access ?', False):
@@ -87,8 +88,8 @@
             copy(osp.join(resource_dir, resource_path), dest_resource)
         # handle md5 version subdirectory
         linkdir(dest, osp.join(dest, config.instance_md5_version()))
-        print ('You can use apache rewrite rule below :\n'
-               'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
+        print('You can use apache rewrite rule below :\n'
+              'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
 
     def _datadirs(self, config, repo=None):
         if repo is None:
--- a/wsgi/__init__.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/wsgi/__init__.py	Mon Oct 12 10:53:35 2015 +0200
@@ -27,11 +27,9 @@
 __docformat__ = "restructuredtext en"
 
 from email import message, message_from_string
-from Cookie import SimpleCookie
-from StringIO import StringIO
-from cgi import parse_header
 from pprint import pformat as _pformat
 
+from six.moves.http_cookies import SimpleCookie
 
 def pformat(obj):
     """pretty prints `obj` if possible"""
--- a/wsgi/handler.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/wsgi/handler.py	Mon Oct 12 10:53:35 2015 +0200
@@ -19,7 +19,9 @@
 
 __docformat__ = "restructuredtext en"
 
-from itertools import chain, repeat, izip
+from itertools import chain, repeat
+
+from six.moves import zip
 
 from cubicweb import AuthenticationError
 from cubicweb.web import DirectResponse
@@ -78,7 +80,7 @@
     def __init__(self, code, req, body=None):
         text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE')
         self.status =  '%s %s' % (code, text)
-        self.headers = list(chain(*[izip(repeat(k), v)
+        self.headers = list(chain(*[zip(repeat(k), v)
                                     for k, v in req.headers_out.getAllRawHeaders()]))
         self.headers = [(str(k), str(v)) for k, v in self.headers]
         if body:
--- a/wsgi/request.py	Mon Oct 12 09:19:07 2015 +0200
+++ b/wsgi/request.py	Mon Oct 12 10:53:35 2015 +0200
@@ -28,12 +28,11 @@
 import tempfile
 
 from StringIO import StringIO
-from urllib import quote
-from urlparse import parse_qs
-from warnings import warn
+
+from six.moves.urllib.parse import parse_qs
 
 from cubicweb.multipart import (
-    copy_file, parse_form_data, MultipartError, parse_options_header)
+    copy_file, parse_form_data, parse_options_header)
 from cubicweb.web import RequestError
 from cubicweb.web.request import CubicWebRequestBase
 from cubicweb.wsgi import pformat, normalize_header