backport stable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Tue, 30 Mar 2010 14:32:03 +0200
changeset 5082 d6fd82a5a4e8
parent 5052 c9dbd95333f7 (diff)
parent 5081 2ea98b8512dd (current diff)
child 5121 a63d7886fcf5
backport stable
devtools/testlib.py
server/querier.py
--- a/README	Tue Mar 30 13:32:01 2010 +0200
+++ b/README	Tue Mar 30 14:32:03 2010 +0200
@@ -1,6 +1,15 @@
 CubicWeb semantic web framework
 ===============================
 
+CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+* a repository server
+* a RQL command line client to the repository
+* an adaptative modpython interface to the server
+* a bunch of other management tools
+
 Install
 -------
 
--- a/__pkginfo__.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/__pkginfo__.py	Tue Mar 30 14:32:03 2010 +0200
@@ -1,36 +1,21 @@
 # pylint: disable-msg=W0622,C0103
 """cubicweb global packaging information for the cubicweb knowledge management
 software
+
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
 
-distname = "cubicweb"
-modname = "cubicweb"
+modname = distname = "cubicweb"
 
 numversion = (3, 7, 2)
 version = '.'.join(str(num) for num in numversion)
 
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
+description = "a repository of entities / relations for knowledge management"
 author = "Logilab"
 author_email = "contact@logilab.fr"
-
-short_desc = "a repository of entities / relations for knowledge management"
-long_desc = """CubicWeb is a entities / relations based knowledge management system
-developped at Logilab.
-
-This package contains:
-* a repository server
-* a RQL command line client to the repository
-* an adaptative modpython interface to the server
-* a bunch of other management tools
-"""
-
 web = 'http://www.cubicweb.org'
 ftp = 'ftp://ftp.logilab.org/pub/cubicweb'
-pyversions = ['2.5', '2.6']
+license = 'LGPL'
 
 classifiers = [
            'Environment :: Web Environment',
@@ -39,6 +24,34 @@
            'Programming Language :: JavaScript',
 ]
 
+__depends__ = {
+    'logilab-common': '>= 0.49.0',
+    'logilab-mtconverter': '>= 0.6.0',
+    'rql': '>= 0.26.0',
+    'yams': '>= 0.28.1',
+    'docutils': '>= 0.6',
+    #gettext                    # for xgettext, msgcat, etc...
+    # web dependancies
+    'simplejson': '>= 2.0.9',
+    'lxml': '',
+    'Twisted': '',
+    # XXX graphviz
+    # server dependencies
+    'logilab-database': '',
+    'pysqlite': '>= 2.5.5', # XXX install pysqlite2
+    }
+
+__recommends__ = {
+    'Pyro': '>= 3.9.1',
+    'PIL': '',                  # for captcha
+    'pycrypto': '',             # for crypto extensions
+    'fyzz': '>= 0.1.0',         # for sparql
+    'pysixt': '>= 0.1.0',       # XXX for pdf export
+    'python-gettext': '>= 1.0', # XXX for pdf export
+    'vobject': '>= 0.6.0',      # for ical view
+    #'Products.FCKeditor':'',
+    #'SimpleTAL':'>= 4.1.6',
+    }
 
 import sys
 from os import listdir, environ
@@ -49,55 +62,52 @@
            if not s.endswith('.bat')]
 include_dirs = [join('test', 'data'),
                 join('server', 'test', 'data'),
+                join('hooks', 'test', 'data'),
                 join('web', 'test', 'data'),
                 join('devtools', 'test', 'data'),
                 'skeleton']
 
 
-entities_dir = 'entities'
-schema_dir = 'schemas'
-sobjects_dir = 'sobjects'
-server_migration_dir = join('misc', 'migration')
-data_dir = join('web', 'data')
-wdoc_dir = join('web', 'wdoc')
-wdocimages_dir = join(wdoc_dir, 'images')
-views_dir = join('web', 'views')
-i18n_dir = 'i18n'
+_server_migration_dir = join('misc', 'migration')
+_data_dir = join('web', 'data')
+_wdoc_dir = join('web', 'wdoc')
+_wdocimages_dir = join(_wdoc_dir, 'images')
+_views_dir = join('web', 'views')
+_i18n_dir = 'i18n'
 
-if environ.get('APYCOT_ROOT'):
+_pyversion = '.'.join(str(num) for num in sys.version_info[0:2])
+if '--home' in sys.argv:
     # --home install
-    pydir = 'python'
+    pydir = 'python' + _pyversion
 else:
-    python_version = '.'.join(str(num) for num in sys.version_info[0:2])
-    pydir = join('python' + python_version, 'site-packages')
+    pydir = join('python' + _pyversion, 'site-packages')
 
 try:
     data_files = [
-        # common data
-        #[join('share', 'cubicweb', 'entities'),
-        # [join(entities_dir, filename) for filename in listdir(entities_dir)]],
         # server data
         [join('share', 'cubicweb', 'schemas'),
-         [join(schema_dir, filename) for filename in listdir(schema_dir)]],
-        #[join('share', 'cubicweb', 'sobjects'),
-        # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]],
+         [join('schemas', filename) for filename in listdir('schemas')]],
         [join('share', 'cubicweb', 'migration'),
-         [join(server_migration_dir, filename)
-          for filename in listdir(server_migration_dir)]],
+         [join(_server_migration_dir, filename)
+          for filename in listdir(_server_migration_dir)]],
         # web data
         [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
-         [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]],
+         [join(_data_dir, fname) for fname in listdir(_data_dir)
+          if not isdir(join(_data_dir, fname))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
-         [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]],
+         [join(_data_dir, 'timeline', fname)
+          for fname in listdir(join(_data_dir, 'timeline'))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
-         [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]],
+         [join(_wdoc_dir, fname) for fname in listdir(_wdoc_dir)
+          if not isdir(join(_wdoc_dir, fname))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
-         [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]],
-        # XXX: .pt install should be handled properly in a near future version
+         [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]],
+        [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+         [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]],
+        # XXX: drop .pt files
         [join('lib', pydir, 'cubicweb', 'web', 'views'),
-         [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]],
-        [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
-         [join(i18n_dir, fname) for fname in listdir(i18n_dir)]],
+         [join(_views_dir, fname) for fname in listdir(_views_dir)
+          if fname.endswith('.pt')]],
         # skeleton
         ]
 except OSError:
--- a/cwconfig.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/cwconfig.py	Tue Mar 30 14:32:03 2010 +0200
@@ -15,8 +15,7 @@
 
  * cubicweb migration files are by default searched in
    `<CW_SOFTWARE_ROOT>/misc/migration` instead of
-   `/usr/share/cubicweb/migration/`(unless another emplacement is specified
-   using `CW_MIGRATION_DIR`.
+   `<install prefix>/share/cubicweb/migration/`
 
  * Cubicweb will start in 'user' mode (see below)
 
@@ -66,9 +65,6 @@
 .. envvar:: CW_RUNTIME_DIR
    Directory where pid files will be written
 
-.. envvar:: CW_MIGRATION_DIR
-   Directory where cubicweb migration files will be found
-
 
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
@@ -78,12 +74,11 @@
 import sys
 import os
 import logging
-import tempfile
 from smtplib import SMTP
 from threading import Lock
-from os.path import exists, join, expanduser, abspath, normpath, basename, isdir
+from os.path import (exists, join, expanduser, abspath, normpath,
+                     basename, isdir, dirname)
 from warnings import warn
-
 from logilab.common.decorators import cached, classproperty
 from logilab.common.deprecation import deprecated
 from logilab.common.logging_ext import set_log_methods, init_log
@@ -131,6 +126,23 @@
                                  % (directory, modes))
     return modes[0]
 
+def _find_prefix(start_path=CW_SOFTWARE_ROOT):
+    """Runs along the parent directories of *start_path* (default to cubicweb source directory)
+    looking for one containing a 'share/cubicweb' directory.
+    The first matching directory is assumed as the prefix installation of cubicweb
+
+    Returns the matching prefix or None.
+    """
+    prefix = start_path
+    old_prefix = None
+    if not isdir(start_path):
+        prefix = dirname(start_path)
+    while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix:
+        old_prefix = prefix
+        prefix = dirname(prefix)
+    if isdir(join(prefix, 'share', 'cubicweb')):
+        return prefix
+    return sys.prefix
 
 # persistent options definition
 PERSISTENT_OPTIONS = (
@@ -203,6 +215,11 @@
 
 CWDEV = exists(join(CW_SOFTWARE_ROOT, '.hg'))
 
+try:
+    _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX']
+except KeyError:
+    _INSTALL_PREFIX = _find_prefix()
+
 class CubicWebNoAppConfiguration(ConfigurationMixIn):
     """base class for cubicweb configuration without a specific instance directory
     """
@@ -216,25 +233,16 @@
     # debug mode
     debugmode = False
 
-    if os.environ.get('APYCOT_ROOT'):
-        mode = 'test'
-        # allow to test cubes within apycot using cubicweb not installed by
-        # apycot
-        if __file__.startswith(os.environ['APYCOT_ROOT']):
-            CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ
-            # create __init__ file
-            file(join(CUBES_DIR, '__init__.py'), 'w').close()
-        else:
-            CUBES_DIR = '/usr/share/cubicweb/cubes/'
-    elif (CWDEV and _forced_mode != 'system'):
+
+    if (CWDEV and _forced_mode != 'system'):
         mode = 'user'
-        CUBES_DIR = abspath(normpath(join(CW_SOFTWARE_ROOT, '../cubes')))
+        _CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
     else:
-        if _forced_mode == 'user':
-            mode = 'user'
-        else:
-            mode = 'system'
-        CUBES_DIR = '/usr/share/cubicweb/cubes/'
+        mode = _forced_mode or 'system'
+        _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
+
+    CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False)
+    CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep)
 
     options = (
        ('log-threshold',
@@ -296,7 +304,6 @@
           }),
         )
     # static and class methods used to get instance independant resources ##
-
     @staticmethod
     def cubicweb_version():
         """return installed cubicweb version"""
@@ -335,21 +342,17 @@
                            % directory)
                 continue
             for cube in os.listdir(directory):
-                if isdir(join(directory, cube)) and not cube == 'shared':
+                if cube in ('CVS', '.svn', 'shared', '.hg'):
+                    continue
+                if isdir(join(directory, cube)):
                     cubes.add(cube)
         return sorted(cubes)
 
     @classmethod
     def cubes_search_path(cls):
         """return the path of directories where cubes should be searched"""
-        path = []
-        try:
-            for directory in os.environ['CW_CUBES_PATH'].split(os.pathsep):
-                directory = abspath(normpath(directory))
-                if exists(directory) and not directory in path:
-                    path.append(directory)
-        except KeyError:
-            pass
+        path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH
+                if directory.strip() and exists(directory.strip())]
         if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR):
             path.append(cls.CUBES_DIR)
         return path
@@ -365,7 +368,7 @@
     @classmethod
     def cube_dir(cls, cube):
         """return the cube directory for the given cube id,
-        raise ConfigurationError if it doesn't exists
+        raise `ConfigurationError` if it doesn't exists
         """
         for directory in cls.cubes_search_path():
             cubedir = join(directory, cube)
@@ -383,10 +386,12 @@
         """return the information module for the given cube"""
         cube = CW_MIGRATION_MAP.get(cube, cube)
         try:
-            return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__
+            parent = __import__('cubes.%s.__pkginfo__' % cube)
+            return getattr(parent, cube).__pkginfo__
         except Exception, ex:
-            raise ConfigurationError('unable to find packaging information for '
-                                     'cube %s (%s: %s)' % (cube, ex.__class__.__name__, ex))
+            raise ConfigurationError(
+                'unable to find packaging information for cube %s (%s: %s)'
+                % (cube, ex.__class__.__name__, ex))
 
     @classmethod
     def cube_version(cls, cube):
@@ -398,14 +403,33 @@
         return Version(version)
 
     @classmethod
+    def _cube_deps(cls, cube, key, oldkey):
+        """return cubicweb cubes used by the given cube"""
+        pkginfo = cls.cube_pkginfo(cube)
+        try:
+            deps = getattr(pkginfo, key)
+        except AttributeError:
+            if hasattr(pkginfo, oldkey):
+                warn('[3.6] %s is deprecated, use %s dict' % (oldkey, key),
+                     DeprecationWarning)
+                deps = getattr(pkginfo, oldkey)
+            else:
+                deps = {}
+        if not isinstance(deps, dict):
+            deps = dict((key, None) for key in deps)
+            warn('[3.6] cube %s should define %s as a dict' % (cube, key),
+                 DeprecationWarning)
+        return deps
+
+    @classmethod
     def cube_dependencies(cls, cube):
         """return cubicweb cubes used by the given cube"""
-        return getattr(cls.cube_pkginfo(cube), '__use__', ())
+        return cls._cube_deps(cube, '__depends_cubes__', '__use__')
 
     @classmethod
     def cube_recommends(cls, cube):
         """return cubicweb cubes recommended by the given cube"""
-        return getattr(cls.cube_pkginfo(cube), '__recommend__', ())
+        return cls._cube_deps(cube, '__recommends_cubes__', '__recommend__')
 
     @classmethod
     def expand_cubes(cls, cubes, with_recommends=False):
@@ -438,9 +462,10 @@
         graph = {}
         for cube in cubes:
             cube = CW_MIGRATION_MAP.get(cube, cube)
-            deps = cls.cube_dependencies(cube) + \
-                   cls.cube_recommends(cube)
-            graph[cube] = set(dep for dep in deps if dep in cubes)
+            graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+                              if dep in cubes)
+            graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+                               if dep in cubes)
         cycles = get_cycles(graph)
         if cycles:
             cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles)
@@ -588,6 +613,7 @@
             cw_rest_init()
 
     def adjust_sys_path(self):
+        # overriden in CubicWebConfiguration
         self.cls_adjust_sys_path()
 
     def init_log(self, logthreshold=None, debug=False,
@@ -637,35 +663,24 @@
         """
         return None
 
+
 class CubicWebConfiguration(CubicWebNoAppConfiguration):
     """base class for cubicweb server and web configurations"""
 
-    INSTANCES_DATA_DIR = None
-    if os.environ.get('APYCOT_ROOT'):
-        root = os.environ['APYCOT_ROOT']
-        REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
-        if not exists(REGISTRY_DIR):
-            os.makedirs(REGISTRY_DIR)
-        RUNTIME_DIR = tempfile.gettempdir()
-        # allow to test cubes within apycot using cubicweb not installed by
-        # apycot
-        if __file__.startswith(os.environ['APYCOT_ROOT']):
-            MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root
+    if CubicWebNoAppConfiguration.mode == 'user':
+        _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/')
+    else: #mode = 'system'
+        if _INSTALL_PREFIX == '/usr':
+            _INSTANCES_DIR = '/etc/cubicweb.d/'
         else:
-            MIGRATION_DIR = '/usr/share/cubicweb/migration/'
-    else:
-        if CubicWebNoAppConfiguration.mode == 'user':
-            REGISTRY_DIR = expanduser('~/etc/cubicweb.d/')
-            RUNTIME_DIR = tempfile.gettempdir()
-            INSTANCES_DATA_DIR = REGISTRY_DIR
-        else: #mode = 'system'
-            REGISTRY_DIR = '/etc/cubicweb.d/'
-            RUNTIME_DIR = '/var/run/cubicweb/'
-            INSTANCES_DATA_DIR = '/var/lib/cubicweb/instances/'
-        if CWDEV:
-            MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration')
-        else:
-            MIGRATION_DIR = '/usr/share/cubicweb/migration/'
+            _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d')
+
+    if os.environ.get('APYCOT_ROOT'):
+        _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py')
+        if not exists(_cubes_init):
+            file(join(_cubes_init), 'w').close()
+        if not exists(_INSTANCES_DIR):
+            os.makedirs(_INSTANCES_DIR)
 
     # for some commands (creation...) we don't want to initialize gettext
     set_language = True
@@ -709,25 +724,19 @@
         )
 
     @classmethod
-    def runtime_dir(cls):
-        """run time directory for pid file..."""
-        return env_path('CW_RUNTIME_DIR', cls.RUNTIME_DIR, 'run time')
-
-    @classmethod
-    def registry_dir(cls):
+    def instances_dir(cls):
         """return the control directory"""
-        return env_path('CW_INSTANCES_DIR', cls.REGISTRY_DIR, 'registry')
-
-    @classmethod
-    def instance_data_dir(cls):
-        """return the instance data directory"""
-        return env_path('CW_INSTANCES_DATA_DIR', cls.INSTANCES_DATA_DIR,
-                        'additional data')
+        return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry')
 
     @classmethod
     def migration_scripts_dir(cls):
         """cubicweb migration scripts directory"""
-        return env_path('CW_MIGRATION_DIR', cls.MIGRATION_DIR, 'migration')
+        if CWDEV:
+            return join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+        mdir = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'migration')
+        if not exists(mdir):
+            raise ConfigurationError('migration path %s doesn\'t exist' % mdir)
+        return mdir
 
     @classmethod
     def config_for(cls, appid, config=None):
@@ -750,9 +759,10 @@
         """return the home directory of the instance with the given
         instance id
         """
-        home = join(cls.registry_dir(), appid)
+        home = join(cls.instances_dir(), appid)
         if not exists(home):
-            raise ConfigurationError('no such instance %s (check it exists with "cubicweb-ctl list")' % appid)
+            raise ConfigurationError('no such instance %s (check it exists with'
+                                     ' "cubicweb-ctl list")' % appid)
         return home
 
     MODES = ('common', 'repository', 'Any', 'web')
@@ -775,7 +785,9 @@
     def default_log_file(self):
         """return default path to the log file of the instance'server"""
         if self.mode == 'user':
-            basepath = join(tempfile.gettempdir(), '%s-%s' % (basename(self.appid), self.name))
+            import tempfile
+            basepath = join(tempfile.gettempdir(), '%s-%s' % (
+                basename(self.appid), self.name))
             path = basepath + '.log'
             i = 1
             while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
@@ -790,7 +802,13 @@
 
     def default_pid_file(self):
         """return default path to the pid file of the instance'server"""
-        return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name))
+        if self.mode == 'system':
+            # XXX not under _INSTALL_PREFIX, right?
+            rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time')
+        else:
+            import tempfile
+            rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time')
+        return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
 
     # instance methods used to get instance specific resources #############
 
@@ -810,11 +828,17 @@
 
     @property
     def apphome(self):
-        return join(self.registry_dir(), self.appid)
+        return join(self.instances_dir(), self.appid)
 
     @property
     def appdatahome(self):
-        return join(self.instance_data_dir(), self.appid)
+        if self.mode == 'system':
+            # XXX not under _INSTALL_PREFIX, right?
+            iddir = '/var/lib/cubicweb/instances/'
+        else:
+            iddir = self.instances_dir()
+        iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data')
+        return join(iddir, self.appid)
 
     def init_cubes(self, cubes):
         assert self._cubes is None, self._cubes
@@ -879,7 +903,8 @@
                 if exists(sitefile) and not sitefile in self._site_loaded:
                     self._load_site_cubicweb(sitefile)
                     self._site_loaded.add(sitefile)
-                    self.warning('[3.5] site_erudi.py is deprecated, should be renamed to site_cubicweb.py')
+                    self.warning('[3.5] site_erudi.py is deprecated, should be '
+                                 'renamed to site_cubicweb.py')
 
     def _load_site_cubicweb(self, sitefile):
         # XXX extrapath argument to load_module_from_file only in lgc > 0.46
--- a/cwctl.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/cwctl.py	Tue Mar 30 14:32:03 2010 +0200
@@ -13,6 +13,7 @@
 # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash
 # completion). So import locally in command helpers.
 import sys
+from warnings import warn
 from os import remove, listdir, system, pathsep
 try:
     from os import kill, getpgid
@@ -85,7 +86,7 @@
         Instance used by another one should appears first in the file (one
         instance per line)
         """
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         _allinstances = list_instances(regdir)
         if isfile(join(regdir, 'startorder')):
             allinstances = []
@@ -168,84 +169,6 @@
 
 # base commands ###############################################################
 
-def version_strictly_lower(a, b):
-    from logilab.common.changelog import Version
-    if a:
-        a = Version(a)
-    if b:
-        b = Version(b)
-    return a < b
-
-def max_version(a, b):
-    from logilab.common.changelog import Version
-    return str(max(Version(a), Version(b)))
-
-class ConfigurationProblem(object):
-    """Each cube has its own list of dependencies on other cubes/versions.
-
-    The ConfigurationProblem is used to record the loaded cubes, then to detect
-    inconsistencies in their dependencies.
-
-    See configuration management on wikipedia for litterature.
-    """
-
-    def __init__(self):
-        self.cubes = {}
-
-    def add_cube(self, name, info):
-        self.cubes[name] = info
-
-    def solve(self):
-        self.warnings = []
-        self.errors = []
-        self.read_constraints()
-        for cube, versions in sorted(self.constraints.items()):
-            oper, version = None, None
-            # simplify constraints
-            if versions:
-                for constraint in versions:
-                    op, ver = constraint
-                    if oper is None:
-                        oper = op
-                        version = ver
-                    elif op == '>=' and oper == '>=':
-                        version = max_version(ver, version)
-                    else:
-                        print 'unable to handle this case', oper, version, op, ver
-            # "solve" constraint satisfaction problem
-            if cube not in self.cubes:
-                self.errors.append( ('add', cube, version) )
-            elif versions:
-                lower_strict = version_strictly_lower(self.cubes[cube].version, version)
-                if oper in ('>=','='):
-                    if lower_strict:
-                        self.errors.append( ('update', cube, version) )
-                else:
-                    print 'unknown operator', oper
-
-    def read_constraints(self):
-        self.constraints = {}
-        self.reverse_constraints = {}
-        for cube, info in self.cubes.items():
-            if hasattr(info,'__depends_cubes__'):
-                use = info.__depends_cubes__
-                if not isinstance(use, dict):
-                    use = dict((key, None) for key in use)
-                    self.warnings.append('cube %s should define __depends_cubes__ as a dict not a list')
-            else:
-                self.warnings.append('cube %s should define __depends_cubes__' % cube)
-                use = dict((key, None) for key in info.__use__)
-            for name, constraint in use.items():
-                self.constraints.setdefault(name,set())
-                if constraint:
-                    try:
-                        oper, version = constraint.split()
-                        self.constraints[name].add( (oper, version) )
-                    except:
-                        self.warnings.append('cube %s depends on %s but constraint badly formatted: %s'
-                                             % (cube, name, constraint))
-                self.reverse_constraints.setdefault(name, set()).add(cube)
-
 class ListCommand(Command):
     """List configurations, cubes and instances.
 
@@ -262,6 +185,7 @@
         """run the command with its specific arguments"""
         if args:
             raise BadCommandUsage('Too much arguments')
+        from cubicweb.migration import ConfigurationProblem
         print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
         print
         print 'Available configurations:'
@@ -273,7 +197,7 @@
                     continue
                 print '   ', line
         print
-        cfgpb = ConfigurationProblem()
+        cfgpb = ConfigurationProblem(cwcfg)
         try:
             cubesdir = pathsep.join(cwcfg.cubes_search_path())
             namesize = max(len(x) for x in cwcfg.available_cubes())
@@ -284,26 +208,31 @@
         else:
             print 'Available cubes (%s):' % cubesdir
             for cube in cwcfg.available_cubes():
-                if cube in ('CVS', '.svn', 'shared', '.hg'):
-                    continue
                 try:
                     tinfo = cwcfg.cube_pkginfo(cube)
                     tversion = tinfo.version
-                    cfgpb.add_cube(cube, tinfo)
+                    cfgpb.add_cube(cube, tversion)
                 except ConfigurationError:
                     tinfo = None
                     tversion = '[missing cube information]'
                 print '* %s %s' % (cube.ljust(namesize), tversion)
                 if self.config.verbose:
-                    shortdesc = tinfo and (getattr(tinfo, 'short_desc', '')
-                                           or tinfo.__doc__)
-                    if shortdesc:
-                        print '    '+ '    \n'.join(shortdesc.splitlines())
+                    if tinfo:
+                        descr = getattr(tinfo, 'description', '')
+                        if not descr:
+                            descr = getattr(tinfo, 'short_desc', '')
+                            if descr:
+                                warn('[3.8] short_desc is deprecated, update %s'
+                                     ' pkginfo' % cube, DeprecationWarning)
+                            else:
+                                descr = tinfo.__doc__
+                        if descr:
+                            print '    '+ '    \n'.join(descr.splitlines())
                     modes = detect_available_modes(cwcfg.cube_dir(cube))
                     print '    available modes: %s' % ', '.join(modes)
         print
         try:
-            regdir = cwcfg.registry_dir()
+            regdir = cwcfg.instances_dir()
         except ConfigurationError, ex:
             print 'No instance available:', ex
             print
@@ -611,7 +540,7 @@
     actionverb = 'restarted'
 
     def run_args(self, args, askconfirm):
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         if not isfile(join(regdir, 'startorder')) or len(args) <= 1:
             # no specific startorder
             super(RestartInstanceCommand, self).run_args(args, askconfirm)
@@ -948,7 +877,7 @@
 
     def run(self, args):
         """run the command with its specific arguments"""
-        regdir = cwcfg.registry_dir()
+        regdir = cwcfg.instances_dir()
         for appid in sorted(listdir(regdir)):
             print appid
 
--- a/debian/control	Tue Mar 30 13:32:01 2010 +0200
+++ b/debian/control	Tue Mar 30 14:32:03 2010 +0200
@@ -97,7 +97,7 @@
 Package: cubicweb-common
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.25.0), python-lxml
+Depends: ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.6.0), python-logilab-common (>= 0.49.0), python-yams (>= 0.28.1), python-rql (>= 0.26.0), python-lxml
 Recommends: python-simpletal (>= 4.0), python-crypto
 Conflicts: cubicweb-core
 Replaces: cubicweb-core
--- a/devtools/__init__.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/devtools/__init__.py	Tue Mar 30 14:32:03 2010 +0200
@@ -97,9 +97,6 @@
           }),
         ))
 
-    if not os.environ.get('APYCOT_ROOT'):
-        REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes'))
-
     def __init__(self, appid, log_threshold=logging.CRITICAL+10):
         ServerConfiguration.__init__(self, appid)
         self.init_log(log_threshold, force=True)
--- a/devtools/testlib.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/devtools/testlib.py	Tue Mar 30 14:32:03 2010 +0200
@@ -5,6 +5,8 @@
 :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
+from __future__ import with_statement
+
 __docformat__ = "restructuredtext en"
 
 import os
@@ -29,6 +31,7 @@
 from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
 from cubicweb.sobjects import notification
 from cubicweb.web import Redirect, application
+from cubicweb.server.session import security_enabled
 from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
 from cubicweb.devtools import fake, htmlparser
 
@@ -772,6 +775,10 @@
         """this method populates the database with `how_many` entities
         of each possible type. It also inserts random relations between them
         """
+        with security_enabled(self.session, read=False, write=False):
+            self._auto_populate(how_many)
+
+    def _auto_populate(self, how_many):
         cu = self.cursor()
         self.custom_populate(how_many, cu)
         vreg = self.vreg
--- a/migration.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/migration.py	Tue Mar 30 14:32:03 2010 +0200
@@ -16,6 +16,7 @@
 from logilab.common.decorators import cached
 from logilab.common.configuration import REQUIRED, read_old_config
 from logilab.common.shellutils import ASK
+from logilab.common.changelog import Version
 
 from cubicweb import ConfigurationError
 
@@ -374,3 +375,75 @@
 from logging import getLogger
 from cubicweb import set_log_methods
 set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
+
+
+def version_strictly_lower(a, b):
+    if a:
+        a = Version(a)
+    if b:
+        b = Version(b)
+    return a < b
+
+def max_version(a, b):
+    return str(max(Version(a), Version(b)))
+
+class ConfigurationProblem(object):
+    """Each cube has its own list of dependencies on other cubes/versions.
+
+    The ConfigurationProblem is used to record the loaded cubes, then to detect
+    inconsistencies in their dependencies.
+
+    See configuration management on wikipedia for litterature.
+    """
+
+    def __init__(self, config):
+        self.cubes = {}
+        self.config = config
+
+    def add_cube(self, name, version):
+        self.cubes[name] = version
+
+    def solve(self):
+        self.warnings = []
+        self.errors = []
+        self.read_constraints()
+        for cube, versions in sorted(self.constraints.items()):
+            oper, version = None, None
+            # simplify constraints
+            if versions:
+                for constraint in versions:
+                    op, ver = constraint
+                    if oper is None:
+                        oper = op
+                        version = ver
+                    elif op == '>=' and oper == '>=':
+                        version = max_version(ver, version)
+                    else:
+                        print 'unable to handle this case', oper, version, op, ver
+            # "solve" constraint satisfaction problem
+            if cube not in self.cubes:
+                self.errors.append( ('add', cube, version) )
+            elif versions:
+                lower_strict = version_strictly_lower(self.cubes[cube], version)
+                if oper in ('>=','='):
+                    if lower_strict:
+                        self.errors.append( ('update', cube, version) )
+                else:
+                    print 'unknown operator', oper
+
+    def read_constraints(self):
+        self.constraints = {}
+        self.reverse_constraints = {}
+        for cube in self.cubes:
+            use = self.config.cube_dependencies(cube)
+            for name, constraint in use.iteritems():
+                self.constraints.setdefault(name,set())
+                if constraint:
+                    try:
+                        oper, version = constraint.split()
+                        self.constraints[name].add( (oper, version) )
+                    except:
+                        self.warnings.append(
+                            'cube %s depends on %s but constraint badly '
+                            'formatted: %s' % (cube, name, constraint))
+                self.reverse_constraints.setdefault(name, set()).add(cube)
--- a/rqlrewrite.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/rqlrewrite.py	Tue Mar 30 14:32:03 2010 +0200
@@ -41,15 +41,15 @@
         except KeyError:
             continue
         stinfo = var.stinfo
-        if stinfo.get('uidrels'):
+        if stinfo.get('uidrel') is not None:
             continue # eid specified, no need for additional type specification
         try:
-            typerels = rqlst.defined_vars[varname].stinfo.get('typerels')
+            typerel = rqlst.defined_vars[varname].stinfo.get('typerel')
         except KeyError:
             assert varname in rqlst.aliases
             continue
-        if newroot is rqlst and typerels:
-            mytyperel = iter(typerels).next()
+        if newroot is rqlst and typerel is not None:
+            mytyperel = typerel
         else:
             for vref in newroot.defined_vars[varname].references():
                 rel = vref.relation()
@@ -80,7 +80,7 @@
                 # tree is not annotated yet, no scope set so add the restriction
                 # to the root
                 rel = newroot.add_type_restriction(var, possibletypes)
-            stinfo['typerels'] = frozenset((rel,))
+            stinfo['typerel'] = rel
             stinfo['possibletypes'] = possibletypes
 
 
--- a/server/msplanner.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/msplanner.py	Tue Mar 30 14:32:03 2010 +0200
@@ -309,21 +309,20 @@
         # find for each source which variable/solution are supported
         for varname, varobj in self.rqlst.defined_vars.items():
             # if variable has an eid specified, we can get its source directly
-            # NOTE: use uidrels and not constnode to deal with "X eid IN(1,2,3,4)"
-            if varobj.stinfo['uidrels']:
-                vrels = varobj.stinfo['relations'] - varobj.stinfo['uidrels']
-                for rel in varobj.stinfo['uidrels']:
-                    for const in rel.children[1].get_nodes(Constant):
-                        eid = const.eval(self.plan.args)
-                        source = self._session.source_from_eid(eid)
-                        if vrels and not any(source.support_relation(r.r_type)
-                                             for r in vrels):
-                            self._set_source_for_term(self.system_source, varobj)
-                        else:
-                            self._set_source_for_term(source, varobj)
+            # NOTE: use uidrel and not constnode to deal with "X eid IN(1,2,3,4)"
+            if varobj.stinfo['uidrel'] is not None:
+                rel = varobj.stinfo['uidrel']
+                for const in rel.children[1].get_nodes(Constant):
+                    eid = const.eval(self.plan.args)
+                    source = self._session.source_from_eid(eid)
+                    if not any(source.support_relation(r.r_type)
+                               for r in varobj.stinfo['relations'] if not r is rel):
+                        self._set_source_for_term(self.system_source, varobj)
+                    else:
+                        self._set_source_for_term(source, varobj)
                 continue
             rels = varobj.stinfo['relations']
-            if not rels and not varobj.stinfo['typerels']:
+            if not rels and varobj.stinfo['typerel'] is None:
                 # (rare) case where the variable has no type specified nor
                 # relation accessed ex. "Any MAX(X)"
                 self._set_source_for_term(self.system_source, varobj)
@@ -700,7 +699,7 @@
                     for var in select.defined_vars.itervalues():
                         if not var in terms:
                             stinfo = var.stinfo
-                            for ovar, rtype in stinfo['attrvars']:
+                            for ovar, rtype in stinfo.get('attrvars', ()):
                                 if ovar in terms:
                                     needsel.add(var.name)
                                     terms.append(var)
@@ -778,20 +777,19 @@
             # variable is refed by an outer scope and should be substituted
             # using an 'identity' relation (else we'll get a conflict of
             # temporary tables)
-            if rhsvar in terms and not lhsvar in terms:
+            if rhsvar in terms and not lhsvar in terms and lhsvar.scope is lhsvar.stmt:
                 self._identity_substitute(rel, lhsvar, terms, needsel)
-            elif lhsvar in terms and not rhsvar in terms:
+            elif lhsvar in terms and not rhsvar in terms and rhsvar.scope is rhsvar.stmt:
                 self._identity_substitute(rel, rhsvar, terms, needsel)
 
     def _identity_substitute(self, relation, var, terms, needsel):
         newvar = self._insert_identity_variable(relation.scope, var)
-        if newvar is not None:
-            # ensure relation is using '=' operator, else we rely on a
-            # sqlgenerator side effect (it won't insert an inequality operator
-            # in this case)
-            relation.children[1].operator = '='
-            terms.append(newvar)
-            needsel.add(newvar.name)
+        # ensure relation is using '=' operator, else we rely on a
+        # sqlgenerator side effect (it won't insert an inequality operator
+        # in this case)
+        relation.children[1].operator = '='
+        terms.append(newvar)
+        needsel.add(newvar.name)
 
     def _choose_term(self, sourceterms):
         """pick one term among terms supported by a source, which will be used
@@ -1418,7 +1416,7 @@
             return False
         if not var in terms or used_in_outer_scope(var, self.current_scope):
             return False
-        if any(v for v, _ in var.stinfo['attrvars'] if not v in terms):
+        if any(v for v, _ in var.stinfo.get('attrvars', ()) if not v in terms):
             return False
         return True
 
--- a/server/mssteps.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/mssteps.py	Tue Mar 30 14:32:03 2010 +0200
@@ -61,7 +61,7 @@
             if not isinstance(vref, VariableRef):
                 continue
             var = vref.variable
-            if var.stinfo['attrvars']:
+            if var.stinfo.get('attrvars'):
                 for lhsvar, rtype in var.stinfo['attrvars']:
                     if lhsvar.name in srqlst.defined_vars:
                         key = '%s.%s' % (lhsvar.name, rtype)
--- a/server/querier.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/querier.py	Tue Mar 30 14:32:03 2010 +0200
@@ -319,16 +319,9 @@
         varkwargs = {}
         if not session.transaction_data.get('security-rqlst-cache'):
             for var in rqlst.defined_vars.itervalues():
-                for rel in var.stinfo['uidrels']:
-                    const = rel.children[1].children[0]
-                    try:
-                        varkwargs[var.name] = typed_eid(const.eval(self.args))
-                        break
-                    except AttributeError:
-                        #from rql.nodes import Function
-                        #assert isinstance(const, Function)
-                        # X eid IN(...)
-                        pass
+                if var.stinfo['constnode'] is not None:
+                    eid = var.stinfo['constnode'].eval(self.args)
+                    varkwargs[var.name] = typed_eid(eid)
         # dictionnary of variables restricted for security reason
         localchecks = {}
         restricted_vars = set()
--- a/server/rqlannotation.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/rqlannotation.py	Tue Mar 30 14:32:03 2010 +0200
@@ -38,7 +38,7 @@
             stinfo['invariant'] = False
             stinfo['principal'] = _select_main_var(stinfo['rhsrelations'])
             continue
-        if not stinfo['relations'] and not stinfo['typerels']:
+        if not stinfo['relations'] and stinfo['typerel'] is None:
             # Any X, Any MAX(X)...
             # those particular queries should be executed using the system
             # entities table unless there is some type restriction
@@ -80,7 +80,7 @@
                 continue
             rschema = getrschema(rel.r_type)
             if rel.optional:
-                if rel in stinfo['optrelations']:
+                if rel in stinfo.get('optrelations', ()):
                     # optional variable can't be invariant if this is the lhs
                     # variable of an inlined relation
                     if not rel in stinfo['rhsrelations'] and rschema.inlined:
@@ -296,7 +296,7 @@
     def compute(self, rqlst):
         # set domains for each variable
         for varname, var in rqlst.defined_vars.iteritems():
-            if var.stinfo['uidrels'] or \
+            if var.stinfo['uidrel'] is not None or \
                    self.eschema(rqlst.solutions[0][varname]).final:
                 ptypes = var.stinfo['possibletypes']
             else:
@@ -339,7 +339,11 @@
     def set_rel_constraint(self, term, rel, etypes_func):
         if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
             var = term.variable
-            if len(var.stinfo['relations'] - var.stinfo['typerels']) == 1 \
+            if var.stinfo['typerel'] is not None:
+                sub = 1
+            else:
+                sub = 0
+            if len(var.stinfo['relations']) - sub == 1 \
                    or rel.sqlscope is var.sqlscope or rel.r_type == 'identity':
                 self.restrict(var, frozenset(etypes_func()))
                 try:
@@ -356,7 +360,7 @@
         if isinstance(other, VariableRef) and isinstance(other.variable, Variable):
             deambiguifier = other.variable
             if not var is self.deambification_map.get(deambiguifier):
-                if not var.stinfo['typerels']:
+                if var.stinfo['typerel'] is None:
                     otheretypes = deambiguifier.stinfo['possibletypes']
                 elif not self.is_ambiguous(deambiguifier):
                     otheretypes = self.varsols[deambiguifier]
@@ -364,7 +368,7 @@
                     # we know variable won't be invariant, try to use
                     # it to deambguify the current variable
                     otheretypes = self.varsols[deambiguifier]
-            if not deambiguifier.stinfo['typerels']:
+            if deambiguifier.stinfo['typerel'] is None:
                 # if deambiguifier has no type restriction using 'is',
                 # don't record it
                 deambiguifier = None
--- a/server/sources/rql2sql.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/sources/rql2sql.py	Tue Mar 30 14:32:03 2010 +0200
@@ -87,7 +87,7 @@
     modified = False
     for varname in tuple(unstable):
         var = select.defined_vars[varname]
-        if not var.stinfo['optrelations']:
+        if not var.stinfo.get('optrelations'):
             continue
         modified = True
         unstable.remove(varname)
@@ -114,13 +114,13 @@
             var.stinfo['relations'].remove(rel)
             newvar.stinfo['relations'].add(newrel)
             if rel.optional in ('left', 'both'):
-                newvar.stinfo['optrelations'].add(newrel)
+                newvar.add_optional_relation(newrel)
             for vref in newrel.children[1].iget_nodes(VariableRef):
                 var = vref.variable
                 var.stinfo['relations'].add(newrel)
                 var.stinfo['rhsrelations'].add(newrel)
                 if rel.optional in ('right', 'both'):
-                    var.stinfo['optrelations'].add(newrel)
+                    var.add_optional_relation(newrel)
         # extract subquery solutions
         mysolutions = [sol.copy() for sol in solutions]
         cleanup_solutions(newselect, mysolutions)
@@ -888,7 +888,7 @@
                         condition = '%s=%s' % (lhssql, rhsconst.accept(self))
                         if relation.r_type != 'identity':
                             condition = '(%s OR %s IS NULL)' % (condition, lhssql)
-                        if not lhsvar.stinfo['optrelations']:
+                        if not lhsvar.stinfo.get('optrelations'):
                             return condition
                         self.add_outer_join_condition(lhsvar, t1, condition)
                     return
@@ -987,7 +987,7 @@
                 sql = '%s%s' % (lhssql, rhssql)
         except AttributeError:
             sql = '%s%s' % (lhssql, rhssql)
-        if lhs.variable.stinfo['optrelations']:
+        if lhs.variable.stinfo.get('optrelations'):
             self.add_outer_join_condition(lhs.variable, table, sql)
         else:
             return sql
@@ -1002,7 +1002,7 @@
         lhsvar = lhs.variable
         me_is_principal = lhsvar.stinfo.get('principal') is rel
         if me_is_principal:
-            if not lhsvar.stinfo['typerels']:
+            if lhsvar.stinfo['typerel'] is None:
                 # the variable is using the fti table, no join needed
                 jointo = None
             elif not lhsvar.name in self._varmap:
@@ -1135,7 +1135,7 @@
                 vtablename = '_' + variable.name
                 self.add_table('entities AS %s' % vtablename, vtablename)
                 sql = '%s.eid' % vtablename
-                if variable.stinfo['typerels']:
+                if variable.stinfo['typerel'] is not None:
                     # add additional restriction on entities.type column
                     pts = variable.stinfo['possibletypes']
                     if len(pts) == 1:
@@ -1297,7 +1297,7 @@
             tablealias = self._state.outer_tables[table]
             actualtables = self._state.actual_tables[-1]
         except KeyError:
-            for rel in var.stinfo['optrelations']:
+            for rel in var.stinfo.get('optrelations'):
                 self.visit_relation(rel)
             assert self._state.outer_tables
             self.add_outer_join_condition(var, table, condition)
--- a/server/test/unittest_rql2sql.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/server/test/unittest_rql2sql.py	Tue Mar 30 14:32:03 2010 +0200
@@ -1605,8 +1605,8 @@
 class removeUnsusedSolutionsTC(TestCase):
     def test_invariant_not_varying(self):
         rqlst = mock_object(defined_vars={})
-        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
-        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
         self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
                                                           {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None),
                           ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
@@ -1616,8 +1616,8 @@
 
     def test_invariant_varying(self):
         rqlst = mock_object(defined_vars={})
-        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=True)
-        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={'optrelations':False}, _q_invariant=False)
+        rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True)
+        rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False)
         self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'},
                                                           {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None),
                           ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set())
--- a/setup.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/setup.py	Tue Mar 30 14:32:03 2010 +0200
@@ -24,38 +24,44 @@
 import os
 import sys
 import shutil
-from distutils.core import setup
-from distutils.command import install_lib
 from os.path import isdir, exists, join, walk
 
+try:
+   if os.environ.get('NO_SETUPTOOLS'):
+      raise ImportError() # do as there is no setuptools
+   from setuptools import setup
+   from setuptools.command import install_lib
+   USE_SETUPTOOLS = True
+except ImportError:
+   from distutils.core import setup
+   from distutils.command import install_lib
+   USE_SETUPTOOLS = False
+
 # import required features
-from __pkginfo__ import modname, version, license, short_desc, long_desc, \
-     web, author, author_email
+from __pkginfo__ import modname, version, license, description, web, \
+     author, author_email
+
+if exists('README'):
+   long_description = file('README').read()
+
 # import optional features
-try:
-    from __pkginfo__ import distname
-except ImportError:
-    distname = modname
-try:
-    from __pkginfo__ import scripts
-except ImportError:
-    scripts = []
-try:
-    from __pkginfo__ import data_files
-except ImportError:
-    data_files = None
-try:
-    from __pkginfo__ import subpackage_of
-except ImportError:
-    subpackage_of = None
-try:
-    from __pkginfo__ import include_dirs
-except ImportError:
-    include_dirs = []
-try:
-    from __pkginfo__ import ext_modules
-except ImportError:
-    ext_modules = None
+import __pkginfo__
+if USE_SETUPTOOLS:
+   requires = {}
+   for entry in ("__depends__", "__recommends__"):
+      requires.update(getattr(__pkginfo__, entry, {}))
+   install_requires = [("%s %s" % (d, v and v or "")).strip()
+                       for d, v in requires.iteritems()]
+else:
+   install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+subpackage_of = getattr(__pkginfo__, 'subpackage_of', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+
 
 BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
 IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
@@ -92,7 +98,8 @@
 
 def export(from_dir, to_dir,
            blacklist=BASE_BLACKLIST,
-           ignore_ext=IGNORED_EXTENSIONS):
+           ignore_ext=IGNORED_EXTENSIONS,
+           verbose=True):
     """make a mirror of from_dir in to_dir, omitting directories and files
     listed in the black list
     """
@@ -111,7 +118,8 @@
                 continue
             src = '%s/%s' % (directory, filename)
             dest = to_dir + src[len(from_dir):]
-            print >> sys.stderr, src, '->', dest
+            if verbose:
+               print >> sys.stderr, src, '->', dest
             if os.path.isdir(src):
                 if not exists(dest):
                     os.mkdir(dest)
@@ -154,29 +162,31 @@
                 base = modname
             for directory in include_dirs:
                 dest = join(self.install_dir, base, directory)
-                export(directory, dest)
+                export(directory, dest, verbose=False)
 
 def install(**kwargs):
     """setup entry point"""
+    if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \
+           sys.versioninfo < (2, 5, 4):
+       sys.argv.remove('--install-layout=deb')
+       print "W: remove '--install-layout=deb' option"
     if subpackage_of:
         package = subpackage_of + '.' + modname
         kwargs['package_dir'] = {package : '.'}
         packages = [package] + get_packages(os.getcwd(), package)
+        if USE_SETUPTOOLS:
+            kwargs['namespace_packages'] = [subpackage_of]
     else:
         kwargs['package_dir'] = {modname : '.'}
         packages = [modname] + get_packages(os.getcwd(), modname)
     kwargs['packages'] = packages
-    return setup(name = distname,
-                 version = version,
-                 license =license,
-                 description = short_desc,
-                 long_description = long_desc,
-                 author = author,
-                 author_email = author_email,
-                 url = web,
-                 scripts = ensure_scripts(scripts),
-                 data_files=data_files,
+    return setup(name=distname, version=version, license=license, url=web,
+                 description=description, long_description=long_description,
+                 author=author, author_email=author_email,
+                 scripts=ensure_scripts(scripts), data_files=data_files,
                  ext_modules=ext_modules,
+                 install_requires=install_requires,
+                 #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"],
                  cmdclass={'install_lib': MyInstallLib},
                  **kwargs
                  )
--- a/skeleton/__pkginfo__.py.tmpl	Tue Mar 30 13:32:01 2010 +0200
+++ b/skeleton/__pkginfo__.py.tmpl	Tue Mar 30 14:32:03 2010 +0200
@@ -8,14 +8,11 @@
 version = '.'.join(str(num) for num in numversion)
 
 license = 'LCL'
-copyright = '''Copyright (c) %(year)s %(author)s.
-%(author-web-site)s -- mailto:%(author-email)s'''
 
 author = '%(author)s'
 author_email = '%(author-email)s'
 
-short_desc = '%(shortdesc)s'
-long_desc = '''%(longdesc)s'''
+description = '%(shortdesc)s'
 
 web = 'http://www.cubicweb.org/project/%%s' %% distname
 
@@ -43,8 +40,10 @@
 # Note: here, you'll need to add subdirectories if you want
 # them to be included in the debian package
 
-__depends_cubes__ = {}
 __depends__ = {'cubicweb': '>= 3.6.0'}
-__use__ = (%(dependancies)s)
-__recommend__ = ()
+__depends_cubes__ = dict( (x[len('cubicweb-):], v) for x, v in __depends__
+                          if x.startswith('cubicweb-'))
+__recommends__ = {}
+__recommends_cubes__ = dict( (x[len('cubicweb-):], v) for x, v in __recommends__
+                             if x.startswith('cubicweb-'))
 
--- a/skeleton/setup.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/skeleton/setup.py	Tue Mar 30 14:32:03 2010 +0200
@@ -1,14 +1,12 @@
 #!/usr/bin/env python
-"""
+"""Generic Setup script, takes package info from __pkginfo__.py file
 
 :organization: Logilab
 :copyright: 2001-2010 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
 :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
-# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152
-# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# pylint: disable-msg=W0142,W0403,W0404,W0613,W0622,W0622,W0704,R0904,C0103,E0611
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -22,36 +20,176 @@
 # You should have received a copy of the GNU General Public License along with
 # this program; if not, write to the Free Software Foundation, Inc.,
 # 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
-""" Generic Setup script, takes package info from __pkginfo__.py file """
+
+import os
+import sys
+import shutil
+from os.path import isdir, exists, join, walk
 
-from distutils.core import setup
+try:
+   if os.environ.get('NO_SETUPTOOLS'):
+      raise ImportError() # do as there is no setuptools
+   from setuptools import setup
+   from setuptools.command import install_lib
+   USE_SETUPTOOLS = True
+except ImportError:
+   from distutils.core import setup
+   from distutils.command import install_lib
+   USE_SETUPTOOLS = False
 
 # import required features
-from __pkginfo__ import distname, version, license, short_desc, long_desc, \
-     web, author, author_email
+from __pkginfo__ import modname, version, license, description, web, \
+     author, author_email
+
+if exists('README'):
+   long_description = file('README').read()
+
 # import optional features
-try:
-    from __pkginfo__ import data_files
-except ImportError:
-    data_files = None
-try:
-    from __pkginfo__ import include_dirs
-except ImportError:
-    include_dirs = []
+import __pkginfo__
+if USE_SETUPTOOLS:
+   requires = {}
+   for entry in ("__depends__", "__recommends__"):
+      requires.update(getattr(__pkginfo__, entry, {}))
+   install_requires = [("%s %s" % (d, v and v or "")).strip()
+                       for d, v in requires.iteritems()]
+else:
+   install_requires = []
+
+distname = getattr(__pkginfo__, 'distname', modname)
+scripts = getattr(__pkginfo__, 'scripts', ())
+include_dirs = getattr(__pkginfo__, 'include_dirs', ())
+data_files = getattr(__pkginfo__, 'data_files', None)
+subpackage_of = getattr(__pkginfo__, 'subpackage_of', None)
+ext_modules = getattr(__pkginfo__, 'ext_modules', None)
+
+
+BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog')
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc')
+
+
+def ensure_scripts(linux_scripts):
+    """
+    Creates the proper script names required for each platform
+    (taken from 4Suite)
+    """
+    from distutils import util
+    if util.get_platform()[:3] == 'win':
+        scripts_ = [script + '.bat' for script in linux_scripts]
+    else:
+        scripts_ = linux_scripts
+    return scripts_
+
+
+def get_packages(directory, prefix):
+    """return a list of subpackages for the given directory
+    """
+    result = []
+    for package in os.listdir(directory):
+        absfile = join(directory, package)
+        if isdir(absfile):
+            if exists(join(absfile, '__init__.py')) or \
+                   package in ('test', 'tests'):
+                if prefix:
+                    result.append('%s.%s' % (prefix, package))
+                else:
+                    result.append(package)
+                result += get_packages(absfile, result[-1])
+    return result
+
+def export(from_dir, to_dir,
+           blacklist=BASE_BLACKLIST,
+           ignore_ext=IGNORED_EXTENSIONS,
+           verbose=True):
+    """make a mirror of from_dir in to_dir, omitting directories and files
+    listed in the black list
+    """
+    def make_mirror(arg, directory, fnames):
+        """walk handler"""
+        for norecurs in blacklist:
+            try:
+                fnames.remove(norecurs)
+            except ValueError:
+                pass
+        for filename in fnames:
+            # don't include binary files
+            if filename[-4:] in ignore_ext:
+                continue
+            if filename[-1] == '~':
+                continue
+            src = '%s/%s' % (directory, filename)
+            dest = to_dir + src[len(from_dir):]
+            if verbose:
+               print >> sys.stderr, src, '->', dest
+            if os.path.isdir(src):
+                if not exists(dest):
+                    os.mkdir(dest)
+            else:
+                if exists(dest):
+                    os.remove(dest)
+                shutil.copy2(src, dest)
+    try:
+        os.mkdir(to_dir)
+    except OSError, ex:
+        # file exists ?
+        import errno
+        if ex.errno != errno.EEXIST:
+            raise
+    walk(from_dir, make_mirror, None)
+
+
+EMPTY_FILE = '"""generated file, don\'t modify or your data will be lost"""\n'
+
+class MyInstallLib(install_lib.install_lib):
+    """extend install_lib command to handle  package __init__.py and
+    include_dirs variable if necessary
+    """
+    def run(self):
+        """overridden from install_lib class"""
+        install_lib.install_lib.run(self)
+        # create Products.__init__.py if needed
+        if subpackage_of:
+            product_init = join(self.install_dir, subpackage_of, '__init__.py')
+            if not exists(product_init):
+                self.announce('creating %s' % product_init)
+                stream = open(product_init, 'w')
+                stream.write(EMPTY_FILE)
+                stream.close()
+        # manually install included directories if any
+        if include_dirs:
+            if subpackage_of:
+                base = join(subpackage_of, modname)
+            else:
+                base = modname
+            for directory in include_dirs:
+                dest = join(self.install_dir, base, directory)
+                export(directory, dest, verbose=False)
 
 def install(**kwargs):
     """setup entry point"""
-    #kwargs['distname'] = modname
-    return setup(name=distname,
-                 version=version,
-                 license=license,
-                 description=short_desc,
-                 long_description=long_desc,
-                 author=author,
-                 author_email=author_email,
-                 url=web,
-                 data_files=data_files,
-                 **kwargs)
+    if not USE_SETUPTOOLS and '--install-layout=deb' in sys.argv and \
+           sys.versioninfo < (2, 5, 4):
+       sys.argv.remove('--install-layout=deb')
+       print "W: remove '--install-layout=deb' option"
+    if subpackage_of:
+        package = subpackage_of + '.' + modname
+        kwargs['package_dir'] = {package : '.'}
+        packages = [package] + get_packages(os.getcwd(), package)
+        if USE_SETUPTOOLS:
+            kwargs['namespace_packages'] = [subpackage_of]
+    else:
+        kwargs['package_dir'] = {modname : '.'}
+        packages = [modname] + get_packages(os.getcwd(), modname)
+    kwargs['packages'] = packages
+    return setup(name=distname, version=version, license=license, url=web,
+                 description=description, long_description=long_description,
+                 author=author, author_email=author_email,
+                 scripts=ensure_scripts(scripts), data_files=data_files,
+                 ext_modules=ext_modules,
+                 install_requires=install_requires,
+                 #dependency_links=["http://alain:alain@intranet.logilab.fr/~alain/"],
+                 cmdclass={'install_lib': MyInstallLib},
+                 **kwargs
+                 )
 
 if __name__ == '__main__' :
     install()
--- a/test/data/cubes/file/__pkginfo__.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/test/data/cubes/file/__pkginfo__.py	Tue Mar 30 14:32:03 2010 +0200
@@ -13,48 +13,3 @@
 numversion = (1, 4, 3)
 version = '.'.join(str(num) for num in numversion)
 
-license = 'LGPL'
-copyright = '''Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
-http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
-
-author = "Logilab"
-author_email = "contact@logilab.fr"
-web = ''
-
-short_desc = "Raw file support for the CubicWeb framework"
-long_desc = """CubicWeb is a entities / relations bases knowledge management system
-developped at Logilab.
-.
-This package provides schema and views to store files and images in cubicweb
-applications.
-.
-"""
-
-from os import listdir
-from os.path import join
-
-CUBES_DIR = join('share', 'cubicweb', 'cubes')
-try:
-    data_files = [
-        [join(CUBES_DIR, 'file'),
-         [fname for fname in listdir('.')
-          if fname.endswith('.py') and fname != 'setup.py']],
-        [join(CUBES_DIR, 'file', 'data'),
-         [join('data', fname) for fname in listdir('data')]],
-        [join(CUBES_DIR, 'file', 'wdoc'),
-         [join('wdoc', fname) for fname in listdir('wdoc')]],
-        [join(CUBES_DIR, 'file', 'views'),
-         [join('views', fname) for fname in listdir('views') if fname.endswith('.py')]],
-        [join(CUBES_DIR, 'file', 'i18n'),
-         [join('i18n', fname) for fname in listdir('i18n')]],
-        [join(CUBES_DIR, 'file', 'migration'),
-         [join('migration', fname) for fname in listdir('migration')]],
-        ]
-except OSError:
-    # we are in an installed directory
-    pass
-
-
-cube_eid = 20320
-# used packages
-__use__ = ()
--- a/test/unittest_cwconfig.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/test/unittest_cwconfig.py	Tue Mar 30 14:32:03 2010 +0200
@@ -7,13 +7,16 @@
 """
 import sys
 import os
+import tempfile
 from os.path import dirname, join, abspath
 
 from logilab.common.modutils import cleanup_sys_modules
-from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.testlib import (TestCase, unittest_main,
+                                    with_tempdir)
 from logilab.common.changelog import Version
 
 from cubicweb.devtools import ApptestConfiguration
+from cubicweb.cwconfig import _find_prefix
 
 def unabsolutize(path):
     parts = path.split(os.sep)
@@ -32,7 +35,7 @@
         self.config._cubes = ('email', 'file')
 
     def tearDown(self):
-        os.environ.pop('CW_CUBES_PATH', None)
+        ApptestConfiguration.CUBES_PATH = []
 
     def test_reorder_cubes(self):
         # jpl depends on email and file and comment
@@ -52,7 +55,7 @@
 
     def test_reorder_cubes_recommends(self):
         from cubes.comment import __pkginfo__ as comment_pkginfo
-        comment_pkginfo.__recommend__ = ('file',)
+        comment_pkginfo.__recommends_cubes__ = {'file': None}
         try:
             # email recommends comment
             # comment recommends file
@@ -65,7 +68,7 @@
             self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')),
                               ('forge', 'email', 'comment', 'file'))
         finally:
-            comment_pkginfo.__use__ = ()
+            comment_pkginfo.__recommends_cubes__ = {}
 
 
 #     def test_vc_config(self):
@@ -91,11 +94,11 @@
         # make sure we don't import the email cube, but the stdlib email package
         import email
         self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR)
-        os.environ['CW_CUBES_PATH'] = CUSTOM_CUBES_DIR
+        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
         self.assertEquals(self.config.cubes_search_path(),
                           [CUSTOM_CUBES_DIR, self.config.CUBES_DIR])
-        os.environ['CW_CUBES_PATH'] = os.pathsep.join([
-            CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant'])
+        self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR,
+                                            self.config.CUBES_DIR, 'unexistant']
         # filter out unexistant and duplicates
         self.assertEquals(self.config.cubes_search_path(),
                           [CUSTOM_CUBES_DIR,
@@ -114,6 +117,91 @@
         from cubes import file
         self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')])
 
+class FindPrefixTC(TestCase):
+    def make_dirs(self, *args):
+        path = join(tempfile.tempdir, *args)
+        if not os.path.exists(path):
+            os.makedirs(path)
+        return path
+
+    def make_file(self, *args):
+        self.make_dirs(*args[: -1])
+        file_path = join(tempfile.tempdir, *args)
+        file_obj = open(file_path, 'w')
+        file_obj.write('""" None """')
+        file_obj.close()
+        return file_path
+
+    @with_tempdir
+    def test_samedir(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.assertEquals(_find_prefix(prefix), prefix)
+
+    @with_tempdir
+    def test_samedir_filepath(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        dir_path = self.make_dirs('bob')
+        self.assertEquals(_find_prefix(dir_path), prefix)
+
+    @with_tempdir
+    def test_file_in_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_file_in_deeper_dir_inside_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_multiple_candidate_prefix(self):
+        self.make_dirs('share', 'cubicweb')
+        prefix = self.make_dirs('bob')
+        self.make_dirs('bob', 'share', 'cubicweb')
+        file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_sister_candidate_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.make_dirs('bob', 'share', 'cubicweb')
+        file_path = self.make_file('bell', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_multiple_parent_candidate_prefix(self):
+        self.make_dirs('share', 'cubicweb')
+        prefix = self.make_dirs('share', 'cubicweb', 'bob')
+        self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb')
+        file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_upper_candidate_prefix(self):
+        prefix = tempfile.tempdir
+        self.make_dirs('share', 'cubicweb')
+        self.make_dirs('bell','bob',  'share', 'cubicweb')
+        file_path = self.make_file('bell', 'toto.py')
+        self.assertEquals(_find_prefix(file_path), prefix)
+
+    @with_tempdir
+    def test_no_prefix(self):
+        prefix = tempfile.tempdir
+        self.assertEquals(_find_prefix(prefix), sys.prefix)
 
 if __name__ == '__main__':
     unittest_main()
--- a/test/unittest_cwctl.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/test/unittest_cwctl.py	Tue Mar 30 14:32:03 2010 +0200
@@ -10,15 +10,8 @@
 from cStringIO import StringIO
 from logilab.common.testlib import TestCase, unittest_main
 
-if os.environ.get('APYCOT_ROOT'):
-    root = os.environ['APYCOT_ROOT']
-    CUBES_DIR = '%s/local/share/cubicweb/cubes/' % root
-    os.environ['CW_CUBES_PATH'] = CUBES_DIR
-    REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
-    os.environ['CW_INSTANCES_DIR'] = REGISTRY_DIR
-
 from cubicweb.cwconfig import CubicWebConfiguration
-CubicWebConfiguration.load_cwctl_plugins()
+CubicWebConfiguration.load_cwctl_plugins() # XXX necessary?
 
 class CubicWebCtlTC(TestCase):
     def setUp(self):
--- a/toolsutils.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/toolsutils.py	Tue Mar 30 14:32:03 2010 +0200
@@ -184,7 +184,7 @@
                 config_file, ex)
     return config
 
-def env_path(env_var, default, name):
+def env_path(env_var, default, name, checkexists=True):
     """get a path specified in a variable or using the default value and return
     it.
 
@@ -203,8 +203,8 @@
     :raise `ConfigurationError`: if the returned path does not exist
     """
     path = environ.get(env_var, default)
-    if not exists(path):
-        raise ConfigurationError('%s path %s doesn\'t exist' % (name, path))
+    if checkexists and not exists(path):
+        raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path))
     return abspath(path)
 
 
--- a/web/facet.py	Tue Mar 30 13:32:01 2010 +0200
+++ b/web/facet.py	Tue Mar 30 14:32:03 2010 +0200
@@ -8,7 +8,6 @@
 """
 __docformat__ = "restructuredtext en"
 
-from itertools import chain
 from copy import deepcopy
 from datetime import date, datetime, timedelta
 
@@ -199,7 +198,7 @@
     # add attribute variable to selection
     rqlst.add_selected(attrvar)
     # add is restriction if necessary
-    if not mainvar.stinfo['typerels']:
+    if mainvar.stinfo['typerel'] is None:
         etypes = frozenset(sol[mainvar.name] for sol in rqlst.solutions)
         rqlst.add_type_restriction(mainvar, etypes)
     return var
@@ -228,12 +227,16 @@
         for ovarname in linkedvars:
             vargraph[ovarname].remove(trvarname)
         # remove relation using this variable
-        for rel in chain(trvar.stinfo['relations'], trvar.stinfo['typerels']):
+        for rel in trvar.stinfo['relations']:
             if rel in removed:
                 # already removed
                 continue
             rqlst.remove_node(rel)
             removed.add(rel)
+        rel = trvar.stinfo['typerel']
+        if rel is not None and not rel in removed:
+            rqlst.remove_node(rel)
+            removed.add(rel)
         # cleanup groupby clause
         if rqlst.groupby:
             for vref in rqlst.groupby[:]: