merge
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Tue, 08 Dec 2009 10:40:20 +0100
changeset 4022 934e758a73ef
parent 4021 280c910c8710 (diff)
parent 4020 65ee20fb9078 (current diff)
child 4023 eae23c40627a
merge
--- a/.hgtags	Mon Dec 07 09:10:36 2009 -0800
+++ b/.hgtags	Tue Dec 08 10:40:20 2009 +0100
@@ -92,3 +92,5 @@
 7a96c0544c138a0c5f452e5b2428ce6e2b7cb378 cubicweb-debian-version-3.5.7-1
 1677312fd8a3e8c0a5ae083e3104ca62b7c9a5bb cubicweb-version-3.5.9
 d7f2d32340fb59753548ef29cbc1958ef3a55fc6 cubicweb-debian-version-3.5.9-1
+9b52725d8c534ba40877457b413077a10173bf88 cubicweb-version-3.5.10
+dfe2f245248c97bea3a29c8ecc6d293e25ff708e cubicweb-debian-version-3.5.10-1
--- a/__pkginfo__.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/__pkginfo__.py	Tue Dec 08 10:40:20 2009 +0100
@@ -7,7 +7,7 @@
 distname = "cubicweb"
 modname = "cubicweb"
 
-numversion = (3, 5, 10)
+numversion = (3, 6, 0)
 version = '.'.join(str(num) for num in numversion)
 
 license = 'LGPL'
--- a/appobject.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/appobject.py	Tue Dec 08 10:40:20 2009 +0100
@@ -310,6 +310,11 @@
     # deprecated ###############################################################
 
     @property
+    @deprecated('[3.6] use self.__regid__')
+    def id(self):
+        return self.__regid__
+
+    @property
     @deprecated('[3.6] use self._cw.vreg')
     def vreg(self):
         return self._cw.vreg
--- a/common/appobject.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-"""pre 3.2 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.appobject', DeprecationWarning, stacklevel=2)
-from cubicweb.appobject import *
--- a/common/entity.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-"""pre 3.2 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.entity', DeprecationWarning, stacklevel=2)
-from cubicweb.entity import *
-from cubicweb.entity import _marker
--- a/common/i18n.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,99 +0,0 @@
-"""Some i18n/gettext utilities.
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-__docformat__ = "restructuredtext en"
-
-import re
-import os
-import sys
-from os.path import join, basename, splitext, exists
-from glob import glob
-
-from cubicweb.toolsutils import create_dir
-
-def extract_from_tal(files, output_file):
-    """extract i18n strings from tal and write them into the given output file
-    using standard python gettext marker (_)
-    """
-    output = open(output_file, 'w')
-    for filepath in files:
-        for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()):
-            print >> output, '_("%s")' % match.group(2)
-    output.close()
-
-
-def add_msg(w, msgid, msgctx=None):
-    """write an empty pot msgid definition"""
-    if isinstance(msgid, unicode):
-        msgid = msgid.encode('utf-8')
-    if msgctx:
-        if isinstance(msgctx, unicode):
-            msgctx = msgctx.encode('utf-8')
-        w('msgctxt "%s"\n' % msgctx)
-    msgid = msgid.replace('"', r'\"').splitlines()
-    if len(msgid) > 1:
-        w('msgid ""\n')
-        for line in msgid:
-            w('"%s"' % line.replace('"', r'\"'))
-    else:
-        w('msgid "%s"\n' % msgid[0])
-    w('msgstr ""\n\n')
-
-
-def execute(cmd):
-    """display the command, execute it and raise an Exception if returned
-    status != 0
-    """
-    from subprocess import call
-    print cmd.replace(os.getcwd() + os.sep, '')
-    status = call(cmd, shell=True)
-    if status != 0:
-        raise Exception('status = %s' % status)
-
-
-def available_catalogs(i18ndir=None):
-    if i18ndir is None:
-        wildcard = '*.po'
-    else:
-        wildcard = join(i18ndir, '*.po')
-    for popath in glob(wildcard):
-        lang = splitext(basename(popath))[0]
-        yield lang, popath
-
-
-def compile_i18n_catalogs(sourcedirs, destdir, langs):
-    """generate .mo files for a set of languages into the `destdir` i18n directory
-    """
-    from logilab.common.fileutils import ensure_fs_mode
-    print '-> compiling %s catalogs...' % destdir
-    errors = []
-    for lang in langs:
-        langdir = join(destdir, lang, 'LC_MESSAGES')
-        if not exists(langdir):
-            create_dir(langdir)
-        pofiles = [join(path, '%s.po' % lang) for path in sourcedirs]
-        pofiles = [pof for pof in pofiles if exists(pof)]
-        mergedpo = join(destdir, '%s_merged.po' % lang)
-        try:
-            # merge instance/cubes messages catalogs with the stdlib's one
-            execute('msgcat --use-first --sort-output --strict -o "%s" %s'
-                    % (mergedpo, ' '.join('"%s"' % f for f in pofiles)))
-            # make sure the .mo file is writeable and compiles with *msgfmt*
-            applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo')
-            try:
-                ensure_fs_mode(applmo)
-            except OSError:
-                pass # suppose not exists
-            execute('msgfmt "%s" -o "%s"' % (mergedpo, applmo))
-        except Exception, ex:
-            errors.append('while handling language %s: %s' % (lang, ex))
-        try:
-            # clean everything
-            os.unlink(mergedpo)
-        except Exception:
-            continue
-    return errors
--- a/common/migration.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,373 +0,0 @@
-"""utilities for instances migration
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-__docformat__ = "restructuredtext en"
-
-import sys
-import os
-import logging
-import tempfile
-from os.path import exists, join, basename, splitext
-
-from logilab.common.decorators import cached
-from logilab.common.configuration import REQUIRED, read_old_config
-from logilab.common.shellutils import ASK
-
-from cubicweb import ConfigurationError
-
-
-def filter_scripts(config, directory, fromversion, toversion, quiet=True):
-    """return a list of paths of migration files to consider to upgrade
-    from a version to a greater one
-    """
-    from logilab.common.changelog import Version # doesn't work with appengine
-    assert fromversion
-    assert toversion
-    assert isinstance(fromversion, tuple), fromversion.__class__
-    assert isinstance(toversion, tuple), toversion.__class__
-    assert fromversion <= toversion, (fromversion, toversion)
-    if not exists(directory):
-        if not quiet:
-            print directory, "doesn't exists, no migration path"
-        return []
-    if fromversion == toversion:
-        return []
-    result = []
-    for fname in os.listdir(directory):
-        if fname.endswith('.pyc') or fname.endswith('.pyo') \
-               or fname.endswith('~'):
-            continue
-        fpath = join(directory, fname)
-        try:
-            tver, mode = fname.split('_', 1)
-        except ValueError:
-            continue
-        mode = mode.split('.', 1)[0]
-        if not config.accept_mode(mode):
-            continue
-        try:
-            tver = Version(tver)
-        except ValueError:
-            continue
-        if tver <= fromversion:
-            continue
-        if tver > toversion:
-            continue
-        result.append((tver, fpath))
-    # be sure scripts are executed in order
-    return sorted(result)
-
-
-IGNORED_EXTENSIONS = ('.swp', '~')
-
-
-def execscript_confirm(scriptpath):
-    """asks for confirmation before executing a script and provides the
-    ability to show the script's content
-    """
-    while True:
-        answer = ASK.ask('Execute %r ?' % scriptpath, ('Y','n','show'), 'Y')
-        if answer == 'n':
-            return False
-        elif answer == 'show':
-            stream = open(scriptpath)
-            scriptcontent = stream.read()
-            stream.close()
-            print
-            print scriptcontent
-            print
-        else:
-            return True
-
-def yes(*args, **kwargs):
-    return True
-
-
-class MigrationHelper(object):
-    """class holding CubicWeb Migration Actions used by migration scripts"""
-
-    def __init__(self, config, interactive=True, verbosity=1):
-        self.config = config
-        if config:
-            # no config on shell to a remote instance
-            self.config.init_log(logthreshold=logging.ERROR, debug=True)
-        # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything
-        self.verbosity = verbosity
-        self.need_wrap = True
-        if not interactive or not verbosity:
-            self.confirm = yes
-            self.execscript_confirm = yes
-        else:
-            self.execscript_confirm = execscript_confirm
-        self._option_changes = []
-        self.__context = {'confirm': self.confirm,
-                          'config': self.config,
-                          'interactive_mode': interactive,
-                          }
-
-    def __getattribute__(self, name):
-        try:
-            return object.__getattribute__(self, name)
-        except AttributeError:
-            cmd = 'cmd_%s' % name
-            if hasattr(self, cmd):
-                meth = getattr(self, cmd)
-                return lambda *args, **kwargs: self.interact(args, kwargs,
-                                                             meth=meth)
-            raise
-        raise AttributeError(name)
-
-    def repo_connect(self):
-        return self.config.repository()
-
-    def migrate(self, vcconf, toupgrade, options):
-        """upgrade the given set of cubes
-
-        `cubes` is an ordered list of 3-uple:
-        (cube, fromversion, toversion)
-        """
-        if options.fs_only:
-            # monkey path configuration.accept_mode so database mode (e.g. Any)
-            # won't be accepted
-            orig_accept_mode = self.config.accept_mode
-            def accept_mode(mode):
-                if mode == 'Any':
-                    return False
-                return orig_accept_mode(mode)
-            self.config.accept_mode = accept_mode
-        # may be an iterator
-        toupgrade = tuple(toupgrade)
-        vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade)
-        ctx = self.__context
-        ctx['versions_map'] = vmap
-        if self.config.accept_mode('Any') and 'cubicweb' in vmap:
-            migrdir = self.config.migration_scripts_dir()
-            self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py'))
-        for cube, fromversion, toversion in toupgrade:
-            if cube == 'cubicweb':
-                migrdir = self.config.migration_scripts_dir()
-            else:
-                migrdir = self.config.cube_migration_scripts_dir(cube)
-            scripts = filter_scripts(self.config, migrdir, fromversion, toversion)
-            if scripts:
-                prevversion = None
-                for version, script in scripts:
-                    # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call
-                    # cube_upgraded once all script of X.Y.Z have been executed
-                    if prevversion is not None and version != prevversion:
-                        self.cube_upgraded(cube, prevversion)
-                    prevversion = version
-                    self.cmd_process_script(script)
-                self.cube_upgraded(cube, toversion)
-            else:
-                self.cube_upgraded(cube, toversion)
-
-    def cube_upgraded(self, cube, version):
-        pass
-
-    def shutdown(self):
-        pass
-
-    def interact(self, args, kwargs, meth):
-        """execute the given method according to user's confirmation"""
-        msg = 'Execute command: %s(%s) ?' % (
-            meth.__name__[4:],
-            ', '.join([repr(arg) for arg in args] +
-                      ['%s=%r' % (n,v) for n,v in kwargs.items()]))
-        if 'ask_confirm' in kwargs:
-            ask_confirm = kwargs.pop('ask_confirm')
-        else:
-            ask_confirm = True
-        if not ask_confirm or self.confirm(msg):
-            return meth(*args, **kwargs)
-
-    def confirm(self, question, shell=True, abort=True, retry=False, default='y'):
-        """ask for confirmation and return true on positive answer
-
-        if `retry` is true the r[etry] answer may return 2
-        """
-        possibleanswers = ['y','n']
-        if abort:
-            possibleanswers.append('abort')
-        if shell:
-            possibleanswers.append('shell')
-        if retry:
-            possibleanswers.append('retry')
-        try:
-            answer = ASK.ask(question, possibleanswers, default)
-        except (EOFError, KeyboardInterrupt):
-            answer = 'abort'
-        if answer == 'n':
-            return False
-        if answer == 'retry':
-            return 2
-        if answer == 'abort':
-            raise SystemExit(1)
-        if shell and answer == 'shell':
-            self.interactive_shell()
-            return self.confirm(question)
-        return True
-
-    def interactive_shell(self):
-        self.confirm = yes
-        self.need_wrap = False
-        # avoid '_' to be added to builtins by sys.display_hook
-        def do_not_add___to_builtins(obj):
-            if obj is not None:
-                print repr(obj)
-        sys.displayhook = do_not_add___to_builtins
-        local_ctx = self._create_context()
-        try:
-            import readline
-            from rlcompleter import Completer
-        except ImportError:
-            # readline not available
-            pass
-        else:
-            readline.set_completer(Completer(local_ctx).complete)
-            readline.parse_and_bind('tab: complete')
-            home_key = 'HOME'
-            if sys.platform == 'win32':
-                home_key = 'USERPROFILE'
-            histfile = os.path.join(os.environ[home_key], ".eshellhist")
-            try:
-                readline.read_history_file(histfile)
-            except IOError:
-                pass
-        from code import interact
-        banner = """entering the migration python shell
-just type migration commands or arbitrary python code and type ENTER to execute it
-type "exit" or Ctrl-D to quit the shell and resume operation"""
-        # give custom readfunc to avoid http://bugs.python.org/issue1288615
-        def unicode_raw_input(prompt):
-            return unicode(raw_input(prompt), sys.stdin.encoding)
-        interact(banner, readfunc=unicode_raw_input, local=local_ctx)
-        readline.write_history_file(histfile)
-        # delete instance's confirm attribute to avoid questions
-        del self.confirm
-        self.need_wrap = True
-
-    @cached
-    def _create_context(self):
-        """return a dictionary to use as migration script execution context"""
-        context = self.__context
-        for attr in dir(self):
-            if attr.startswith('cmd_'):
-                if self.need_wrap:
-                    context[attr[4:]] = getattr(self, attr[4:])
-                else:
-                    context[attr[4:]] = getattr(self, attr)
-        return context
-
-    def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs):
-        """execute a migration script
-        in interactive mode,  display the migration script path, ask for
-        confirmation and execute it if confirmed
-        """
-        migrscript = os.path.normpath(migrscript)
-        if migrscript.endswith('.py'):
-            script_mode = 'python'
-        elif migrscript.endswith('.txt') or migrscript.endswith('.rst'):
-            script_mode = 'doctest'
-        else:
-            raise Exception('This is not a valid cubicweb shell input')
-        if not self.execscript_confirm(migrscript):
-            return
-        scriptlocals = self._create_context().copy()
-        if script_mode == 'python':
-            if funcname is None:
-                pyname = '__main__'
-            else:
-                pyname = splitext(basename(migrscript))[0]
-            scriptlocals.update({'__file__': migrscript, '__name__': pyname})
-            execfile(migrscript, scriptlocals)
-            if funcname is not None:
-                try:
-                    func = scriptlocals[funcname]
-                    self.info('found %s in locals', funcname)
-                    assert callable(func), '%s (%s) is not callable' % (func, funcname)
-                except KeyError:
-                    self.critical('no %s in script %s', funcname, migrscript)
-                    return None
-                return func(*args, **kwargs)
-        else: # script_mode == 'doctest'
-            import doctest
-            doctest.testfile(migrscript, module_relative=False,
-                             optionflags=doctest.ELLIPSIS, globs=scriptlocals)
-
-    def cmd_option_renamed(self, oldname, newname):
-        """a configuration option has been renamed"""
-        self._option_changes.append(('renamed', oldname, newname))
-
-    def cmd_option_group_change(self, option, oldgroup, newgroup):
-        """a configuration option has been moved in another group"""
-        self._option_changes.append(('moved', option, oldgroup, newgroup))
-
-    def cmd_option_added(self, optname):
-        """a configuration option has been added"""
-        self._option_changes.append(('added', optname))
-
-    def cmd_option_removed(self, optname):
-        """a configuration option has been removed"""
-        # can safely be ignored
-        #self._option_changes.append(('removed', optname))
-
-    def cmd_option_type_changed(self, optname, oldtype, newvalue):
-        """a configuration option's type has changed"""
-        self._option_changes.append(('typechanged', optname, oldtype, newvalue))
-
-    def cmd_add_cubes(self, cubes):
-        """modify the list of used cubes in the in-memory config
-        returns newly inserted cubes, including dependencies
-        """
-        if isinstance(cubes, basestring):
-            cubes = (cubes,)
-        origcubes = self.config.cubes()
-        newcubes = [p for p in self.config.expand_cubes(cubes)
-                       if not p in origcubes]
-        if newcubes:
-            for cube in cubes:
-                assert cube in newcubes
-            self.config.add_cubes(newcubes)
-        return newcubes
-
-    def cmd_remove_cube(self, cube, removedeps=False):
-        if removedeps:
-            toremove = self.config.expand_cubes([cube])
-        else:
-            toremove = (cube,)
-        origcubes = self.config._cubes
-        basecubes = [c for c in origcubes if not c in toremove]
-        self.config._cubes = tuple(self.config.expand_cubes(basecubes))
-        removed = [p for p in origcubes if not p in self.config._cubes]
-        if not cube in removed:
-            raise ConfigurationError("can't remove cube %s, "
-                                     "used as a dependency" % cube)
-        return removed
-
-    def rewrite_configuration(self):
-        # import locally, show_diffs unavailable in gae environment
-        from cubicweb.toolsutils import show_diffs
-        configfile = self.config.main_config_file()
-        if self._option_changes:
-            read_old_config(self.config, self._option_changes, configfile)
-        fd, newconfig = tempfile.mkstemp()
-        for optdescr in self._option_changes:
-            if optdescr[0] == 'added':
-                optdict = self.config.get_option_def(optdescr[1])
-                if optdict.get('default') is REQUIRED:
-                    self.config.input_option(optdescr[1], optdict)
-        self.config.generate_config(open(newconfig, 'w'))
-        show_diffs(configfile, newconfig)
-        os.close(fd)
-        if exists(newconfig):
-            os.unlink(newconfig)
-
-
-from logging import getLogger
-from cubicweb import set_log_methods
-set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
--- a/common/schema.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-"""pre 3.0 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.schema', DeprecationWarning, stacklevel=2)
-from cubicweb.schema import *
--- a/common/selectors.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-"""pre 3.2 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.selectors', DeprecationWarning, stacklevel=2)
-from cubicweb.selectors import *
-from cubicweb.selectors import _rql_condition
--- a/common/test/data/migration/0.0.3_Any.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-"""
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-coucou
--- a/common/test/data/migration/0.0.4_Any.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-"""
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-coucou
--- a/common/test/data/migration/0.1.0_Any.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-"""
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-coucou
--- a/common/test/data/migration/0.1.0_common.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""common to all configuration
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- a/common/test/data/migration/0.1.0_repository.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""repository specific
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- a/common/test/data/migration/0.1.0_web.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""web only
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- a/common/test/data/migration/0.1.2_Any.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-"""
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-coucou
--- a/common/test/data/migration/depends.map	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-0.0.2: 2.3.0
-0.0.3: 2.4.0
-# missing 0.0.4 entry, that's alright
-0.1.0: 2.6.0
-0.1.2: 2.10.0
--- a/common/test/data/server_migration/bootstrapmigration_repository.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-"""allways executed before all others in server migration
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
--- a/common/test/unittest_migration.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,103 +0,0 @@
-"""cubicweb.common.migration unit tests
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-
-from os.path import abspath
-from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb.devtools import TestServerConfiguration
-from cubicweb.cwconfig import CubicWebConfiguration
-from cubicweb.common.migration import MigrationHelper, filter_scripts
-from cubicweb.server.migractions import ServerMigrationHelper
-
-
-class Schema(dict):
-    def has_entity(self, e_type):
-        return self.has_key(e_type)
-
-SMIGRDIR = abspath('data/server_migration') + '/'
-TMIGRDIR = abspath('data/migration') + '/'
-
-class MigrTestConfig(TestServerConfiguration):
-    verbosity = 0
-    def migration_scripts_dir(cls):
-        return SMIGRDIR
-
-    def cube_migration_scripts_dir(cls, cube):
-        return TMIGRDIR
-
-class MigrationToolsTC(TestCase):
-    def setUp(self):
-        self.config = MigrTestConfig('data')
-        from yams.schema import Schema
-        self.config.load_schema = lambda expand_cubes=False: Schema('test')
-        self.config.__class__.cubicweb_appobject_path = frozenset()
-        self.config.__class__.cube_appobject_path = frozenset()
-
-    def test_filter_scripts_base(self):
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)),
-                              [])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)),
-                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)),
-                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)),
-                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'),
-                               ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)),
-                              [])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)),
-                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'),
-                               ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')])
-        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)),
-                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
-
-        self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)),
-                              [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')])
-        self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)),
-                              [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'),
-                               ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')])
-
-    def test_filter_scripts_for_mode(self):
-        config = CubicWebConfiguration('data')
-        config.verbosity = 0
-        self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper))
-        self.assertIsInstance(config.migration_handler(), MigrationHelper)
-        config = self.config
-        config.__class__.name = 'twisted'
-        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
-                              [((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')])
-        config.__class__.name = 'repository'
-        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
-                              [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')])
-        config.__class__.name = 'all-in-one'
-        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
-                              [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'),
-                               ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')])
-        config.__class__.name = 'repository'
-
-
-from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite
-
-class BaseCreationTC(TestCase):
-
-    def test_db_creation(self):
-        """make sure database can be created"""
-        config = ApptestConfiguration('data')
-        source = config.sources()['system']
-        self.assertEquals(source['db-driver'], 'sqlite')
-        cleanup_sqlite(source['db-name'], removetemplate=True)
-        init_test_database(config=config)
-
-
-if __name__ == '__main__':
-    unittest_main()
--- a/common/utils.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-"""pre 3.2 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.utils', DeprecationWarning, stacklevel=2)
-from cubicweb.utils import *
--- a/common/view.py	Mon Dec 07 09:10:36 2009 -0800
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-"""pre 3.2 bw compat
-
-:organization: Logilab
-:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
-:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
-:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
-"""
-# pylint: disable-msg=W0614,W0401
-from warnings import warn
-warn('moved to cubicweb.view', DeprecationWarning, stacklevel=2)
-from cubicweb.view import *
--- a/cwconfig.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/cwconfig.py	Tue Dec 08 10:40:20 2009 +0100
@@ -331,6 +331,10 @@
     def available_cubes(cls):
         cubes = set()
         for directory in cls.cubes_search_path():
+            if not os.path.exists(directory):
+                self.error('unexistant directory in cubes search path: %s'
+                           % directory)
+                continue
             for cube in os.listdir(directory):
                 if isdir(join(directory, cube)) and not cube == 'shared':
                     cubes.add(cube)
@@ -922,11 +926,11 @@
 
     def migration_handler(self):
         """return a migration handler instance"""
-        from cubicweb.common.migration import MigrationHelper
+        from cubicweb.migration import MigrationHelper
         return MigrationHelper(self, verbosity=self.verbosity)
 
     def i18ncompile(self, langs=None):
-        from cubicweb.common import i18n
+        from cubicweb import i18n
         if langs is None:
             langs = self.available_languages()
         i18ndir = join(self.apphome, 'i18n')
--- a/cwctl.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/cwctl.py	Tue Dec 08 10:40:20 2009 +0100
@@ -311,7 +311,7 @@
         # handle i18n files structure
         # in the first cube given
         print '-> preparing i18n catalogs'
-        from cubicweb.common import i18n
+        from cubicweb import i18n
         langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
         errors = config.i18ncompile(langs)
         if errors:
@@ -666,7 +666,7 @@
         # * install new languages
         # * recompile catalogs
         # in the first componant given
-        from cubicweb.common import i18n
+        from cubicweb import i18n
         templdir = cwcfg.cube_dir(config.cubes()[0])
         langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
         errors = config.i18ncompile(langs)
--- a/devtools/devctl.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/devtools/devctl.py	Tue Dec 08 10:40:20 2009 +0100
@@ -113,7 +113,7 @@
 
 
 def _generate_schema_pot(w, vreg, schema, libconfig=None, cube=None):
-    from cubicweb.common.i18n import add_msg
+    from cubicweb.i18n import add_msg
     from cubicweb.web import uicfg
     from cubicweb.schema import META_RTYPES, SYSTEM_RTYPES
     no_context_rtypes = META_RTYPES | SYSTEM_RTYPES
@@ -286,7 +286,7 @@
         import yams
         from logilab.common.fileutils import ensure_fs_mode
         from logilab.common.shellutils import globfind, find, rm
-        from cubicweb.common.i18n import extract_from_tal, execute
+        from cubicweb.i18n import extract_from_tal, execute
         tempdir = tempfile.mkdtemp()
         potfiles = [join(I18NDIR, 'static-messages.pot')]
         print '-> extract schema messages.'
@@ -379,7 +379,7 @@
     import tempfile
     from logilab.common.fileutils import ensure_fs_mode
     from logilab.common.shellutils import find, rm
-    from cubicweb.common.i18n import extract_from_tal, execute
+    from cubicweb.i18n import extract_from_tal, execute
     toedit = []
     cube = basename(normpath(cubedir))
     tempdir = tempfile.mkdtemp()
--- a/devtools/testlib.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/devtools/testlib.py	Tue Dec 08 10:40:20 2009 +0100
@@ -353,38 +353,6 @@
     def entity(self, rql, args=None, eidkey=None, req=None):
         return self.execute(rql, args, eidkey, req=req).get_entity(0, 0)
 
-    def add_entity(self, etype, req=None, **kwargs):
-        rql = ['INSERT %s X' % etype]
-        # dict for replacement in RQL Request
-        args = {}
-        if kwargs:
-            rql.append(':')
-            # dict to define new entities variables
-            entities = {}
-            # assignement part of the request
-            sub_rql = []
-            for key, value in kwargs.iteritems():
-                # entities
-                if hasattr(value, 'eid'):
-                    new_value = "%s__" % key.upper()
-                    entities[new_value] = value.eid
-                    args[new_value] = value.eid
-
-                    sub_rql.append("X %s %s" % (key, new_value))
-                # final attributes
-                else:
-                    sub_rql.append('X %s %%(%s)s' % (key, key))
-                    args[key] = value
-            rql.append(', '.join(sub_rql))
-            if entities:
-                rql.append('WHERE')
-                # WHERE part of the request (to link entity to they eid)
-                sub_rql = []
-                for key, value in entities.iteritems():
-                    sub_rql.append("%s eid %%(%s)s" % (key, key))
-                rql.append(', '.join(sub_rql))
-        return self.execute(' '.join(rql), args, req=req).get_entity(0, 0)
-
     # vregistry inspection utilities ###########################################
 
     def pviews(self, req, rset):
@@ -689,6 +657,12 @@
 
     # deprecated ###############################################################
 
+    @deprecated('[3.6] use self.request().create_entity(...)')
+    def add_entity(self, etype, req=None, **kwargs):
+        if req is None:
+            req = self.request()
+        return req.create_entity(etype, **kwargs)
+
     @deprecated('[3.4] use self.vreg["etypes"].etype_class(etype)(self.request())')
     def etype_instance(self, etype, req=None):
         req = req or self.request()
--- a/entities/wfobjs.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/entities/wfobjs.py	Tue Dec 08 10:40:20 2009 +0100
@@ -152,6 +152,20 @@
             tr.add_exit_point(fromstate, tostate)
         return tr
 
+    def replace_state(self, todelstate, replacement):
+        """migration convenience method"""
+        if not hasattr(todelstate, 'eid'):
+            todelstate = self.state_by_name(todelstate)
+        if not hasattr(replacement, 'eid'):
+            replacement = self.state_by_name(replacement)
+        execute = self._cw.unsafe_execute
+        execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid}, 's')
+        execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
+                {'os': todelstate.eid, 'ns': newstate.eid}, 's')
+        execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
+                {'os': todelstate.eid, 'ns': newstate.eid}, 's')
+        todelstate.delete()
+
 
 class BaseTransition(AnyEntity):
     """customized class for abstract transition
--- a/goa/goactl.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/goa/goactl.py	Tue Dec 08 10:40:20 2009 +0100
@@ -59,6 +59,8 @@
     'cwconfig.py',
     'entity.py',
     'interfaces.py',
+    'i18n.py',
+    'migration.py',
     'rqlrewrite.py',
     'rset.py',
     'schema.py',
@@ -69,7 +71,6 @@
     'view.py',
 
     'common/mail.py',
-    'common/migration.py',
     'common/mixins.py',
     'common/mttransforms.py',
     'common/uilib.py',
@@ -224,7 +225,7 @@
                            join(packagesdir, include))
         # generate sample config
         from cubicweb.goa.goaconfig import GAEConfiguration
-        from cubicweb.common.migration import MigrationHelper
+        from cubicweb.migration import MigrationHelper
         config = GAEConfiguration(appid, appldir)
         if exists(config.main_config_file()):
             mih = MigrationHelper(config)
--- a/hooks/syncschema.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/hooks/syncschema.py	Tue Dec 08 10:40:20 2009 +0100
@@ -12,7 +12,7 @@
 """
 __docformat__ = "restructuredtext en"
 
-from yams.schema import BASE_TYPES
+from yams.schema import BASE_TYPES, RelationSchema
 from yams.buildobjs import EntityType, RelationType, RelationDefinition
 from yams.schema2sql import eschema2sql, rschema2sql, type_from_constraints
 
@@ -483,7 +483,8 @@
         # so there is nothing to do here
         if session.added_in_transaction(rdef.eid):
             return
-        subjtype, rtype, objtype = session.vreg.schema.schema_by_eid(rdef.eid)
+        rdefschema = session.vreg.schema.schema_by_eid(rdef.eid)
+        subjtype, rtype, objtype = rdefschema.as_triple()
         cstrtype = self.entity.type
         oldcstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype)
         newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value)
@@ -603,7 +604,7 @@
     def commit_event(self):
         # structure should be clean, not need to remove entity's relations
         # at this point
-        self.rschema.rdef[self.kobj].update(self.values)
+        self.rschema.rdefs[self.kobj].update(self.values)
 
 
 class MemSchemaRDefDel(MemSchemaOperation):
@@ -632,7 +633,8 @@
         if self.session.added_in_transaction(rdef.eid):
             self.cancelled = True
             return
-        subjtype, rtype, objtype = self.session.vreg.schema.schema_by_eid(rdef.eid)
+        rdef = self.session.vreg.schema.schema_by_eid(rdef.eid)
+        subjtype, rtype, objtype = rdef.as_triple()
         self.prepare_constraints(subjtype, rtype, objtype)
         cstrtype = self.entity.type
         self.cstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype)
@@ -668,13 +670,13 @@
     def commit_event(self):
         """the observed connections pool has been commited"""
         try:
-            erschema = self.session.vreg.schema[self.name]
+            erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
             # duh, schema not found, log error and skip operation
-            self.error('no schema for %s', self.name)
+            self.error('no schema for %s', self.eid)
             return
         perms = list(erschema.action_permissions(self.action))
-        if hasattr(self, group_eid):
+        if hasattr(self, 'group_eid'):
             perm = self.session.entity_from_eid(self.group_eid).name
         else:
             perm = erschema.rql_expression(self.expr)
@@ -695,18 +697,20 @@
     def commit_event(self):
         """the observed connections pool has been commited"""
         try:
-            erschema = self.session.vreg.schema[self.name]
+            erschema = self.session.vreg.schema.schema_by_eid(self.eid)
         except KeyError:
             # duh, schema not found, log error and skip operation
-            self.error('no schema for %s', self.name)
+            self.error('no schema for %s', self.eid)
+            return
+        if isinstance(erschema, RelationSchema): # XXX 3.6 migration
             return
         perms = list(erschema.action_permissions(self.action))
-        if hasattr(self, group_eid):
+        if hasattr(self, 'group_eid'):
             perm = self.session.entity_from_eid(self.group_eid).name
         else:
             perm = erschema.rql_expression(self.expr)
         try:
-            perms.remove(self.group)
+            perms.remove(perm)
             erschema.set_action_permissions(self.action, perms)
         except ValueError:
             self.error('can\'t remove permission %s for %s on %s',
@@ -916,7 +920,7 @@
     # don't use getattr(entity, attr), we would get the modified value if any
     for attr in ro_attrs:
         if attr in entity.edited_attributes:
-            origval, newval = entity_oldnewvalue(entity, attr)
+            origval, newval = hook.entity_oldnewvalue(entity, attr)
             if newval != origval:
                 errors[attr] = session._("can't change the %s attribute") % \
                                display_name(session, attr)
@@ -940,8 +944,8 @@
 
     def __call__(self):
         session = self._cw
-        subjschema, rschema, objschema = session.vreg.schema.schema_by_eid(self.eidfrom)
-        subjschema, rschema, objschema = session.schema.schema_by_eid(rdefeid)
+        rdef = session.vreg.schema.schema_by_eid(self.eidfrom)
+        subjschema, rschema, objschema = rdef.as_triple()
         pendings = session.transaction_data.get('pendingeids', ())
         pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set())
         # first delete existing relation if necessary
@@ -956,7 +960,7 @@
                                 % (rschema, subjschema, objschema))
         execute = session.unsafe_execute
         rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,'
-                       'R eid %%(x)s' % rdeftype, {'x': rteid})
+                       'R eid %%(x)s' % rdeftype, {'x': self.eidto})
         lastrel = rset[0][0] == 0
         # we have to update physical schema systematically for final and inlined
         # relations, but only if it's the last instance for this relation type
@@ -965,17 +969,17 @@
         if (rschema.final or rschema.inlined):
             rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, '
                            'R eid %%(x)s, X from_entity E, E name %%(name)s'
-                           % rdeftype, {'x': rteid, 'name': str(subjschema)})
+                           % rdeftype, {'x': self.eidto, 'name': str(subjschema)})
             if rset[0][0] == 0 and not subjschema.eid in pendings:
                 ptypes = session.transaction_data.setdefault('pendingrtypes', set())
                 ptypes.add(rschema.type)
                 DropColumn(session, table=SQL_PREFIX + subjschema.type,
-                             column=SQL_PREFIX + rschema.type)
+                           column=SQL_PREFIX + rschema.type)
         elif lastrel:
             DropRelationTable(session, rschema.type)
         # if this is the last instance, drop associated relation type
         if lastrel and not rteid in pendings:
-            execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rteid}, 'x')
+            execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}, 'x')
         MemSchemaRDefDel(session, (subjschema, rschema, objschema))
 
 
@@ -1087,7 +1091,7 @@
                                    group_eid=self.eidto)
         else: # RQLExpression
             expr = self._cw.entity_from_eid(self.eidto).expression
-            MemSchemaPermissionAdd(session, action=action, eid=self.eidfrom,
+            MemSchemaPermissionAdd(self._cw, action=action, eid=self.eidfrom,
                                    expr=expr)
 
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/i18n.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,99 @@
+"""Some i18n/gettext utilities.
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+__docformat__ = "restructuredtext en"
+
+import re
+import os
+import sys
+from os.path import join, basename, splitext, exists
+from glob import glob
+
+from cubicweb.toolsutils import create_dir
+
+def extract_from_tal(files, output_file):
+    """extract i18n strings from tal and write them into the given output file
+    using standard python gettext marker (_)
+    """
+    output = open(output_file, 'w')
+    for filepath in files:
+        for match in re.finditer('i18n:(content|replace)="([^"]+)"', open(filepath).read()):
+            print >> output, '_("%s")' % match.group(2)
+    output.close()
+
+
+def add_msg(w, msgid, msgctx=None):
+    """write an empty pot msgid definition"""
+    if isinstance(msgid, unicode):
+        msgid = msgid.encode('utf-8')
+    if msgctx:
+        if isinstance(msgctx, unicode):
+            msgctx = msgctx.encode('utf-8')
+        w('msgctxt "%s"\n' % msgctx)
+    msgid = msgid.replace('"', r'\"').splitlines()
+    if len(msgid) > 1:
+        w('msgid ""\n')
+        for line in msgid:
+            w('"%s"' % line.replace('"', r'\"'))
+    else:
+        w('msgid "%s"\n' % msgid[0])
+    w('msgstr ""\n\n')
+
+
+def execute(cmd):
+    """display the command, execute it and raise an Exception if returned
+    status != 0
+    """
+    from subprocess import call
+    print cmd.replace(os.getcwd() + os.sep, '')
+    status = call(cmd, shell=True)
+    if status != 0:
+        raise Exception('status = %s' % status)
+
+
+def available_catalogs(i18ndir=None):
+    if i18ndir is None:
+        wildcard = '*.po'
+    else:
+        wildcard = join(i18ndir, '*.po')
+    for popath in glob(wildcard):
+        lang = splitext(basename(popath))[0]
+        yield lang, popath
+
+
+def compile_i18n_catalogs(sourcedirs, destdir, langs):
+    """generate .mo files for a set of languages into the `destdir` i18n directory
+    """
+    from logilab.common.fileutils import ensure_fs_mode
+    print '-> compiling %s catalogs...' % destdir
+    errors = []
+    for lang in langs:
+        langdir = join(destdir, lang, 'LC_MESSAGES')
+        if not exists(langdir):
+            create_dir(langdir)
+        pofiles = [join(path, '%s.po' % lang) for path in sourcedirs]
+        pofiles = [pof for pof in pofiles if exists(pof)]
+        mergedpo = join(destdir, '%s_merged.po' % lang)
+        try:
+            # merge instance/cubes messages catalogs with the stdlib's one
+            execute('msgcat --use-first --sort-output --strict -o "%s" %s'
+                    % (mergedpo, ' '.join('"%s"' % f for f in pofiles)))
+            # make sure the .mo file is writeable and compiles with *msgfmt*
+            applmo = join(destdir, lang, 'LC_MESSAGES', 'cubicweb.mo')
+            try:
+                ensure_fs_mode(applmo)
+            except OSError:
+                pass # suppose not exists
+            execute('msgfmt "%s" -o "%s"' % (mergedpo, applmo))
+        except Exception, ex:
+            errors.append('while handling language %s: %s' % (lang, ex))
+        try:
+            # clean everything
+            os.unlink(mergedpo)
+        except Exception:
+            continue
+    return errors
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/migration.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,373 @@
+"""utilities for instances migration
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import logging
+import tempfile
+from os.path import exists, join, basename, splitext
+
+from logilab.common.decorators import cached
+from logilab.common.configuration import REQUIRED, read_old_config
+from logilab.common.shellutils import ASK
+
+from cubicweb import ConfigurationError
+
+
+def filter_scripts(config, directory, fromversion, toversion, quiet=True):
+    """return a list of paths of migration files to consider to upgrade
+    from a version to a greater one
+    """
+    from logilab.common.changelog import Version # doesn't work with appengine
+    assert fromversion
+    assert toversion
+    assert isinstance(fromversion, tuple), fromversion.__class__
+    assert isinstance(toversion, tuple), toversion.__class__
+    assert fromversion <= toversion, (fromversion, toversion)
+    if not exists(directory):
+        if not quiet:
+            print directory, "doesn't exists, no migration path"
+        return []
+    if fromversion == toversion:
+        return []
+    result = []
+    for fname in os.listdir(directory):
+        if fname.endswith('.pyc') or fname.endswith('.pyo') \
+               or fname.endswith('~'):
+            continue
+        fpath = join(directory, fname)
+        try:
+            tver, mode = fname.split('_', 1)
+        except ValueError:
+            continue
+        mode = mode.split('.', 1)[0]
+        if not config.accept_mode(mode):
+            continue
+        try:
+            tver = Version(tver)
+        except ValueError:
+            continue
+        if tver <= fromversion:
+            continue
+        if tver > toversion:
+            continue
+        result.append((tver, fpath))
+    # be sure scripts are executed in order
+    return sorted(result)
+
+
+IGNORED_EXTENSIONS = ('.swp', '~')
+
+
+def execscript_confirm(scriptpath):
+    """asks for confirmation before executing a script and provides the
+    ability to show the script's content
+    """
+    while True:
+        answer = ASK.ask('Execute %r ?' % scriptpath, ('Y','n','show'), 'Y')
+        if answer == 'n':
+            return False
+        elif answer == 'show':
+            stream = open(scriptpath)
+            scriptcontent = stream.read()
+            stream.close()
+            print
+            print scriptcontent
+            print
+        else:
+            return True
+
+def yes(*args, **kwargs):
+    return True
+
+
+class MigrationHelper(object):
+    """class holding CubicWeb Migration Actions used by migration scripts"""
+
+    def __init__(self, config, interactive=True, verbosity=1):
+        self.config = config
+        if config:
+            # no config on shell to a remote instance
+            self.config.init_log(logthreshold=logging.ERROR, debug=True)
+        # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything
+        self.verbosity = verbosity
+        self.need_wrap = True
+        if not interactive or not verbosity:
+            self.confirm = yes
+            self.execscript_confirm = yes
+        else:
+            self.execscript_confirm = execscript_confirm
+        self._option_changes = []
+        self.__context = {'confirm': self.confirm,
+                          'config': self.config,
+                          'interactive_mode': interactive,
+                          }
+
+    def __getattribute__(self, name):
+        try:
+            return object.__getattribute__(self, name)
+        except AttributeError:
+            cmd = 'cmd_%s' % name
+            if hasattr(self, cmd):
+                meth = getattr(self, cmd)
+                return lambda *args, **kwargs: self.interact(args, kwargs,
+                                                             meth=meth)
+            raise
+        raise AttributeError(name)
+
+    def repo_connect(self):
+        return self.config.repository()
+
+    def migrate(self, vcconf, toupgrade, options):
+        """upgrade the given set of cubes
+
+        `cubes` is an ordered list of 3-uple:
+        (cube, fromversion, toversion)
+        """
+        if options.fs_only:
+            # monkey path configuration.accept_mode so database mode (e.g. Any)
+            # won't be accepted
+            orig_accept_mode = self.config.accept_mode
+            def accept_mode(mode):
+                if mode == 'Any':
+                    return False
+                return orig_accept_mode(mode)
+            self.config.accept_mode = accept_mode
+        # may be an iterator
+        toupgrade = tuple(toupgrade)
+        vmap = dict( (cube, (fromver, tover)) for cube, fromver, tover in toupgrade)
+        ctx = self.__context
+        ctx['versions_map'] = vmap
+        if self.config.accept_mode('Any') and 'cubicweb' in vmap:
+            migrdir = self.config.migration_scripts_dir()
+            self.cmd_process_script(join(migrdir, 'bootstrapmigration_repository.py'))
+        for cube, fromversion, toversion in toupgrade:
+            if cube == 'cubicweb':
+                migrdir = self.config.migration_scripts_dir()
+            else:
+                migrdir = self.config.cube_migration_scripts_dir(cube)
+            scripts = filter_scripts(self.config, migrdir, fromversion, toversion)
+            if scripts:
+                prevversion = None
+                for version, script in scripts:
+                    # take care to X.Y.Z_Any.py / X.Y.Z_common.py: we've to call
+                    # cube_upgraded once all script of X.Y.Z have been executed
+                    if prevversion is not None and version != prevversion:
+                        self.cube_upgraded(cube, prevversion)
+                    prevversion = version
+                    self.cmd_process_script(script)
+                self.cube_upgraded(cube, toversion)
+            else:
+                self.cube_upgraded(cube, toversion)
+
+    def cube_upgraded(self, cube, version):
+        pass
+
+    def shutdown(self):
+        pass
+
+    def interact(self, args, kwargs, meth):
+        """execute the given method according to user's confirmation"""
+        msg = 'Execute command: %s(%s) ?' % (
+            meth.__name__[4:],
+            ', '.join([repr(arg) for arg in args] +
+                      ['%s=%r' % (n,v) for n,v in kwargs.items()]))
+        if 'ask_confirm' in kwargs:
+            ask_confirm = kwargs.pop('ask_confirm')
+        else:
+            ask_confirm = True
+        if not ask_confirm or self.confirm(msg):
+            return meth(*args, **kwargs)
+
+    def confirm(self, question, shell=True, abort=True, retry=False, default='y'):
+        """ask for confirmation and return true on positive answer
+
+        if `retry` is true the r[etry] answer may return 2
+        """
+        possibleanswers = ['y','n']
+        if abort:
+            possibleanswers.append('abort')
+        if shell:
+            possibleanswers.append('shell')
+        if retry:
+            possibleanswers.append('retry')
+        try:
+            answer = ASK.ask(question, possibleanswers, default)
+        except (EOFError, KeyboardInterrupt):
+            answer = 'abort'
+        if answer == 'n':
+            return False
+        if answer == 'retry':
+            return 2
+        if answer == 'abort':
+            raise SystemExit(1)
+        if shell and answer == 'shell':
+            self.interactive_shell()
+            return self.confirm(question)
+        return True
+
+    def interactive_shell(self):
+        self.confirm = yes
+        self.need_wrap = False
+        # avoid '_' to be added to builtins by sys.display_hook
+        def do_not_add___to_builtins(obj):
+            if obj is not None:
+                print repr(obj)
+        sys.displayhook = do_not_add___to_builtins
+        local_ctx = self._create_context()
+        try:
+            import readline
+            from rlcompleter import Completer
+        except ImportError:
+            # readline not available
+            pass
+        else:
+            readline.set_completer(Completer(local_ctx).complete)
+            readline.parse_and_bind('tab: complete')
+            home_key = 'HOME'
+            if sys.platform == 'win32':
+                home_key = 'USERPROFILE'
+            histfile = os.path.join(os.environ[home_key], ".eshellhist")
+            try:
+                readline.read_history_file(histfile)
+            except IOError:
+                pass
+        from code import interact
+        banner = """entering the migration python shell
+just type migration commands or arbitrary python code and type ENTER to execute it
+type "exit" or Ctrl-D to quit the shell and resume operation"""
+        # give custom readfunc to avoid http://bugs.python.org/issue1288615
+        def unicode_raw_input(prompt):
+            return unicode(raw_input(prompt), sys.stdin.encoding)
+        interact(banner, readfunc=unicode_raw_input, local=local_ctx)
+        readline.write_history_file(histfile)
+        # delete instance's confirm attribute to avoid questions
+        del self.confirm
+        self.need_wrap = True
+
+    @cached
+    def _create_context(self):
+        """return a dictionary to use as migration script execution context"""
+        context = self.__context
+        for attr in dir(self):
+            if attr.startswith('cmd_'):
+                if self.need_wrap:
+                    context[attr[4:]] = getattr(self, attr[4:])
+                else:
+                    context[attr[4:]] = getattr(self, attr)
+        return context
+
+    def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs):
+        """execute a migration script
+        in interactive mode,  display the migration script path, ask for
+        confirmation and execute it if confirmed
+        """
+        migrscript = os.path.normpath(migrscript)
+        if migrscript.endswith('.py'):
+            script_mode = 'python'
+        elif migrscript.endswith('.txt') or migrscript.endswith('.rst'):
+            script_mode = 'doctest'
+        else:
+            raise Exception('This is not a valid cubicweb shell input')
+        if not self.execscript_confirm(migrscript):
+            return
+        scriptlocals = self._create_context().copy()
+        if script_mode == 'python':
+            if funcname is None:
+                pyname = '__main__'
+            else:
+                pyname = splitext(basename(migrscript))[0]
+            scriptlocals.update({'__file__': migrscript, '__name__': pyname})
+            execfile(migrscript, scriptlocals)
+            if funcname is not None:
+                try:
+                    func = scriptlocals[funcname]
+                    self.info('found %s in locals', funcname)
+                    assert callable(func), '%s (%s) is not callable' % (func, funcname)
+                except KeyError:
+                    self.critical('no %s in script %s', funcname, migrscript)
+                    return None
+                return func(*args, **kwargs)
+        else: # script_mode == 'doctest'
+            import doctest
+            doctest.testfile(migrscript, module_relative=False,
+                             optionflags=doctest.ELLIPSIS, globs=scriptlocals)
+
+    def cmd_option_renamed(self, oldname, newname):
+        """a configuration option has been renamed"""
+        self._option_changes.append(('renamed', oldname, newname))
+
+    def cmd_option_group_change(self, option, oldgroup, newgroup):
+        """a configuration option has been moved in another group"""
+        self._option_changes.append(('moved', option, oldgroup, newgroup))
+
+    def cmd_option_added(self, optname):
+        """a configuration option has been added"""
+        self._option_changes.append(('added', optname))
+
+    def cmd_option_removed(self, optname):
+        """a configuration option has been removed"""
+        # can safely be ignored
+        #self._option_changes.append(('removed', optname))
+
+    def cmd_option_type_changed(self, optname, oldtype, newvalue):
+        """a configuration option's type has changed"""
+        self._option_changes.append(('typechanged', optname, oldtype, newvalue))
+
+    def cmd_add_cubes(self, cubes):
+        """modify the list of used cubes in the in-memory config
+        returns newly inserted cubes, including dependencies
+        """
+        if isinstance(cubes, basestring):
+            cubes = (cubes,)
+        origcubes = self.config.cubes()
+        newcubes = [p for p in self.config.expand_cubes(cubes)
+                       if not p in origcubes]
+        if newcubes:
+            for cube in cubes:
+                assert cube in newcubes
+            self.config.add_cubes(newcubes)
+        return newcubes
+
+    def cmd_remove_cube(self, cube, removedeps=False):
+        if removedeps:
+            toremove = self.config.expand_cubes([cube])
+        else:
+            toremove = (cube,)
+        origcubes = self.config._cubes
+        basecubes = [c for c in origcubes if not c in toremove]
+        self.config._cubes = tuple(self.config.expand_cubes(basecubes))
+        removed = [p for p in origcubes if not p in self.config._cubes]
+        if not cube in removed:
+            raise ConfigurationError("can't remove cube %s, "
+                                     "used as a dependency" % cube)
+        return removed
+
+    def rewrite_configuration(self):
+        # import locally, show_diffs unavailable in gae environment
+        from cubicweb.toolsutils import show_diffs
+        configfile = self.config.main_config_file()
+        if self._option_changes:
+            read_old_config(self.config, self._option_changes, configfile)
+        fd, newconfig = tempfile.mkstemp()
+        for optdescr in self._option_changes:
+            if optdescr[0] == 'added':
+                optdict = self.config.get_option_def(optdescr[1])
+                if optdict.get('default') is REQUIRED:
+                    self.config.input_option(optdescr[1], optdict)
+        self.config.generate_config(open(newconfig, 'w'))
+        show_diffs(configfile, newconfig)
+        os.close(fd)
+        if exists(newconfig):
+            os.unlink(newconfig)
+
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(MigrationHelper, getLogger('cubicweb.migration'))
--- a/misc/migration/bootstrapmigration_repository.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/misc/migration/bootstrapmigration_repository.py	Tue Dec 08 10:40:20 2009 +0100
@@ -10,6 +10,45 @@
 
 applcubicwebversion, cubicwebversion = versions_map['cubicweb']
 
+if applcubicwebversion < (3, 6, 0) and cubicwebversion >= (3, 6, 0):
+    from cubicweb.server import schemaserial as ss
+    session.set_pool()
+    session.execute = session.unsafe_execute
+    permsdict = ss.deserialize_ertype_permissions(session)
+    def _add_relation_definition_no_perms(subjtype, rtype, objtype):
+        rschema = fsschema.rschema(rtype)
+        for query, args in ss.rdef2rql(rschema, subjtype, objtype, groupmap=None):
+            rql(query, args, ask_confirm=False)
+        checkpoint(ask_confirm=False)
+
+    config.disabled_hooks_categories.add('integrity')
+    for rschema in repo.schema.relations():
+        rpermsdict = permsdict.get(rschema.eid, {})
+        for rdef in rschema.rdefs.values():
+            for action in ('read', 'add', 'delete'):
+                actperms = []
+                for something in rpermsdict.get(action, ()):
+                    if isinstance(something, tuple):
+                        actperms.append(rdef.rql_expression(*something))
+                    else: # group name
+                        actperms.append(something)
+                rdef.set_action_permissions(action, actperms)
+    for action in ('read', 'add', 'delete'):
+        _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'CWGroup')
+        _add_relation_definition_no_perms('CWRelation', '%s_permission' % action, 'RQLExpression')
+        _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'CWGroup')
+        _add_relation_definition_no_perms('CWAttribute', '%s_permission' % action, 'RQLExpression')
+    for action in ('read', 'add', 'delete'):
+        rql('SET X %s_permission Y WHERE X is IN (CWAttribute, CWRelation), '
+            'RT %s_permission Y, X relation_type RT, Y is CWGroup' % (action, action))
+        rql('INSERT RQLExpression Y: Y exprtype YET, Y mainvars YMV, Y expression YEX, '
+            'X %s_permission Y WHERE X is IN (CWAttribute, CWRelation), '
+            'X relation_type RT, RT %s_permission Y2, Y2 exprtype YET, '
+            'Y2 mainvars YMV, Y2 expression YEX' % (action, action))
+        drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False)
+        drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression')
+    config.disabled_hooks_categories.add('integrity')
+
 if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0):
 
     session.set_shared_data('do-not-insert-cwuri', True)
--- a/server/hook.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/server/hook.py	Tue Dec 08 10:40:20 2009 +0100
@@ -87,6 +87,19 @@
 VRegistry.REGISTRY_FACTORY['hooks'] = HooksRegistry
 
 
+def entity_oldnewvalue(entity, attr):
+    """returns the couple (old attr value, new attr value)
+    NOTE: will only work in a before_update_entity hook
+    """
+    # get new value and remove from local dict to force a db query to
+    # fetch old value
+    newvalue = entity.pop(attr, None)
+    oldvalue = getattr(entity, attr)
+    if newvalue is not None:
+        entity[attr] = newvalue
+    return oldvalue, newvalue
+
+
 # some hook specific selectors #################################################
 
 @objectify_selector
--- a/server/hookhelper.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/server/hookhelper.py	Tue Dec 08 10:40:20 2009 +0100
@@ -10,18 +10,12 @@
 from logilab.common.deprecation import deprecated, class_moved
 
 from cubicweb import RepositoryError
+from cubicweb.server import hook
 
+@deprecated('[3.6] entity_oldnewvalue should be imported from cw.server.hook')
 def entity_oldnewvalue(entity, attr):
-    """returns the couple (old attr value, new attr value)
-    NOTE: will only work in a before_update_entity hook
-    """
-    # get new value and remove from local dict to force a db query to
-    # fetch old value
-    newvalue = entity.pop(attr, None)
-    oldvalue = getattr(entity, attr)
-    if newvalue is not None:
-        entity[attr] = newvalue
-    return oldvalue, newvalue
+    """return the "name" attribute of the entity with the given eid"""
+    return hook.entity_oldnewvalue(entity, attr)
 
 @deprecated('[3.6] entity_name is deprecated, use entity.name')
 def entity_name(session, eid):
@@ -32,5 +26,4 @@
 def rproperty(session, rtype, eidfrom, eidto, rprop):
     return session.rproperty(rtype, eidfrom, eidto, rprop)
 
-from cubicweb.server.hook import SendMailOp
-SendMailOp = class_moved(SendMailOp)
+SendMailOp = class_moved(hook.SendMailOp)
--- a/server/migractions.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/server/migractions.py	Tue Dec 08 10:40:20 2009 +0100
@@ -38,7 +38,7 @@
 from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES,
                              CubicWebRelationSchema, order_eschemas)
 from cubicweb.dbapi import get_repository, repo_connect
-from cubicweb.common.migration import MigrationHelper, yes
+from cubicweb.migration import MigrationHelper, yes
 
 try:
     from cubicweb.server import SOURCE_TYPES, schemaserial as ss
@@ -263,7 +263,8 @@
     def _create_context(self):
         """return a dictionary to use as migration script execution context"""
         context = super(ServerMigrationHelper, self)._create_context()
-        context.update({'checkpoint': self.checkpoint,
+        context.update({'commit': self.checkpoint,
+                        'checkpoint': deprecated('[3.6] use commit')(self.checkpoint),
                         'sql': self.sqlexec,
                         'rql': self.rqlexec,
                         'rqliter': self.rqliter,
@@ -521,9 +522,9 @@
 
     # base actions ############################################################
 
-    def checkpoint(self):
+    def checkpoint(self, ask_confirm=True):
         """checkpoint action"""
-        if self.confirm('commit now ?', shell=False):
+        if not ask_confirm or self.confirm('commit now ?', shell=False):
             self.commit()
 
     def cmd_add_cube(self, cube, update_database=True):
@@ -686,12 +687,9 @@
             eschema = self.fs_schema.eschema(etype)
         confirm = self.verbosity >= 2
         # register the entity into CWEType
-        self.rqlexecall(ss.eschema2rql(eschema), ask_confirm=confirm)
+        self.rqlexecall(ss.eschema2rql(eschema, self.group_mapping()), ask_confirm=confirm)
         # add specializes relation if needed
         self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm)
-        # register groups / permissions for the entity
-        self.rqlexecall(ss.erperms2rql(eschema, self.group_mapping()),
-                        ask_confirm=confirm)
         # register entity's attributes
         for rschema, attrschema in eschema.attribute_definitions():
             # ignore those meta relations, they will be automatically added
@@ -828,12 +826,9 @@
         # definitions
         self.rqlexecall(ss.rschema2rql(rschema, addrdef=False),
                         ask_confirm=self.verbosity>=2)
-        # register groups / permissions for the relation
-        self.rqlexecall(ss.erperms2rql(rschema, self.group_mapping()),
-                        ask_confirm=self.verbosity>=2)
         if addrdef:
             self.commit()
-            self.rqlexecall(ss.rdef2rql(rschema),
+            self.rqlexecall(ss.rdef2rql(rschema, groupmap=self.group_mapping()),
                             ask_confirm=self.verbosity>=2)
             if rtype in META_RTYPES:
                 # if the relation is in META_RTYPES, ensure we're adding it for
@@ -880,7 +875,7 @@
         rschema = self.fs_schema.rschema(rtype)
         if not rtype in self.repo.schema:
             self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
-        self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype),
+        self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype, groupmap=self.group_mapping()),
                         ask_confirm=self.verbosity>=2)
         if commit:
             self.commit()
--- a/server/schemaserial.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/server/schemaserial.py	Tue Dec 08 10:40:20 2009 +0100
@@ -308,14 +308,10 @@
             if pb is not None:
                 pb.update()
             continue
-        for rql, kwargs in erschema2rql(schema[ertype]):
+        for rql, kwargs in erschema2rql(schema[ertype], groupmap):
             if verbose:
                 print rql % kwargs
             cursor.execute(rql, kwargs)
-        for rql, kwargs in erperms2rql(schema[ertype], groupmap):
-            if verbose:
-                print rql
-            cursor.execute(rql, kwargs)
         if pb is not None:
             pb.update()
     for rql, kwargs in specialize2rql(schema):
@@ -399,17 +395,22 @@
     return relations, values
 
 
-def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None):
+def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None, groupmap=None):
     if subjtype is None:
         assert objtype is None
         assert props is None
-        targets = rschema.iter_rdefs()
+        targets = rschema.rdefs
     else:
         assert not objtype is None
         targets = [(subjtype, objtype)]
+    # relation schema
+    if rschema.final:
+        etype = 'CWAttribute'
+    else:
+        etype = 'CWRelation'
     for subjtype, objtype in targets:
         if props is None:
-            _props = rschema.rproperties(subjtype, objtype)
+            _props = rschema.rdef(subjtype, objtype)
         else:
             _props = props
         # don't serialize infered relations
@@ -418,6 +419,15 @@
         gen = genmap[rschema.final]
         for rql, values in gen(rschema, subjtype, objtype, _props):
             yield rql, values
+        # no groupmap means "no security insertion"
+        if groupmap:
+            for rql, args in _erperms2rql(_props, groupmap):
+                args['st'] = str(subjtype)
+                args['rt'] = str(rschema)
+                args['ot'] = str(objtype)
+                yield rql + 'X is %s, X from_entity ST, X to_entity OT, '\
+                      'X relation_type RT, RT name %%(rt)s, ST name %%(st)s, '\
+                      'OT name %%(ot)s' % etype, args
 
 
 def schema2rql(schema, skip=None, allow=None):
@@ -433,12 +443,12 @@
         return chain(*[erschema2rql(schema[t]) for t in all if t in allow])
     return chain(*[erschema2rql(schema[t]) for t in all])
 
-def erschema2rql(erschema):
+def erschema2rql(erschema, groupmap):
     if isinstance(erschema, schemamod.EntitySchema):
-        return eschema2rql(erschema)
+        return eschema2rql(erschema, groupmap)
     return rschema2rql(erschema)
 
-def eschema2rql(eschema):
+def eschema2rql(eschema, groupmap):
     """return a list of rql insert statements to enter an entity schema
     in the database as an CWEType entity
     """
@@ -446,6 +456,10 @@
     # NOTE: 'specializes' relation can't be inserted here since there's no
     # way to make sure the parent type is inserted before the child type
     yield 'INSERT CWEType X: %s' % ','.join(relations) , values
+        # entity schema
+    for rql, args in _erperms2rql(eschema, groupmap):
+        args['name'] = str(eschema)
+        yield rql + 'X is CWEType, X name %(name)s', args
 
 def specialize2rql(schema):
     for eschema in schema.entities():
@@ -458,7 +472,7 @@
         values = {'x': eschema.type, 'et': specialized_type.type}
         yield 'SET X specializes ET WHERE X name %(x)s, ET name %(et)s', values
 
-def rschema2rql(rschema, addrdef=True):
+def rschema2rql(rschema, addrdef=True, groupmap=None):
     """return a list of rql insert statements to enter a relation schema
     in the database as an CWRType entity
     """
@@ -467,12 +481,12 @@
     relations, values = rschema_relations_values(rschema)
     yield 'INSERT CWRType X: %s' % ','.join(relations), values
     if addrdef:
-        for rql, values in rdef2rql(rschema):
+        for rql, values in rdef2rql(rschema, groupmap=groupmap):
             yield rql, values
 
-def rdef2rql(rschema, subjtype=None, objtype=None, props=None):
+def rdef2rql(rschema, subjtype=None, objtype=None, props=None, groupmap=None):
     genmap = {True: frdef2rql, False: nfrdef2rql}
-    return __rdef2rql(genmap, rschema, subjtype, objtype, props)
+    return __rdef2rql(genmap, rschema, subjtype, objtype, props, groupmap)
 
 
 _LOCATE_RDEF_RQL0 = 'X relation_type ER,X from_entity SE,X to_entity OE'
@@ -496,7 +510,7 @@
 
 def rdefrelations2rql(rschema, subjtype, objtype, props):
     iterators = []
-    for constraint in props['constraints']:
+    for constraint in props.constraints:
         iterators.append(constraint2rql(rschema, subjtype, objtype, constraint))
     return chain(*iterators)
 
@@ -508,43 +522,8 @@
 CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, \
 ER name %(rt)s, SE name %(se)s, OE name %(oe)s', values
 
-def perms2rql(schema, groupmapping):
-    """return rql insert statements to enter the schema's permissions in
-    the database as [read|add|delete|update]_permission relations between
-    CWEType/CWRType and CWGroup entities
 
-    groupmapping is a dictionnary mapping standard group names to
-    eids
-    """
-    for etype in sorted(schema.entities()):
-        yield erperms2rql(schema[etype], groupmapping)
-    for rtype in sorted(schema.relations()):
-        yield erperms2rql(schema[rtype], groupmapping)
-
-def erperms2rql(erschema, groupmapping):
-    if hasattr(erschema, 'iter_rdefs'):
-        # relation schema
-        if erschema.final:
-            etype = 'CWAttribute'
-        else:
-            etype = 'CWRelation'
-        for subject, object in erschema.iter_rdefs():
-            permissions = erschema.rproperty(subject, object, 'permissions')
-            for rql, args in _erperms2rql(erschema.rproperties(subject, object),
-                                          groupmapping):
-                args['st'] = str(subject)
-                args['rt'] = str(erschema)
-                args['ot'] = str(object)
-                yield rql + 'X is %s, X from_entity ST, X to_entity OT, '\
-                      'X relation_type RT, RT name %%(rt)s, ST name %%(st)s, '\
-                      'OT name %%(ot)s' % etype, args
-    else:
-        # entity schema
-        for rql, args in _erperms2rql(erschema, groupmapping):
-            args['name'] = str(erschema)
-            yield rql + 'X is CWEType, X name %(name)s', args
-
-def _erperms2rql(erschema, groupmapping):
+def _erperms2rql(erschema, groupmap):
     """return rql insert statements to enter the entity or relation
     schema's permissions in the database as
     [read|add|delete|update]_permission relations between CWEType/CWRType
@@ -556,7 +535,7 @@
                 # group
                 try:
                     yield ('SET X %s_permission Y WHERE Y eid %%(g)s, ' % action,
-                           {'g': groupmapping[group_or_rqlexpr]})
+                           {'g': groupmap[group_or_rqlexpr]})
                 except KeyError:
                     continue
             else:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.0.3_Any.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,8 @@
+"""
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.0.4_Any.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,8 @@
+"""
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.1.0_Any.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,8 @@
+"""
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.1.0_common.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,7 @@
+"""common to all configuration
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.1.0_repository.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,7 @@
+"""repository specific
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.1.0_web.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,7 @@
+"""web only
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/0.1.2_Any.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,8 @@
+"""
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+coucou
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/migration/depends.map	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,5 @@
+0.0.2: 2.3.0
+0.0.3: 2.4.0
+# missing 0.0.4 entry, that's alright
+0.1.0: 2.6.0
+0.1.2: 2.10.0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/data/server_migration/bootstrapmigration_repository.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,7 @@
+"""allways executed before all others in server migration
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
--- a/test/unittest_entity.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/test/unittest_entity.py	Tue Dec 08 10:40:20 2009 +0100
@@ -405,7 +405,7 @@
         e['data_name'] = 'an html file'
         e['data_format'] = 'text/html'
         e['data_encoding'] = 'ascii'
-        e.req.transaction_data = {} # XXX req should be a session
+        e._cw.transaction_data = {} # XXX req should be a session
         self.assertEquals(set(e.get_words()),
                           set(['an', 'html', 'file', 'du', 'html', 'some', 'data']))
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_migration.py	Tue Dec 08 10:40:20 2009 +0100
@@ -0,0 +1,103 @@
+"""cubicweb.common.migration unit tests
+
+:organization: Logilab
+:copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
+"""
+
+from os.path import abspath
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.devtools import TestServerConfiguration
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.migration import MigrationHelper, filter_scripts
+from cubicweb.server.migractions import ServerMigrationHelper
+
+
+class Schema(dict):
+    def has_entity(self, e_type):
+        return self.has_key(e_type)
+
+SMIGRDIR = abspath('data/server_migration') + '/'
+TMIGRDIR = abspath('data/migration') + '/'
+
+class MigrTestConfig(TestServerConfiguration):
+    verbosity = 0
+    def migration_scripts_dir(cls):
+        return SMIGRDIR
+
+    def cube_migration_scripts_dir(cls, cube):
+        return TMIGRDIR
+
+class MigrationToolsTC(TestCase):
+    def setUp(self):
+        self.config = MigrTestConfig('data')
+        from yams.schema import Schema
+        self.config.load_schema = lambda expand_cubes=False: Schema('test')
+        self.config.__class__.cubicweb_appobject_path = frozenset()
+        self.config.__class__.cube_appobject_path = frozenset()
+
+    def test_filter_scripts_base(self):
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)),
+                              [])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)),
+                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)),
+                              [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'),
+                               ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)),
+                              [])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'),
+                               ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')])
+        self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)),
+                              [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')])
+
+        self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)),
+                              [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')])
+        self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)),
+                              [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'),
+                               ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')])
+
+    def test_filter_scripts_for_mode(self):
+        config = CubicWebConfiguration('data')
+        config.verbosity = 0
+        self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper))
+        self.assertIsInstance(config.migration_handler(), MigrationHelper)
+        config = self.config
+        config.__class__.name = 'twisted'
+        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
+                              [((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')])
+        config.__class__.name = 'repository'
+        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
+                              [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')])
+        config.__class__.name = 'all-in-one'
+        self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
+                              [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'),
+                               ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')])
+        config.__class__.name = 'repository'
+
+
+from cubicweb.devtools import ApptestConfiguration, init_test_database, cleanup_sqlite
+
+class BaseCreationTC(TestCase):
+
+    def test_db_creation(self):
+        """make sure database can be created"""
+        config = ApptestConfiguration('data')
+        source = config.sources()['system']
+        self.assertEquals(source['db-driver'], 'sqlite')
+        cleanup_sqlite(source['db-name'], removetemplate=True)
+        init_test_database(config=config)
+
+
+if __name__ == '__main__':
+    unittest_main()
--- a/utils.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/utils.py	Tue Dec 08 10:40:20 2009 +0100
@@ -411,7 +411,7 @@
             elif isinstance(obj, pydatetime.time):
                 return obj.strftime('%H:%M:%S')
             elif isinstance(obj, pydatetime.timedelta):
-                return '%10d.%s' % (obj.days, obj.seconds)
+                return (obj.days * 24 * 60 * 60) + obj.seconds
             elif isinstance(obj, decimal.Decimal):
                 return float(obj)
             try:
--- a/vregistry.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/vregistry.py	Tue Dec 08 10:40:20 2009 +0100
@@ -64,7 +64,7 @@
         warn('[3.6] %s.%s: id is deprecated, use __regid__'
              % (cls.__module__, cls.__name__), DeprecationWarning)
         cls.__regid__ = cls.id
-    if hasattr(cls, 'id'):
+    if hasattr(cls, 'id') and not isinstance(cls.id, property):
         return cls.id
     return cls.__regid__
 
--- a/web/formfields.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/web/formfields.py	Tue Dec 08 10:40:20 2009 +0100
@@ -583,27 +583,25 @@
     'subjschema rschema objschema' according to information found in the schema
     """
     fieldclass = None
-    card = eschema.cardinality(rschema, role)
+    rdef = eschema.rdef(rschema, role)
     if role == 'subject':
-        targetschema = rschema.objects(eschema)[0]
-        help = rschema.rproperty(eschema, targetschema, 'description')
+        targetschema = rdef.object
         if rschema.final:
-            if rschema.rproperty(eschema, targetschema, 'internationalizable'):
+            if rdef.internationalizable:
                 kwargs.setdefault('internationalizable', True)
             def get_default(form, es=eschema, rs=rschema):
                 return es.default(rs)
             kwargs.setdefault('initial', get_default)
     else:
-        targetschema = rschema.subjects(eschema)[0]
-        help = rschema.rproperty(targetschema, eschema, 'description')
-    kwargs['required'] = card in '1+'
+        targetschema = rdef.subject
+    kwargs['required'] = rdef.role_cardinality(role) in '1+'
     kwargs['name'] = rschema.type
     if role == 'object':
         kwargs.setdefault('label', (eschema.type, rschema.type + '_object'))
     else:
         kwargs.setdefault('label', (eschema.type, rschema.type))
     kwargs['eidparam'] = True
-    kwargs.setdefault('help', help)
+    kwargs.setdefault('help', rdef.description)
     if rschema.final:
         if skip_meta_attr and rschema in eschema.meta_attributes():
             return None
@@ -617,18 +615,16 @@
                 # use RichTextField instead of StringField if the attribute has
                 # a "format" metadata. But getting information from constraints
                 # may be useful anyway...
-                constraints = rschema.rproperty(eschema, targetschema, 'constraints')
-                for cstr in constraints:
+                for cstr in rdef.constraints:
                     if isinstance(cstr, StaticVocabularyConstraint):
                         raise Exception('rich text field with static vocabulary')
                 return RichTextField(**kwargs)
-            constraints = rschema.rproperty(eschema, targetschema, 'constraints')
             # init StringField parameters according to constraints
-            for cstr in constraints:
+            for cstr in rdef.constraints:
                 if isinstance(cstr, StaticVocabularyConstraint):
                     kwargs.setdefault('choices', cstr.vocabulary)
                     break
-            for cstr in constraints:
+            for cstr in rdef.constraints:
                 if isinstance(cstr, SizeConstraint) and cstr.max is not None:
                     kwargs['max_length'] = cstr.max
             return StringField(**kwargs)
--- a/web/uicfg.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/web/uicfg.py	Tue Dec 08 10:40:20 2009 +0100
@@ -360,7 +360,7 @@
                     continue
                 rdef = rschema.role_rdef(eschema, tschema, role)
                 if not ((not strict and rdef.has_local_role(permission)) or
-                        rdef.has_perm(entity.req, permission, fromeid=eid)):
+                        rdef.has_perm(entity._cw, permission, fromeid=eid)):
                     continue
                 _targetschemas.append(tschema)
             if not _targetschemas:
--- a/web/views/autoform.py	Mon Dec 07 09:10:36 2009 -0800
+++ b/web/views/autoform.py	Tue Dec 08 10:40:20 2009 +0100
@@ -59,9 +59,9 @@
         try:
             return super(AutomaticEntityForm, cls_or_self).field_by_name(name, role)
         except form.FieldNotFound:
-            if eschema is None or not name in cls_or_self._cw.schema:
+            if eschema is None or not name in cls_or_self._cw.vreg.schema:
                 raise
-            rschema = cls_or_self._cw.schema.rschema(name)
+            rschema = cls_or_self._cw.vreg.schema.rschema(name)
             # XXX use a sample target type. Document this.
             tschemas = rschema.targets(eschema, role)
             fieldcls = cls_or_self.rfields.etype_get(eschema, rschema, role,