oldstable is now 3.11 oldstable
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Wed, 27 Apr 2011 09:54:22 +0200
brancholdstable
changeset 7254 c0278ad81823
parent 7178 a62f24e1497e (current diff)
parent 7239 078cfa8a5187 (diff)
child 7258 2e7f0d6fa2d6
child 7265 8eedf71f280e
oldstable is now 3.11
web/views/old_calendar.py
--- a/.hgtags	Tue Apr 05 08:39:49 2011 +0200
+++ b/.hgtags	Wed Apr 27 09:54:22 2011 +0200
@@ -182,3 +182,9 @@
 e581a86a68f089946a98c966ebca7aee58a5718f cubicweb-version-3.10.8
 132b525de25bc75ed6389c45aee77e847cb3a437 cubicweb-debian-version-3.10.8-1
 48f468f33704e401a8e7907e258bf1ac61eb8407 cubicweb-version-3.9.x
+37432cede4fe55b97fc2e9be0a2dd20e8837a848 cubicweb-version-3.11.0
+8daabda9f571863e8754f8ab722744c417ba3abf cubicweb-debian-version-3.11.0-1
+d0410eb4d8bbf657d7f32b0c681db09b1f8119a0 cubicweb-version-3.11.1
+77318f1ec4aae3523d455e884daf3708c3c79af7 cubicweb-debian-version-3.11.1-1
+56ae3cd5f8553678a2b1d4121b61241598d0ca68 cubicweb-version-3.11.2
+954b5b51cd9278eb45d66be1967064d01ab08453 cubicweb-debian-version-3.11.2-1
--- a/__pkginfo__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/__pkginfo__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -22,7 +22,7 @@
 
 modname = distname = "cubicweb"
 
-numversion = (3, 10, 9)
+numversion = (3, 11, 2)
 version = '.'.join(str(num) for num in numversion)
 
 description = "a repository of entities / relations for knowledge management"
@@ -40,10 +40,10 @@
 ]
 
 __depends__ = {
-    'logilab-common': '>= 0.54.0',
+    'logilab-common': '>= 0.55.2',
     'logilab-mtconverter': '>= 0.8.0',
     'rql': '>= 0.28.0',
-    'yams': '>= 0.30.1',
+    'yams': '>= 0.30.4',
     'docutils': '>= 0.6',
     #gettext                    # for xgettext, msgcat, etc...
     # web dependancies
@@ -52,7 +52,7 @@
     'Twisted': '',
     # XXX graphviz
     # server dependencies
-    'logilab-database': '>= 1.3.3',
+    'logilab-database': '>= 1.4.0',
     'pysqlite': '>= 2.5.5', # XXX install pysqlite2
     }
 
@@ -62,6 +62,7 @@
     'pycrypto': '',             # for crypto extensions
     'fyzz': '>= 0.1.0',         # for sparql
     'vobject': '>= 0.6.0',      # for ical view
+    'rdflib': None,             #
     #'Products.FCKeditor':'',
     #'SimpleTAL':'>= 4.1.6',
     }
--- a/cwconfig.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/cwconfig.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -326,7 +326,8 @@
          {'type' : 'string',
           'default': '',
           'help': 'Pyro name server\'s host. If not set, will be detected by a \
-broadcast query. It may contains port information using <host>:<port> notation.',
+broadcast query. It may contains port information using <host>:<port> notation. \
+Use "NO_PYRONS" to create a Pyro server but not register to a pyro nameserver',
           'group': 'pyro', 'level': 1,
           }),
         ('pyro-ns-group',
@@ -844,9 +845,8 @@
         if not exists(_INSTANCES_DIR):
             os.makedirs(_INSTANCES_DIR)
 
-    # for some commands (creation...) we don't want to initialize gettext
-    set_language = True
-    # set this to true to allow somethings which would'nt be possible
+    # set to true during repair (shell, migration) to allow some things which
+    # wouldn't be possible otherwise
     repairing = False
 
     options = CubicWebNoAppConfiguration.options + (
@@ -901,13 +901,13 @@
         return mdir
 
     @classmethod
-    def config_for(cls, appid, config=None, debugmode=False):
+    def config_for(cls, appid, config=None, debugmode=False, creating=False):
         """return a configuration instance for the given instance identifier
         """
         cls.load_available_configs()
         config = config or guess_configuration(cls.instance_home(appid))
         configcls = configuration_cls(config)
-        return configcls(appid, debugmode)
+        return configcls(appid, debugmode, creating)
 
     @classmethod
     def possible_configurations(cls, appid):
@@ -967,8 +967,6 @@
             log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log')
             return log_path % (self.appid, self.name)
 
-
-
     def default_pid_file(self):
         """return default path to the pid file of the instance'server"""
         if self.mode == 'system':
@@ -986,8 +984,10 @@
 
     # instance methods used to get instance specific resources #############
 
-    def __init__(self, appid, debugmode=False):
+    def __init__(self, appid, debugmode=False, creating=False):
         self.appid = appid
+        # set to true while creating an instance
+        self.creating = creating
         super(CubicWebConfiguration, self).__init__(debugmode)
         fake_gettext = (unicode, lambda ctx, msgid: unicode(msgid))
         for lang in self.available_languages():
@@ -1078,7 +1078,7 @@
     def load_configuration(self):
         """load instance's configuration files"""
         super(CubicWebConfiguration, self).load_configuration()
-        if self.apphome and self.set_language:
+        if self.apphome and not self.creating:
             # init gettext
             self._gettext_init()
 
--- a/cwctl.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/cwctl.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -300,6 +300,11 @@
                     print '* cube %s version %s is installed, but version %s is required by %s' % (
                         cube, cfgpb.cubes[cube], version, src)
 
+def check_options_consistency(config):
+    if config.automatic and config.config_level > 0:
+        raise BadCommandUsage('--automatic and --config-level should not be '
+                              'used together')
+
 class CreateInstanceCommand(Command):
     """Create an instance from a cube. This is an unified
     command which can handle web / server / all-in-one installation
@@ -309,7 +314,7 @@
     <cube>
       the name of cube to use (list available cube names using
       the "list" command). You can use several cubes by separating
-      them using comma (e.g. 'jpl,eemail')
+      them using comma (e.g. 'jpl,email')
     <instance>
       an identifier for the instance to create
     """
@@ -317,34 +322,39 @@
     arguments = '<cube> <instance>'
     min_args = max_args = 2
     options = (
-        ("config-level",
+        ('automatic',
+         {'short': 'a', 'action' : 'store_true',
+          'default': False,
+          'help': 'automatic mode: never ask and use default answer to every '
+          'question. this may require that your login match a database super '
+          'user (allowed to create database & all).',
+          }),
+        ('config-level',
          {'short': 'l', 'type' : 'int', 'metavar': '<level>',
           'default': 0,
-          'help': 'configuration level (0..2): 0 will ask for essential \
-configuration parameters only while 2 will ask for all parameters',
-          }
-         ),
-        ("config",
+          'help': 'configuration level (0..2): 0 will ask for essential '
+          'configuration parameters only while 2 will ask for all parameters',
+          }),
+        ('config',
          {'short': 'c', 'type' : 'choice', 'metavar': '<install type>',
           'choices': ('all-in-one', 'repository', 'twisted'),
           'default': 'all-in-one',
-          'help': 'installation type, telling which part of an instance \
-should be installed. You can list available configurations using the "list" \
-command. Default to "all-in-one", e.g. an installation embedding both the RQL \
-repository and the web server.',
-          }
-         ),
+          'help': 'installation type, telling which part of an instance '
+          'should be installed. You can list available configurations using the'
+          ' "list" command. Default to "all-in-one", e.g. an installation '
+          'embedding both the RQL repository and the web server.',
+          }),
         )
 
     def run(self, args):
         """run the command with its specific arguments"""
         from logilab.common.textutils import splitstrip
+        check_options_consistency(self.config)
         configname = self.config.config
         cubes, appid = args
         cubes = splitstrip(cubes)
         # get the configuration and helper
-        config = cwcfg.config_for(appid, configname)
-        config.set_language = False
+        config = cwcfg.config_for(appid, configname, creating=True)
         cubes = config.expand_cubes(cubes)
         config.init_cubes(cubes)
         helper = self.config_helper(config)
@@ -361,31 +371,35 @@
         print '\n'+underline_title('Creating the instance %s' % appid)
         create_dir(config.apphome)
         # cubicweb-ctl configuration
-        print '\n'+underline_title('Configuring the instance (%s.conf)' % configname)
-        config.input_config('main', self.config.config_level)
+        if not self.config.automatic:
+            print '\n'+underline_title('Configuring the instance (%s.conf)'
+                                       % configname)
+            config.input_config('main', self.config.config_level)
         # configuration'specific stuff
         print
-        helper.bootstrap(cubes, self.config.config_level)
+        helper.bootstrap(cubes, self.config.automatic, self.config.config_level)
         # input for cubes specific options
-        for section in set(sect.lower() for sect, opt, optdict in config.all_options()
-                           if optdict.get('level') <= self.config.config_level):
-            if section not in ('main', 'email', 'pyro'):
+        sections = set(sect.lower() for sect, opt, odict in config.all_options()
+                       if 'type' in odict
+                       and odict.get('level') <= self.config.config_level)
+        for section in sections:
+            if section not in ('main', 'email', 'pyro', 'web'):
                 print '\n' + underline_title('%s options' % section)
                 config.input_config(section, self.config.config_level)
         # write down configuration
         config.save()
         self._handle_win32(config, appid)
-        print '-> generated %s' % config.main_config_file()
+        print '-> generated config %s' % config.main_config_file()
         # handle i18n files structure
         # in the first cube given
-        print '-> preparing i18n catalogs'
         from cubicweb import i18n
         langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
         errors = config.i18ncompile(langs)
         if errors:
             print '\n'.join(errors)
-            if not ASK.confirm('error while compiling message catalogs, '
-                               'continue anyway ?'):
+            if self.config.automatic \
+                   or not ASK.confirm('error while compiling message catalogs, '
+                                      'continue anyway ?'):
                 print 'creation not completed'
                 return
         # create the additional data directory for this instance
@@ -397,8 +411,8 @@
             # this directory should be owned by the uid of the server process
             print 'set %s as owner of the data directory' % config['uid']
             chown(config.appdatahome, config['uid'])
-        print '\n-> creation done for %r.\n' % config.apphome
-        helper.postcreate()
+        print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1]
+        helper.postcreate(self.config.automatic)
 
     def _handle_win32(self, config, appid):
         if sys.platform != 'win32':
--- a/cwvreg.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/cwvreg.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/dataimport.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/dataimport.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,12 +19,11 @@
 """This module provides tools to import tabular data.
 
 
-
 Example of use (run this with `cubicweb-ctl shell instance import-script.py`):
 
 .. sourcecode:: python
 
-  from cubicweb.devtools.dataimport import *
+  from cubicweb.dataimport import *
   # define data generators
   GENERATORS = []
 
@@ -36,12 +35,11 @@
   def gen_users(ctl):
       for row in ctl.iter_and_commit('utilisateurs'):
           entity = mk_entity(row, USERS)
-          entity['upassword'] = u'motdepasse'
+          entity['upassword'] = 'motdepasse'
           ctl.check('login', entity['login'], None)
-          ctl.store.add('CWUser', entity)
-          email = {'address': row['email']}
-          ctl.store.add('EmailAddress', email)
-          ctl.store.relate(entity['eid'], 'use_email', email['eid'])
+          entity = ctl.store.create_entity('CWUser', **entity)
+          email = ctl.store.create_entity('EmailAddress', address=row['email'])
+          ctl.store.relate(entity.eid, 'use_email', email.eid)
           ctl.store.rql('SET U in_group G WHERE G name "users", U eid %(x)s', {'x':entity['eid']})
 
   CHK = [('login', check_doubles, 'Utilisateurs Login',
@@ -74,14 +72,18 @@
 import os.path as osp
 from StringIO import StringIO
 from copy import copy
+from datetime import datetime
 
-from logilab.common import shellutils
+from logilab.common import shellutils, attrdict
 from logilab.common.date import strptime
 from logilab.common.decorators import cached
 from logilab.common.deprecation import deprecated
 
+from cubicweb import QueryError
+from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES
 from cubicweb.server.utils import eschema_eid
-from cubicweb.server.ssplanner import EditedEntity
+from cubicweb.server.edition import EditedEntity
+
 
 def count_lines(stream_or_filename):
     if isinstance(stream_or_filename, basestring):
@@ -145,6 +147,22 @@
     for row in reader:
         yield dict(zip(header, row))
 
+def lazydbtable(cu, table, headers, orderby=None):
+    """return an iterator on rows of a sql table. On each row, fetch columns
+    defined in headers and return values as a dictionary.
+
+    >>> data = lazydbtable(cu, 'experimentation', ('id', 'nickname', 'gps'))
+    """
+    sql = 'SELECT %s FROM %s' % (','.join(headers), table,)
+    if orderby:
+        sql += ' ORDER BY %s' % ','.join(orderby)
+    cu.execute(sql)
+    while True:
+        row = cu.fetchone()
+        if row is None:
+            break
+        yield dict(zip(headers, row))
+
 def mk_entity(row, map):
     """Return a dict made from sanitized mapped values.
 
@@ -171,10 +189,9 @@
                 if res[dest] is None:
                     break
         except ValueError, err:
-            raise ValueError('error with %r field: %s' % (src, err))
+            raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1]
     return res
 
-
 # user interactions ############################################################
 
 def tell(msg):
@@ -287,11 +304,9 @@
     But it will not enforce the constraints of the schema and hence will miss some problems
 
     >>> store = ObjectStore()
-    >>> user = {'login': 'johndoe'}
-    >>> store.add('CWUser', user)
-    >>> group = {'name': 'unknown'}
-    >>> store.add('CWUser', group)
-    >>> store.relate(user['eid'], 'in_group', group['eid'])
+    >>> user = store.create_entity('CWUser', login=u'johndoe')
+    >>> group = store.create_entity('CWUser', name=u'unknown')
+    >>> store.relate(user.eid, 'in_group', group.eid)
     """
     def __init__(self):
         self.items = []
@@ -307,7 +322,8 @@
         return len(self.items) - 1
 
     def create_entity(self, etype, **data):
-        data['eid'] =  eid = self._put(etype, data)
+        data = attrdict(data)
+        data['eid'] = eid = self._put(etype, data)
         self.eids[eid] = data
         self.types.setdefault(etype, []).append(eid)
         return data
@@ -473,6 +489,11 @@
         self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
                  {'x': int(eid_from), 'y': int(eid_to)})
 
+    def find_entities(self, *args, **kwargs):
+        return self.session.find_entities(*args, **kwargs)
+
+    def find_one_entity(self, *args, **kwargs):
+        return self.session.find_one_entity(*args, **kwargs)
 
 # the import controller ########################################################
 
@@ -523,6 +544,10 @@
 
     def run(self):
         self.errors = {}
+        if self.commitevery is None:
+            self.tell('Will commit all or nothing.')
+        else:
+            self.tell('Will commit every %s iterations' % self.commitevery)
         for func, checks in self.generators:
             self._checks = {}
             func_name = func.__name__
@@ -541,7 +566,12 @@
                     err = func(buckets)
                     if err:
                         self.errors[title] = (help, err)
-        txuuid = self.store.commit()
+        try:
+            txuuid = self.store.commit()
+            if txuuid is not None:
+                self.tell('Transaction commited (txuuid: %s)' % txuuid)
+        except QueryError, ex:
+            self.tell('Transaction aborted: %s' % ex)
         self._print_stats()
         if self.errors:
             if self.askerror == 2 or (self.askerror and confirm('Display errors ?')):
@@ -549,10 +579,9 @@
                 for errkey, error in self.errors.items():
                     self.tell("\n%s (%s): %d\n" % (error[0], errkey, len(error[1])))
                     self.tell(pformat(sorted(error[1])))
-        if txuuid is not None:
-            print 'transaction id:', txuuid
+
     def _print_stats(self):
-        nberrors = sum(len(err[1]) for err in self.errors.values())
+        nberrors = sum(len(err) for err in self.errors.values())
         self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors'
                   % (self.store.nb_inserted_entities,
                      self.store.nb_inserted_types,
@@ -589,11 +618,6 @@
                                   self.get_data(datakey))
 
 
-
-from datetime import datetime
-from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES
-
-
 class NoHookRQLObjectStore(RQLObjectStore):
     """ObjectStore that works with an actual RQL repository (production mode)"""
     _rql = None # bw compat
--- a/dbapi.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/dbapi.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -102,11 +102,14 @@
         return Repository(config, vreg=vreg)
     else: # method == 'pyro'
         # resolve the Pyro object
-        from logilab.common.pyro_ext import ns_get_proxy
+        from logilab.common.pyro_ext import ns_get_proxy, get_proxy
         pyroid = database or config['pyro-instance-id'] or config.appid
         try:
-            return ns_get_proxy(pyroid, defaultnsgroup=config['pyro-ns-group'],
-                                nshost=config['pyro-ns-host'])
+            if config['pyro-ns-host'] == 'NO_PYRONS':
+                return get_proxy(pyroid)
+            else:
+                return ns_get_proxy(pyroid, defaultnsgroup=config['pyro-ns-group'],
+                                    nshost=config['pyro-ns-host'])
         except Exception, ex:
             raise ConnectionError(str(ex))
 
@@ -233,11 +236,10 @@
         return False
 
 class DBAPISession(object):
-    def __init__(self, cnx, login=None, authinfo=None):
+    def __init__(self, cnx, login=None):
         self.cnx = cnx
         self.data = {}
         self.login = login
-        self.authinfo = authinfo
         # dbapi session identifier is the same as the first connection
         # identifier, but may later differ in case of auto-reconnection as done
         # by the web authentication manager (in cw.web.views.authentication)
@@ -602,9 +604,8 @@
             req = self.request()
         rset = req.eid_rset(eid, 'CWUser')
         if self.vreg is not None and 'etypes' in self.vreg:
-            user = self.vreg['etypes'].etype_class('CWUser')(req, rset, row=0,
-                                                             groups=groups,
-                                                             properties=properties)
+            user = self.vreg['etypes'].etype_class('CWUser')(
+                req, rset, row=0, groups=groups, properties=properties)
         else:
             from cubicweb.entity import Entity
             user = Entity(req, rset, row=0)
--- a/debian/changelog	Tue Apr 05 08:39:49 2011 +0200
+++ b/debian/changelog	Wed Apr 27 09:54:22 2011 +0200
@@ -1,3 +1,21 @@
+cubicweb (3.11.2-1) unstable; urgency=low
+
+  * new upstream release 
+
+ -- Nicolas Chauvat <nicolas.chauvat@logilab.fr>  Mon, 28 Mar 2011 19:18:54 +0200
+
+cubicweb (3.11.1-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Mon, 07 Mar 2011 17:21:28 +0100
+
+cubicweb (3.11.0-1) unstable; urgency=low
+
+  * new upstream release
+
+ -- Sylvain Thénault <sylvain.thenault@logilab.fr>  Fri, 18 Feb 2011 10:27:22 +0100
+
 cubicweb (3.10.8-1) unstable; urgency=low
 
   * new upstream release
--- a/debian/control	Tue Apr 05 08:39:49 2011 +0200
+++ b/debian/control	Wed Apr 27 09:54:22 2011 +0200
@@ -33,7 +33,7 @@
 Conflicts: cubicweb-multisources
 Replaces: cubicweb-multisources
 Provides: cubicweb-multisources
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.3.3), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
+Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.4.0), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2
 Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
 Description: server part of the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -83,7 +83,7 @@
 Architecture: all
 XB-Python-Version: ${python:Versions}
 Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3)
-Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging
+Recommends: python-docutils, python-vobject, fckeditor, python-fyzz, python-imaging, python-rdflib
 Description: web interface library for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
@@ -97,7 +97,7 @@
 Package: cubicweb-common
 Architecture: all
 XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.54.0), python-yams (>= 0.30.1), python-rql (>= 0.28.0), python-lxml
+Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.55.2), python-yams (>= 0.30.4), python-rql (>= 0.28.0), python-lxml
 Recommends: python-simpletal (>= 4.0), python-crypto
 Conflicts: cubicweb-core
 Replaces: cubicweb-core
--- a/debian/copyright	Tue Apr 05 08:39:49 2011 +0200
+++ b/debian/copyright	Wed Apr 27 09:54:22 2011 +0200
@@ -8,7 +8,7 @@
 
 Copyright:
 
-    Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+    Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
     http://www.logilab.fr/ -- mailto:contact@logilab.fr
 
 License:
--- a/devtools/__init__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/__init__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -127,10 +127,9 @@
 
 class TestServerConfiguration(ServerConfiguration):
     mode = 'test'
-    set_language = False
     read_instance_schema = False
     init_repository = True
-
+    skip_db_create_and_restore = False
     def __init__(self, appid='data', apphome=None, log_threshold=logging.CRITICAL+10):
         # must be set before calling parent __init__
         if apphome is None:
@@ -233,6 +232,11 @@
 
 # XXX merge with BaseApptestConfiguration ?
 class ApptestConfiguration(BaseApptestConfiguration):
+    # `skip_db_create_and_restore` controls wether or not the test database
+    # should be created / backuped / restored. If set to True, those
+    # steps are completely skipped, the database is used as is and is
+    # considered initialized
+    skip_db_create_and_restore = False
 
     def __init__(self, appid, apphome=None,
                  log_threshold=logging.CRITICAL, sourcefile=None):
@@ -261,6 +265,7 @@
               self.view('foaf', rset)
 
     """
+    skip_db_create_and_restore = True
     read_instance_schema = True # read schema from database
 
 
@@ -294,9 +299,12 @@
 
     def absolute_backup_file(self, db_id, suffix):
         """Path for config backup of a given database id"""
-        dbname = self.dbname.replace('-', '_')
+        # in case db name is an absolute path, we don't want to replace anything
+        # in parent directories
+        directory, basename = split(self.dbname)
+        dbname = basename.replace('-', '_')
         assert '.' not in db_id
-        filename = '%s-%s.%s' % (dbname, db_id, suffix)
+        filename = join(directory, '%s-%s.%s' % (dbname, db_id, suffix))
         return join(self._ensure_test_backup_db_dir(), filename)
 
     def db_cache_key(self, db_id, dbname=None):
@@ -407,9 +415,9 @@
     def dbname(self):
         return self.system_source['db-name']
 
-    def init_test_database():
+    def init_test_database(self):
         """actual initialisation of the database"""
-        raise ValueError('no initialization function for driver %r' % driver)
+        raise ValueError('no initialization function for driver %r' % self.DRIVER)
 
     def has_cache(self, db_id):
         """Check if a given database id exist in cb cache for the current config"""
@@ -478,13 +486,38 @@
             cnx.close()
         self.backup_database(test_db_id)
 
+
+class NoCreateDropDatabaseHandler(TestDataBaseHandler):
+    """This handler is used if config.skip_db_create_and_restore is True
+
+    This is typically the case with RealDBConfig. In that case,
+    we explicitely want to skip init / backup / restore phases.
+
+    This handler redefines the three corresponding methods and delegates
+    to original handler for any other method / attribute
+    """
+
+    def __init__(self, base_handler):
+        self.base_handler = base_handler
+
+    # override init / backup / restore methods
+    def init_test_database(self):
+        pass
+
+    def backup_database(self, db_id):
+        pass
+
+    def restore_database(self, db_id):
+        pass
+
+    # delegate to original handler in all other cases
+    def __getattr__(self, attrname):
+        return getattr(self.base_handler, attrname)
+
+
 ### postgres test database handling ############################################
 
 class PostgresTestDataBaseHandler(TestDataBaseHandler):
-
-    # XXX
-    # XXX PostgresTestDataBaseHandler Have not been tested at all.
-    # XXX
     DRIVER = 'postgres'
 
     @property
@@ -497,24 +530,31 @@
     @cached
     def dbcnx(self):
         from cubicweb.server.serverctl import _db_sys_cnx
-        return  _db_sys_cnx(self.system_source, 'CREATE DATABASE and / or USER', verbose=0)
+        return  _db_sys_cnx(self.system_source, 'CREATE DATABASE and / or USER',
+                            interactive=False)
 
     @property
     @cached
     def cursor(self):
         return self.dbcnx.cursor()
 
+    def process_cache_entry(self, directory, dbname, db_id, entry):
+        backup_name = self._backup_name(db_id)
+        if backup_name in self.helper.list_databases(self.cursor):
+            return backup_name
+        return None
+
     def init_test_database(self):
-        """initialize a fresh postgresql databse used for testing purpose"""
+        """initialize a fresh postgresql database used for testing purpose"""
         from cubicweb.server import init_repository
         from cubicweb.server.serverctl import system_source_cnx, createdb
         # connect on the dbms system base to create our base
         try:
             self._drop(self.dbname)
-
             createdb(self.helper, self.system_source, self.dbcnx, self.cursor)
             self.dbcnx.commit()
-            cnx = system_source_cnx(self.system_source, special_privs='LANGUAGE C', verbose=0)
+            cnx = system_source_cnx(self.system_source, special_privs='LANGUAGE C',
+                                    interactive=False)
             templcursor = cnx.cursor()
             try:
                 # XXX factorize with db-create code
@@ -554,7 +594,6 @@
 
     def _drop(self, db_name):
         if db_name in self.helper.list_databases(self.cursor):
-            #print 'dropping overwritted database:', db_name
             self.cursor.execute('DROP DATABASE %s' % db_name)
             self.dbcnx.commit()
 
@@ -566,7 +605,6 @@
         orig_name = self.system_source['db-name']
         try:
             backup_name = self._backup_name(db_id)
-            #print 'storing postgres backup as', backup_name
             self._drop(backup_name)
             self.system_source['db-name'] = backup_name
             createdb(self.helper, self.system_source, self.dbcnx, self.cursor, template=orig_name)
@@ -580,7 +618,6 @@
         """Actual restore of the current database.
 
         Use the value tostored in db_cache as input """
-        #print 'restoring postgrest backup from', backup_coordinates
         self._drop(self.dbname)
         createdb(self.helper, self.system_source, self.dbcnx, self.cursor,
                  template=backup_coordinates)
@@ -599,7 +636,7 @@
         """initialize a fresh sqlserver databse used for testing purpose"""
         if self.config.init_repository:
             from cubicweb.server import init_repository
-            init_repository(config, interactive=False, drop=True)
+            init_repository(self.config, interactive=False, drop=True)
 
 ### sqlite test database handling ##############################################
 
@@ -646,7 +683,6 @@
         # remove database file if it exists ?
         dbfile = self.absolute_dbfile()
         self._cleanup_database(dbfile)
-        #print 'resto from', backup_coordinates
         shutil.copy(backup_coordinates, dbfile)
         repo = self.get_repo()
 
@@ -753,6 +789,8 @@
     handlerkls = HANDLERS.get(driver, None)
     if handlerkls is not None:
         handler = handlerkls(config)
+        if config.skip_db_create_and_restore:
+            handler = NoCreateDropDatabaseHandler(handler)
         HCACHE.set(config, handler)
         return handler
     else:
--- a/devtools/cwwindmill.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/cwwindmill.py	Wed Apr 27 09:54:22 2011 +0200
@@ -86,9 +86,10 @@
 
             test_dir = __file__
 
-        Instead of toggle `edit_test` value, try `pytest -i`
+        Instead of toggle `edit_test` value, try `python <test script> -f`
         """
         browser = 'firefox'
+
         edit_test = "-i" in sys.argv # detection for pytest invocation
         # Windmill use case are written with no anonymous user
         anonymous_allowed = False
--- a/devtools/fake.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/fake.py	Wed Apr 27 09:54:22 2011 +0200
@@ -25,6 +25,8 @@
 from cubicweb.req import RequestSessionBase
 from cubicweb.cwvreg import CubicWebVRegistry
 from cubicweb.web.request import CubicWebRequestBase
+from cubicweb.web.http_headers import Headers
+
 from cubicweb.devtools import BASE_URL, BaseApptestConfiguration
 
 
@@ -59,8 +61,14 @@
         self._url = kwargs.pop('url', 'view?rql=Blop&vid=blop')
         super(FakeRequest, self).__init__(*args, **kwargs)
         self._session_data = {}
-        self._headers = {}
+        self._headers_in = Headers()
 
+    def set_cookie(self, cookie, key, maxage=300, expires=None):
+        super(FakeRequest, self).set_cookie(cookie, key, maxage=300, expires=None)
+        cookie = self.get_response_header('Set-Cookie')
+        self._headers_in.setHeader('Cookie', cookie)
+
+    ## Implement request abstract API
     def header_accept_language(self):
         """returns an ordered list of preferred languages"""
         return ('en',)
@@ -81,48 +89,32 @@
             return url
         return url.split('?', 1)[0]
 
-    def set_content_type(self, content_type, filename=None, encoding=None):
-        """set output content type for this request. An optional filename
-        may be given
+    def get_header(self, header, default=None, raw=True):
+        """return the value associated with the given input header, raise
+        KeyError if the header is not set
         """
-        pass
-
-    def set_header(self, header, value, raw=True):
-        """set an output HTTP header"""
-        self._headers[header] = value
+        if raw:
+            return self._headers_in.getRawHeaders(header, [default])[0]
+        return self._headers_in.getHeader(header, default)
 
-    def add_header(self, header, value):
-        """set an output HTTP header"""
-        self._headers[header] = value # XXX
+    ## extend request API to control headers in / out values
+    def set_request_header(self, header, value, raw=False):
+        """set an input HTTP header"""
+        if isinstance(value, basestring):
+            value = [value]
+        if raw:
+            self._headers_in.setRawHeaders(header, value)
+        else:
+            self._headers_in.setHeader(header, value)
 
-    def remove_header(self, header):
-        """remove an output HTTP header"""
-        self._headers.pop(header, None)
-
-    def get_header(self, header, default=None):
+    def get_response_header(self, header, default=None, raw=False):
         """return the value associated with the given input header,
         raise KeyError if the header is not set
         """
-        return self._headers.get(header, default)
-
-    def set_cookie(self, cookie, key, maxage=300, expires=None):
-        """set / update a cookie key
-
-        by default, cookie will be available for the next 5 minutes
-        """
-        morsel = cookie[key]
-        if maxage is not None:
-            morsel['Max-Age'] = maxage
-        if expires:
-            morsel['expires'] = expires.strftime('%a, %d %b %Y %H:%M:%S %z')
-        # make sure cookie is set on the correct path
-        morsel['path'] = self.base_url_path()
-        self.add_header('Set-Cookie', morsel.OutputString())
-        self.add_header('Cookie', morsel.OutputString())
-
-    def remove_cookie(self, cookie, key):
-        self.remove_header('Set-Cookie')
-        self.remove_header('Cookie')
+        if raw:
+            return self.headers_out.getRawHeaders(header, default)[0]
+        else:
+            return self.headers_out.getHeader(header, default)
 
     def validate_cache(self):
         pass
@@ -185,8 +177,7 @@
     def internal_session(self):
         return FakeSession(self)
 
-    def extid2eid(self, source, extid, etype, session, insert=True,
-                  recreate=False):
+    def extid2eid(self, source, extid, etype, session, insert=True):
         try:
             return self.extids[extid]
         except KeyError:
--- a/devtools/fill.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/fill.py	Wed Apr 27 09:54:22 2011 +0200
@@ -157,6 +157,11 @@
         base = date(randint(2000, 2010), 1, 1) + timedelta(randint(1, 365))
         return self._constrained_generate(entity, attrname, base, timedelta(days=1), index)
 
+    def generate_interval(self, entity, attrname, index):
+        """generates a random date (format is 'yyyy-mm-dd')"""
+        base = timedelta(randint(1, 365))
+        return self._constrained_generate(entity, attrname, base, timedelta(days=1), index)
+
     def generate_time(self, entity, attrname, index):
         """generates a random time (format is ' HH:MM')"""
         return time(11, index%60) #'11:%02d' % (index % 60)
--- a/devtools/repotest.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/repotest.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -277,7 +277,8 @@
 
 
 class BasePlannerTC(BaseQuerierTC):
-    newsources = 0
+    newsources = ()
+
     def setup(self):
         clear_cache(self.repo, 'rel_type_sources')
         clear_cache(self.repo, 'rel_type_sources')
@@ -293,18 +294,21 @@
         do_monkey_patch()
         self._dumb_sessions = [] # by hi-jacked parent setup
         self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered
+        self.newsources = []
 
     def add_source(self, sourcecls, uri):
-        self.sources.append(sourcecls(self.repo, {'uri': uri}))
-        self.repo.sources_by_uri[uri] = self.sources[-1]
-        setattr(self, uri, self.sources[-1])
-        self.newsources += 1
+        source = sourcecls(self.repo, {'uri': uri, 'type': 'whatever'})
+        if not source.copy_based_source:
+            self.sources.append(source)
+        self.newsources.append(source)
+        self.repo.sources_by_uri[uri] = source
+        setattr(self, uri, source)
 
     def tearDown(self):
-        while self.newsources:
-            source = self.sources.pop(-1)
+        for source in self.newsources:
+            if not source.copy_based_source:
+                self.sources.remove(source)
             del self.repo.sources_by_uri[source.uri]
-            self.newsources -= 1
         undo_monkey_patch()
         for session in self._dumb_sessions:
             session._threaddata.pool = None
--- a/devtools/testlib.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/devtools/testlib.py	Wed Apr 27 09:54:22 2011 +0200
@@ -252,8 +252,7 @@
         # cnx is now an instance property that use a class protected attributes.
         cls.set_cnx(cnx)
         cls.vreg = cls.repo.vreg
-        cls.websession = DBAPISession(cnx, cls.admlogin,
-                                      {'password': cls.admpassword})
+        cls.websession = DBAPISession(cnx, cls.admlogin)
         cls._orig_cnx = (cnx, cls.websession)
         cls.config.repository = lambda x=None: cls.repo
 
@@ -486,7 +485,8 @@
                       for a in self.vreg['views'].possible_views(req, rset=rset))
 
     def pactions(self, req, rset,
-                 skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')):
+                 skipcategories=('addrelated', 'siteactions', 'useractions',
+                                 'footer', 'manage')):
         return [(a.__regid__, a.__class__)
                 for a in self.vreg['actions'].poss_visible_objects(req, rset=rset)
                 if a.category not in skipcategories]
@@ -497,7 +497,8 @@
                 if a.category in categories]
 
     def pactionsdict(self, req, rset,
-                     skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')):
+                     skipcategories=('addrelated', 'siteactions', 'useractions',
+                                     'footer', 'manage')):
         res = {}
         for a in self.vreg['actions'].poss_visible_objects(req, rset=rset):
             if a.category not in skipcategories:
@@ -661,13 +662,16 @@
         """
         return self.expect_redirect(lambda x: self.app_publish(x, path), req)
 
-    def init_authentication(self, authmode, anonuser=None):
+    def set_auth_mode(self, authmode, anonuser=None):
         self.set_option('auth-mode', authmode)
         self.set_option('anonymous-user', anonuser)
         if anonuser is None:
             self.config.anonymous_credential = None
         else:
             self.config.anonymous_credential = (anonuser, anonuser)
+
+    def init_authentication(self, authmode, anonuser=None):
+        self.set_auth_mode(authmode, anonuser)
         req = self.request()
         origsession = req.session
         req.session = req.cnx = None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/admin/config.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,224 @@
+.. -*- coding: utf-8 -*-
+
+.. _ConfigEnv:
+
+Set-up of a *CubicWeb* environment
+==================================
+
+You can `configure the database`_ system of your choice:
+
+  - `PostgreSQL configuration`_
+  - `MySql configuration`_
+  - `SQLServer configuration`_
+  - `SQLite configuration`_
+
+For advanced features, have a look to:
+
+  - `Pyro configuration`_
+  - `Cubicweb resources configuration`_
+
+.. _`configure the database`: DatabaseInstallation_
+.. _`PostgreSQL configuration`: PostgresqlConfiguration_
+.. _`MySql configuration`: MySqlConfiguration_
+.. _`SQLServer configuration`: SQLServerConfiguration_
+.. _`SQLite configuration`: SQLiteConfiguration_
+.. _`Pyro configuration`: PyroConfiguration_
+.. _`Cubicweb resources configuration`: RessourcesConfiguration_
+
+
+
+.. _RessourcesConfiguration:
+
+Cubicweb resources configuration
+--------------------------------
+
+.. autodocstring:: cubicweb.cwconfig
+
+
+.. _DatabaseInstallation:
+
+Databases configuration
+-----------------------
+
+Each instance can be configured with its own database connection information,
+that will be stored in the instance's :file:`sources` file. The database to use
+will be chosen when creating the instance. CubicWeb is known to run with
+Postgresql (recommended), MySQL, SQLServer and SQLite.
+
+Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial,
+but at least one relational database is required for CubicWeb to work. You do
+not need to install a backend that you do not intend to use for one of your
+instances. SQLite is not fit for production use, but it works well for testing
+and ships with Python, which saves installation time when you want to get
+started quickly.
+
+.. _PostgresqlConfiguration:
+
+PostgreSQL
+~~~~~~~~~~
+
+For installation, please refer to the `PostgreSQL project online documentation`_.
+
+.. _`PostgreSQL project online documentation`: http://www.postgresql.org/
+
+You need to install the three following packages: `postgresql-8.X`,
+`postgresql-client-8.X`, and `postgresql-plpython-8.X`. If you run postgres
+version prior to 8.3, you'll also need the `postgresql-contrib-8.X` package for
+full-text search extension.
+
+If you run postgres on another host than the |cubicweb| repository, you should
+install the `postgresql-client` package on the |cubicweb| host, and others on the
+database host.
+
+.. Note::
+
+    If you already have an existing cluster and PostgreSQL server running, you do
+    not need to execute the initilization step of your PostgreSQL database unless
+    you want a specific cluster for |cubicweb| databases or if your existing
+    cluster doesn't use the UTF8 encoding (see note below).
+
+* First, initialize a PostgreSQL cluster with the command ``initdb``::
+
+    $ initdb -E UTF8 -D /path/to/pgsql
+
+  Notice the encoding specification. This is necessary since |cubicweb| usually
+  want UTF8 encoded database. If you use a cluster with the wrong encoding, you'll
+  get error like::
+
+    new encoding (UTF8) is incompatible with the encoding of the template database (SQL_ASCII)
+    HINT:  Use the same encoding as in the template database, or use template0 as template.
+
+
+  Once initialized, start the database server PostgreSQL with the command::
+
+    $ postgres -D /path/to/psql
+
+  If you cannot execute this command due to permission issues, please make sure
+  that your username has write access on the database.  ::
+
+    $ chown username /path/to/pgsql
+
+* The database authentication can be either set to `ident sameuser` or `md5`.  If
+  set to `md5`, make sure to use an existing user of your database.  If set to
+  `ident sameuser`, make sure that your client's operating system user name has a
+  matching user in the database. If not, please do as follow to create a user::
+
+    $ su
+    $ su - postgres
+    $ createuser -s -P username
+
+  The option `-P` (for password prompt), will encrypt the password with the
+  method set in the configuration file :file:`pg_hba.conf`.  If you do not use this
+  option `-P`, then the default value will be null and you will need to set it
+  with::
+
+    $ su postgres -c "echo ALTER USER username WITH PASSWORD 'userpasswd' | psql"
+
+.. Note::
+    The authentication method can be configured in file:`pg_hba.conf`.
+
+
+The above login/password will be requested when you will create an instance with
+`cubicweb-ctl create` to initialize the database of your instance.
+
+Notice that the `cubicweb-ctl db-create` does database initialization that
+may requires a postgres superuser. That's why a login/password is explicitly asked
+at this step, so you can use there a superuser without using this user when running
+the instance. Things that require special privileges at this step:
+
+* database creation, require the 'create database' permission
+* install the plpython extension language (require superuser)
+* install the tsearch extension for postgres version prior to 8.3 (require superuser)
+
+To avoid using a super user each time you create an install, a nice trick is to
+install plpython (and tsearch when needed) on the special `template1` database,
+so they will be installed automatically when cubicweb databases are created
+without even with needs for special access rights. To do so, run ::
+
+  # Installation of plpythonu language by default ::
+  $ createlang -U pgadmin plpythonu template1
+  $ psql -U pgadmin template1
+  template1=# update pg_language set lanpltrusted=TRUE where lanname='plpythonu';
+
+Where `pgadmin` is a postgres superuser. The last command is necessary since by
+default plpython is an 'untrusted' language and as such can't be used by non
+superuser. This update fix that problem by making it trusted.
+
+To install the tsearch plain-text index extension on postgres prior to 8.3, run::
+
+    cat /usr/share/postgresql/8.X/contrib/tsearch2.sql | psql -U username template1
+
+
+
+.. _MySqlConfiguration:
+
+MySql
+~~~~~
+
+You must add the following lines in ``/etc/mysql/my.cnf`` file::
+
+    transaction-isolation=READ-COMMITTED
+    default-storage-engine=INNODB
+    default-character-set=utf8
+    max_allowed_packet = 128M
+
+.. Note::
+    It is unclear whether mysql supports indexed string of arbitrary length or
+    not.
+
+
+.. _SQLServerConfiguration:
+
+SQLServer
+~~~~~~~~~
+
+As of this writing, support for SQLServer 2005 is functional but incomplete. You
+should be able to connect, create a database and go quite far, but some of the
+SQL generated from RQL queries is still currently not accepted by the
+backend. Porting to SQLServer 2008 is also an item on the backlog.
+
+The `source` configuration file may look like this (specific parts only are
+shown)::
+
+  [system]
+  db-driver=sqlserver2005
+  db-user=someuser
+  # database password not needed
+  #db-password=toto123
+  #db-create/init may ask for a pwd: just say anything
+  db-extra-arguments=Trusted_Connection
+  db-encoding=utf8
+
+
+
+.. _SQLiteConfiguration:
+
+SQLite
+~~~~~~
+SQLite has the great advantage of requiring almost no configuration. Simply
+use 'sqlite' as db-driver, and set path to the dabase as db-name. Don't specify
+anything for db-user and db-password, they will be ignore anyway.
+
+.. Note::
+  SQLite is great for testing and to play with cubicweb but is not suited for
+  production environments.
+
+
+.. _PyroConfiguration:
+
+Pyro configuration
+------------------
+
+If you want to use Pyro to access your instance remotely, or to have multi-source
+or distributed configuration, it is required to have a Pyro name server running
+on your network. By default it is detected by a broadcast request, but you can
+specify a location in the instance's configuration file.
+
+To do so, you need to :
+
+* launch the pyro name server with `pyro-nsd start` before starting cubicweb
+
+* under debian, edit the file :file:`/etc/default/pyro-nsd` so that the name
+  server pyro will be launched automatically when the machine fire up
+
+
--- a/doc/book/en/admin/instance-config.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/admin/instance-config.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -4,11 +4,6 @@
 Configure an instance
 =====================
 
-On a Unix system, the instances are usually stored in the directory
-:file:`/etc/cubicweb.d/`. During development, the
-:file:`~/etc/cubicweb.d/` directory is looked up, as well as the paths
-in :envvar:`CW_INSTANCES_DIR` environment variable.
-
 While creating an instance, a configuration file is generated in::
 
     $ (CW_INSTANCES_DIR) / <instance> / <configuration name>.conf
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/admin/setup-windows.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,156 @@
+.. -*- coding: utf-8 -*-
+
+.. _SetUpWindowsEnv:
+
+Installing a development environement on Windows
+================================================
+
+Setting up a Windows development environment is not too complicated
+but it requires a series of small steps.
+
+We proposed an example of a typical |cubicweb| installation on Windows
+from sources. We assume everything goes into ``C:\\`` and for any
+package, without version specification, "the latest is
+the greatest".
+
+Take into the mind that adjusting the installation drive should be
+straightforward.
+
+
+
+Install the required elements
+-----------------------------
+
+|cubicweb| requires some base elements that must be installed to run
+correctly. So, first of all, you must install them :
+
+* python >= 2.5 and < 3 
+  (`Download Python <http://www.python.org/download/>`_). 
+  You can also consider the Python(x,y) distribution 
+  (`Download Python(x,y) <http://code.google.com/p/pythonxy/wiki/Downloads>`_)
+  as it makes things easier for Windows user by wrapping in a single installer
+  python 2.5 plus numerous useful third-party modules and
+  applications (including Eclipse + pydev, which is an arguably good
+  IDE for Python under Windows).
+
+* `Twisted <http://twistedmatrix.com/trac/>`_ is an event-driven
+  networking engine 
+  (`Download Twisted <http://twistedmatrix.com/trac/>`_)
+
+* `lxml <http://codespeak.net/lxml/>`_ library
+  (version >=2.2.1) allows working with XML and HTML 
+  (`Download lxml <http://pypi.python.org/pypi/lxml/2.2.1>`_)
+
+* `Postgresql 8.4 <http://www.postgresql.org/>`_, 
+  an object-relational database system 
+  (`Download Postgresql <http://www.enterprisedb.com/products/pgdownload.do#windows>`_) 
+  and its python drivers 
+  (`Download psycopg <http://www.stickpeople.com/projects/python/win-psycopg/#Version2>`_)
+
+* A recent version of `gettext` 
+  (`Download gettext <http://download.logilab.org/pub/gettext/gettext-0.17-win32-setup.exe>`_).
+
+* `rql <http://www.logilab.org/project/rql>`_, 
+  the recent version of the Relationship Query Language parser 
+  (`Download rql <http://download.logilab.org/pub/rql/rql-0.26.3.win32-py2.5.exe>`_).
+
+Install optional elements
+-------------------------
+
+We recommend you to install the following elements. They are not
+mandatory but they activate very interesting features in |cubicweb|:
+
+* `Simplejson <http://pypi.python.org/pypi/simplejson/>`_ 
+  must be installed if you have python <= 2.5 
+  (`Download simplejson <http://www.osuch.org/python-simplejson%3Awin32>`_).
+  It is included in the Standard library from Python >= 2.6.
+
+* `Pyro <http://www.xs4all.nl/~irmen/pyro3/>`_ 
+  enables remote access to cubicweb repository instances.
+  It also allows the client and the server not running on the same machine
+  (`Download Pyro <http://www.xs4all.nl/~irmen/pyro3/download/>`_).
+
+* `python-ldap <http://pypi.python.org/pypi/python-ldap>`_ 
+  provides access to LDAP/Active directory directories
+  (`Download python-ldap <http://www.osuch.org/python-ldap>`_).
+
+* `graphviz <http://www.graphviz.org/>`_ 
+  which allow schema drawings.  
+  (`Download graphviz <http://www.graphviz.org/Download_windows.php>`_).
+  It is quite recommended (albeit not mandatory).
+
+Other elements will activate more features once installed. Take a look
+at :ref:`InstallDependencies`.
+
+Useful tools
+------------
+
+Some additional tools could be useful to develop :ref:`cubes <AvailableCubes>`
+with the framework.
+
+* `mercurial <http://mercurial.selenic.com/>`_ and its standard
+  windows GUI (`TortoiseHG <http://tortoisehg.bitbucket.org/>`_) 
+  allow you to get the source code of |cubicweb| from control version
+  repositories. So you will be able to get the latest development
+  version in an easy way 
+  (`Download mercurial <http://bitbucket.org/tortoisehg/stable/wiki/download>`_).
+
+* You can also consider the ssh client `Putty` in order to peruse
+  mercurial over ssh (`Download <http://www.putty.org/>`_).
+
+* If you are an Eclipse user, mercurial can be integrated using the
+  `MercurialEclipse` plugin 
+  (`Home page <http://www.vectrace.com/mercurialeclipse/>`_).
+
+Getting the sources
+-------------------
+
+There are tow ways to get the sources of |cubicweb| and its
+:ref:`cubes <AvailableCubes>`:
+
+* download the latest release (:ref:`SourceInstallation`) 
+* get the development version using Mercurial 
+  (:ref:`MercurialInstallation`)
+
+Environment variables
+---------------------
+
+You will need some convenience environment variables once all is set up. These
+variables are settable through the GUI by getting at the `System properties`
+window (by righ-clicking on `My Computer` -> `properties`).
+
+In the `advanced` tab, there is an `Environment variables` button. Click on
+it. That opens a small window allowing edition of user-related and system-wide
+variables.
+
+We will consider only user variables. First, the ``PATH`` variable. Assuming 
+you are logged as user *Jane*, add the following paths, separated by 
+semi-colons::
+
+  C:\Documents and Settings\Jane\My Documents\Python\cubicweb\cubicweb\bin
+  C:\Program Files\Graphviz2.24\bin
+
+The ``PYTHONPATH`` variable should also contain::
+
+  C:\Documents and Settings\Jane\My Documents\Python\cubicweb\
+
+From now, on a fresh `cmd` shell, you should be able to type::
+
+  cubicweb-ctl list
+
+... and get a meaningful output.
+
+Running an instance as a service
+--------------------------------
+
+This currently assumes that the instances configurations is located at
+``C:\\etc\\cubicweb.d``. For a cube 'my_instance', you will find
+``C:\\etc\\cubicweb.d\\my_instance\\win32svc.py``.
+
+Now, register your instance as a windows service with::
+
+  win32svc install
+
+Then start the service with::
+
+  net start cubicweb-my_instance
--- a/doc/book/en/admin/setup.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/admin/setup.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -2,575 +2,255 @@
 
 .. _SetUpEnv:
 
-Installation and set-up of a |cubicweb| environment
-===================================================
+Installation of a *CubicWeb* environment
+========================================
 
-Installation of `Cubicweb` and its dependencies
------------------------------------------------
+Official releases are available from the `CubicWeb.org forge`_ and from
+`PyPI`_. Since CubicWeb is developed using `Agile software development
+<http://en.wikipedia.org/wiki/Agile_software_development>`_ techniques, releases
+happen frequently. In a version numbered X.Y.Z, X changes after a few years when
+the API breaks, Y changes after a few weeks when features are added and Z
+changes after a few days when bugs are fixed.
 
-|cubicweb| is packaged for `Debian and Ubuntu`_, is `pip installable`_ and
-`easy_install installable`_. It can be installed from source using a tarball_
-or the `Mercurial version control system`_ . Windows user may want to check the
-`Windows Installation`_ section.
+Depending on your needs, you will chose a different way to install CubicWeb on
+your system:
 
-Also, since version 3.9, can be safely installed, used and contained inside a
-`virtualenv`_.
+- `Installation on Debian/Ubuntu`_
+- `Installation on Windows`_
+- `Installation in a virtualenv`_
+- `Installation with pip`_
+- `Installation with easy_install`_
+- `Installation from tarball`_
 
+If you are a power-user and need the very latest features, you will
+
+- `Install from version control`_
 
-.. _`Debian and Ubuntu` : DebianInstallation_
-.. _`pip installable`: PipInstallation_
-.. _`easy_install installable`: EasyInstallInstallation_
-.. _tarball: TarballInstallation_
-.. _`Mercurial version control system`: MercurialInstallation_
-.. _`Windows Installation`: WindowsInstallation_
-.. _`virtualenv`: http://pypi.python.org/pypi/virtualenv
+Once the software is installed, move on to :ref:`ConfigEnv` for better control
+and advanced features of |cubicweb|.
 
+.. _`Installation on Debian/Ubuntu`: DebianInstallation_
+.. _`Installation on Windows`: WindowsInstallation_
+.. _`Installation in a virtualenv`: VirtualenvInstallation_
+.. _`Installation with pip`: PipInstallation_
+.. _`Installation with easy_install`: EasyInstallInstallation_
+.. _`Installation from tarball`: TarballInstallation_
+.. _`Install from version control`: MercurialInstallation_
 
-.. file:///home/pyves/tmp/cwdoc/html/admin/setup.html#pipinstallation
 
 .. _DebianInstallation:
 
-Debian and Ubuntu packages
-```````````````````````````
+Debian/Ubuntu install
+---------------------
+
+|cubicweb| is packaged for Debian/Ubuntu (and derived
+distributions). Their integrated package-management system make
+installation and upgrade much easier for users since
+dependencies (like databases) are automatically installed.
 
 Depending on the distribution you are using, add the appropriate line to your
-list of sources (for example by editing ``/etc/apt/sources.list``).
+`list of sources` (for example by editing ``/etc/apt/sources.list``).
 
-For Debian Lenny::
+For Debian Squeeze (stable)::
+
+  deb http://download.logilab.org/production/ squeeze/
 
-  deb http://ftp.logilab.org/dists/ lenny/
+For Debian Sid (unstable)::
 
-For Debian Sid::
+  deb http://download.logilab.org/production/ sid/
+
+For Ubuntu Lucid (Long Term Support) and newer::
 
-  deb http://ftp.logilab.org/dists/ sid/
+  deb http://download.logilab.org/production/ lucid/
 
-For Ubuntu Hardy::
+  Note that for Ubuntu Maverick and newer, you shall use the `lucid`
+  repository and install the ``libgecode19`` package from `lucid
+  universe <http://packages.ubuntu.com/lucid/libgecode19>`_.
 
-  deb http://ftp.logilab.org/dists/ hardy/
+The repositories are signed with the `Logilab's gnupg key`_. You can download
+and register the key to avoid warnings::
 
+  wget -q http://download.logilab.org/logilab-dists-key.asc -O- | sudo apt-key add -
 
-You can now install the required packages with the following command::
+Update your list of packages and perform the installation::
 
   apt-get update
   apt-get install cubicweb cubicweb-dev
 
-
-`cubicweb` installs the framework itself, allowing you to create new instances.
-
-`cubicweb-dev` installs the development environment allowing you to develop new
-cubes.
-
-.. note::
-
-   `cubicweb-dev` will install basic sqlite support. You can easily setup
-   `cubicweb with other database`_ using the following virtual packages :
-   `cubicweb-postgresql-support` contains necessary dependency for using
-   `cubicweb with postgresql datatabase`_ and `cubicweb-mysql-support` contains
-   necessary dependency for using `cubicweb with mysql database`_ .
-
-There is also a wide variety of :ref:`cubes <AvailableCubes>` listed on the
-`CubicWeb.org Forge`_ available as debian packages and tarball.
-
-The repositories are signed with `Logilab's gnupg key`_. To avoid warning on
-"apt-get update":
+``cubicweb`` installs the framework itself, allowing you to create new
+instances. ``cubicweb-dev`` installs the development environment
+allowing you to develop new cubes.
 
-1. become root using sudo
-2. download http://ftp.logilab.org/dists/logilab-dists-key.asc using e.g. wget
-3. run "apt-key add logilab-dists-key.asc"
-4. re-run apt-get update (manually or through the package manager, whichever you prefer)
-
-.. _`Logilab's gnupg key`: http://ftp.logilab.org/dists/logilab-dists-key.asc
-.. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
-.. _`cubicweb with other database`: DatabaseInstallation_
-.. _`cubicweb with postgresql datatabase` : PostgresqlConfiguration_
-.. _`cubicweb with mysql database` : MySqlConfiguration_
-
-
-.. _PipInstallation:
-
-Installation with pip
-`````````````````````
-
-pip_ is a smart python utility that lets you automatically download, build,
-install, and manage python packages and their dependencies.
-
-|cubicweb| and its cubes have been pip_ installable since version 3.9. Search
-for them on pypi_::
-
-  pip install cubicweb
-  pip install cubicweb-blog
+There is also a wide variety of :ref:`cubes <AvailableCubes>`. You can access a
+list of available cubes using ``apt-cache search cubicweb`` or at the
+`CubicWeb.org forge`_.
 
 .. note::
 
-    Pip is the recommended way to install |cubicweb| if there is no binary
-    package available on your system or you want to install it inside a
-    `virtualenv`_. However pip doesn't install binary package and may require
-    several compilation steps while installing |cubicweb| dependencies. If you
-    don't have a compilation environment you should use  `easy_install
-    installation`_ to install |cubicweb|.
+  `cubicweb-dev` will install basic sqlite support. You can easily setup
+  :ref:`cubicweb with other database <DatabaseInstallation>` using the following
+  virtual packages :
+
+  * `cubicweb-postgresql-support` contains the necessary dependency for
+    using :ref:`cubicweb with postgresql datatabase <PostgresqlConfiguration>`
+
+  * `cubicweb-mysql-support` contains the necessary dependency for using
+    :ref:`cubicweb with mysql database <MySqlConfiguration>`.
+
+.. _`list of sources`: http://wiki.debian.org/SourcesList
+.. _`Logilab's gnupg key`: http://download.logilab.org/logilab-dists-key.asc
+.. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
+
+.. _WindowsInstallation:
+
+Windows Install
+---------------
 
-    Once, |cubicweb| is installed, this limitation doesn't apply when installing
-    cubes.
+You need to have `python`_ version >= 2.5 and < 3 installed.
+
+If you want an automated install, your best option is probably the
+:ref:`EasyInstallInstallation`. EasyInstall is a tool that helps users to
+install python packages along with their dependencies, searching for suitable
+pre-compiled binaries on the `The Python Package Index`_.
 
+If you want better control over the process as well as a suitable development
+environment or if you are having problems with `easy_install`, read on to
+:ref:`SetUpWindowsEnv`.
+
+.. _python:  http://www.python.org/
+.. _`The Python Package Index`: http://pypi.python.org
+
+.. _VirtualenvInstallation:
+
+`Virtualenv` install
+--------------------
 
-.. _pip: http://pypi.python.org/pypi/pip
-.. _pypi: http://pypi.python.org/pypi?%3Aaction=search&term=cubicweb
-.. _`easy_install installation`: EasyInstallInstallation_
+Since version 3.9, |cubicweb| can be safely installed, used and contained inside
+a `virtualenv`_. You can use either :ref:`pip <PipInstallation>` or
+:ref:`easy_install <EasyInstallInstallation>` to install |cubicweb| inside an
+activated virtual environment.
+
+.. _PipInstallation:
 
+`pip` install
+-------------
+
+pip_ is a python utility that helps downloading, building, installing, and
+managing python packages and their dependencies. It is fully compatible with
+`virtualenv`_ and installs the packages from sources published on the
+`The Python Package Index`_.
+
+.. _`pip`: http://pip.openplans.org/
+.. _`virtualenv`: http://virtualenv.openplans.org/
 
-.. warning::
+A working compilation chain is needed to build the modules that include C
+extensions. If you definitively wont, installing `Lxml
+<http://codespeak.net/lxml/>`_, `Twisted <http://twistedmatrix.com/trac/>`_ and
+`libgecode <http://www.gecode.org/>`_ will help.
+
+To install |cubicweb| and its dependencies, just run::
+
+  pip install cubicweb
 
-  |cubicweb| depends upon the `lxml` python module. This module contains ``C``
-  code that must be compiled.  To successfully install |cubicweb| with pip, you
-  must either have an environment ables to compile Python ``C`` extensions or
-  preinstall lxml from a binary package.
+There is also a wide variety of :ref:`cubes <AvailableCubes>`. You can access a
+list of available cubes on
+`PyPI <http://pypi.python.org/pypi?%3Aaction=search&term=cubicweb&submit=search>`_
+or at the `CubicWeb.org forge`_.
+
+For example, installing the *blog cube* is achieved by::
+
+  pip install cubicweb-blog
+
+.. _EasyInstallInstallation:
+
+`easy_install` install
+----------------------
 
 .. note::
 
-  For better performance the setup processor will compile a ``C`` extension for
-  the :ref:`RQL <RQL>` language if you have an environment ables to compile
-  Python ``C`` extensions and the `gecode library`_.  Otherwise, a pure python
-  alternative will be used for degraded performance.
+   If you are not a Windows user and you have a compilation environment, we
+   recommend you to use the PipInstallation_.
+
+`easy_install`_ is a python utility that helps downloading, installing, and
+managing python packages and their dependencies.
+
+Install |cubicweb| and its dependencies, run::
 
-.. _`gecode library`: http://www.gecode.org/
-.. _`easy_install`:   http://packages.python.org/distribute/easy_install.html
-
+  easy_install cubicweb
 
-.. _EasyInstallInstallation:
+There is also a wide variety of :ref:`cubes <AvailableCubes>`. You can access a
+list of available cubes on `PyPI
+<http://pypi.python.org/pypi?%3Aaction=search&term=cubicweb&submit=search>`_
+or at the `CubicWeb.org Forge`_.
 
-Installation with EasyInstall
-``````````````````````````````
+For example, installing the *blog cube* is achieved by::
+
+  easy_install cubicweb-blog
 
 .. note::
 
-    We don't recommend the use of `easy_install` and setuptools in the generic
-    case. However as easy_install is currently the sole pure python package
-    system that support binary installation. Using `easy_install` is currently
-    the easiest way to install |cubicweb| when you don't have a compilation
-    environment set-up or Debian based distribution.
-
-
-|cubicweb| is easy_install_ installable for version 3.9::
+  If you encounter problem with :ref:`cubes <AvailableCubes>` installation,
+  consider using :ref:`PipInstallation` which is more stable
+  but can not installed pre-compiled binaries.
 
-  easy_install cubicweb
-
-.. warning::
-
-    Cubes are **not** is easy_install_ installable. But they are
-    `pip installable`_
-
-
+.. _`easy_install`: http://packages.python.org/distribute/easy_install.html
 
 
 .. _SourceInstallation:
 
 Install from source
-```````````````````
+-------------------
 
 .. _TarballInstallation:
 
-You can download the archive containing the sources from our `ftp site`_ at::
-
-  http://ftp.logilab.org/pub/cubicweb/
-
-.. _`ftp site`: http://ftp.logilab.org/pub/cubicweb/
+You can download the archive containing the sources from
+`http://download.logilab.org/pub/cubicweb/ <http://download.logilab.org/pub/cubicweb/>`_.
 
 Make sure you also have all the :ref:`InstallDependencies`.
 
+Once uncompressed, you can install the framework from inside the uncompressed
+folder with::
+
+  python setup.py install
+
+Or you can run |cubicweb| directly from the source directory by
+setting the :ref:`resource mode <RessourcesConfiguration>` to `user`. This will
+ease the development with the framework.
+
+There is also a wide variety of :ref:`cubes <AvailableCubes>`. You can access a
+list of availble cubes at the `CubicWeb.org Forge`_.
+
+
 .. _MercurialInstallation:
 
 Install from version control system
-```````````````````````````````````
+-----------------------------------
 
-You can keep up to date with on-going development by using Mercurial::
-
-  hg clone http://hg.logilab.org/cubicweb
+To keep-up with on-going development, clone the :ref:`Mercurial
+<MercurialPresentation>` repository::
 
-See :ref:`MercurialPresentation` for more details about Mercurial.
+  hg clone -u stable http://hg.logilab.org/cubicweb # stable branch
+  hg clone http://hg.logilab.org/cubicweb # development branch
 
-A practical way to get many of CubicWeb's dependencies and a nice set
-of base cubes is to run the `clone_deps.py` script located in
-`cubicweb/bin/`::
+To get many of CubicWeb's dependencies and a nice set of base cubes, run the
+`clone_deps.py` script located in `cubicweb/bin/`::
 
   python cubicweb/bin/clone_deps.py
 
 (Windows users should replace slashes with antislashes).
 
-This script will clone a set of mercurial repositories into in the
-directory containing the CubicWeb repository, and update them to the
+This script will clone a set of mercurial repositories into the
+directory containing the ``cubicweb`` repository, and update them to the
 latest published version tag (if any).
 
-When cloning a repository, you might be set in a development branch
-(the 'default' branch). You should check that the branches of the
-repositories are set to 'stable' (using `hg up stable` for each one)
-if you do not intend to develop the framework itself.
+.. note::
 
-Even better, `hg tags` will display a list of tags in reverse
-chronological order. One reasonnable way to get to a working version
-is to pick the latest published version (as done by the `clone_deps`
-script). These look like `cubicweb-debian-version-3.9.7-1`. Typing::
+  In every cloned repositories, a `hg tags` will display a list of
+  tags in reverse chronological order. One reasonnable option is to go to a
+  tagged version: the latest published version or example, as done by
+  the `clone_deps` script)::
 
- hg update cubicweb-debian-version-3.9.7-1
-
-will update the repository files to this version.
+   hg update cubicweb-version-3.12.2
 
 Make sure you also have all the :ref:`InstallDependencies`.
 
-
-.. _WindowsInstallation:
-
-Windows installation
-````````````````````
-
-Your best option is probably the :ref:`PipInstallation`. If it does not work or
-if you want more control over the process, continue with the following
-instructions.
-
-Base elements
-~~~~~~~~~~~~~
-
-Setting up a windows development environment is not too complicated but requires
-a series of small steps. What is proposed there is only an example of what can be
-done. We assume everything goes into `C:\\` in this document. Adjusting the
-installation drive should be straightforward.
-
-You should start by downloading and installing Python version >= 2.5 and < 3.
-
-An alternative option would be installing the Python(x,y)
-distribution. Python(x,y) is not a requirement, but it makes things easier for
-Windows user by wrapping in a single installer python 2.5 plus numerous useful
-third-party modules and applications (including Eclipse + pydev, which is an
-arguably good IDE for Python under Windows). Download it from this page::
-
-  http://code.google.com/p/pythonxy/wiki/Downloads
-
-Then you must grab Twisted. There is a windows installer directly available from
-this page::
-
-  http://twistedmatrix.com/trac/
-
-A windows installer for lxml will be found there::
-
-  http://pypi.python.org/pypi/lxml/2.2.1
-
-Check out the lxml-2.2.1-win32-py2.5.exe file. More recent bugfix
-releases should probably work, too.
-
-You should find postgresql 8.4 there::
-
-  http://www.enterprisedb.com/products/pgdownload.do#windows
-
-The python drivers for posgtresql are to be found there::
-
-  http://www.stickpeople.com/projects/python/win-psycopg/#Version2
-
-Please be careful to select the right python (2.5) and postgres (8.4) versions.
-
-A windows compiled recent version of gettext::
-
-  http://ftp.logilab.org/pub/gettext/gettext-0.17-win32-setup.exe
-
-A pre-compiled version of rql for windows (take care of retrieving the
-most recent version available there)::
-
-  http://ftp.logilab.org/pub/rql/rql-0.23.0.win32-py2.5.exe
-
-Pyro enables remote access to cubicweb repository instances. Get it there::
-
-  http://sourceforge.net/projects/pyro/files/
-
-To access LDAP/Active directory directories, we need the python-ldap
-package. Windows binaries are available from::
-
-  http://www.osuch.org/python-ldap
-
-Check out the latest release.
-
-Having graphviz will allow schema drawings, which is quite recommended (albeit
-not mandatory). You should get an msi installer there::
-
-  http://www.graphviz.org/Download_windows.php
-
-Simplejson is needed when installing with Python 2.5, but included in the
-standard library for Python >= 2.6. Get it from there::
-
-  http://www.osuch.org/python-simplejson%3Awin32
-
-Make sure you also have all the :ref:`InstallDependencies` that are not specific
-to Windows.
-
-Tools
-~~~~~
-
-Get mercurial + its standard windows GUI (TortoiseHG) there (the latest is the
-greatest)::
-
-  http://bitbucket.org/tortoisehg/stable/wiki/download
-
-If you need to peruse mercurial over ssh, it can be helpful to get an ssh client
-like Putty::
-
-  http://www.putty.org/
-
-Integration of mercurial and Eclipse is convenient enough that we want
-it. Instructions are set there, in the `Download & Install` section::
-
-  http://www.vectrace.com/mercurialeclipse/
-
-Getting the sources
-~~~~~~~~~~~~~~~~~~~
-
-You can either download the latest release (see
-:ref:`SourceInstallation`) or get the development version using
-Mercurial (see :ref:`MercurialInstallation` and below), which is more
-convenient.
-
-Environment variables
-~~~~~~~~~~~~~~~~~~~~~
-
-You will need some convenience environment variables once all is set up. These
-variables are settable through the GUI by getting at the 'System properties'
-window (by righ-clicking on 'My Computer' -> properties).
-
-In the 'advanced' tab, there is an 'Environment variables' button. Click on
-it. That opens a small window allowing edition of user-related and system-wide
-variables.
-
-We will consider only user variables. First, the PATH variable. You should ensure
-it contains, separated by semi-colons, and assuming you are logged in as user
-Jane::
-
-  C:\Documents and Settings\Jane\My Documents\Python\cubicweb\cubicweb\bin
-  C:\Program Files\Graphviz2.24\bin
-
-The PYTHONPATH variable should also contain::
-
-  C:\Documents and Settings\Jane\My Documents\Python\cubicweb\
-
-From now, on a fresh `cmd` shell, you should be able to type::
-
-  cubicweb-ctl list
-
-... and get a meaningful output.
-
-Running an instance as a service
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This currently assumes that the instances configurations is located at
-C:\\etc\\cubicweb.d.
-
-For a cube 'my_instance', you will then find
-C:\\etc\\cubicweb.d\\my_instance\\win32svc.py that has to be used as follows::
-
-  win32svc install
-
-This should just register your instance as a windows service. A simple::
-
-  net start cubicweb-my_instance
-
-should start the service.
-
-
-Other dependencies
-``````````````````
-
-You can also install:
-
-* `pyro` if you wish the repository to be accessible through Pyro
-  or if the client and the server are not running on the same machine
-  (in which case the packages will have to be installed on both
-  machines)
-
-* `python-ldap` if you plan to use a LDAP source on the server
-
-
-.. _DatabaseInstallation:
-
-Databases configuration
------------------------
-
-Each instance can be configured with its own database connection information,
-that will be stored in the instance's :file:`sources` file. The database to use
-will be chosen when creating the instance. Currently cubicweb has been tested
-using Postgresql (recommended), MySQL, SQLServer and SQLite.
-
-Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial,
-but at least one relational database is required for CubicWeb to work. You do
-not need to install a backend that you do not intend to use for one of your
-instances. SQLite is not fit for production use, but it works well for testing
-and ships with Python, which saves installation time when you want to get
-started quickly.
-
-.. _PostgresqlConfiguration:
-
-PostgreSQL configuration
-````````````````````````
-
-For installation, please refer to the `PostgreSQL project online documentation`_.
-
-.. _`PostgreSQL project online documentation`: http://www.postgresql.org/
-
-You need to install the three following packages: `postgresql-8.X`,
-`postgresql-client-8.X`, and `postgresql-plpython-8.X`. If you run postgres
-version prior to 8.3, you'll also need the `postgresql-contrib-8.X` package for
-full-text search extension.
-
-If you run postgres on another host than the |cubicweb| repository, you should
-install the `postgresql-client` package on the |cubicweb| host, and others on the
-database host.
-
-.. Note::
-
-    If you already have an existing cluster and PostgreSQL server running, you do
-    not need to execute the initilization step of your PostgreSQL database unless
-    you want a specific cluster for |cubicweb| databases or if your existing
-    cluster doesn't use the UTF8 encoding (see note below).
-
-* First, initialize a PostgreSQL cluster with the command ``initdb``.
-  ::
-
-    $ initdb -E UTF8 -D /path/to/pgsql
-
-  Notice the encoding specification. This is necessary since |cubicweb| usually
-  want UTF8 encoded database. If you use a cluster with the wrong encoding, you'll
-  get error like::
-
-    new encoding (UTF8) is incompatible with the encoding of the template database (SQL_ASCII)
-    HINT:  Use the same encoding as in the template database, or use template0 as template.
-
-
-  Once initialized, start the database server PostgreSQL with the command::
-
-    $ postgres -D /path/to/psql
-
-  If you cannot execute this command due to permission issues, please make sure
-  that your username has write access on the database.  ::
-
-    $ chown username /path/to/pgsql
-
-* The database authentication can be either set to `ident sameuser` or `md5`.  If
-  set to `md5`, make sure to use an existing user of your database.  If set to
-  `ident sameuser`, make sure that your client's operating system user name has a
-  matching user in the database. If not, please do as follow to create a user::
-
-    $ su
-    $ su - postgres
-    $ createuser -s -P username
-
-  The option `-P` (for password prompt), will encrypt the password with the
-  method set in the configuration file :file:`pg_hba.conf`.  If you do not use this
-  option `-P`, then the default value will be null and you will need to set it
-  with::
-
-    $ su postgres -c "echo ALTER USER username WITH PASSWORD 'userpasswd' | psql"
-
-.. Note::
-    The authentication method can be configured in file:`pg_hba.conf`.
-
-
-The above login/password will be requested when you will create an instance with
-`cubicweb-ctl create` to initialize the database of your instance.
-
-Notice that the `cubicweb-ctl db-create` does database initialization that
-may requires a postgres superuser. That's why a login/password is explicitly asked
-at this step, so you can use there a superuser without using this user when running
-the instance. Things that require special privileges at this step:
-
-* database creation, require the 'create database' permission
-* install the plpython extension language (require superuser)
-* install the tsearch extension for postgres version prior to 8.3 (require superuser)
-
-To avoid using a super user each time you create an install, a nice trick is to
-install plpython (and tsearch when needed) on the special `template1` database,
-so they will be installed automatically when cubicweb databases are created
-without even with needs for special access rights. To do so, run ::
-
-  # Installation of plpythonu language by default ::
-  $ createlang -U pgadmin plpythonu template1
-  $ psql -U pgadmin template1
-  template1=# update pg_language set lanpltrusted=TRUE where lanname='plpythonu';
-
-Where `pgadmin` is a postgres superuser. The last command is necessary since by
-default plpython is an 'untrusted' language and as such can't be used by non
-superuser. This update fix that problem by making it trusted.
-
-To install the tsearch plain-text index extension on postgres prior to 8.3, run::
-
-    cat /usr/share/postgresql/8.X/contrib/tsearch2.sql | psql -U username template1
-
-
-.. _MySqlConfiguration:
-
-MySql configuration
-```````````````````
-You must add the following lines in ``/etc/mysql/my.cnf`` file::
-
-    transaction-isolation=READ-COMMITTED
-    default-storage-engine=INNODB
-    default-character-set=utf8
-    max_allowed_packet = 128M
-
-.. Note::
-    It is unclear whether mysql supports indexed string of arbitrary length or
-    not.
-
-
-.. _SQLServerConfiguration:
-
-SQLServer configuration
-```````````````````````
-
-As of this writing, support for SQLServer 2005 is functional but incomplete. You
-should be able to connect, create a database and go quite far, but some of the
-SQL generated from RQL queries is still currently not accepted by the
-backend. Porting to SQLServer 2008 is also an item on the backlog.
-
-The `source` configuration file may look like this (specific parts only are
-shown)::
-
-  [system]
-  db-driver=sqlserver2005
-  db-user=someuser
-  # database password not needed
-  #db-password=toto123
-  #db-create/init may ask for a pwd: just say anything
-  db-extra-arguments=Trusted_Connection
-  db-encoding=utf8
-
-
-
-.. _SQLiteConfiguration:
-
-SQLite configuration
-````````````````````
-SQLite has the great advantage of requiring almost no configuration. Simply
-use 'sqlite' as db-driver, and set path to the dabase as db-name. Don't specify
-anything for db-user and db-password, they will be ignore anyway.
-
-.. Note::
-  SQLite is great for testing and to play with cubicweb but is not suited for
-  production environments.
-
-
-.. _PyroConfiguration:
-
-Pyro configuration
-------------------
-
-If you want to use Pyro to access your instance remotely, or to have multi-source
-or distributed configuration, it is required to have a Pyro name server running
-on your network. By default it is detected by a broadcast request, but you can
-specify a location in the instance's configuration file.
-
-To do so, you need to :
-
-* launch the pyro name server with `pyro-nsd start` before starting cubicweb
-
-* under debian, edit the file :file:`/etc/default/pyro-nsd` so that the name
-  server pyro will be launched automatically when the machine fire up
-
-
-Cubicweb resources configuration
---------------------------------
-
-.. autodocstring:: cubicweb.cwconfig
--- a/doc/book/en/annexes/rql/language.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/annexes/rql/language.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -153,7 +153,7 @@
 - Aggregate Functions: COUNT, MIN, MAX, AVG, SUM, GROUP_CONCAT
 
 Having
-``````
+```````
 
 The HAVING clause, as in SQL, has been originally introduced to restrict a query
 according to value returned by an aggregate function, e.g.::
@@ -214,7 +214,12 @@
 
 
 Exists
-``````
+```````
+
+You can use `EXISTS` when you want to know if some expression is true and do not
+need the complete set of elements that make it true. Testing for existence is
+much faster than fetching the complete set of results.
+
 ::
 
     Any X ORDERBY PN,N
--- a/doc/book/en/devrepo/testing.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/devrepo/testing.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -59,10 +59,10 @@
 
         def setup_database(self):
             req = self.request()
-            group_etype = req.execute('Any X WHERE X name "CWGroup"').get_entity(0,0)
+            group_etype = req.find_one_entity('CWEType', name='CWGroup')
             c1 = req.create_entity('Classification', name=u'classif1',
                                    classifies=group_etype)
-            user_etype = req.execute('Any X WHERE X name "CWUser"').get_entity(0,0)
+            user_etype = req.find_one_entity('CWEType', name='CWUser')
             c2 = req.create_entity('Classification', name=u'classif2',
                                    classifies=user_etype)
             self.kw1 = req.create_entity('Keyword', name=u'kwgroup', included_in=c1)
@@ -228,7 +228,7 @@
 
         def test_admin(self):
             req = self.request()
-            rset = req.execute('Any C WHERE C is Conference')
+            rset = req.find_entities('Conference')
             self.assertListEqual(self.pactions(req, rset),
                                   [('workflow', workflow.WorkflowActions),
                                    ('edit', confactions.ModifyAction),
--- a/doc/book/en/index.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/index.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -38,7 +38,7 @@
 
 The hacker will join development at the forge_.
 
-The impatient developer will move right away to :ref:`SetUpEnv`.
+The impatient developer will move right away to :ref:`SetUpEnv` then to :ref:`ConfigEnv`.
 
 The chatter lover will join the `jabber forum`_, the `mailing-list`_ and the blog_.
 
--- a/doc/book/en/tutorials/advanced/part03_bfss.rst	Tue Apr 05 08:39:49 2011 +0200
+++ b/doc/book/en/tutorials/advanced/part03_bfss.rst	Wed Apr 27 09:54:22 2011 +0200
@@ -20,25 +20,25 @@
     from os.path import join, exists
 
     from cubicweb.server import hook
-    from cubicweb.server.sources import storage
+    from cubicweb.server.sources import storages
 
     class ServerStartupHook(hook.Hook):
-	__regid__ = 'sytweb.serverstartup'
-	events = ('server_startup', 'server_maintenance')
+        __regid__ = 'sytweb.serverstartup'
+        events = ('server_startup', 'server_maintenance')
 
-	def __call__(self):
-	    bfssdir = join(self.repo.config.appdatahome, 'bfss')
-	    if not exists(bfssdir):
-		makedirs(bfssdir)
-		print 'created', bfssdir
-	    storage = storages.BytesFileSystemStorage(bfssdir)
-	    set_attribute_storage(self.repo, 'File', 'data', storage)
+        def __call__(self):
+            bfssdir = join(self.repo.config.appdatahome, 'bfss')
+            if not exists(bfssdir):
+                makedirs(bfssdir)
+                print 'created', bfssdir
+            storage = storages.BytesFileSystemStorage(bfssdir)
+            set_attribute_storage(self.repo, 'File', 'data', storage)
 
 .. Note::
 
-  * how we built the hook's registry identifier (_`_regid__`): you can introduce
+  * how we built the hook's registry identifier (`__regid__`): you can introduce
     'namespaces' by using there python module like naming identifiers. This is
-    especially import for hooks where you usually want a new custom hook, not
+    especially important for hooks where you usually want a new custom hook, not
     overriding / specializing an existant one, but the concept may be applied to
     any application objects
 
@@ -50,48 +50,48 @@
   * the path given to the storage is the place where file added through the ui
     (or in the database before migration) will be located
 
-  * be ware that by doing this, you can't anymore write queries that will try to
+  * beware that by doing this, you can't anymore write queries that will try to
     restrict on File `data` attribute. Hopefuly we don't do that usually
     on file's content or more generally on attributes for the Bytes type
 
 Now, if you've already added some photos through the web ui, you'll have to
 migrate existing data so file's content will be stored on the file-system instead
 of the database. There is a migration command to do so, let's run it in the
-cubicweb shell (in actual life, you'd have to put it in a migration script as we
-seen last time):
+cubicweb shell (in real life, you would have to put it in a migration script as we
+have seen last time):
 
 ::
 
    $ cubicweb-ctl shell sytweb
-    entering the migration python shell
-    just type migration commands or arbitrary python code and type ENTER to execute it
-    type "exit" or Ctrl-D to quit the shell and resume operation
-    >>> storage_changed('File', 'data')
-    [........................]
+   entering the migration python shell
+   just type migration commands or arbitrary python code and type ENTER to execute it
+   type "exit" or Ctrl-D to quit the shell and resume operation
+   >>> storage_changed('File', 'data')
+   [........................]
 
 
-That's it. Now, file added through the web ui will have their content stored on
+That's it. Now, files added through the web ui will have their content stored on
 the file-system, and you'll also be able to import files from the file-system as
 explained in the next part.
 
 Step 2: importing some data into the instance
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Hey, we start to have some nice features, let give us a try on this new web
+Hey, we start to have some nice features, let us give a try to this new web
 site. For instance if I have a 'photos/201005WePyrenees' containing pictures for
 a particular event, I can import it to my web site by typing ::
 
   $ cubicweb-ctl fsimport -F sytweb photos/201005WePyrenees/
   ** importing directory /home/syt/photos/201005WePyrenees
-    importing IMG_8314.JPG
-    importing IMG_8274.JPG
-    importing IMG_8286.JPG
-    importing IMG_8308.JPG
-    importing IMG_8304.JPG
+  importing IMG_8314.JPG
+  importing IMG_8274.JPG
+  importing IMG_8286.JPG
+  importing IMG_8308.JPG
+  importing IMG_8304.JPG
 
 .. Note::
-  The -F option tell that folders should be mapped, hence my photos will be
-  all under a Folder entity corresponding to the file-system folder.
+  The -F option means that folders should be mapped, hence my photos will be
+  linked to a Folder entity corresponding to the file-system folder.
 
 Let's take a look at the web ui:
 
@@ -103,11 +103,11 @@
 
 .. image:: ../../images/tutos-photowebsite_ui2.png
 
-Yeah, it's there! You can also notice that I can see some entities as well as
+Yeah, it's there! You will notice that I can see some entities as well as
 folders and images the anonymous user can't. It just works **everywhere in the
 ui** since it's handled at the repository level, thanks to our security model.
 
-Now if I click on the newly inserted folder, I can see
+Now if I click on the recently inserted folder, I can see
 
 .. image:: ../../images/tutos-photowebsite_ui3.png
 
@@ -124,7 +124,7 @@
 We started to see here an advanced feature of our repository: the ability
 to store some parts of our data-model into a custom storage, outside the
 database. There is currently only the :class:`BytesFileSystemStorage` available,
-but you can expect to see more coming in a near future (our write your own!).
+but you can expect to see more coming in a near future (or write your own!).
 
 Also, we can know start to feed our web-site with some nice pictures!
 The site isn't perfect (far from it actually) but it's usable, and we can
--- a/entities/__init__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entities/__init__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -121,16 +121,18 @@
     # edition helper functions ################################################
 
     def linked_to(self, rtype, role, remove=True):
-        """if entity should be linked to another using __linkto form param for
+        """if entity should be linked to another using '__linkto' form param for
         the given relation/role, return eids of related entities
 
         This method is consuming matching link-to information from form params
-        if `remove` is True (by default).
+        if `remove` is True (by default). Computed values are stored into a
+        `cw_linkto` attribute, a dictionary with (relation, role) as key and
+        linked eids as value.
         """
         try:
-            return self.__linkto[(rtype, role)]
+            return self.cw_linkto[(rtype, role)]
         except AttributeError:
-            self.__linkto = {}
+            self.cw_linkto = {}
         except KeyError:
             pass
         linktos = list(self._cw.list_form_param('__linkto'))
@@ -144,7 +146,7 @@
                     linktos.remove(linkto)
                     self._cw.form['__linkto'] = linktos
                 linkedto.append(typed_eid(eid))
-        self.__linkto[(rtype, role)] = linkedto
+        self.cw_linkto[(rtype, role)] = linkedto
         return linkedto
 
     # server side helpers #####################################################
--- a/entities/adapters.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entities/adapters.py	Wed Apr 27 09:54:22 2011 +0200
@@ -183,7 +183,7 @@
         """return actual data of the downloadable content"""
         raise NotImplementedError
 
-
+# XXX should propose to use two different relations for children/parent
 class ITreeAdapter(EntityAdapter):
     """This adapter has to be overriden to be configured using the
     tree_relation, child_role and parent_role class attributes to benefit from
--- a/entities/authobjs.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entities/authobjs.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -80,6 +80,20 @@
                          key, self.login)
         return self._cw.vreg.property_value(key)
 
+    def set_property(self, pkey, value):
+        value = unicode(value)
+        try:
+            prop = self._cw.execute(
+                'CWProperty X WHERE X pkey %(k)s, X for_user U, U eid %(u)s',
+                {'k': pkey, 'u': self.eid}).get_entity(0, 0)
+        except:
+            kwargs = dict(pkey=unicode(pkey), value=value)
+            if self.is_in_group('managers'):
+                kwargs['for_user'] = self
+            self._cw.create_entity('CWProperty', **kwargs)
+        else:
+            prop.set_attributes(value=value)
+
     def matching_groups(self, groups):
         """return the number of the given group(s) in which the user is
 
--- a/entities/schemaobjs.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entities/schemaobjs.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,12 +19,7 @@
 
 __docformat__ = "restructuredtext en"
 
-import re
-from socket import gethostname
-
 from logilab.common.decorators import cached
-from logilab.common.textutils import text_to_dict
-from logilab.common.configuration import OptionError
 
 from yams.schema import role_name
 
@@ -34,58 +29,6 @@
 from cubicweb.entities import AnyEntity, fetch_config
 
 
-class _CWSourceCfgMixIn(object):
-    @property
-    def dictconfig(self):
-        return self.config and text_to_dict(self.config) or {}
-
-    def update_config(self, skip_unknown=False, **config):
-        from cubicweb.server import SOURCE_TYPES
-        from cubicweb.server.serverconfig import (SourceConfiguration,
-                                                  generate_source_config)
-        cfg = self.dictconfig
-        cfg.update(config)
-        options = SOURCE_TYPES[self.type].options
-        sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
-        for opt, val in cfg.iteritems():
-            try:
-                sconfig.set_option(opt, val)
-            except OptionError:
-                if skip_unknown:
-                    continue
-                raise
-        cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding)
-        self.set_attributes(config=cfgstr)
-
-
-class CWSource(_CWSourceCfgMixIn, AnyEntity):
-    __regid__ = 'CWSource'
-    fetch_attrs, fetch_order = fetch_config(['name', 'type'])
-
-    @property
-    def host_config(self):
-        dictconfig = self.dictconfig
-        host = gethostname()
-        for hostcfg in self.host_configs:
-            if hostcfg.match(host):
-                self.info('matching host config %s for source %s',
-                          hostcfg.match_host, self.name)
-                dictconfig.update(hostcfg.dictconfig)
-        return dictconfig
-
-    @property
-    def host_configs(self):
-        return self.reverse_cw_host_config_of
-
-
-class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity):
-    __regid__ = 'CWSourceHostConfig'
-    fetch_attrs, fetch_order = fetch_config(['match_host', 'config'])
-
-    def match(self, hostname):
-        return re.match(self.match_host, hostname)
-
-
 class CWEType(AnyEntity):
     __regid__ = 'CWEType'
     fetch_attrs, fetch_order = fetch_config(['name'])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/entities/sources.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,133 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""data source related entities"""
+
+__docformat__ = "restructuredtext en"
+
+import re
+from socket import gethostname
+
+from logilab.common.textutils import text_to_dict
+from logilab.common.configuration import OptionError
+
+from cubicweb import ValidationError
+from cubicweb.entities import AnyEntity, fetch_config
+
+class _CWSourceCfgMixIn(object):
+    @property
+    def dictconfig(self):
+        return self.config and text_to_dict(self.config) or {}
+
+    def update_config(self, skip_unknown=False, **config):
+        from cubicweb.server import SOURCE_TYPES
+        from cubicweb.server.serverconfig import (SourceConfiguration,
+                                                  generate_source_config)
+        cfg = self.dictconfig
+        cfg.update(config)
+        options = SOURCE_TYPES[self.type].options
+        sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
+        for opt, val in cfg.iteritems():
+            try:
+                sconfig.set_option(opt, val)
+            except OptionError:
+                if skip_unknown:
+                    continue
+                raise
+        cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding)
+        self.set_attributes(config=cfgstr)
+
+
+class CWSource(_CWSourceCfgMixIn, AnyEntity):
+    __regid__ = 'CWSource'
+    fetch_attrs, fetch_order = fetch_config(['name', 'type'])
+
+    @property
+    def host_config(self):
+        dictconfig = self.dictconfig
+        host = gethostname()
+        for hostcfg in self.host_configs:
+            if hostcfg.match(host):
+                self.info('matching host config %s for source %s',
+                          hostcfg.match_host, self.name)
+                dictconfig.update(hostcfg.dictconfig)
+        return dictconfig
+
+    @property
+    def host_configs(self):
+        return self.reverse_cw_host_config_of
+
+    def init_mapping(self, mapping):
+        for key, options in mapping:
+            if isinstance(key, tuple): # relation definition
+                assert len(key) == 3
+                restrictions = ['X relation_type RT, RT name %(rt)s']
+                kwargs = {'rt': key[1]}
+                if key[0] != '*':
+                    restrictions.append('X from_entity FT, FT name %(ft)s')
+                    kwargs['ft'] = key[0]
+                if key[2] != '*':
+                    restrictions.append('X to_entity TT, TT name %(tt)s')
+                    kwargs['tt'] = key[2]
+                rql = 'Any X WHERE %s' % ','.join(restrictions)
+                schemarset = self._cw.execute(rql, kwargs)
+            elif key[0].isupper(): # entity type
+                schemarset = self._cw.execute('CWEType X WHERE X name %(et)s',
+                                              {'et': key})
+            else: # relation type
+                schemarset = self._cw.execute('CWRType X WHERE X name %(rt)s',
+                                              {'rt': key})
+            for schemaentity in schemarset.entities():
+                self._cw.create_entity('CWSourceSchemaConfig',
+                                       cw_for_source=self,
+                                       cw_schema=schemaentity,
+                                       options=options)
+
+    @property
+    def repo_source(self):
+        """repository only property, not available from the web side (eg
+        self._cw is expected to be a server session)
+        """
+        return self._cw.repo.sources_by_eid[self.eid]
+
+
+class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity):
+    __regid__ = 'CWSourceHostConfig'
+    fetch_attrs, fetch_order = fetch_config(['match_host', 'config'])
+
+    @property
+    def cwsource(self):
+        return self.cw_host_config_of[0]
+
+    def match(self, hostname):
+        return re.match(self.match_host, hostname)
+
+
+class CWSourceSchemaConfig(AnyEntity):
+    __regid__ = 'CWSourceSchemaConfig'
+    fetch_attrs, fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options'])
+
+    def dc_title(self):
+        return self._cw._(self.__regid__) + ' #%s' % self.eid
+
+    @property
+    def schema(self):
+        return self.cw_schema[0]
+
+    @property
+    def cwsource(self):
+        return self.cw_for_source[0]
--- a/entities/test/unittest_base.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entities/test/unittest_base.py	Wed Apr 27 09:54:22 2011 +0200
@@ -111,6 +111,11 @@
                                'creation_date', 'modification_date', 'cwuri', 'eid'))
                           )
 
+    def test_cw_instantiate_object_relation(self):
+        """ a weird non regression test """
+        e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0)
+        self.request().create_entity('CWGroup', name=u'logilab', reverse_in_group=e)
+
 
 class InterfaceTC(CubicWebTC):
 
--- a/entity.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/entity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -62,7 +62,6 @@
     return True
 
 
-
 class Entity(AppObject):
     """an entity instance has e_schema automagically set on
     the class and instances has access to their issuing cursor.
@@ -253,28 +252,38 @@
 
         >>> companycls = vreg['etypes'].etype_class(('Company')
         >>> personcls = vreg['etypes'].etype_class(('Person')
-        >>> c = companycls.cw_instantiate(req.execute, name=u'Logilab')
-        >>> personcls.cw_instantiate(req.execute, firstname=u'John', lastname=u'Doe',
-        ...                          works_for=c)
+        >>> c = companycls.cw_instantiate(session.execute, name=u'Logilab')
+        >>> p = personcls.cw_instantiate(session.execute, firstname=u'John', lastname=u'Doe',
+        ...                              works_for=c)
 
+        You can also set relation where the entity has 'object' role by
+        prefixing the relation by 'reverse_'.
         """
         rql = 'INSERT %s X' % cls.__regid__
         relations = []
         restrictions = set()
         pending_relations = []
+        eschema = cls.e_schema
         for attr, value in kwargs.items():
-            if isinstance(value, (tuple, list, set, frozenset)):
+            if attr.startswith('reverse_'):
+                attr = attr[len('reverse_'):]
+                role = 'object'
+            else:
+                role = 'subject'
+            assert eschema.has_relation(attr, role)
+            rschema = eschema.subjrels[attr] if role == 'subject' else eschema.objrels[attr]
+            if not rschema.final and isinstance(value, (tuple, list, set, frozenset)):
                 if len(value) == 1:
                     value = iter(value).next()
                 else:
+                    # prepare IN clause
                     del kwargs[attr]
                     pending_relations.append( (attr, value) )
                     continue
             if hasattr(value, 'eid'): # non final relation
                 rvar = attr.upper()
-                # XXX safer detection of object relation
-                if attr.startswith('reverse_'):
-                    relations.append('%s %s X' % (rvar, attr[len('reverse_'):]))
+                if role == 'object':
+                    relations.append('%s %s X' % (rvar, attr))
                 else:
                     relations.append('X %s %s' % (attr, rvar))
                 restriction = '%s eid %%(%s)s' % (rvar, attr)
@@ -808,7 +817,11 @@
             else:
                 existant = None # instead of 'SO', improve perfs
             for select in rqlst.children:
-                rewriter.rewrite(select, [((searchedvar, searchedvar), rqlexprs)],
+                varmap = {}
+                for var in 'SO':
+                    if var in select.defined_vars:
+                        varmap[var] = var
+                rewriter.rewrite(select, [(varmap, rqlexprs)],
                                  select.solutions, args, existant)
             rql = rqlst.as_string()
         return rql, args
@@ -902,9 +915,7 @@
         assert kwargs
         assert self.cw_is_saved(), "should not call set_attributes while entity "\
                "hasn't been saved yet"
-        relations = []
-        for key in kwargs:
-            relations.append('X %s %%(%s)s' % (key, key))
+        relations = ['X %s %%(%s)s' % (key, key) for key in kwargs]
         # and now update the database
         kwargs['x'] = self.eid
         self._cw.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
--- a/etwist/twctl.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/etwist/twctl.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -51,10 +51,10 @@
         """
         cfgname = 'all-in-one'
 
-        def bootstrap(self, cubes, inputlevel=0):
+        def bootstrap(self, cubes, automatic=False, inputlevel=0):
             """bootstrap this configuration"""
-            serverctl.RepositoryCreateHandler.bootstrap(self, cubes, inputlevel)
-            TWCreateHandler.bootstrap(self, cubes, inputlevel)
+            serverctl.RepositoryCreateHandler.bootstrap(self, cubes, automatic, inputlevel)
+            TWCreateHandler.bootstrap(self, cubes, automatic, inputlevel)
 
     class AllInOneStartHandler(TWStartHandler):
         cmdname = 'start'
--- a/hooks/__init__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/__init__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,17 +15,17 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""core hooks
+"""core hooks registering some maintainance tasks as server startup time"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from datetime import timedelta, datetime
+
 from cubicweb.server import hook
 
 class ServerStartupHook(hook.Hook):
     """task to cleanup expirated auth cookie entities"""
-    __regid__ = 'cw_cleanup_transactions'
+    __regid__ = 'cw.start-looping-tasks'
     events = ('server_startup',)
 
     def __call__(self):
@@ -47,3 +47,27 @@
             finally:
                 session.close()
         self.repo.looping_task(60*60*24, cleanup_old_transactions, self.repo)
+        def update_feeds(repo):
+            session = repo.internal_session()
+            try:
+                # don't iter on repo.sources which doesn't include copy based
+                # sources (the one we're looking for)
+                for source in repo.sources_by_eid.itervalues():
+                    if (not source.copy_based_source
+                        or not repo.config.source_enabled(source)
+                        or not source.config['synchronize']):
+                        continue
+                    try:
+                        stats = source.pull_data(session)
+                        if stats.get('created'):
+                            source.info('added %s entities', len(stats['created']))
+                        if stats.get('updated'):
+                            source.info('updated %s entities', len(stats['updated']))
+                        session.commit()
+                    except Exception, exc:
+                        session.exception('while trying to update feed %s', source)
+                        session.rollback()
+                    session.set_pool()
+            finally:
+                session.close()
+        self.repo.looping_task(60, update_feeds, self.repo)
--- a/hooks/integrity.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/integrity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/hooks/notification.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/notification.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,9 +15,8 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""some hooks to handle notification on entity's changes
+"""some hooks to handle notification on entity's changes"""
 
-"""
 __docformat__ = "restructuredtext en"
 
 from logilab.common.textutils import normalize_text
--- a/hooks/syncschema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/syncschema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -912,6 +912,10 @@
     def __call__(self):
         entity = self.entity
         if entity.cw_edited.get('final'):
+            # final entity types don't need a table in the database and are
+            # systematically added by yams at schema initialization time so
+            # there is no need to do further processing. Simply assign its eid.
+            self._cw.vreg.schema[entity.name].eid = entity.eid
             return
         CWETypeAddOp(self._cw, entity=entity)
 
--- a/hooks/syncsession.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/syncsession.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- a/hooks/syncsources.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/syncsources.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,6 +1,29 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""hooks for repository sources synchronization"""
+
+from socket import gethostname
+
+from yams.schema import role_name
+
 from cubicweb import ValidationError
 from cubicweb.selectors import is_instance
-from cubicweb.server import hook
+from cubicweb.server import SOURCE_TYPES, hook
 
 class SourceHook(hook.Hook):
     __abstract__ = True
@@ -8,7 +31,7 @@
 
 
 class SourceAddedOp(hook.Operation):
-    def precommit_event(self):
+    def postcommit_event(self):
         self.session.repo.add_source(self.entity)
 
 class SourceAddedHook(SourceHook):
@@ -16,6 +39,14 @@
     __select__ = SourceHook.__select__ & is_instance('CWSource')
     events = ('after_add_entity',)
     def __call__(self):
+        try:
+            sourcecls = SOURCE_TYPES[self.entity.type]
+        except KeyError:
+            msg = self._cw._('unknown source type')
+            raise ValidationError(self.entity.eid,
+                                  {role_name('type', 'subject'): msg})
+        sourcecls.check_conf_dict(self.entity.eid, self.entity.host_config,
+                                  fail_if_unknown=not self._cw.vreg.config.repairing)
         SourceAddedOp(self._cw, entity=self.entity)
 
 
@@ -31,3 +62,102 @@
         if self.entity.name == 'system':
             raise ValidationError(self.entity.eid, {None: 'cant remove system source'})
         SourceRemovedOp(self._cw, uri=self.entity.name)
+
+
+class SourceUpdatedOp(hook.DataOperationMixIn, hook.Operation):
+
+    def precommit_event(self):
+        self.__processed = []
+        for source in self.get_data():
+            conf = source.repo_source.check_config(source)
+            self.__processed.append( (source, conf) )
+
+    def postcommit_event(self):
+        for source, conf in self.__processed:
+            source.repo_source.update_config(source, conf)
+
+class SourceUpdatedHook(SourceHook):
+    __regid__ = 'cw.sources.configupdate'
+    __select__ = SourceHook.__select__ & is_instance('CWSource')
+    events = ('after_update_entity',)
+    def __call__(self):
+        if 'config' in self.entity.cw_edited:
+            SourceUpdatedOp.get_instance(self._cw).add_data(self.entity)
+
+class SourceHostConfigUpdatedHook(SourceHook):
+    __regid__ = 'cw.sources.hostconfigupdate'
+    __select__ = SourceHook.__select__ & is_instance('CWSourceHostConfig')
+    events = ('after_add_entity', 'after_update_entity', 'before_delete_entity',)
+    def __call__(self):
+        if self.entity.match(gethostname()):
+            if self.event == 'after_update_entity' and \
+                   not 'config' in self.entity.cw_edited:
+                return
+            try:
+                SourceUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource)
+            except IndexError:
+                # XXX no source linked to the host config yet
+                pass
+
+
+# source mapping synchronization. Expect cw_for_source/cw_schema are immutable
+# relations (i.e. can't change from a source or schema to another).
+
+class SourceMappingDeleteHook(SourceHook):
+    """check cw_for_source and cw_schema are immutable relations
+
+    XXX empty delete perms would be enough?
+    """
+    __regid__ = 'cw.sources.delschemaconfig'
+    __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source', 'cw_schema')
+    events = ('before_add_relation',)
+    def __call__(self):
+        if not self._cw.added_in_transaction(self.eidfrom):
+            msg = self._cw._("can't change this relation")
+            raise ValidationError(self.eidfrom, {self.rtype: msg})
+
+
+class SourceMappingChangedOp(hook.DataOperationMixIn, hook.Operation):
+    def check_or_update(self, checkonly):
+        session = self.session
+        # take care, can't call get_data() twice
+        try:
+            data = self.__data
+        except AttributeError:
+            data = self.__data = self.get_data()
+        for schemacfg, source in data:
+            if source is None:
+                source = schemacfg.cwsource.repo_source
+            if session.added_in_transaction(schemacfg.eid):
+                if not session.deleted_in_transaction(schemacfg.eid):
+                    source.add_schema_config(schemacfg, checkonly=checkonly)
+            elif session.deleted_in_transaction(schemacfg.eid):
+                source.delete_schema_config(schemacfg, checkonly=checkonly)
+            else:
+                source.update_schema_config(schemacfg, checkonly=checkonly)
+
+    def precommit_event(self):
+        self.check_or_update(True)
+
+    def postcommit_event(self):
+        self.check_or_update(False)
+
+
+class SourceMappingChangedHook(SourceHook):
+    __regid__ = 'cw.sources.schemaconfig'
+    __select__ = SourceHook.__select__ & is_instance('CWSourceSchemaConfig')
+    events = ('after_add_entity', 'after_update_entity')
+    def __call__(self):
+        if self.event == 'after_add_entity' or (
+            self.event == 'after_update_entity' and 'options' in self.entity.cw_edited):
+            SourceMappingChangedOp.get_instance(self._cw).add_data(
+                (self.entity, None) )
+
+class SourceMappingDeleteHook(SourceHook):
+    __regid__ = 'cw.sources.delschemaconfig'
+    __select__ = SourceHook.__select__ & hook.match_rtype('cw_for_source')
+    events = ('before_delete_relation',)
+    def __call__(self):
+        SourceMappingChangedOp.get_instance(self._cw).add_data(
+            (self._cw.entity_from_eid(self.eidfrom),
+             self._cw.entity_from_eid(self.eidto)) )
--- a/hooks/test/unittest_hooks.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/hooks/test/unittest_hooks.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -18,12 +18,11 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """functional tests for core hooks
 
-note: most schemahooks.py hooks are actually tested in unittest_migrations.py
+Note:
+  syncschema.py hooks are mostly tested in server/test/unittest_migrations.py
 """
 from __future__ import with_statement
 
-from logilab.common.testlib import TestCase, unittest_main
-
 from datetime import datetime
 
 from cubicweb import ValidationError, AuthenticationError, BadConnectionId
@@ -31,38 +30,6 @@
 
 class CoreHooksTC(CubicWebTC):
 
-    def test_delete_internal_entities(self):
-        self.assertRaises(ValidationError, self.execute,
-                          'DELETE CWEType X WHERE X name "CWEType"')
-        self.assertRaises(ValidationError, self.execute,
-                          'DELETE CWRType X WHERE X name "relation_type"')
-        self.assertRaises(ValidationError, self.execute,
-                          'DELETE CWGroup X WHERE X name "owners"')
-
-    def test_delete_required_relations_subject(self):
-        self.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y '
-                     'WHERE Y name "users"')
-        self.commit()
-        self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"')
-        self.assertRaises(ValidationError, self.commit)
-        self.execute('DELETE X in_group Y WHERE X login "toto"')
-        self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"')
-        self.commit()
-
-    def test_delete_required_relations_object(self):
-        self.skipTest('no sample in the schema ! YAGNI ? Kermaat ?')
-
-    def test_static_vocabulary_check(self):
-        self.assertRaises(ValidationError,
-                          self.execute,
-                          'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", X relation_type RT, RT name "in_group"')
-
-    def test_missing_required_relations_subject_inline(self):
-        # missing in_group relation
-        self.execute('INSERT CWUser X: X login "toto", X upassword "hop"')
-        self.assertRaises(ValidationError,
-                          self.commit)
-
     def test_inlined(self):
         self.assertEqual(self.repo.schema['sender'].inlined, True)
         self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
@@ -73,54 +40,6 @@
         rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid)
         self.assertEqual(len(rset), 1)
 
-    def test_composite_1(self):
-        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
-        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
-        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
-                     'WHERE Y is EmailAddress, P is EmailPart')
-        self.failUnless(self.execute('Email X WHERE X sender Y'))
-        self.commit()
-        self.execute('DELETE Email X')
-        rset = self.execute('Any X WHERE X is EmailPart')
-        self.assertEqual(len(rset), 1)
-        self.commit()
-        rset = self.execute('Any X WHERE X is EmailPart')
-        self.assertEqual(len(rset), 0)
-
-    def test_composite_2(self):
-        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
-        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
-        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
-                     'WHERE Y is EmailAddress, P is EmailPart')
-        self.commit()
-        self.execute('DELETE Email X')
-        self.execute('DELETE EmailPart X')
-        self.commit()
-        rset = self.execute('Any X WHERE X is EmailPart')
-        self.assertEqual(len(rset), 0)
-
-    def test_composite_redirection(self):
-        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
-        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
-        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
-                     'WHERE Y is EmailAddress, P is EmailPart')
-        self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y '
-                     'WHERE Y is EmailAddress')
-        self.commit()
-        self.execute('DELETE X parts Y WHERE X messageid "<1234>"')
-        self.execute('SET X parts Y WHERE X messageid "<2345>"')
-        self.commit()
-        rset = self.execute('Any X WHERE X is EmailPart')
-        self.assertEqual(len(rset), 1)
-        self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>')
-
-    def test_unsatisfied_constraints(self):
-        releid = self.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0]
-        with self.assertRaises(ValidationError) as cm:
-            self.commit()
-        self.assertEqual(cm.exception.errors,
-                          {'in_group-object': u'RQLConstraint NOT O name "owners" failed'})
-
     def test_html_tidy_hook(self):
         req = self.request()
         entity = req.create_entity('Workflow', name=u'wf1', description_format=u'text/html',
@@ -226,29 +145,6 @@
         self.failIf(self.execute('Any X WHERE X created_by Y, X eid >= %(x)s', {'x': eid}))
 
 
-class CWPropertyHooksTC(CubicWebTC):
-
-    def test_unexistant_eproperty(self):
-        with self.assertRaises(ValidationError) as cm:
-            self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U')
-        self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'})
-        with self.assertRaises(ValidationError) as cm:
-            self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"')
-        self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'})
-
-    def test_site_wide_eproperty(self):
-        with self.assertRaises(ValidationError) as cm:
-            self.execute('INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U')
-        self.assertEqual(cm.exception.errors, {'for_user-subject': "site-wide property can't be set for user"})
-
-    def test_bad_type_eproperty(self):
-        with self.assertRaises(ValidationError) as cm:
-            self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U')
-        self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'})
-        with self.assertRaises(ValidationError) as cm:
-            self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"')
-        self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'})
-
 
 class SchemaHooksTC(CubicWebTC):
 
@@ -271,4 +167,5 @@
 
 
 if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
     unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/test/unittest_integrity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""functional tests for integrity hooks"""
+
+from __future__ import with_statement
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+class CoreHooksTC(CubicWebTC):
+
+    def test_delete_internal_entities(self):
+        self.assertRaises(ValidationError, self.execute,
+                          'DELETE CWEType X WHERE X name "CWEType"')
+        self.assertRaises(ValidationError, self.execute,
+                          'DELETE CWRType X WHERE X name "relation_type"')
+        self.assertRaises(ValidationError, self.execute,
+                          'DELETE CWGroup X WHERE X name "owners"')
+
+    def test_delete_required_relations_subject(self):
+        self.execute('INSERT CWUser X: X login "toto", X upassword "hop", X in_group Y '
+                     'WHERE Y name "users"')
+        self.commit()
+        self.execute('DELETE X in_group Y WHERE X login "toto", Y name "users"')
+        self.assertRaises(ValidationError, self.commit)
+        self.execute('DELETE X in_group Y WHERE X login "toto"')
+        self.execute('SET X in_group Y WHERE X login "toto", Y name "guests"')
+        self.commit()
+
+    def test_delete_required_relations_object(self):
+        self.skipTest('no sample in the schema ! YAGNI ? Kermaat ?')
+
+    def test_static_vocabulary_check(self):
+        self.assertRaises(ValidationError,
+                          self.execute,
+                          'SET X composite "whatever" WHERE X from_entity FE, FE name "CWUser", X relation_type RT, RT name "in_group"')
+
+    def test_missing_required_relations_subject_inline(self):
+        # missing in_group relation
+        self.execute('INSERT CWUser X: X login "toto", X upassword "hop"')
+        self.assertRaises(ValidationError,
+                          self.commit)
+
+    def test_composite_1(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.failUnless(self.execute('Email X WHERE X sender Y'))
+        self.commit()
+        self.execute('DELETE Email X')
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEqual(len(rset), 1)
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEqual(len(rset), 0)
+
+    def test_composite_2(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.commit()
+        self.execute('DELETE Email X')
+        self.execute('DELETE EmailPart X')
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEqual(len(rset), 0)
+
+    def test_composite_redirection(self):
+        self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"')
+        self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"')
+        self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P '
+                     'WHERE Y is EmailAddress, P is EmailPart')
+        self.execute('INSERT Email X: X messageid "<2345>", X subject "test2", X sender Y, X recipients Y '
+                     'WHERE Y is EmailAddress')
+        self.commit()
+        self.execute('DELETE X parts Y WHERE X messageid "<1234>"')
+        self.execute('SET X parts Y WHERE X messageid "<2345>"')
+        self.commit()
+        rset = self.execute('Any X WHERE X is EmailPart')
+        self.assertEqual(len(rset), 1)
+        self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>')
+
+    def test_unsatisfied_constraints(self):
+        releid = self.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0]
+        with self.assertRaises(ValidationError) as cm:
+            self.commit()
+        self.assertEqual(cm.exception.errors,
+                          {'in_group-object': u'RQLConstraint NOT O name "owners" failed'})
+
+    def test_unique_constraint(self):
+        req = self.request()
+        entity = req.create_entity('CWGroup', name=u'trout')
+        self.commit()
+        self.assertRaises(ValidationError, req.create_entity, 'CWGroup', name=u'trout')
+        self.rollback()
+        req.execute('SET X name "trout" WHERE X eid %(x)s', {'x': entity.eid})
+        self.commit()
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/test/unittest_syncsession.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""functional tests for core hooks
+
+Note:
+  syncschema.py hooks are mostly tested in server/test/unittest_migrations.py
+"""
+from __future__ import with_statement
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+class CWPropertyHooksTC(CubicWebTC):
+
+    def test_unexistant_cwproperty(self):
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U')
+        self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'})
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"')
+        self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'})
+
+    def test_site_wide_cwproperty(self):
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U')
+        self.assertEqual(cm.exception.errors, {'for_user-subject': "site-wide property can't be set for user"})
+
+    def test_system_cwproperty(self):
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "system.version.cubicweb", X value "hop", X for_user U')
+        self.assertEqual(cm.exception.errors, {'for_user-subject': "site-wide property can't be set for user"})
+
+    def test_bad_type_cwproperty(self):
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U')
+        self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'})
+        with self.assertRaises(ValidationError) as cm:
+            self.execute('INSERT CWProperty X: X pkey "ui.language", X value "hop"')
+        self.assertEqual(cm.exception.errors, {'value-subject': u'unauthorized value'})
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/i18n.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/i18n.py	Wed Apr 27 09:54:22 2011 +0200
@@ -80,7 +80,7 @@
     """generate .mo files for a set of languages into the `destdir` i18n directory
     """
     from logilab.common.fileutils import ensure_fs_mode
-    print '-> compiling %s catalogs...' % destdir
+    print '-> compiling message catalogs to %s' % destdir
     errors = []
     for lang in langs:
         langdir = join(destdir, lang, 'LC_MESSAGES')
--- a/i18n/de.po	Tue Apr 05 08:39:49 2011 +0200
+++ b/i18n/de.po	Wed Apr 27 09:54:22 2011 +0200
@@ -43,6 +43,13 @@
 msgstr " :"
 
 #, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+
+#, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr "%(attr)s geändert in %(newvalue)s"
 
@@ -127,6 +134,10 @@
 msgstr "%d&#160;Jahre"
 
 #, python-format
+msgid "%s could be supported"
+msgstr ""
+
+#, python-format
 msgid "%s error report"
 msgstr "%s Fehlerbericht"
 
@@ -135,6 +146,10 @@
 msgstr "%s unbekannt(e)"
 
 #, python-format
+msgid "%s relation should not be in mapped"
+msgstr ""
+
+#, python-format
 msgid "%s software version of the database"
 msgstr "Software-Version der Datenbank %s"
 
@@ -142,6 +157,14 @@
 msgid "%s updated"
 msgstr "%s aktualisiert"
 
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr ""
+
+#, python-format
+msgid "'%s' action require 'linkattr' option"
+msgstr ""
+
 msgid "(UNEXISTANT EID)"
 msgstr "(EID nicht gefunden)"
 
@@ -224,9 +247,6 @@
 msgid "Attributes permissions:"
 msgstr "Rechte der Attribute"
 
-msgid "Attributes with non default permissions:"
-msgstr "Attribute mit nicht-standard-Berechtigungen"
-
 # schema pot file, generated on 2009-09-16 16:46:55
 #
 # singular and plural forms for each entity type
@@ -347,6 +367,12 @@
 msgid "CWSourceHostConfig_plural"
 msgstr ""
 
+msgid "CWSourceSchemaConfig"
+msgstr ""
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr ""
+
 msgid "CWSource_plural"
 msgstr ""
 
@@ -433,6 +459,9 @@
 msgid "Decimal_plural"
 msgstr "Dezimalzahlen"
 
+msgid "Detected problems"
+msgstr ""
+
 msgid "Do you want to delete the following element(s) ?"
 msgstr "Wollen Sie das/die folgend(n) Element(e) löschen?"
 
@@ -452,8 +481,8 @@
 msgid "Entities"
 msgstr "Entitäten"
 
-msgid "Entity types"
-msgstr "Entitätstypen"
+msgid "Entity and relation supported by this source"
+msgstr ""
 
 msgid "ExternalUri"
 msgstr "Externer Uri"
@@ -485,9 +514,6 @@
 msgid "Help"
 msgstr "Hilfe"
 
-msgid "Index"
-msgstr "Index"
-
 msgid "Instance"
 msgstr "Instanz"
 
@@ -509,6 +535,9 @@
 msgid "Looked up classes"
 msgstr "gesuchte Klassen"
 
+msgid "Manage"
+msgstr ""
+
 msgid "Most referenced classes"
 msgstr "meist-referenzierte Klassen"
 
@@ -554,6 +583,9 @@
 msgid "New CWSourceHostConfig"
 msgstr ""
 
+msgid "New CWSourceSchemaConfig"
+msgstr ""
+
 msgid "New CWUniqueTogetherConstraint"
 msgstr "Neue unique-together-Einschränkung"
 
@@ -611,12 +643,6 @@
 msgid "Password_plural"
 msgstr "Passwörter"
 
-msgid "Permissions for entity types"
-msgstr "Berechtigungen für Entitätstypen"
-
-msgid "Permissions for relations"
-msgstr "Berechtigungen für Relationen"
-
 msgid "Please note that this is only a shallow copy"
 msgstr "Achtung: dies ist nur eine flache Kopie!"
 
@@ -647,9 +673,6 @@
 msgid "Registry's content"
 msgstr "Inhalt der Registry"
 
-msgid "Relation types"
-msgstr "Relationstypen"
-
 msgid "Relations"
 msgstr "Relationen"
 
@@ -666,6 +689,9 @@
 msgid "Search for"
 msgstr "Suchen"
 
+msgid "Site information"
+msgstr ""
+
 msgid "SizeConstraint"
 msgstr "Größeneinschränkung"
 
@@ -764,6 +790,9 @@
 msgid "This CWSourceHostConfig"
 msgstr ""
 
+msgid "This CWSourceSchemaConfig"
+msgstr ""
+
 msgid "This CWUniqueTogetherConstraint"
 msgstr "Diese unique-together-Einschränkung"
 
@@ -818,6 +847,9 @@
 msgid "Transition_plural"
 msgstr "Übergänge"
 
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+
 msgid "UniqueConstraint"
 msgstr "eindeutige Einschränkung"
 
@@ -1017,6 +1049,10 @@
 msgid "add WorkflowTransition transition_of Workflow object"
 msgstr "Workflow-Ãœbergang"
 
+#, python-format
+msgid "add a %s"
+msgstr ""
+
 msgctxt "inlined:CWRelation.from_entity.subject"
 msgid "add a CWEType"
 msgstr "einen Entitätstyp hinzufügen"
@@ -1029,6 +1065,12 @@
 msgid "add a CWRType"
 msgstr "einen Relationstyp hinzufügen"
 
+msgid "add a CWSource"
+msgstr ""
+
+msgid "add a CWSourceSchemaConfig"
+msgstr ""
+
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "Email-Adresse hinzufügen"
@@ -1097,6 +1139,9 @@
 msgid "allow to set a specific workflow for an entity"
 msgstr "erlaube, einen bestimmten Workflow für eine Entität zu setzen"
 
+msgid "allowed options depends on the source type"
+msgstr ""
+
 msgid "allowed transitions from this state"
 msgstr "erlaubte Übergänge von diesem Zustand"
 
@@ -1122,18 +1167,6 @@
 msgid "allowed_transition_object"
 msgstr "ausstehende Zustände"
 
-msgid "am/pm calendar (month)"
-msgstr "am/pm Kalender (Monat)"
-
-msgid "am/pm calendar (semester)"
-msgstr "am/pm Kalender (Halbjahr)"
-
-msgid "am/pm calendar (week)"
-msgstr "am/pm Kalender (Woche)"
-
-msgid "am/pm calendar (year)"
-msgstr "am/pm Kalender (Jahr)"
-
 msgid "an electronic mail address associated to a short alias"
 msgstr "Eine E-Mail-Adresse wurde mit einem Alias verknüpft."
 
@@ -1159,9 +1192,6 @@
 msgid "anonymous"
 msgstr "anonym"
 
-msgid "application entities"
-msgstr "Anwendungs-Entitäten"
-
 msgid "april"
 msgstr "April"
 
@@ -1182,6 +1212,9 @@
 msgid "attribute"
 msgstr "Attribut"
 
+msgid "attribute/relation can't be mapped, only entity and relation types"
+msgstr ""
+
 msgid "august"
 msgstr "August"
 
@@ -1278,18 +1311,6 @@
 msgid "calendar"
 msgstr "Kalender anzeigen"
 
-msgid "calendar (month)"
-msgstr "Kalender (monatlich)"
-
-msgid "calendar (semester)"
-msgstr "Kalender (halbjährlich)"
-
-msgid "calendar (week)"
-msgstr "Kalender (wöchentlich)"
-
-msgid "calendar (year)"
-msgstr "Kalender (jährlich)"
-
 msgid "can not resolve entity types:"
 msgstr "Die Typen konnten nicht ermittelt werden:"
 
@@ -1303,6 +1324,9 @@
 msgid "can't change the %s attribute"
 msgstr "Kann das Attribut %s nicht ändern."
 
+msgid "can't change this relation"
+msgstr ""
+
 #, python-format
 msgid "can't connect to source %s, some data may be missing"
 msgstr "Keine Verbindung zu der Quelle %s, einige Daten könnten fehlen"
@@ -1314,6 +1338,12 @@
 msgid "can't have multiple exits on the same state"
 msgstr "Mehrere Ausgänge aus demselben Zustand nicht möglich."
 
+msgid "can't mix dontcross and maycross options"
+msgstr ""
+
+msgid "can't mix dontcross and write options"
+msgstr ""
+
 #, python-format
 msgid "can't parse %(value)r (expected %(format)s)"
 msgstr ""
@@ -1550,9 +1580,6 @@
 msgid "create an index for quick search on this attribute"
 msgstr "Erstelle einen Index zur schnellen Suche über dieses Attribut"
 
-msgid "create an index page"
-msgstr "Eine Index-Seite anlegen"
-
 msgid "created on"
 msgstr "angelegt am"
 
@@ -1820,18 +1847,18 @@
 msgid "custom_workflow_object"
 msgstr "angepasster Workflow von"
 
-msgid "cw_dont_cross"
+msgid "cw_for_source"
+msgstr ""
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr ""
+
+msgid "cw_for_source_object"
 msgstr ""
 
 msgctxt "CWSource"
-msgid "cw_dont_cross"
-msgstr ""
-
-msgid "cw_dont_cross_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_dont_cross_object"
+msgid "cw_for_source_object"
 msgstr ""
 
 msgid "cw_host_config_of"
@@ -1848,18 +1875,30 @@
 msgid "cw_host_config_of_object"
 msgstr ""
 
-msgid "cw_may_cross"
+msgid "cw_schema"
+msgstr ""
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
 msgstr ""
 
-msgctxt "CWSource"
-msgid "cw_may_cross"
+msgid "cw_schema_object"
 msgstr ""
 
-msgid "cw_may_cross_object"
+msgctxt "CWAttribute"
+msgid "cw_schema_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
 msgstr ""
 
 msgctxt "CWRType"
-msgid "cw_may_cross_object"
+msgid "cw_schema_object"
+msgstr ""
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
 msgstr ""
 
 msgid "cw_source"
@@ -1868,24 +1907,6 @@
 msgid "cw_source_object"
 msgstr ""
 
-msgid "cw_support"
-msgstr ""
-
-msgctxt "CWSource"
-msgid "cw_support"
-msgstr ""
-
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWEType"
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_support_object"
-msgstr ""
-
 msgid "cwetype-box"
 msgstr "Box-Ansicht"
 
@@ -1913,15 +1934,30 @@
 msgid "cwrtype-permissions"
 msgstr "Berechtigungen"
 
+msgid "cwsource-main"
+msgstr ""
+
+msgid "cwsource-mapping"
+msgstr ""
+
 msgid "cwuri"
 msgstr "interner URI"
 
 msgid "data directory url"
 msgstr "URL des Daten-Pools"
 
+msgid "data sources"
+msgstr ""
+
+msgid "data sources management"
+msgstr ""
+
 msgid "date"
 msgstr "Datum"
 
+msgid "day"
+msgstr ""
+
 msgid "deactivate"
 msgstr "deaktivieren"
 
@@ -2221,9 +2257,6 @@
 msgid "edit canceled"
 msgstr "Änderungen verwerfen"
 
-msgid "edit the index page"
-msgstr "Index-Seite bearbeiten"
-
 msgid "editable-table"
 msgstr "bearbeitbare Tabelle"
 
@@ -2248,6 +2281,9 @@
 msgid "entities deleted"
 msgstr "Entitäten gelöscht"
 
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+
 msgid "entity copied"
 msgstr "Entität kopiert"
 
@@ -2287,6 +2323,9 @@
 msgid "entity update"
 msgstr "Aktualisierung der Entität"
 
+msgid "error"
+msgstr ""
+
 msgid "error while embedding page"
 msgstr "Fehler beim Einbetten der Seite"
 
@@ -2503,6 +2542,9 @@
 msgid "fulltextindexed"
 msgstr "indizierter Text"
 
+msgid "gc"
+msgstr ""
+
 msgid "generic plot"
 msgstr "generischer Plot"
 
@@ -2732,6 +2774,10 @@
 msgid "inlined"
 msgstr "eingereiht"
 
+#, python-format
+msgid "inlined relation %(rtype)s of %(etype)s should be supported"
+msgstr ""
+
 msgid "instance home"
 msgstr "Startseite der Instanz"
 
@@ -2836,9 +2882,19 @@
 msgid "latest modification time of an entity"
 msgstr "Datum der letzten Änderung einer Entität"
 
+msgid "latest synchronization time"
+msgstr ""
+
 msgid "latest update on"
 msgstr "letzte Änderung am"
 
+msgid "latest_retrieval"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr ""
+
 msgid "left"
 msgstr "links"
 
@@ -2990,6 +3046,9 @@
 msgid "monday"
 msgstr "Montag"
 
+msgid "month"
+msgstr ""
+
 msgid "more actions"
 msgstr "weitere Aktionen"
 
@@ -3179,6 +3238,10 @@
 msgid "options"
 msgstr "Optionen"
 
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr ""
+
 msgid "order"
 msgstr "Reihenfolge"
 
@@ -3219,6 +3282,16 @@
 "Notwendige Daten scheinen nicht mehr gültig zu sein. Bitte laden Sie die "
 "Seite neu und beginnen Sie von vorn."
 
+msgid "parser"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr ""
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+
 msgid "password"
 msgstr "Passwort"
 
@@ -3315,6 +3388,9 @@
 msgid "rdef-permissions"
 msgstr "Rechte"
 
+msgid "rdf"
+msgstr ""
+
 msgid "read"
 msgstr "Lesen"
 
@@ -3363,6 +3439,24 @@
 msgid "relation %(relname)s of %(ent)s"
 msgstr "Relation %(relname)s von %(ent)s"
 
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %s is supported but none if its definitions matches supported "
+"entities"
+msgstr ""
+
 msgid "relation add"
 msgstr "Relation hinzufügen"
 
@@ -3402,7 +3496,7 @@
 
 msgctxt "CWRType"
 msgid "relations_object"
-msgstr ""
+msgstr "Relationen von"
 
 msgid "relative url of the bookmarked page"
 msgstr "URL relativ zu der Seite"
@@ -3485,9 +3579,6 @@
 msgid "saturday"
 msgstr "Samstag"
 
-msgid "schema entities"
-msgstr "Entitäten, die das Schema definieren"
-
 msgid "schema's permissions definitions"
 msgstr "Im Schema definierte Rechte"
 
@@ -3500,9 +3591,6 @@
 msgid "schema-relation-types"
 msgstr "Relationstypen"
 
-msgid "schema-security"
-msgstr "Rechte"
-
 msgid "search"
 msgstr "suchen"
 
@@ -3614,6 +3702,9 @@
 "Eine Eigenschaft für die gesamte Website kann nicht für einen Nutzer gesetzt "
 "werden."
 
+msgid "siteinfo"
+msgstr ""
+
 msgid "some errors occurred:"
 msgstr "Einige Fehler sind aufgetreten"
 
@@ -3650,6 +3741,10 @@
 msgid "specializes_object"
 msgstr "Vorgänger von"
 
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr ""
+
 msgid "startup views"
 msgstr "Start-Ansichten"
 
@@ -3779,8 +3874,8 @@
 msgid "symmetric"
 msgstr "symmetrisch"
 
-msgid "system entities"
-msgstr "System-Entitäten"
+msgid "synchronization-interval must be greater than 1 minute"
+msgstr ""
 
 msgid "table"
 msgstr "Tabelle"
@@ -3812,6 +3907,9 @@
 msgid "the prefered email"
 msgstr "primäre E-Mail-Adresse"
 
+msgid "the system source has its configuration stored on the file-system"
+msgstr ""
+
 #, python-format
 msgid "the value \"%s\" is already used, use another one"
 msgstr ""
@@ -3823,9 +3921,15 @@
 msgid "this entity is currently owned by"
 msgstr "Diese Entität gehört:"
 
+msgid "this parser doesn't use a mapping"
+msgstr ""
+
 msgid "this resource does not exist"
 msgstr "cette ressource est introuvable"
 
+msgid "this source doesn't use a mapping"
+msgstr ""
+
 msgid "thursday"
 msgstr "Donnerstag"
 
@@ -3839,9 +3943,6 @@
 msgid "timestamp"
 msgstr "gültig seit"
 
-msgid "timestamp of the latest source synchronization."
-msgstr "Zeitstempel der letzten Synchronisierung mit der Quelle."
-
 msgid "timetable"
 msgstr "Zeitplan"
 
@@ -3897,6 +3998,9 @@
 msgid "to_state_object"
 msgstr "Ãœbergang zu diesem Zustand"
 
+msgid "today"
+msgstr ""
+
 msgid "todo_by"
 msgstr "zu erledigen bis"
 
@@ -4031,15 +4135,23 @@
 msgstr "(Externe) Entität nicht gefunden"
 
 #, python-format
+msgid "unknown option(s): %s"
+msgstr ""
+
+#, python-format
+msgid "unknown options %s"
+msgstr ""
+
+#, python-format
 msgid "unknown property key %s"
 msgstr "Unbekannter Eigentumsschlüssel %s"
 
+msgid "unknown source type"
+msgstr ""
+
 msgid "unknown vocabulary:"
 msgstr "Unbekanntes Wörterbuch : "
 
-msgid "up"
-msgstr "nach oben"
-
 msgid "upassword"
 msgstr "Passwort"
 
@@ -4089,6 +4201,13 @@
 msgid "uri"
 msgstr "URI"
 
+msgid "url"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "url"
+msgstr ""
+
 msgid "use template languages"
 msgstr "Verwenden Sie Templating-Sprachen"
 
@@ -4154,6 +4273,12 @@
 msgid "users"
 msgstr "Nutzer"
 
+msgid "users and groups"
+msgstr ""
+
+msgid "users and groups management"
+msgstr ""
+
 msgid "users using this bookmark"
 msgstr "Nutzer, die dieses Lesezeichen verwenden"
 
@@ -4236,6 +4361,9 @@
 msgid "visible"
 msgstr "sichtbar"
 
+msgid "warning"
+msgstr ""
+
 msgid "we are not yet ready to handle this query"
 msgstr "Momentan können wir diese sparql-Anfrage noch nicht ausführen."
 
@@ -4330,12 +4458,86 @@
 msgid "you have been logged out"
 msgstr "Sie sind jetzt abgemeldet."
 
+#, python-format
+msgid "you may want to specify something for %s"
+msgstr ""
+
 msgid "you should probably delete that property"
 msgstr "Sie sollten diese Eigenschaft wahrscheinlich löschen."
 
+#, python-format
+msgid "you should un-inline relation %s which is supported and may be crossed "
+msgstr ""
+
+#~ msgid "Attributes with non default permissions:"
+#~ msgstr "Attribute mit nicht-standard-Berechtigungen"
+
+#~ msgid "Entity types"
+#~ msgstr "Entitätstypen"
+
+#~ msgid "Index"
+#~ msgstr "Index"
+
+#~ msgid "Permissions for entity types"
+#~ msgstr "Berechtigungen für Entitätstypen"
+
+#~ msgid "Permissions for relations"
+#~ msgstr "Berechtigungen für Relationen"
+
+#~ msgid "Relation types"
+#~ msgstr "Relationstypen"
+
+#~ msgid "am/pm calendar (month)"
+#~ msgstr "am/pm Kalender (Monat)"
+
+#~ msgid "am/pm calendar (semester)"
+#~ msgstr "am/pm Kalender (Halbjahr)"
+
+#~ msgid "am/pm calendar (week)"
+#~ msgstr "am/pm Kalender (Woche)"
+
+#~ msgid "am/pm calendar (year)"
+#~ msgstr "am/pm Kalender (Jahr)"
+
+#~ msgid "application entities"
+#~ msgstr "Anwendungs-Entitäten"
+
+#~ msgid "calendar (month)"
+#~ msgstr "Kalender (monatlich)"
+
+#~ msgid "calendar (semester)"
+#~ msgstr "Kalender (halbjährlich)"
+
+#~ msgid "calendar (week)"
+#~ msgstr "Kalender (wöchentlich)"
+
+#~ msgid "calendar (year)"
+#~ msgstr "Kalender (jährlich)"
+
 #~ msgid ""
 #~ "can't set inlined=%(inlined)s, %(stype)s %(rtype)s %(otype)s has "
 #~ "cardinality=%(card)s"
 #~ msgstr ""
-#~ "Kann 'inlined' = %(inlined)s nicht zuweisen, %(stype)s %(rtype)s "
-#~ "%(otype)s hat die Kardinalität %(card)s"
+#~ "Kann 'inlined' = %(inlined)s nicht zuweisen, %(stype)s %(rtype)s %(otype)"
+#~ "s hat die Kardinalität %(card)s"
+
+#~ msgid "create an index page"
+#~ msgstr "Eine Index-Seite anlegen"
+
+#~ msgid "edit the index page"
+#~ msgstr "Index-Seite bearbeiten"
+
+#~ msgid "schema entities"
+#~ msgstr "Entitäten, die das Schema definieren"
+
+#~ msgid "schema-security"
+#~ msgstr "Rechte"
+
+#~ msgid "system entities"
+#~ msgstr "System-Entitäten"
+
+#~ msgid "timestamp of the latest source synchronization."
+#~ msgstr "Zeitstempel der letzten Synchronisierung mit der Quelle."
+
+#~ msgid "up"
+#~ msgstr "nach oben"
--- a/i18n/en.po	Tue Apr 05 08:39:49 2011 +0200
+++ b/i18n/en.po	Wed Apr 27 09:54:22 2011 +0200
@@ -35,6 +35,13 @@
 msgstr ":"
 
 #, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+
+#, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr ""
 
@@ -119,6 +126,10 @@
 msgstr ""
 
 #, python-format
+msgid "%s could be supported"
+msgstr ""
+
+#, python-format
 msgid "%s error report"
 msgstr ""
 
@@ -127,6 +138,10 @@
 msgstr ""
 
 #, python-format
+msgid "%s relation should not be in mapped"
+msgstr ""
+
+#, python-format
 msgid "%s software version of the database"
 msgstr ""
 
@@ -134,6 +149,14 @@
 msgid "%s updated"
 msgstr ""
 
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr ""
+
+#, python-format
+msgid "'%s' action require 'linkattr' option"
+msgstr ""
+
 msgid "(UNEXISTANT EID)"
 msgstr ""
 
@@ -213,9 +236,6 @@
 msgid "Attributes permissions:"
 msgstr ""
 
-msgid "Attributes with non default permissions:"
-msgstr ""
-
 # schema pot file, generated on 2009-09-16 16:46:55
 #
 # singular and plural forms for each entity type
@@ -331,10 +351,16 @@
 msgstr "Data source"
 
 msgid "CWSourceHostConfig"
-msgstr "Host configuration"
+msgstr "Source host configuration"
 
 msgid "CWSourceHostConfig_plural"
-msgstr "Host configurations"
+msgstr "Source host configurations"
+
+msgid "CWSourceSchemaConfig"
+msgstr "Source schema configuration"
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr "Source schema configurations"
 
 msgid "CWSource_plural"
 msgstr "Data sources"
@@ -409,6 +435,9 @@
 msgid "Decimal_plural"
 msgstr "Decimal numbers"
 
+msgid "Detected problems"
+msgstr ""
+
 msgid "Do you want to delete the following element(s) ?"
 msgstr ""
 
@@ -428,7 +457,7 @@
 msgid "Entities"
 msgstr ""
 
-msgid "Entity types"
+msgid "Entity and relation supported by this source"
 msgstr ""
 
 msgid "ExternalUri"
@@ -461,9 +490,6 @@
 msgid "Help"
 msgstr ""
 
-msgid "Index"
-msgstr ""
-
 msgid "Instance"
 msgstr ""
 
@@ -485,6 +511,9 @@
 msgid "Looked up classes"
 msgstr ""
 
+msgid "Manage"
+msgstr ""
+
 msgid "Most referenced classes"
 msgstr ""
 
@@ -528,7 +557,10 @@
 msgstr "New source"
 
 msgid "New CWSourceHostConfig"
-msgstr "New host configuration"
+msgstr "New source host configuration"
+
+msgid "New CWSourceSchemaConfig"
+msgstr "New source schema configuration"
 
 msgid "New CWUniqueTogetherConstraint"
 msgstr "New unicity constraint"
@@ -585,12 +617,6 @@
 msgid "Password_plural"
 msgstr "Passwords"
 
-msgid "Permissions for entity types"
-msgstr ""
-
-msgid "Permissions for relations"
-msgstr ""
-
 msgid "Please note that this is only a shallow copy"
 msgstr ""
 
@@ -621,9 +647,6 @@
 msgid "Registry's content"
 msgstr ""
 
-msgid "Relation types"
-msgstr ""
-
 msgid "Relations"
 msgstr ""
 
@@ -640,6 +663,9 @@
 msgid "Search for"
 msgstr ""
 
+msgid "Site information"
+msgstr ""
+
 msgid "SizeConstraint"
 msgstr "size constraint"
 
@@ -736,7 +762,10 @@
 msgstr "This data source"
 
 msgid "This CWSourceHostConfig"
-msgstr "This host configuration"
+msgstr "This source host configuration"
+
+msgid "This CWSourceSchemaConfig"
+msgstr "This source schema configuration"
 
 msgid "This CWUniqueTogetherConstraint"
 msgstr "This unicity constraint"
@@ -792,6 +821,9 @@
 msgid "Transition_plural"
 msgstr "Transitions"
 
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+
 msgid "UniqueConstraint"
 msgstr "unique constraint"
 
@@ -977,6 +1009,10 @@
 msgid "add WorkflowTransition transition_of Workflow object"
 msgstr "workflow-transition"
 
+#, python-format
+msgid "add a %s"
+msgstr ""
+
 msgctxt "inlined:CWRelation.from_entity.subject"
 msgid "add a CWEType"
 msgstr "add an entity type"
@@ -989,6 +1025,12 @@
 msgid "add a CWRType"
 msgstr "add a relation type"
 
+msgid "add a CWSource"
+msgstr "add a source"
+
+msgid "add a CWSourceSchemaConfig"
+msgstr "add an item to mapping "
+
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "add an email address"
@@ -1055,6 +1097,9 @@
 msgid "allow to set a specific workflow for an entity"
 msgstr ""
 
+msgid "allowed options depends on the source type"
+msgstr ""
+
 msgid "allowed transitions from this state"
 msgstr ""
 
@@ -1080,18 +1125,6 @@
 msgid "allowed_transition_object"
 msgstr "incoming states"
 
-msgid "am/pm calendar (month)"
-msgstr ""
-
-msgid "am/pm calendar (semester)"
-msgstr ""
-
-msgid "am/pm calendar (week)"
-msgstr ""
-
-msgid "am/pm calendar (year)"
-msgstr ""
-
 msgid "an electronic mail address associated to a short alias"
 msgstr ""
 
@@ -1116,9 +1149,6 @@
 msgid "anonymous"
 msgstr ""
 
-msgid "application entities"
-msgstr ""
-
 msgid "april"
 msgstr ""
 
@@ -1137,6 +1167,9 @@
 msgid "attribute"
 msgstr ""
 
+msgid "attribute/relation can't be mapped, only entity and relation types"
+msgstr ""
+
 msgid "august"
 msgstr ""
 
@@ -1233,18 +1266,6 @@
 msgid "calendar"
 msgstr ""
 
-msgid "calendar (month)"
-msgstr ""
-
-msgid "calendar (semester)"
-msgstr ""
-
-msgid "calendar (week)"
-msgstr ""
-
-msgid "calendar (year)"
-msgstr ""
-
 msgid "can not resolve entity types:"
 msgstr ""
 
@@ -1258,6 +1279,9 @@
 msgid "can't change the %s attribute"
 msgstr ""
 
+msgid "can't change this relation"
+msgstr ""
+
 #, python-format
 msgid "can't connect to source %s, some data may be missing"
 msgstr ""
@@ -1269,6 +1293,12 @@
 msgid "can't have multiple exits on the same state"
 msgstr ""
 
+msgid "can't mix dontcross and maycross options"
+msgstr ""
+
+msgid "can't mix dontcross and write options"
+msgstr ""
+
 #, python-format
 msgid "can't parse %(value)r (expected %(format)s)"
 msgstr ""
@@ -1399,11 +1429,11 @@
 
 msgctxt "CWSource"
 msgid "config"
-msgstr ""
+msgstr "configuration"
 
 msgctxt "CWSourceHostConfig"
 msgid "config"
-msgstr ""
+msgstr "configuration"
 
 msgid "config mode"
 msgstr ""
@@ -1503,9 +1533,6 @@
 msgid "create an index for quick search on this attribute"
 msgstr ""
 
-msgid "create an index page"
-msgstr ""
-
 msgid "created on"
 msgstr ""
 
@@ -1775,71 +1802,65 @@
 msgid "custom_workflow_object"
 msgstr "custom workflow of"
 
-msgid "cw_dont_cross"
-msgstr ""
+msgid "cw_for_source"
+msgstr "for source"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr "for source"
+
+msgid "cw_for_source_object"
+msgstr "mapping"
 
 msgctxt "CWSource"
-msgid "cw_dont_cross"
-msgstr ""
-
-msgid "cw_dont_cross_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_dont_cross_object"
-msgstr ""
+msgid "cw_for_source_object"
+msgstr "mapping"
 
 msgid "cw_host_config_of"
-msgstr ""
+msgstr "source"
 
 msgctxt "CWSourceHostConfig"
 msgid "cw_host_config_of"
-msgstr ""
+msgstr "source"
 
 msgid "cw_host_config_of_object"
-msgstr ""
+msgstr "host configuration"
 
 msgctxt "CWSource"
 msgid "cw_host_config_of_object"
-msgstr ""
-
-msgid "cw_may_cross"
-msgstr ""
-
-msgctxt "CWSource"
-msgid "cw_may_cross"
-msgstr ""
-
-msgid "cw_may_cross_object"
-msgstr ""
+msgstr "host configuration"
+
+msgid "cw_schema"
+msgstr "maps"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
+msgstr "maps"
+
+msgid "cw_schema_object"
+msgstr "mapped by"
+
+msgctxt "CWAttribute"
+msgid "cw_schema_object"
+msgstr "mapped by"
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
+msgstr "mapped by"
 
 msgctxt "CWRType"
-msgid "cw_may_cross_object"
-msgstr ""
+msgid "cw_schema_object"
+msgstr "mapped by"
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
+msgstr "mapped by"
 
 msgid "cw_source"
-msgstr ""
+msgstr "source"
 
 msgid "cw_source_object"
-msgstr ""
-
-msgid "cw_support"
-msgstr ""
-
-msgctxt "CWSource"
-msgid "cw_support"
-msgstr ""
-
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWEType"
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_support_object"
-msgstr ""
+msgstr "contains entities"
 
 msgid "cwetype-box"
 msgstr "\"box\" view"
@@ -1868,15 +1889,30 @@
 msgid "cwrtype-permissions"
 msgstr "permissions"
 
+msgid "cwsource-main"
+msgstr "description"
+
+msgid "cwsource-mapping"
+msgstr "mapping"
+
 msgid "cwuri"
 msgstr "internal uri"
 
 msgid "data directory url"
 msgstr ""
 
+msgid "data sources"
+msgstr ""
+
+msgid "data sources management"
+msgstr ""
+
 msgid "date"
 msgstr ""
 
+msgid "day"
+msgstr ""
+
 msgid "deactivate"
 msgstr ""
 
@@ -2166,9 +2202,6 @@
 msgid "edit canceled"
 msgstr ""
 
-msgid "edit the index page"
-msgstr ""
-
 msgid "editable-table"
 msgstr ""
 
@@ -2193,6 +2226,9 @@
 msgid "entities deleted"
 msgstr ""
 
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+
 msgid "entity copied"
 msgstr ""
 
@@ -2231,6 +2267,9 @@
 msgid "entity update"
 msgstr ""
 
+msgid "error"
+msgstr ""
+
 msgid "error while embedding page"
 msgstr ""
 
@@ -2340,11 +2379,11 @@
 
 msgctxt "CWEType"
 msgid "final"
-msgstr ""
+msgstr "final"
 
 msgctxt "CWRType"
 msgid "final"
-msgstr ""
+msgstr "final"
 
 msgid "first name"
 msgstr ""
@@ -2354,7 +2393,7 @@
 
 msgctxt "CWUser"
 msgid "firstname"
-msgstr ""
+msgstr "firstname"
 
 msgid "foaf"
 msgstr ""
@@ -2445,6 +2484,9 @@
 msgid "fulltextindexed"
 msgstr "fulltext indexed"
 
+msgid "gc"
+msgstr "memory leak"
+
 msgid "generic plot"
 msgstr ""
 
@@ -2657,6 +2699,10 @@
 msgid "inlined"
 msgstr "inlined"
 
+#, python-format
+msgid "inlined relation %(rtype)s of %(etype)s should be supported"
+msgstr ""
+
 msgid "instance home"
 msgstr ""
 
@@ -2671,7 +2717,7 @@
 
 msgctxt "CWAttribute"
 msgid "internationalizable"
-msgstr ""
+msgstr "internationalizable"
 
 #, python-format
 msgid "invalid action %r"
@@ -2756,9 +2802,19 @@
 msgid "latest modification time of an entity"
 msgstr ""
 
+msgid "latest synchronization time"
+msgstr ""
+
 msgid "latest update on"
 msgstr ""
 
+msgid "latest_retrieval"
+msgstr "latest retrieval"
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr "latest retrieval"
+
 msgid "left"
 msgstr ""
 
@@ -2836,7 +2892,7 @@
 
 msgctxt "RQLExpression"
 msgid "mainvars"
-msgstr "main vars"
+msgstr "main variables"
 
 msgid "manage"
 msgstr ""
@@ -2860,11 +2916,11 @@
 msgstr ""
 
 msgid "match_host"
-msgstr ""
+msgstr "match host"
 
 msgctxt "CWSourceHostConfig"
 msgid "match_host"
-msgstr ""
+msgstr "match host"
 
 msgid "maximum number of characters in short description"
 msgstr ""
@@ -2903,6 +2959,9 @@
 msgid "monday"
 msgstr ""
 
+msgid "month"
+msgstr ""
+
 msgid "more actions"
 msgstr ""
 
@@ -2928,15 +2987,15 @@
 
 msgctxt "CWConstraintType"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "CWEType"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "CWGroup"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "CWPermission"
 msgid "name"
@@ -2948,7 +3007,7 @@
 
 msgctxt "CWSource"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "State"
 msgid "name"
@@ -2956,15 +3015,15 @@
 
 msgctxt "Transition"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "Workflow"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgctxt "WorkflowTransition"
 msgid "name"
-msgstr ""
+msgstr "name"
 
 msgid "name of the cache"
 msgstr ""
@@ -3090,6 +3149,10 @@
 msgid "options"
 msgstr ""
 
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr "options"
+
 msgid "order"
 msgstr ""
 
@@ -3129,6 +3192,16 @@
 msgstr ""
 "some necessary data seem expired, please reload the page and try again."
 
+msgid "parser"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr "parser"
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+
 msgid "password"
 msgstr ""
 
@@ -3225,6 +3298,9 @@
 msgid "rdef-permissions"
 msgstr "permissions"
 
+msgid "rdf"
+msgstr ""
+
 msgid "read"
 msgstr ""
 
@@ -3273,6 +3349,24 @@
 msgid "relation %(relname)s of %(ent)s"
 msgstr ""
 
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %s is supported but none if its definitions matches supported "
+"entities"
+msgstr ""
+
 msgid "relation add"
 msgstr ""
 
@@ -3302,7 +3396,7 @@
 
 msgctxt "CWUniqueTogetherConstraint"
 msgid "relations"
-msgstr ""
+msgstr "relations"
 
 msgid "relations deleted"
 msgstr ""
@@ -3392,9 +3486,6 @@
 msgid "saturday"
 msgstr ""
 
-msgid "schema entities"
-msgstr ""
-
 msgid "schema's permissions definitions"
 msgstr ""
 
@@ -3407,9 +3498,6 @@
 msgid "schema-relation-types"
 msgstr "relations"
 
-msgid "schema-security"
-msgstr "permissions"
-
 msgid "search"
 msgstr ""
 
@@ -3516,6 +3604,9 @@
 msgid "site-wide property can't be set for user"
 msgstr ""
 
+msgid "siteinfo"
+msgstr "site information"
+
 msgid "some errors occurred:"
 msgstr ""
 
@@ -3550,6 +3641,10 @@
 msgid "specializes_object"
 msgstr "specialized by"
 
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr ""
+
 msgid "startup views"
 msgstr ""
 
@@ -3614,7 +3709,7 @@
 
 msgctxt "WorkflowTransition"
 msgid "subworkflow"
-msgstr ""
+msgstr "subworkflow"
 
 msgid ""
 "subworkflow isn't a workflow for the same types as the transition's workflow"
@@ -3675,7 +3770,7 @@
 msgid "symmetric"
 msgstr "symmetric"
 
-msgid "system entities"
+msgid "synchronization-interval must be greater than 1 minute"
 msgstr ""
 
 msgid "table"
@@ -3708,6 +3803,9 @@
 msgid "the prefered email"
 msgstr ""
 
+msgid "the system source has its configuration stored on the file-system"
+msgstr ""
+
 #, python-format
 msgid "the value \"%s\" is already used, use another one"
 msgstr ""
@@ -3718,9 +3816,15 @@
 msgid "this entity is currently owned by"
 msgstr ""
 
+msgid "this parser doesn't use a mapping"
+msgstr ""
+
 msgid "this resource does not exist"
 msgstr ""
 
+msgid "this source doesn't use a mapping"
+msgstr ""
+
 msgid "thursday"
 msgstr ""
 
@@ -3734,9 +3838,6 @@
 msgid "timestamp"
 msgstr "timestamp"
 
-msgid "timestamp of the latest source synchronization."
-msgstr ""
-
 msgid "timetable"
 msgstr ""
 
@@ -3745,7 +3846,7 @@
 
 msgctxt "Bookmark"
 msgid "title"
-msgstr ""
+msgstr "title"
 
 msgid "to"
 msgstr ""
@@ -3792,6 +3893,9 @@
 msgid "to_state_object"
 msgstr "transitions to this state"
 
+msgid "today"
+msgstr ""
+
 msgid "todo_by"
 msgstr "to do by"
 
@@ -3858,7 +3962,7 @@
 
 msgctxt "CWSource"
 msgid "type"
-msgstr ""
+msgstr "type"
 
 msgctxt "Transition"
 msgid "type"
@@ -3926,15 +4030,23 @@
 msgstr ""
 
 #, python-format
+msgid "unknown option(s): %s"
+msgstr ""
+
+#, python-format
+msgid "unknown options %s"
+msgstr ""
+
+#, python-format
 msgid "unknown property key %s"
 msgstr ""
 
+msgid "unknown source type"
+msgstr ""
+
 msgid "unknown vocabulary:"
 msgstr ""
 
-msgid "up"
-msgstr ""
-
 msgid "upassword"
 msgstr "password"
 
@@ -3982,7 +4094,14 @@
 
 msgctxt "ExternalUri"
 msgid "uri"
-msgstr ""
+msgstr "uri"
+
+msgid "url"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "url"
+msgstr "url"
 
 msgid "use template languages"
 msgstr ""
@@ -4040,6 +4159,12 @@
 msgid "users"
 msgstr ""
 
+msgid "users and groups"
+msgstr ""
+
+msgid "users and groups management"
+msgstr ""
+
 msgid "users using this bookmark"
 msgstr ""
 
@@ -4120,6 +4245,9 @@
 msgid "visible"
 msgstr ""
 
+msgid "warning"
+msgstr ""
+
 msgid "we are not yet ready to handle this query"
 msgstr ""
 
@@ -4212,5 +4340,20 @@
 msgid "you have been logged out"
 msgstr ""
 
+#, python-format
+msgid "you may want to specify something for %s"
+msgstr ""
+
 msgid "you should probably delete that property"
 msgstr ""
+
+#, python-format
+msgid "you should un-inline relation %s which is supported and may be crossed "
+msgstr ""
+
+#~ msgctxt "CWAttribute"
+#~ msgid "relations_object"
+#~ msgstr "constrained by"
+
+#~ msgid "schema-security"
+#~ msgstr "permissions"
--- a/i18n/es.po	Tue Apr 05 08:39:49 2011 +0200
+++ b/i18n/es.po	Wed Apr 27 09:54:22 2011 +0200
@@ -41,6 +41,13 @@
 msgstr ":"
 
 #, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+
+#, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr "%(attr)s modificado a %(newvalue)s"
 
@@ -125,6 +132,10 @@
 msgstr "%d&#160;años"
 
 #, python-format
+msgid "%s could be supported"
+msgstr ""
+
+#, python-format
 msgid "%s error report"
 msgstr "%s reporte de errores"
 
@@ -133,6 +144,10 @@
 msgstr "%s no estimado(s)"
 
 #, python-format
+msgid "%s relation should not be in mapped"
+msgstr ""
+
+#, python-format
 msgid "%s software version of the database"
 msgstr "versión sistema de la base para %s"
 
@@ -140,6 +155,14 @@
 msgid "%s updated"
 msgstr "%s actualizado"
 
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr ""
+
+#, python-format
+msgid "'%s' action require 'linkattr' option"
+msgstr ""
+
 msgid "(UNEXISTANT EID)"
 msgstr "(EID INEXISTENTE"
 
@@ -222,9 +245,6 @@
 msgid "Attributes permissions:"
 msgstr "Permisos de atributos:"
 
-msgid "Attributes with non default permissions:"
-msgstr "Atributos con permisos no estándares"
-
 # schema pot file, generated on 2009-09-16 16:46:55
 #
 # singular and plural forms for each entity type
@@ -345,6 +365,12 @@
 msgid "CWSourceHostConfig_plural"
 msgstr ""
 
+msgid "CWSourceSchemaConfig"
+msgstr ""
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr ""
+
 msgid "CWSource_plural"
 msgstr ""
 
@@ -430,6 +456,9 @@
 msgid "Decimal_plural"
 msgstr "Decimales"
 
+msgid "Detected problems"
+msgstr ""
+
 msgid "Do you want to delete the following element(s) ?"
 msgstr "Desea eliminar el(los) elemento(s) siguiente(s)"
 
@@ -449,8 +478,8 @@
 msgid "Entities"
 msgstr "Entidades"
 
-msgid "Entity types"
-msgstr "Tipos de entidades"
+msgid "Entity and relation supported by this source"
+msgstr ""
 
 msgid "ExternalUri"
 msgstr "Uri externo"
@@ -482,9 +511,6 @@
 msgid "Help"
 msgstr "Ayuda"
 
-msgid "Index"
-msgstr "Ãndice"
-
 msgid "Instance"
 msgstr "Instancia"
 
@@ -506,6 +532,9 @@
 msgid "Looked up classes"
 msgstr "Clases buscadas"
 
+msgid "Manage"
+msgstr ""
+
 msgid "Most referenced classes"
 msgstr "Clases más referenciadas"
 
@@ -551,6 +580,9 @@
 msgid "New CWSourceHostConfig"
 msgstr ""
 
+msgid "New CWSourceSchemaConfig"
+msgstr ""
+
 msgid "New CWUniqueTogetherConstraint"
 msgstr ""
 
@@ -606,12 +638,6 @@
 msgid "Password_plural"
 msgstr "Contraseñas"
 
-msgid "Permissions for entity types"
-msgstr "Permisos por tipos de entidad"
-
-msgid "Permissions for relations"
-msgstr "Permisos por las relaciones"
-
 msgid "Please note that this is only a shallow copy"
 msgstr "Recuerde que sólo es una copia superficial"
 
@@ -642,9 +668,6 @@
 msgid "Registry's content"
 msgstr "Contenido del registro"
 
-msgid "Relation types"
-msgstr "Tipos de relación"
-
 msgid "Relations"
 msgstr "Relaciones"
 
@@ -661,6 +684,9 @@
 msgid "Search for"
 msgstr "Buscar"
 
+msgid "Site information"
+msgstr ""
+
 msgid "SizeConstraint"
 msgstr "Restricción de tamaño"
 
@@ -759,6 +785,9 @@
 msgid "This CWSourceHostConfig"
 msgstr ""
 
+msgid "This CWSourceSchemaConfig"
+msgstr ""
+
 msgid "This CWUniqueTogetherConstraint"
 msgstr ""
 
@@ -813,6 +842,9 @@
 msgid "Transition_plural"
 msgstr "Transiciones"
 
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+
 msgid "UniqueConstraint"
 msgstr "Restricción de Unicidad"
 
@@ -1019,6 +1051,10 @@
 msgid "add WorkflowTransition transition_of Workflow object"
 msgstr "Transición Workflow"
 
+#, python-format
+msgid "add a %s"
+msgstr ""
+
 msgctxt "inlined:CWRelation.from_entity.subject"
 msgid "add a CWEType"
 msgstr "Agregar un tipo de entidad"
@@ -1031,6 +1067,12 @@
 msgid "add a CWRType"
 msgstr "Agregar un tipo de relación"
 
+msgid "add a CWSource"
+msgstr ""
+
+msgid "add a CWSourceSchemaConfig"
+msgstr ""
+
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "Agregar correo electrónico"
@@ -1099,6 +1141,9 @@
 msgid "allow to set a specific workflow for an entity"
 msgstr "permite definir un Workflow específico para una entidad"
 
+msgid "allowed options depends on the source type"
+msgstr ""
+
 msgid "allowed transitions from this state"
 msgstr "transiciones autorizadas desde este estado"
 
@@ -1124,18 +1169,6 @@
 msgid "allowed_transition_object"
 msgstr "transición autorizada de"
 
-msgid "am/pm calendar (month)"
-msgstr "calendario am/pm (mes)"
-
-msgid "am/pm calendar (semester)"
-msgstr "calendario am/pm (semestre)"
-
-msgid "am/pm calendar (week)"
-msgstr "calendario am/pm (semana)"
-
-msgid "am/pm calendar (year)"
-msgstr "calendario am/pm (año)"
-
 msgid "an electronic mail address associated to a short alias"
 msgstr "una dirección electrónica asociada a este alias"
 
@@ -1160,9 +1193,6 @@
 msgid "anonymous"
 msgstr "anónimo"
 
-msgid "application entities"
-msgstr "Entidades de la aplicación"
-
 msgid "april"
 msgstr "Abril"
 
@@ -1183,6 +1213,9 @@
 msgid "attribute"
 msgstr "Atributo"
 
+msgid "attribute/relation can't be mapped, only entity and relation types"
+msgstr ""
+
 msgid "august"
 msgstr "Agosto"
 
@@ -1279,18 +1312,6 @@
 msgid "calendar"
 msgstr "mostrar un calendario"
 
-msgid "calendar (month)"
-msgstr "calendario (mensual)"
-
-msgid "calendar (semester)"
-msgstr "calendario (semestral)"
-
-msgid "calendar (week)"
-msgstr "calendario (semanal)"
-
-msgid "calendar (year)"
-msgstr "calendario (anual)"
-
 msgid "can not resolve entity types:"
 msgstr "Imposible de interpretar los tipos de entidades:"
 
@@ -1304,6 +1325,9 @@
 msgid "can't change the %s attribute"
 msgstr "no puede modificar el atributo %s"
 
+msgid "can't change this relation"
+msgstr ""
+
 #, python-format
 msgid "can't connect to source %s, some data may be missing"
 msgstr "no se puede conectar a la fuente %s, algunos datos pueden faltar"
@@ -1315,6 +1339,12 @@
 msgid "can't have multiple exits on the same state"
 msgstr "no puede tener varias salidas en el mismo estado"
 
+msgid "can't mix dontcross and maycross options"
+msgstr ""
+
+msgid "can't mix dontcross and write options"
+msgstr ""
+
 #, python-format
 msgid "can't parse %(value)r (expected %(format)s)"
 msgstr "no puede analizar %(value)r (formato requerido : %(format)s)"
@@ -1324,8 +1354,8 @@
 "can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
 "%(card)s"
 msgstr ""
-"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s "
- "tiene cardinalidad %(card)s"
+"no puede poner 'inlined' = True, %(stype)s %(rtype)s %(otype)s tiene "
+"cardinalidad %(card)s"
 
 msgid "cancel"
 msgstr ""
@@ -1560,9 +1590,6 @@
 msgid "create an index for quick search on this attribute"
 msgstr "Crear un índice para accelerar las búsquedas sobre este atributo"
 
-msgid "create an index page"
-msgstr "Crear una página de inicio"
-
 msgid "created on"
 msgstr "creado el"
 
@@ -1839,18 +1866,18 @@
 msgid "custom_workflow_object"
 msgstr "Workflow de"
 
-msgid "cw_dont_cross"
+msgid "cw_for_source"
+msgstr ""
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr ""
+
+msgid "cw_for_source_object"
 msgstr ""
 
 msgctxt "CWSource"
-msgid "cw_dont_cross"
-msgstr ""
-
-msgid "cw_dont_cross_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_dont_cross_object"
+msgid "cw_for_source_object"
 msgstr ""
 
 msgid "cw_host_config_of"
@@ -1867,18 +1894,30 @@
 msgid "cw_host_config_of_object"
 msgstr ""
 
-msgid "cw_may_cross"
+msgid "cw_schema"
+msgstr ""
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
 msgstr ""
 
-msgctxt "CWSource"
-msgid "cw_may_cross"
+msgid "cw_schema_object"
 msgstr ""
 
-msgid "cw_may_cross_object"
+msgctxt "CWAttribute"
+msgid "cw_schema_object"
+msgstr ""
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
 msgstr ""
 
 msgctxt "CWRType"
-msgid "cw_may_cross_object"
+msgid "cw_schema_object"
+msgstr ""
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
 msgstr ""
 
 msgid "cw_source"
@@ -1887,24 +1926,6 @@
 msgid "cw_source_object"
 msgstr ""
 
-msgid "cw_support"
-msgstr ""
-
-msgctxt "CWSource"
-msgid "cw_support"
-msgstr ""
-
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWEType"
-msgid "cw_support_object"
-msgstr ""
-
-msgctxt "CWRType"
-msgid "cw_support_object"
-msgstr ""
-
 msgid "cwetype-box"
 msgstr "Vista \"caja\""
 
@@ -1932,15 +1953,30 @@
 msgid "cwrtype-permissions"
 msgstr "Permisos"
 
+msgid "cwsource-main"
+msgstr ""
+
+msgid "cwsource-mapping"
+msgstr ""
+
 msgid "cwuri"
 msgstr "Uri Interna"
 
 msgid "data directory url"
 msgstr "Url del repertorio de datos"
 
+msgid "data sources"
+msgstr ""
+
+msgid "data sources management"
+msgstr ""
+
 msgid "date"
 msgstr "Fecha"
 
+msgid "day"
+msgstr ""
+
 msgid "deactivate"
 msgstr "Desactivar"
 
@@ -2250,9 +2286,6 @@
 msgid "edit canceled"
 msgstr "Edición cancelada"
 
-msgid "edit the index page"
-msgstr "Modificar la página de inicio"
-
 msgid "editable-table"
 msgstr "Tabla modificable"
 
@@ -2277,6 +2310,9 @@
 msgid "entities deleted"
 msgstr "Entidades eliminadas"
 
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+
 msgid "entity copied"
 msgstr "Entidad copiada"
 
@@ -2317,6 +2353,9 @@
 msgid "entity update"
 msgstr "Actualización de la Entidad"
 
+msgid "error"
+msgstr ""
+
 msgid "error while embedding page"
 msgstr "Error durante la inclusión de la página"
 
@@ -2534,6 +2573,9 @@
 msgid "fulltextindexed"
 msgstr "Texto indexado"
 
+msgid "gc"
+msgstr ""
+
 msgid "generic plot"
 msgstr "Gráfica Genérica"
 
@@ -2762,6 +2804,10 @@
 msgid "inlined"
 msgstr "Inlined"
 
+#, python-format
+msgid "inlined relation %(rtype)s of %(etype)s should be supported"
+msgstr ""
+
 msgid "instance home"
 msgstr "Repertorio de la Instancia"
 
@@ -2865,9 +2911,19 @@
 msgid "latest modification time of an entity"
 msgstr "Fecha de la última modificación de una entidad "
 
+msgid "latest synchronization time"
+msgstr ""
+
 msgid "latest update on"
 msgstr "Actualizado el"
 
+msgid "latest_retrieval"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr ""
+
 msgid "left"
 msgstr "izquierda"
 
@@ -3017,6 +3073,9 @@
 msgid "monday"
 msgstr "Lunes"
 
+msgid "month"
+msgstr ""
+
 msgid "more actions"
 msgstr "Más acciones"
 
@@ -3206,6 +3265,10 @@
 msgid "options"
 msgstr "Opciones"
 
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr ""
+
 msgid "order"
 msgstr "Orden"
 
@@ -3244,6 +3307,16 @@
 msgid "pageid-not-found"
 msgstr "Página no encontrada."
 
+msgid "parser"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr ""
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+
 msgid "password"
 msgstr "Contraseña"
 
@@ -3340,6 +3413,9 @@
 msgid "rdef-permissions"
 msgstr "Permisos"
 
+msgid "rdf"
+msgstr ""
+
 msgid "read"
 msgstr "Lectura"
 
@@ -3388,6 +3464,24 @@
 msgid "relation %(relname)s of %(ent)s"
 msgstr "relación %(relname)s de %(ent)s"
 
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+
+#, python-format
+msgid ""
+"relation %s is supported but none if its definitions matches supported "
+"entities"
+msgstr ""
+
 msgid "relation add"
 msgstr "Agregar Relación"
 
@@ -3511,9 +3605,6 @@
 msgid "saturday"
 msgstr "Sábado"
 
-msgid "schema entities"
-msgstr "Entidades del esquema"
-
 msgid "schema's permissions definitions"
 msgstr "Definiciones de permisos del esquema"
 
@@ -3526,9 +3617,6 @@
 msgid "schema-relation-types"
 msgstr "Relaciones"
 
-msgid "schema-security"
-msgstr "Seguridad"
-
 msgid "search"
 msgstr "Buscar"
 
@@ -3639,6 +3727,9 @@
 msgid "site-wide property can't be set for user"
 msgstr "Una propiedad específica al Sistema no puede ser propia al usuario"
 
+msgid "siteinfo"
+msgstr ""
+
 msgid "some errors occurred:"
 msgstr "Algunos errores encontrados :"
 
@@ -3674,6 +3765,10 @@
 msgid "specializes_object"
 msgstr "Especializado por"
 
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr ""
+
 msgid "startup views"
 msgstr "Vistas de inicio"
 
@@ -3803,8 +3898,8 @@
 msgid "symmetric"
 msgstr "Simétrico"
 
-msgid "system entities"
-msgstr "Entidades del sistema"
+msgid "synchronization-interval must be greater than 1 minute"
+msgstr ""
 
 msgid "table"
 msgstr "Tabla"
@@ -3836,6 +3931,9 @@
 msgid "the prefered email"
 msgstr "Dirección principal de email"
 
+msgid "the system source has its configuration stored on the file-system"
+msgstr ""
+
 #, python-format
 msgid "the value \"%s\" is already used, use another one"
 msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro"
@@ -3846,9 +3944,15 @@
 msgid "this entity is currently owned by"
 msgstr "Esta Entidad es propiedad de"
 
+msgid "this parser doesn't use a mapping"
+msgstr ""
+
 msgid "this resource does not exist"
 msgstr "Este recurso no existe"
 
+msgid "this source doesn't use a mapping"
+msgstr ""
+
 msgid "thursday"
 msgstr "Jueves"
 
@@ -3862,9 +3966,6 @@
 msgid "timestamp"
 msgstr "Válido desde"
 
-msgid "timestamp of the latest source synchronization."
-msgstr "Fecha de la última sincronización de la fuente."
-
 msgid "timetable"
 msgstr "Tablero de tiempos"
 
@@ -3920,6 +4021,9 @@
 msgid "to_state_object"
 msgstr "Transición hacia este Estado"
 
+msgid "today"
+msgstr ""
+
 msgid "todo_by"
 msgstr "Asignada a"
 
@@ -4054,15 +4158,23 @@
 msgstr "Entidad externa desconocida"
 
 #, python-format
+msgid "unknown option(s): %s"
+msgstr ""
+
+#, python-format
+msgid "unknown options %s"
+msgstr ""
+
+#, python-format
 msgid "unknown property key %s"
 msgstr "Clave de Propiedad desconocida: %s"
 
+msgid "unknown source type"
+msgstr ""
+
 msgid "unknown vocabulary:"
 msgstr "Vocabulario desconocido: "
 
-msgid "up"
-msgstr "Arriba"
-
 msgid "upassword"
 msgstr "Contraseña"
 
@@ -4112,6 +4224,13 @@
 msgid "uri"
 msgstr "URI"
 
+msgid "url"
+msgstr ""
+
+msgctxt "CWSource"
+msgid "url"
+msgstr ""
+
 msgid "use template languages"
 msgstr "Utilizar plantillas de lenguaje"
 
@@ -4177,6 +4296,12 @@
 msgid "users"
 msgstr "Usuarios"
 
+msgid "users and groups"
+msgstr ""
+
+msgid "users and groups management"
+msgstr ""
+
 msgid "users using this bookmark"
 msgstr "Usuarios utilizando este Favorito"
 
@@ -4257,6 +4382,9 @@
 msgid "visible"
 msgstr "Visible"
 
+msgid "warning"
+msgstr ""
+
 msgid "we are not yet ready to handle this query"
 msgstr "Aún no podemos manejar este tipo de consulta Sparql"
 
@@ -4352,5 +4480,79 @@
 msgid "you have been logged out"
 msgstr "Ha terminado la sesión"
 
+#, python-format
+msgid "you may want to specify something for %s"
+msgstr ""
+
 msgid "you should probably delete that property"
 msgstr "Debería probablamente suprimir esta propriedad"
+
+#, python-format
+msgid "you should un-inline relation %s which is supported and may be crossed "
+msgstr ""
+
+#~ msgid "Attributes with non default permissions:"
+#~ msgstr "Atributos con permisos no estándares"
+
+#~ msgid "Entity types"
+#~ msgstr "Tipos de entidades"
+
+#~ msgid "Index"
+#~ msgstr "Ãndice"
+
+#~ msgid "Permissions for entity types"
+#~ msgstr "Permisos por tipos de entidad"
+
+#~ msgid "Permissions for relations"
+#~ msgstr "Permisos por las relaciones"
+
+#~ msgid "Relation types"
+#~ msgstr "Tipos de relación"
+
+#~ msgid "am/pm calendar (month)"
+#~ msgstr "calendario am/pm (mes)"
+
+#~ msgid "am/pm calendar (semester)"
+#~ msgstr "calendario am/pm (semestre)"
+
+#~ msgid "am/pm calendar (week)"
+#~ msgstr "calendario am/pm (semana)"
+
+#~ msgid "am/pm calendar (year)"
+#~ msgstr "calendario am/pm (año)"
+
+#~ msgid "application entities"
+#~ msgstr "Entidades de la aplicación"
+
+#~ msgid "calendar (month)"
+#~ msgstr "calendario (mensual)"
+
+#~ msgid "calendar (semester)"
+#~ msgstr "calendario (semestral)"
+
+#~ msgid "calendar (week)"
+#~ msgstr "calendario (semanal)"
+
+#~ msgid "calendar (year)"
+#~ msgstr "calendario (anual)"
+
+#~ msgid "create an index page"
+#~ msgstr "Crear una página de inicio"
+
+#~ msgid "edit the index page"
+#~ msgstr "Modificar la página de inicio"
+
+#~ msgid "schema entities"
+#~ msgstr "Entidades del esquema"
+
+#~ msgid "schema-security"
+#~ msgstr "Seguridad"
+
+#~ msgid "system entities"
+#~ msgstr "Entidades del sistema"
+
+#~ msgid "timestamp of the latest source synchronization."
+#~ msgstr "Fecha de la última sincronización de la fuente."
+
+#~ msgid "up"
+#~ msgstr "Arriba"
--- a/i18n/fr.po	Tue Apr 05 08:39:49 2011 +0200
+++ b/i18n/fr.po	Wed Apr 27 09:54:22 2011 +0200
@@ -40,6 +40,16 @@
 msgstr " :"
 
 #, python-format
+msgid "\"action\" must be specified in options; allowed values are %s"
+msgstr ""
+"\"action\" doit être specifié dans les options; les valeurs autorisées "
+"sont : %s"
+
+msgid "\"role=subject\" or \"role=object\" must be specified in options"
+msgstr ""
+"\"role=subject\" ou \"role=object\" doit être specifié dans les options"
+
+#, python-format
 msgid "%(attr)s set to %(newvalue)s"
 msgstr "%(attr)s modifié à %(newvalue)s"
 
@@ -124,6 +134,10 @@
 msgstr "%d&#160;années"
 
 #, python-format
+msgid "%s could be supported"
+msgstr "%s pourrait être supporté"
+
+#, python-format
 msgid "%s error report"
 msgstr "%s rapport d'erreur"
 
@@ -132,6 +146,10 @@
 msgstr "%s non estimé(s)"
 
 #, python-format
+msgid "%s relation should not be in mapped"
+msgstr "la relation %s ne devrait pas ếtre mappé"
+
+#, python-format
 msgid "%s software version of the database"
 msgstr "version logicielle de la base pour %s"
 
@@ -139,6 +157,14 @@
 msgid "%s updated"
 msgstr "%s mis à jour"
 
+#, python-format
+msgid "'%s' action doesn't take any options"
+msgstr "l'action '%s' ne prend pas d'option"
+
+#, python-format
+msgid "'%s' action require 'linkattr' option"
+msgstr "l'action '%s' nécessite une option 'linkattr'"
+
 msgid "(UNEXISTANT EID)"
 msgstr "(EID INTROUVABLE)"
 
@@ -220,9 +246,6 @@
 msgid "Attributes permissions:"
 msgstr "Permissions des attributs"
 
-msgid "Attributes with non default permissions:"
-msgstr "Attributs ayant des permissions non-standard"
-
 # schema pot file, generated on 2009-09-16 16:46:55
 #
 # singular and plural forms for each entity type
@@ -343,6 +366,12 @@
 msgid "CWSourceHostConfig_plural"
 msgstr "Configurations de source"
 
+msgid "CWSourceSchemaConfig"
+msgstr "Configuration de schéma de source"
+
+msgid "CWSourceSchemaConfig_plural"
+msgstr "Configurations de schéma de source"
+
 msgid "CWSource_plural"
 msgstr "Source de données"
 
@@ -428,6 +457,9 @@
 msgid "Decimal_plural"
 msgstr "Nombres décimaux"
 
+msgid "Detected problems"
+msgstr "Problèmes détectés"
+
 msgid "Do you want to delete the following element(s) ?"
 msgstr "Voulez-vous supprimer le(s) élément(s) suivant(s) ?"
 
@@ -447,8 +479,8 @@
 msgid "Entities"
 msgstr "entités"
 
-msgid "Entity types"
-msgstr "Types d'entités"
+msgid "Entity and relation supported by this source"
+msgstr "Entités et relations supportés par cette source"
 
 msgid "ExternalUri"
 msgstr "Uri externe"
@@ -480,9 +512,6 @@
 msgid "Help"
 msgstr "Aide"
 
-msgid "Index"
-msgstr "Index"
-
 msgid "Instance"
 msgstr "Instance"
 
@@ -504,6 +533,9 @@
 msgid "Looked up classes"
 msgstr "Classes recherchées"
 
+msgid "Manage"
+msgstr "Administration"
+
 msgid "Most referenced classes"
 msgstr "Classes les plus référencées"
 
@@ -549,6 +581,9 @@
 msgid "New CWSourceHostConfig"
 msgstr "Nouvelle configuration de source"
 
+msgid "New CWSourceSchemaConfig"
+msgstr "Nouvelle partie de mapping de source"
+
 msgid "New CWUniqueTogetherConstraint"
 msgstr "Nouvelle contrainte unique_together"
 
@@ -604,12 +639,6 @@
 msgid "Password_plural"
 msgstr "Mots de passe"
 
-msgid "Permissions for entity types"
-msgstr "Permissions pour les types d'entités"
-
-msgid "Permissions for relations"
-msgstr "Permissions pour les relations"
-
 msgid "Please note that this is only a shallow copy"
 msgstr "Attention, cela n'effectue qu'une copie de surface"
 
@@ -640,9 +669,6 @@
 msgid "Registry's content"
 msgstr "Contenu du registre"
 
-msgid "Relation types"
-msgstr "Types de relation"
-
 msgid "Relations"
 msgstr "Relations"
 
@@ -659,6 +685,9 @@
 msgid "Search for"
 msgstr "Rechercher"
 
+msgid "Site information"
+msgstr "Information du site"
+
 msgid "SizeConstraint"
 msgstr "contrainte de taille"
 
@@ -760,6 +789,9 @@
 msgid "This CWSourceHostConfig"
 msgstr "Cette configuration de source"
 
+msgid "This CWSourceSchemaConfig"
+msgstr "Cette partie de mapping de source"
+
 msgid "This CWUniqueTogetherConstraint"
 msgstr "Cette contrainte unique_together"
 
@@ -814,6 +846,11 @@
 msgid "Transition_plural"
 msgstr "Transitions"
 
+msgid "URLs from which content will be imported. You can put one url per line"
+msgstr ""
+"URLs depuis lesquelles le contenu sera importé. Vous pouvez mettre une URL "
+"par ligne."
+
 msgid "UniqueConstraint"
 msgstr "contrainte d'unicité"
 
@@ -1020,6 +1057,10 @@
 msgid "add WorkflowTransition transition_of Workflow object"
 msgstr "transition workflow"
 
+#, python-format
+msgid "add a %s"
+msgstr "ajouter un %s"
+
 msgctxt "inlined:CWRelation.from_entity.subject"
 msgid "add a CWEType"
 msgstr "ajouter un type d'entité sujet"
@@ -1032,6 +1073,12 @@
 msgid "add a CWRType"
 msgstr "ajouter un type de relation"
 
+msgid "add a CWSource"
+msgstr "ajouter une source"
+
+msgid "add a CWSourceSchemaConfig"
+msgstr "ajouter une partie de mapping"
+
 msgctxt "inlined:CWUser.use_email.subject"
 msgid "add a EmailAddress"
 msgstr "ajouter une adresse électronique"
@@ -1100,6 +1147,9 @@
 msgid "allow to set a specific workflow for an entity"
 msgstr "permet de spécifier un workflow donné pour une entité"
 
+msgid "allowed options depends on the source type"
+msgstr "les options autorisées dépendent du type de la source"
+
 msgid "allowed transitions from this state"
 msgstr "transitions autorisées depuis cet état"
 
@@ -1125,18 +1175,6 @@
 msgid "allowed_transition_object"
 msgstr "transition autorisée de"
 
-msgid "am/pm calendar (month)"
-msgstr "calendrier am/pm (mois)"
-
-msgid "am/pm calendar (semester)"
-msgstr "calendrier am/pm (semestre)"
-
-msgid "am/pm calendar (week)"
-msgstr "calendrier am/pm (semaine)"
-
-msgid "am/pm calendar (year)"
-msgstr "calendrier am/pm (année)"
-
 msgid "an electronic mail address associated to a short alias"
 msgstr "une adresse électronique associée à un alias"
 
@@ -1161,9 +1199,6 @@
 msgid "anonymous"
 msgstr "anonyme"
 
-msgid "application entities"
-msgstr "entités applicatives"
-
 msgid "april"
 msgstr "avril"
 
@@ -1184,6 +1219,11 @@
 msgid "attribute"
 msgstr "attribut"
 
+msgid "attribute/relation can't be mapped, only entity and relation types"
+msgstr ""
+"les attributs et relations ne peuvent être mappés, uniquement les types "
+"d'entité et de relation"
+
 msgid "august"
 msgstr "août"
 
@@ -1281,18 +1321,6 @@
 msgid "calendar"
 msgstr "afficher un calendrier"
 
-msgid "calendar (month)"
-msgstr "calendrier (mensuel)"
-
-msgid "calendar (semester)"
-msgstr "calendrier (semestriel)"
-
-msgid "calendar (week)"
-msgstr "calendrier (hebdo)"
-
-msgid "calendar (year)"
-msgstr "calendrier (annuel)"
-
 msgid "can not resolve entity types:"
 msgstr "impossible d'interpréter les types d'entités :"
 
@@ -1306,6 +1334,9 @@
 msgid "can't change the %s attribute"
 msgstr "ne peut changer l'attribut %s"
 
+msgid "can't change this relation"
+msgstr "ne peut modifier cette relation"
+
 #, python-format
 msgid "can't connect to source %s, some data may be missing"
 msgstr "ne peut se connecter à la source %s, des données peuvent manquer"
@@ -1317,6 +1348,12 @@
 msgid "can't have multiple exits on the same state"
 msgstr "ne peut avoir plusieurs sorties sur le même état"
 
+msgid "can't mix dontcross and maycross options"
+msgstr "ne peut mélanger dontcross et maycross options"
+
+msgid "can't mix dontcross and write options"
+msgstr "ne peut mélanger dontcross et write options"
+
 #, python-format
 msgid "can't parse %(value)r (expected %(format)s)"
 msgstr "ne peut analyser %(value)r (format attendu : %(format)s)"
@@ -1326,8 +1363,8 @@
 "can't set inlined=True, %(stype)s %(rtype)s %(otype)s has cardinality="
 "%(card)s"
 msgstr ""
-"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a "
-"pour cardinalité %(card)s"
+"ne peut mettre 'inlined'=Vrai, %(stype)s %(rtype)s %(otype)s a pour "
+"cardinalité %(card)s"
 
 msgid "cancel"
 msgstr "annuler"
@@ -1563,9 +1600,6 @@
 msgid "create an index for quick search on this attribute"
 msgstr "créer un index pour accélérer les recherches sur cet attribut"
 
-msgid "create an index page"
-msgstr "créer une page d'accueil"
-
 msgid "created on"
 msgstr "créé le"
 
@@ -1845,19 +1879,19 @@
 msgid "custom_workflow_object"
 msgstr "workflow de"
 
-msgid "cw_dont_cross"
-msgstr "don't cross"
+msgid "cw_for_source"
+msgstr "source"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_for_source"
+msgstr "source"
+
+msgid "cw_for_source_object"
+msgstr "élément de mapping"
 
 msgctxt "CWSource"
-msgid "cw_dont_cross"
-msgstr "don't cross"
-
-msgid "cw_dont_cross_object"
-msgstr "can't be crossed with"
-
-msgctxt "CWRType"
-msgid "cw_dont_cross_object"
-msgstr "can't be crossed with"
+msgid "cw_for_source_object"
+msgstr "élément de mapping"
 
 msgid "cw_host_config_of"
 msgstr "host configuration of"
@@ -1873,19 +1907,31 @@
 msgid "cw_host_config_of_object"
 msgstr "has host configuration"
 
-msgid "cw_may_cross"
-msgstr "may cross"
-
-msgctxt "CWSource"
-msgid "cw_may_cross"
-msgstr "may cross"
-
-msgid "cw_may_cross_object"
-msgstr "may be crossed with"
+msgid "cw_schema"
+msgstr "schéma"
+
+msgctxt "CWSourceSchemaConfig"
+msgid "cw_schema"
+msgstr "schéma"
+
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWAttribute"
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWEType"
+msgid "cw_schema_object"
+msgstr "mappé par"
 
 msgctxt "CWRType"
-msgid "cw_may_cross_object"
-msgstr "may be crossed with"
+msgid "cw_schema_object"
+msgstr "mappé par"
+
+msgctxt "CWRelation"
+msgid "cw_schema_object"
+msgstr "mappé par"
 
 msgid "cw_source"
 msgstr "from data source"
@@ -1893,24 +1939,6 @@
 msgid "cw_source_object"
 msgstr "entities"
 
-msgid "cw_support"
-msgstr "support"
-
-msgctxt "CWSource"
-msgid "cw_support"
-msgstr "support"
-
-msgid "cw_support_object"
-msgstr "supported by"
-
-msgctxt "CWEType"
-msgid "cw_support_object"
-msgstr "supported by"
-
-msgctxt "CWRType"
-msgid "cw_support_object"
-msgstr "supported by"
-
 msgid "cwetype-box"
 msgstr "vue \"boîte\""
 
@@ -1938,15 +1966,30 @@
 msgid "cwrtype-permissions"
 msgstr "permissions"
 
+msgid "cwsource-main"
+msgstr "description"
+
+msgid "cwsource-mapping"
+msgstr "mapping"
+
 msgid "cwuri"
 msgstr "uri interne"
 
 msgid "data directory url"
 msgstr "url du répertoire de données"
 
+msgid "data sources"
+msgstr "sources de données"
+
+msgid "data sources management"
+msgstr "gestion des sources de données"
+
 msgid "date"
 msgstr "date"
 
+msgid "day"
+msgstr "jour"
+
 msgid "deactivate"
 msgstr "désactiver"
 
@@ -2252,9 +2295,6 @@
 msgid "edit canceled"
 msgstr "édition annulée"
 
-msgid "edit the index page"
-msgstr "éditer la page d'accueil"
-
 msgid "editable-table"
 msgstr "table éditable"
 
@@ -2279,6 +2319,11 @@
 msgid "entities deleted"
 msgstr "entités supprimées"
 
+msgid "entity and relation types can't be mapped, only attributes or relations"
+msgstr ""
+"les types d'entités et de relations ne peuvent être mappés, uniquement les "
+"relations"
+
 msgid "entity copied"
 msgstr "entité copiée"
 
@@ -2318,6 +2363,9 @@
 msgid "entity update"
 msgstr "mise à jour d'entité"
 
+msgid "error"
+msgstr "erreur"
+
 msgid "error while embedding page"
 msgstr "erreur pendant l'inclusion de la page"
 
@@ -2535,6 +2583,9 @@
 msgid "fulltextindexed"
 msgstr "texte indexé"
 
+msgid "gc"
+msgstr "fuite mémoire"
+
 msgid "generic plot"
 msgstr "tracé de courbes standard"
 
@@ -2763,6 +2814,12 @@
 msgid "inlined"
 msgstr "mise en ligne"
 
+#, python-format
+msgid "inlined relation %(rtype)s of %(etype)s should be supported"
+msgstr ""
+"la relation %(rtype)s du type d'entité %(etype)s doit être supportée "
+"('inlined')"
+
 msgid "instance home"
 msgstr "répertoire de l'instance"
 
@@ -2867,9 +2924,19 @@
 msgid "latest modification time of an entity"
 msgstr "date de dernière modification d'une entité"
 
+msgid "latest synchronization time"
+msgstr "date de la dernière synchronisation"
+
 msgid "latest update on"
 msgstr "dernière mise à jour"
 
+msgid "latest_retrieval"
+msgstr "dernière synchronisation"
+
+msgctxt "CWSource"
+msgid "latest_retrieval"
+msgstr "date de la dernière synchronisation de la source."
+
 msgid "left"
 msgstr "gauche"
 
@@ -3019,6 +3086,9 @@
 msgid "monday"
 msgstr "lundi"
 
+msgid "month"
+msgstr "mois"
+
 msgid "more actions"
 msgstr "plus d'actions"
 
@@ -3208,6 +3278,10 @@
 msgid "options"
 msgstr "options"
 
+msgctxt "CWSourceSchemaConfig"
+msgid "options"
+msgstr "options"
+
 msgid "order"
 msgstr "ordre"
 
@@ -3248,6 +3322,18 @@
 "des données nécessaires semblent expirées, veuillez recharger la page et "
 "recommencer."
 
+msgid "parser"
+msgstr "parseur"
+
+msgctxt "CWSource"
+msgid "parser"
+msgstr "parseur"
+
+msgid "parser to use to extract entities from content retrieved at given URLs."
+msgstr ""
+"parseur à utiliser pour extraire entités et relations du contenu récupéré "
+"aux URLs données"
+
 msgid "password"
 msgstr "mot de passe"
 
@@ -3344,6 +3430,9 @@
 msgid "rdef-permissions"
 msgstr "permissions"
 
+msgid "rdf"
+msgstr "rdf"
+
 msgid "read"
 msgstr "lecture"
 
@@ -3393,6 +3482,30 @@
 msgid "relation %(relname)s of %(ent)s"
 msgstr "relation %(relname)s de %(ent)s"
 
+#, python-format
+msgid ""
+"relation %(rtype)s with %(etype)s as %(role)s is supported but no target "
+"type supported"
+msgstr ""
+"la relation %(rtype)s avec %(etype)s comme %(role)s est supportée mais aucun "
+"type cible n'est supporté"
+
+#, python-format
+msgid ""
+"relation %(type)s with %(etype)s as %(role)s and target type %(target)s is "
+"mandatory but not supported"
+msgstr ""
+"la relation %(rtype)s avec %(etype)s comme %(role)s est obligatoire mais non "
+"supportée"
+
+#, python-format
+msgid ""
+"relation %s is supported but none if its definitions matches supported "
+"entities"
+msgstr ""
+"la relation %s est supportée mais aucune de ses définitions ne correspondent "
+"aux types d'entités supportés"
+
 msgid "relation add"
 msgstr "ajout de relation"
 
@@ -3517,9 +3630,6 @@
 msgid "saturday"
 msgstr "samedi"
 
-msgid "schema entities"
-msgstr "entités définissant le schéma"
-
 msgid "schema's permissions definitions"
 msgstr "permissions définies dans le schéma"
 
@@ -3532,9 +3642,6 @@
 msgid "schema-relation-types"
 msgstr "types de relations"
 
-msgid "schema-security"
-msgstr "permissions"
-
 msgid "search"
 msgstr "rechercher"
 
@@ -3644,6 +3751,9 @@
 msgid "site-wide property can't be set for user"
 msgstr "une propriété spécifique au site ne peut être propre à un utilisateur"
 
+msgid "siteinfo"
+msgstr "informations"
+
 msgid "some errors occurred:"
 msgstr "des erreurs sont survenues"
 
@@ -3682,6 +3792,10 @@
 msgid "specializes_object"
 msgstr "parent de"
 
+#, python-format
+msgid "specifying %s is mandatory"
+msgstr "spécifier %s est obligatoire"
+
 msgid "startup views"
 msgstr "vues de départ"
 
@@ -3811,8 +3925,8 @@
 msgid "symmetric"
 msgstr "symétrique"
 
-msgid "system entities"
-msgstr "entités systèmes"
+msgid "synchronization-interval must be greater than 1 minute"
+msgstr "synchronization-interval doit être supérieur à 1 minute"
 
 msgid "table"
 msgstr "table"
@@ -3844,6 +3958,9 @@
 msgid "the prefered email"
 msgstr "l'adresse électronique principale"
 
+msgid "the system source has its configuration stored on the file-system"
+msgstr "la source système a sa configuration stockée sur le système de fichier"
+
 #, python-format
 msgid "the value \"%s\" is already used, use another one"
 msgstr "la valeur \"%s\" est déjà utilisée, veuillez utiliser une autre valeur"
@@ -3855,9 +3972,15 @@
 msgid "this entity is currently owned by"
 msgstr "cette entité appartient à"
 
+msgid "this parser doesn't use a mapping"
+msgstr "ce parseur n'utilise pas de mapping"
+
 msgid "this resource does not exist"
 msgstr "cette ressource est introuvable"
 
+msgid "this source doesn't use a mapping"
+msgstr "cette source n'utilise pas de mapping"
+
 msgid "thursday"
 msgstr "jeudi"
 
@@ -3871,9 +3994,6 @@
 msgid "timestamp"
 msgstr "valide depuis"
 
-msgid "timestamp of the latest source synchronization."
-msgstr "date de la dernière synchronisation avec la source."
-
 msgid "timetable"
 msgstr "emploi du temps"
 
@@ -3929,6 +4049,9 @@
 msgid "to_state_object"
 msgstr "transition vers cet état"
 
+msgid "today"
+msgstr "aujourd'hui"
+
 msgid "todo_by"
 msgstr "à faire par"
 
@@ -4063,15 +4186,23 @@
 msgstr "entité (externe) introuvable"
 
 #, python-format
+msgid "unknown option(s): %s"
+msgstr "option(s) inconnue(s) : %s"
+
+#, python-format
+msgid "unknown options %s"
+msgstr "options inconnues : %s"
+
+#, python-format
 msgid "unknown property key %s"
 msgstr "clé de propriété inconnue : %s"
 
+msgid "unknown source type"
+msgstr "type de source inconnu"
+
 msgid "unknown vocabulary:"
 msgstr "vocabulaire inconnu : "
 
-msgid "up"
-msgstr "haut"
-
 msgid "upassword"
 msgstr "mot de passe"
 
@@ -4121,6 +4252,13 @@
 msgid "uri"
 msgstr "uri"
 
+msgid "url"
+msgstr "url"
+
+msgctxt "CWSource"
+msgid "url"
+msgstr "url"
+
 msgid "use template languages"
 msgstr "utiliser les langages de template"
 
@@ -4184,6 +4322,12 @@
 msgid "users"
 msgstr "utilisateurs"
 
+msgid "users and groups"
+msgstr "utilisateurs et groupes"
+
+msgid "users and groups management"
+msgstr "gestion des utilisateurs et groupes"
+
 msgid "users using this bookmark"
 msgstr "utilisateurs utilisant ce signet"
 
@@ -4264,6 +4408,9 @@
 msgid "visible"
 msgstr "visible"
 
+msgid "warning"
+msgstr "attention"
+
 msgid "we are not yet ready to handle this query"
 msgstr ""
 "nous ne sommes pas capable de gérer ce type de requête sparql pour le moment"
@@ -4272,7 +4419,7 @@
 msgstr "mercredi"
 
 msgid "week"
-msgstr "sem."
+msgstr "semaine"
 
 #, python-format
 msgid "welcome %s !"
@@ -4360,5 +4507,75 @@
 msgid "you have been logged out"
 msgstr "vous avez été déconnecté"
 
+#, python-format
+msgid "you may want to specify something for %s"
+msgstr "vous désirez peut-être spécifié quelque chose pour la relation %s"
+
 msgid "you should probably delete that property"
 msgstr "vous devriez probablement supprimer cette propriété"
+
+#, python-format
+msgid "you should un-inline relation %s which is supported and may be crossed "
+msgstr ""
+"vous devriez enlevé la mise en ligne de la relation %s qui est supportée et "
+"peut-être croisée"
+
+#~ msgid "Attributes with non default permissions:"
+#~ msgstr "Attributs ayant des permissions non-standard"
+
+#~ msgid "Entity types"
+#~ msgstr "Types d'entités"
+
+#~ msgid "Permissions for entity types"
+#~ msgstr "Permissions pour les types d'entités"
+
+#~ msgid "Permissions for relations"
+#~ msgstr "Permissions pour les relations"
+
+#~ msgid "Relation types"
+#~ msgstr "Types de relation"
+
+#~ msgid "am/pm calendar (month)"
+#~ msgstr "calendrier am/pm (mois)"
+
+#~ msgid "am/pm calendar (semester)"
+#~ msgstr "calendrier am/pm (semestre)"
+
+#~ msgid "am/pm calendar (week)"
+#~ msgstr "calendrier am/pm (semaine)"
+
+#~ msgid "am/pm calendar (year)"
+#~ msgstr "calendrier am/pm (année)"
+
+#~ msgid "application entities"
+#~ msgstr "entités applicatives"
+
+#~ msgid "calendar (month)"
+#~ msgstr "calendrier (mensuel)"
+
+#~ msgid "calendar (semester)"
+#~ msgstr "calendrier (semestriel)"
+
+#~ msgid "calendar (week)"
+#~ msgstr "calendrier (hebdo)"
+
+#~ msgid "calendar (year)"
+#~ msgstr "calendrier (annuel)"
+
+#~ msgid "create an index page"
+#~ msgstr "créer une page d'accueil"
+
+#~ msgid "edit the index page"
+#~ msgstr "éditer la page d'accueil"
+
+#~ msgid "schema entities"
+#~ msgstr "entités définissant le schéma"
+
+#~ msgid "schema-security"
+#~ msgstr "permissions"
+
+#~ msgid "system entities"
+#~ msgstr "entités systèmes"
+
+#~ msgid "timestamp of the latest source synchronization."
+#~ msgstr "date de la dernière synchronisation avec la source."
--- a/misc/migration/3.10.0_Any.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/misc/migration/3.10.0_Any.py	Wed Apr 27 09:54:22 2011 +0200
@@ -5,7 +5,7 @@
 for uri, cfg in config.sources().items():
     if uri in ('system', 'admin'):
         continue
-    repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg)
+    repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy())
 
 add_entity_type('CWSource')
 add_relation_definition('CWSource', 'cw_source', 'CWSource')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.11.0_Any.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,85 @@
+from datetime import datetime
+
+for rtype in ('cw_support', 'cw_dont_cross', 'cw_may_cross'):
+    drop_relation_type(rtype)
+
+add_entity_type('CWSourceSchemaConfig')
+
+if not 'url' in schema['CWSource'].subjrels:
+    add_attribute('CWSource', 'url')
+    add_attribute('CWSource', 'parser')
+    add_attribute('CWSource', 'latest_retrieval')
+
+try:
+    from cubicweb.server.sources.pyrorql import PyroRQLSource
+except ImportError:
+    pass
+else:
+
+    from os.path import join
+    # function to read old python mapping file
+    def load_mapping_file(source):
+        mappingfile = source.config['mapping-file']
+        mappingfile = join(source.repo.config.apphome, mappingfile)
+        mapping = {}
+        execfile(mappingfile, mapping)
+        for junk in ('__builtins__', '__doc__'):
+            mapping.pop(junk, None)
+        mapping.setdefault('support_relations', {})
+        mapping.setdefault('dont_cross_relations', set())
+        mapping.setdefault('cross_relations', set())
+        # do some basic checks of the mapping content
+        assert 'support_entities' in mapping, \
+               'mapping file should at least define support_entities'
+        assert isinstance(mapping['support_entities'], dict)
+        assert isinstance(mapping['support_relations'], dict)
+        assert isinstance(mapping['dont_cross_relations'], set)
+        assert isinstance(mapping['cross_relations'], set)
+        unknown = set(mapping) - set( ('support_entities', 'support_relations',
+                                       'dont_cross_relations', 'cross_relations') )
+        assert not unknown, 'unknown mapping attribute(s): %s' % unknown
+        # relations that are necessarily not crossed
+        for rtype in ('is', 'is_instance_of', 'cw_source'):
+            assert rtype not in mapping['dont_cross_relations'], \
+                   '%s relation should not be in dont_cross_relations' % rtype
+            assert rtype not in mapping['support_relations'], \
+                   '%s relation should not be in support_relations' % rtype
+        return mapping
+    # for now, only pyrorql sources have a mapping
+    for source in repo.sources_by_uri.values():
+        if not isinstance(source, PyroRQLSource):
+            continue
+        sourceentity = session.entity_from_eid(source.eid)
+        mapping = load_mapping_file(source)
+        # write mapping as entities
+        print 'migrating map for', source
+        for etype, write in mapping['support_entities'].items():
+            create_entity('CWSourceSchemaConfig',
+                          cw_for_source=sourceentity,
+                          cw_schema=session.entity_from_eid(schema[etype].eid),
+                          options=write and u'write' or None,
+                          ask_confirm=False)
+        for rtype, write in mapping['support_relations'].items():
+            options = []
+            if write:
+                options.append(u'write')
+            if rtype in mapping['cross_relations']:
+                options.append(u'maycross')
+            create_entity('CWSourceSchemaConfig',
+                          cw_for_source=sourceentity,
+                          cw_schema=session.entity_from_eid(schema[rtype].eid),
+                          options=u':'.join(options) or None,
+                          ask_confirm=False)
+        for rtype in mapping['dont_cross_relations']:
+            create_entity('CWSourceSchemaConfig',
+                          cw_for_source=source,
+                          cw_schema=session.entity_from_eid(schema[rtype].eid),
+                          options=u'dontcross',
+                          ask_confirm=False)
+        # latest update time cwproperty is now a source attribute (latest_retrieval)
+        pkey = u'sources.%s.latest-update-time' % source.uri
+        rset = session.execute('Any V WHERE X is CWProperty, X value V, X pkey %(k)s',
+                               {'k': pkey})
+        timestamp = int(rset[0][0])
+        sourceentity.set_attributes(latest_retrieval=datetime.fromtimestamp(timestamp))
+        session.execute('DELETE CWProperty X WHERE X pkey %(k)s', {'k': pkey})
--- a/req.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/req.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -35,6 +35,8 @@
 ONESECOND = timedelta(0, 1, 0)
 CACHE_REGISTRY = {}
 
+class FindEntityError(Exception):
+    """raised when find_one_entity() can not return one and only one entity"""
 
 def _check_cw_unsafe(kwargs):
     if kwargs.pop('_cw_unsafe', False):
@@ -140,6 +142,33 @@
         cls = self.vreg['etypes'].etype_class(etype)
         return cls.cw_instantiate(self.execute, **kwargs)
 
+    def find_entities(self, etype, **kwargs):
+        """find entities of the given type and attribute values.
+
+        >>> users = find_entities('CWGroup', name=u'users')
+        >>> groups = find_entities('CWGroup')
+        """
+        parts = ['Any X WHERE X is %s' % etype]
+        parts.extend('X %(attr)s %%(%(attr)s)s' % {'attr': attr} for attr in kwargs)
+        return self.execute(', '.join(parts), kwargs).entities()
+
+    def find_one_entity(self, etype, **kwargs):
+        """find one entity of the given type and attribute values.
+        raise :exc:`FindEntityError` if can not return one and only one entity.
+
+        >>> users = find_one_entity('CWGroup', name=u'users')
+        >>> groups = find_one_entity('CWGroup')
+        Exception()
+        """
+        parts = ['Any X WHERE X is %s' % etype]
+        parts.extend('X %(attr)s %%(%(attr)s)s' % {'attr': attr} for attr in kwargs)
+        rql = ', '.join(parts)
+        rset = self.execute(rql, kwargs)
+        if len(rset) != 1:
+            raise FindEntityError('Found %i entitie(s) when 1 was expected (rql=%s ; %s)'
+                                  % (len(rset), rql, repr(kwargs)))
+        return rset.get_entity(0,0)
+
     def ensure_ro_rql(self, rql):
         """raise an exception if the given rql is not a select query"""
         first = rql.split(None, 1)[0].lower()
--- a/rqlrewrite.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/rqlrewrite.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,6 +24,7 @@
 __docformat__ = "restructuredtext en"
 
 from rql import nodes as n, stmts, TypeResolverException
+from rql.utils import common_parent
 from yams import BadSchemaDefinition
 from logilab.common.graph import has_path
 
@@ -180,13 +181,23 @@
     def insert_snippets(self, snippets, varexistsmap=None):
         self.rewritten = {}
         for varmap, rqlexprs in snippets:
+            if isinstance(varmap, dict):
+                varmap = tuple(sorted(varmap.items()))
+            else:
+                assert isinstance(varmap, tuple), varmap
             if varexistsmap is not None and not varmap in varexistsmap:
                 continue
-            self.varmap = varmap
-            selectvar, snippetvar = varmap
+            self.insert_varmap_snippets(varmap, rqlexprs, varexistsmap)
+
+    def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap):
+        self.varmap = varmap
+        self.revvarmap = {}
+        self.varinfos = []
+        for i, (selectvar, snippetvar) in enumerate(varmap):
             assert snippetvar in 'SOX'
-            self.revvarmap = {snippetvar: selectvar}
-            self.varinfo = vi = {}
+            self.revvarmap[snippetvar] = (selectvar, i)
+            vi = {}
+            self.varinfos.append(vi)
             try:
                 vi['const'] = typed_eid(selectvar) # XXX gae
                 vi['rhs_rels'] = vi['lhs_rels'] = {}
@@ -194,42 +205,42 @@
                 try:
                     vi['stinfo'] = sti = self.select.defined_vars[selectvar].stinfo
                 except KeyError:
-                    # variable has been moved to a newly inserted subquery
+                    # variable may have been moved to a newly inserted subquery
                     # we should insert snippet in that subquery
                     subquery = self.select.aliases[selectvar].query
                     assert len(subquery.children) == 1
                     subselect = subquery.children[0]
                     RQLRewriter(self.session).rewrite(subselect, [(varmap, rqlexprs)],
                                                       subselect.solutions, self.kwargs)
-                    continue
+                    return
                 if varexistsmap is None:
                     vi['rhs_rels'] = dict( (r.r_type, r) for r in sti['rhsrelations'])
                     vi['lhs_rels'] = dict( (r.r_type, r) for r in sti['relations']
                                            if not r in sti['rhsrelations'])
                 else:
                     vi['rhs_rels'] = vi['lhs_rels'] = {}
-            parent = None
-            inserted = False
-            for rqlexpr in rqlexprs:
-                self.current_expr = rqlexpr
-                if varexistsmap is None:
-                    try:
-                        new = self.insert_snippet(varmap, rqlexpr.snippet_rqlst, parent)
-                    except Unsupported:
-                        continue
-                    inserted = True
-                    if new is not None:
-                        self.exists_snippet[rqlexpr] = new
-                    parent = parent or new
-                else:
-                    # called to reintroduce snippet due to ambiguity creation,
-                    # so skip snippets which are not introducing this ambiguity
-                    exists = varexistsmap[varmap]
-                    if self.exists_snippet[rqlexpr] is exists:
-                        self.insert_snippet(varmap, rqlexpr.snippet_rqlst, exists)
-            if varexistsmap is None and not inserted:
-                # no rql expression found matching rql solutions. User has no access right
-                raise Unauthorized()
+        parent = None
+        inserted = False
+        for rqlexpr in rqlexprs:
+            self.current_expr = rqlexpr
+            if varexistsmap is None:
+                try:
+                    new = self.insert_snippet(varmap, rqlexpr.snippet_rqlst, parent)
+                except Unsupported:
+                    continue
+                inserted = True
+                if new is not None:
+                    self.exists_snippet[rqlexpr] = new
+                parent = parent or new
+            else:
+                # called to reintroduce snippet due to ambiguity creation,
+                # so skip snippets which are not introducing this ambiguity
+                exists = varexistsmap[varmap]
+                if self.exists_snippet[rqlexpr] is exists:
+                    self.insert_snippet(varmap, rqlexpr.snippet_rqlst, exists)
+        if varexistsmap is None and not inserted:
+            # no rql expression found matching rql solutions. User has no access right
+            raise Unauthorized() # XXX bad constraint when inserting constraints
 
     def insert_snippet(self, varmap, snippetrqlst, parent=None):
         new = snippetrqlst.where.accept(self)
@@ -243,16 +254,23 @@
     def _insert_snippet(self, varmap, parent, new):
         if new is not None:
             if self._insert_scope is None:
-                insert_scope = self.varinfo.get('stinfo', {}).get('scope', self.select)
+                insert_scope = None
+                for vi in self.varinfos:
+                    scope = vi.get('stinfo', {}).get('scope', self.select)
+                    if insert_scope is None:
+                        insert_scope = scope
+                    else:
+                        insert_scope = common_parent(scope, insert_scope)
             else:
                 insert_scope = self._insert_scope
-            if self.varinfo.get('stinfo', {}).get('optrelations'):
+            if any(vi.get('stinfo', {}).get('optrelations') for vi in self.varinfos):
                 assert parent is None
                 self._insert_scope = self.snippet_subquery(varmap, new)
                 self.insert_pending()
                 self._insert_scope = None
                 return
-            new = n.Exists(new)
+            if not isinstance(new, (n.Exists, n.Not)):
+                new = n.Exists(new)
             if parent is None:
                 insert_scope.add_restriction(new)
             else:
@@ -291,7 +309,7 @@
                 varname = self.rewritten[key]
             except KeyError:
                 try:
-                    varname = self.revvarmap[key[-1]]
+                    varname = self.revvarmap[key[-1]][0]
                 except KeyError:
                     # variable isn't used anywhere else, we can't insert security
                     raise Unauthorized()
@@ -308,45 +326,51 @@
                 rqlexprs = eschema.get_rqlexprs(action)
                 if not rqlexprs:
                     raise Unauthorized()
-                self.insert_snippets([((varname, 'X'), rqlexprs)])
+                self.insert_snippets([({varname: 'X'}, rqlexprs)])
 
     def snippet_subquery(self, varmap, transformedsnippet):
         """introduce the given snippet in a subquery"""
         subselect = stmts.Select()
-        selectvar = varmap[0]
-        subselectvar = subselect.get_variable(selectvar)
-        subselect.append_selected(n.VariableRef(subselectvar))
         snippetrqlst = n.Exists(transformedsnippet.copy(subselect))
-        aliases = [selectvar]
-        stinfo = self.varinfo['stinfo']
-        need_null_test = False
-        for rel in stinfo['relations']:
-            rschema = self.schema.rschema(rel.r_type)
-            if rschema.final or (rschema.inlined and
-                                 not rel in stinfo['rhsrelations']):
-                rel.children[0].name = selectvar # XXX explain why
-                subselect.add_restriction(rel.copy(subselect))
-                for vref in rel.children[1].iget_nodes(n.VariableRef):
-                    if isinstance(vref.variable, n.ColumnAlias):
-                        # XXX could probably be handled by generating the subquery
-                        # into the detected subquery
-                        raise BadSchemaDefinition(
-                            "cant insert security because of usage two inlined "
-                            "relations in this query. You should probably at "
-                            "least uninline %s" % rel.r_type)
-                    subselect.append_selected(vref.copy(subselect))
-                    aliases.append(vref.name)
-                self.select.remove_node(rel)
-                # when some inlined relation has to be copied in the subquery,
-                # we need to test that either value is NULL or that the snippet
-                # condition is satisfied
-                if rschema.inlined and rel.optional:
-                    need_null_test = True
-        if need_null_test:
-            snippetrqlst = n.Or(
-                n.make_relation(subselectvar, 'is', (None, None), n.Constant,
-                                operator='='),
-                snippetrqlst)
+        aliases = []
+        rels_done = set()
+        for i, (selectvar, snippetvar) in enumerate(varmap):
+            subselectvar = subselect.get_variable(selectvar)
+            subselect.append_selected(n.VariableRef(subselectvar))
+            aliases.append(selectvar)
+            vi = self.varinfos[i]
+            need_null_test = False
+            stinfo = vi['stinfo']
+            for rel in stinfo['relations']:
+                if rel in rels_done:
+                    continue
+                rels_done.add(rel)
+                rschema = self.schema.rschema(rel.r_type)
+                if rschema.final or (rschema.inlined and
+                                     not rel in stinfo['rhsrelations']):
+                    rel.children[0].name = selectvar # XXX explain why
+                    subselect.add_restriction(rel.copy(subselect))
+                    for vref in rel.children[1].iget_nodes(n.VariableRef):
+                        if isinstance(vref.variable, n.ColumnAlias):
+                            # XXX could probably be handled by generating the
+                            # subquery into the detected subquery
+                            raise BadSchemaDefinition(
+                                "cant insert security because of usage two inlined "
+                                "relations in this query. You should probably at "
+                                "least uninline %s" % rel.r_type)
+                        subselect.append_selected(vref.copy(subselect))
+                        aliases.append(vref.name)
+                    self.select.remove_node(rel)
+                    # when some inlined relation has to be copied in the
+                    # subquery, we need to test that either value is NULL or
+                    # that the snippet condition is satisfied
+                    if rschema.inlined and rel.optional:
+                        need_null_test = True
+            if need_null_test:
+                snippetrqlst = n.Or(
+                    n.make_relation(subselectvar, 'is', (None, None), n.Constant,
+                                    operator='='),
+                    snippetrqlst)
         subselect.add_restriction(snippetrqlst)
         if self.u_varname:
             # generate an identifier for the substitution
@@ -433,35 +457,37 @@
                 # no more references, undefine the variable
                 del self.select.defined_vars[vref.name]
 
-    def _may_be_shared_with(self, sniprel, target, searchedvarname):
+    def _may_be_shared_with(self, sniprel, target):
         """if the snippet relation can be skipped to use a relation from the
         original query, return that relation node
         """
         rschema = self.schema.rschema(sniprel.r_type)
-        try:
-            if target == 'object':
-                orel = self.varinfo['lhs_rels'][sniprel.r_type]
-                cardindex = 0
-                ttypes_func = rschema.objects
-                rdef = rschema.rdef
-            else: # target == 'subject':
-                orel = self.varinfo['rhs_rels'][sniprel.r_type]
-                cardindex = 1
-                ttypes_func = rschema.subjects
-                rdef = lambda x, y: rschema.rdef(y, x)
-        except KeyError:
-            # may be raised by self.varinfo['xhs_rels'][sniprel.r_type]
-            return None
-        # can't share neged relation or relations with different outer join
-        if (orel.neged(strict=True) or sniprel.neged(strict=True)
-            or (orel.optional and orel.optional != sniprel.optional)):
-            return None
-        # if cardinality is in '?1', we can ignore the snippet relation and use
-        # variable from the original query
-        for etype in self.varinfo['stinfo']['possibletypes']:
-            for ttype in ttypes_func(etype):
-                if rdef(etype, ttype).cardinality[cardindex] in '+*':
-                    return None
+        for vi in self.varinfos:
+            try:
+                if target == 'object':
+                    orel = vi['lhs_rels'][sniprel.r_type]
+                    cardindex = 0
+                    ttypes_func = rschema.objects
+                    rdef = rschema.rdef
+                else: # target == 'subject':
+                    orel = vi['rhs_rels'][sniprel.r_type]
+                    cardindex = 1
+                    ttypes_func = rschema.subjects
+                    rdef = lambda x, y: rschema.rdef(y, x)
+            except KeyError:
+                # may be raised by vi['xhs_rels'][sniprel.r_type]
+                return None
+            # can't share neged relation or relations with different outer join
+            if (orel.neged(strict=True) or sniprel.neged(strict=True)
+                or (orel.optional and orel.optional != sniprel.optional)):
+                return None
+            # if cardinality is in '?1', we can ignore the snippet relation and use
+            # variable from the original query
+            for etype in vi['stinfo']['possibletypes']:
+                for ttype in ttypes_func(etype):
+                    if rdef(etype, ttype).cardinality[cardindex] in '+*':
+                        return None
+            break
         return orel
 
     def _use_orig_term(self, snippet_varname, term):
@@ -560,12 +586,12 @@
             if self.existingvars and not self.keep_var(rhs.name):
                 return
             if lhs.name in self.revvarmap and rhs.name != 'U':
-                orel = self._may_be_shared_with(node, 'object', lhs.name)
+                orel = self._may_be_shared_with(node, 'object')
                 if orel is not None:
                     self._use_orig_term(rhs.name, orel.children[1].children[0])
                     return
             elif rhs.name in self.revvarmap and lhs.name != 'U':
-                orel = self._may_be_shared_with(node, 'subject', rhs.name)
+                orel = self._may_be_shared_with(node, 'subject')
                 if orel is not None:
                     self._use_orig_term(lhs.name, orel.children[0])
                     return
@@ -600,10 +626,11 @@
     def visit_variableref(self, node):
         """get the sql name for a variable reference"""
         if node.name in self.revvarmap:
-            if self.varinfo.get('const') is not None:
-                return n.Constant(self.varinfo['const'], 'Int') # XXX gae
-            return n.VariableRef(self.select.get_variable(
-                self.revvarmap[node.name]))
+            selectvar, index = self.revvarmap[node.name]
+            vi = self.varinfos[index]
+            if vi.get('const') is not None:
+                return n.Constant(vi['const'], 'Int') # XXX gae
+            return n.VariableRef(self.select.get_variable(selectvar))
         vname_or_term = self._get_varname_or_term(node.name)
         if isinstance(vname_or_term, basestring):
             return n.VariableRef(self.select.get_variable(vname_or_term))
--- a/schema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/schema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -65,6 +65,8 @@
 NO_I18NCONTEXT = META_RTYPES | WORKFLOW_RTYPES
 NO_I18NCONTEXT.add('require_permission')
 
+SKIP_COMPOSITE_RELS = [('cw_source', 'subject')]
+
 # set of entity and relation types used to build the schema
 SCHEMA_TYPES = set((
     'CWEType', 'CWRType', 'CWAttribute', 'CWRelation',
@@ -83,8 +85,7 @@
                       'SubWorkflowExitPoint'))
 
 INTERNAL_TYPES = set(('CWProperty', 'CWPermission', 'CWCache', 'ExternalUri',
-                      'CWSource', 'CWSourceHostConfig',
-))
+                      'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig'))
 
 
 _LOGGER = getLogger('cubicweb.schemaloader')
@@ -108,7 +109,7 @@
     }
 PUB_SYSTEM_ATTR_PERMS = {
     'read':   ('managers', 'users', 'guests',),
-    'update':    ('managers',),
+    'update': ('managers',),
     }
 RO_REL_PERMS = {
     'read':   ('managers', 'users', 'guests',),
@@ -369,6 +370,14 @@
                     msg = "can't use RRQLExpression on %s, use an ERQLExpression"
                     raise BadSchemaDefinition(msg % self.type)
 
+    def is_subobject(self, strict=False, skiprels=None):
+        if skiprels is None:
+            skiprels = SKIP_COMPOSITE_RELS
+        else:
+            skiprels += SKIP_COMPOSITE_RELS
+        return super(CubicWebEntitySchema, self).is_subobject(strict,
+                                                              skiprels=skiprels)
+
     def attribute_definitions(self):
         """return an iterator on attribute definitions
 
@@ -788,6 +797,7 @@
 
 
 class RQLExpression(object):
+
     def __init__(self, expression, mainvars, eid):
         self.eid = eid # eid of the entity representing this rql expression
         if not isinstance(mainvars, unicode):
--- a/schemas/base.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/schemas/base.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,7 +21,7 @@
 _ = unicode
 
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
-                            SubjectRelation, String, Datetime, Password)
+                            SubjectRelation, String, Datetime, Password, Interval)
 from cubicweb.schema import (
     RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
     PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS)
@@ -258,15 +258,30 @@
                         'read':   ('managers',),
                         'update': ('managers',),
                         })
+    # put this here and not in a subclass even if it's only for some sources
+    # since having subclasses on generic relation (cw_source) double the number
+    # of rdef in the schema, and make ms planning harder since queries solutions
+    # may changes when sources are specified
+    url = String(description=_('URLs from which content will be imported. You can put one url per line'))
+    parser = String(description=_('parser to use to extract entities from content retrieved at given URLs.'))
+    latest_retrieval = Datetime(description=_('latest synchronization time'))
+
+
+ENTITY_MANAGERS_PERMISSIONS = {
+    'read':   ('managers',),
+    'add':    ('managers',),
+    'update': ('managers',),
+    'delete': ('managers',),
+    }
+RELATION_MANAGERS_PERMISSIONS = {
+    'read':   ('managers',),
+    'add':    ('managers',),
+    'delete': ('managers',),
+    }
 
 
 class CWSourceHostConfig(EntityType):
-    __permissions__ = {
-        'read':   ('managers',),
-        'add':    ('managers',),
-        'update': ('managers',),
-        'delete': ('managers',),
-        }
+    __permissions__ = ENTITY_MANAGERS_PERMISSIONS
     __unique_together__ = [('match_host', 'cw_host_config_of')]
     match_host = String(required=True, maxsize=128,
                         description=_('regexp matching host(s) to which this config applies'))
@@ -282,6 +297,7 @@
 
 
 class cw_host_config_of(RelationDefinition):
+    __permissions__ = RELATION_MANAGERS_PERMISSIONS
     subject = 'CWSourceHostConfig'
     object = 'CWSource'
     cardinality = '1*'
@@ -297,18 +313,20 @@
     subject = '*'
     object = 'CWSource'
     cardinality = '1*'
-
-class cw_support(RelationDefinition):
-    subject = 'CWSource'
-    object = ('CWEType', 'CWRType')
+    composite = 'object'
 
-class cw_dont_cross(RelationDefinition):
-    subject = 'CWSource'
-    object = 'CWRType'
+class CWSourceSchemaConfig(EntityType):
+    __permissions__ = ENTITY_MANAGERS_PERMISSIONS
+    __unique_together__ = [('cw_for_source', 'cw_schema')]
+    cw_for_source = SubjectRelation(
+        'CWSource', inlined=True, cardinality='1*', composite='object',
+        __permissions__=RELATION_MANAGERS_PERMISSIONS)
+    cw_schema = SubjectRelation(
+        ('CWEType', 'CWRType', 'CWAttribute', 'CWRelation'),
+        inlined=True, cardinality='1*', composite='object',
+        __permissions__=RELATION_MANAGERS_PERMISSIONS)
+    options = String(description=_('allowed options depends on the source type'))
 
-class cw_may_cross(RelationDefinition):
-    subject = 'CWSource'
-    object = 'CWRType'
 
 # "abtract" relation types, no definition in cubicweb itself ###################
 
--- a/selectors.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/selectors.py	Wed Apr 27 09:54:22 2011 +0200
@@ -387,7 +387,9 @@
 
 class ExpectedValueSelector(Selector):
     """Take a list of expected values as initializer argument and store them
-    into the :attr:`expected` set attribute.
+    into the :attr:`expected` set attribute. You may also give a set as single
+    argument, which will be then be referenced as set of expected values,
+    allowing modification to the given set to be considered.
 
     You should implement the :meth:`_get_value(cls, req, **kwargs)` method
     which should return the value for the given context. The selector will then
@@ -395,7 +397,10 @@
     """
     def __init__(self, *expected):
         assert expected, self
-        self.expected = frozenset(expected)
+        if len(expected) == 1 and isinstance(expected[0], set):
+            self.expected = expected[0]
+        else:
+            self.expected = frozenset(expected)
 
     def __str__(self):
         return '%s(%s)' % (self.__class__.__name__,
--- a/server/__init__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/__init__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -129,7 +129,6 @@
     # on connection
     config.creating = True
     config.consider_user_state = False
-    config.set_language = False
     # only enable the system source at initialization time
     repo = Repository(config, vreg=vreg)
     schema = repo.schema
@@ -182,6 +181,7 @@
     session.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s',
                     {'u': admin.eid})
     session.commit()
+    session.close()
     repo.shutdown()
     # reloging using the admin user
     config._cubes = None # avoid assertion error
@@ -205,12 +205,10 @@
     repo.system_source.init_creating()
     cnx.commit()
     cnx.close()
-    session.close()
     repo.shutdown()
     # restore initial configuration
     config.creating = False
     config.consider_user_state = True
-    config.set_language = True
     print '-> database for instance %s initialized.' % config.appid
 
 
@@ -254,7 +252,7 @@
 
 # available sources registry
 SOURCE_TYPES = {'native': LazyObject('cubicweb.server.sources.native', 'NativeSQLSource'),
-                # XXX private sources installed by an external cube
                 'pyrorql': LazyObject('cubicweb.server.sources.pyrorql', 'PyroRQLSource'),
                 'ldapuser': LazyObject('cubicweb.server.sources.ldapuser', 'LDAPUserSource'),
+                'datafeed': LazyObject('cubicweb.server.sources.datafeed', 'DataFeedSource'),
                 }
--- a/server/checkintegrity.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/checkintegrity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -19,8 +19,6 @@
 
 * integrity of a CubicWeb repository. Hum actually only the system database is
   checked.
-
-* consistency of multi-sources instance mapping file
 """
 
 from __future__ import with_statement
@@ -32,7 +30,7 @@
 
 from logilab.common.shellutils import ProgressBar
 
-from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES
+from cubicweb.schema import PURE_VIRTUAL_RTYPES
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.server.session import security_enabled
 
@@ -377,103 +375,3 @@
         session.set_pool()
         reindex_entities(repo.schema, session, withpb=withpb)
         cnx.commit()
-
-
-def info(msg, *args):
-    if args:
-        msg = msg % args
-    print 'INFO: %s' % msg
-
-def warning(msg, *args):
-    if args:
-        msg = msg % args
-    print 'WARNING: %s' % msg
-
-def error(msg, *args):
-    if args:
-        msg = msg % args
-    print 'ERROR: %s' % msg
-
-def check_mapping(schema, mapping, warning=warning, error=error):
-    # first check stuff found in mapping file exists in the schema
-    for attr in ('support_entities', 'support_relations'):
-        for ertype in mapping[attr].keys():
-            try:
-                mapping[attr][ertype] = erschema = schema[ertype]
-            except KeyError:
-                error('reference to unknown type %s in %s', ertype, attr)
-                del mapping[attr][ertype]
-            else:
-                if erschema.final or erschema in META_RTYPES:
-                    error('type %s should not be mapped in %s', ertype, attr)
-                    del mapping[attr][ertype]
-    for attr in ('dont_cross_relations', 'cross_relations'):
-        for rtype in list(mapping[attr]):
-            try:
-                rschema = schema.rschema(rtype)
-            except KeyError:
-                error('reference to unknown relation type %s in %s', rtype, attr)
-                mapping[attr].remove(rtype)
-            else:
-                if rschema.final or rschema in VIRTUAL_RTYPES:
-                    error('relation type %s should not be mapped in %s',
-                          rtype, attr)
-                    mapping[attr].remove(rtype)
-    # check relation in dont_cross_relations aren't in support_relations
-    for rschema in mapping['dont_cross_relations']:
-        if rschema in mapping['support_relations']:
-            info('relation %s is in dont_cross_relations and in support_relations',
-                 rschema)
-    # check relation in cross_relations are in support_relations
-    for rschema in mapping['cross_relations']:
-        if rschema not in mapping['support_relations']:
-            info('relation %s is in cross_relations but not in support_relations',
-                 rschema)
-    # check for relation in both cross_relations and dont_cross_relations
-    for rschema in mapping['cross_relations'] & mapping['dont_cross_relations']:
-        error('relation %s is in both cross_relations and dont_cross_relations',
-              rschema)
-    # now check for more handy things
-    seen = set()
-    for eschema in mapping['support_entities'].values():
-        for rschema, ttypes, role in eschema.relation_definitions():
-            if rschema in META_RTYPES:
-                continue
-            ttypes = [ttype for ttype in ttypes if ttype in mapping['support_entities']]
-            if not rschema in mapping['support_relations']:
-                somethingprinted = False
-                for ttype in ttypes:
-                    rdef = rschema.role_rdef(eschema, ttype, role)
-                    seen.add(rdef)
-                    if rdef.role_cardinality(role) in '1+':
-                        error('relation %s with %s as %s and target type %s is '
-                              'mandatory but not supported',
-                              rschema, eschema, role, ttype)
-                        somethingprinted = True
-                    elif ttype in mapping['support_entities']:
-                        if rdef not in seen:
-                            warning('%s could be supported', rdef)
-                        somethingprinted = True
-                if rschema not in mapping['dont_cross_relations']:
-                    if role == 'subject' and rschema.inlined:
-                        error('inlined relation %s of %s should be supported',
-                              rschema, eschema)
-                    elif not somethingprinted and rschema not in seen and rschema not in mapping['cross_relations']:
-                        print 'you may want to specify something for %s' % rschema
-                        seen.add(rschema)
-            else:
-                if not ttypes:
-                    warning('relation %s with %s as %s is supported but no target '
-                            'type supported', rschema, role, eschema)
-                if rschema in mapping['cross_relations'] and rschema.inlined:
-                    error('you should unline relation %s which is supported and '
-                          'may be crossed ', rschema)
-    for rschema in mapping['support_relations'].values():
-        if rschema in META_RTYPES:
-            continue
-        for subj, obj in rschema.rdefs:
-            if subj in mapping['support_entities'] and obj in mapping['support_entities']:
-                break
-        else:
-            error('relation %s is supported but none if its definitions '
-                  'matches supported entities', rschema)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/edition.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,150 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""helper classes to handle server-side edition of entities"""
+
+from __future__ import with_statement
+
+__docformat__ = "restructuredtext en"
+
+from copy import copy
+from yams import ValidationError
+
+
+_MARKER = object()
+
+class dict_protocol_catcher(object):
+    def __init__(self, entity):
+        self.__entity = entity
+    def __getitem__(self, attr):
+        return self.__entity.cw_edited[attr]
+    def __setitem__(self, attr, value):
+        self.__entity.cw_edited[attr] = value
+    def __getattr__(self, attr):
+        return getattr(self.__entity, attr)
+
+
+class EditedEntity(dict):
+    """encapsulate entities attributes being written by an RQL query"""
+    def __init__(self, entity, **kwargs):
+        dict.__init__(self, **kwargs)
+        self.entity = entity
+        self.skip_security = set()
+        self.querier_pending_relations = {}
+        self.saved = False
+
+    def __hash__(self):
+        # dict|set keyable
+        return hash(id(self))
+
+    def __cmp__(self, other):
+        # we don't want comparison by value inherited from dict
+        return cmp(id(self), id(other))
+
+    def __setitem__(self, attr, value):
+        assert attr != 'eid'
+        # don't add attribute into skip_security if already in edited
+        # attributes, else we may accidentaly skip a desired security check
+        if attr not in self:
+            self.skip_security.add(attr)
+        self.edited_attribute(attr, value)
+
+    def __delitem__(self, attr):
+        assert not self.saved, 'too late to modify edited attributes'
+        super(EditedEntity, self).__delitem__(attr)
+        self.entity.cw_attr_cache.pop(attr, None)
+
+    def pop(self, attr, *args):
+        # don't update skip_security by design (think to storage api)
+        assert not self.saved, 'too late to modify edited attributes'
+        value = super(EditedEntity, self).pop(attr, *args)
+        self.entity.cw_attr_cache.pop(attr, *args)
+        return value
+
+    def setdefault(self, attr, default):
+        assert attr != 'eid'
+        # don't add attribute into skip_security if already in edited
+        # attributes, else we may accidentaly skip a desired security check
+        if attr not in self:
+            self[attr] = default
+        return self[attr]
+
+    def update(self, values, skipsec=True):
+        if skipsec:
+            setitem = self.__setitem__
+        else:
+            setitem = self.edited_attribute
+        for attr, value in values.iteritems():
+            setitem(attr, value)
+
+    def edited_attribute(self, attr, value):
+        """attribute being edited by a rql query: should'nt be added to
+        skip_security
+        """
+        assert not self.saved, 'too late to modify edited attributes'
+        super(EditedEntity, self).__setitem__(attr, value)
+        self.entity.cw_attr_cache[attr] = value
+
+    def oldnewvalue(self, attr):
+        """returns the couple (old attr value, new attr value)
+
+        NOTE: will only work in a before_update_entity hook
+        """
+        assert not self.saved, 'too late to get the old value'
+        # get new value and remove from local dict to force a db query to
+        # fetch old value
+        newvalue = self.entity.cw_attr_cache.pop(attr, _MARKER)
+        oldvalue = getattr(self.entity, attr)
+        if newvalue is not _MARKER:
+            self.entity.cw_attr_cache[attr] = newvalue
+        else:
+            newvalue = oldvalue
+        return oldvalue, newvalue
+
+    def set_defaults(self):
+        """set default values according to the schema"""
+        for attr, value in self.entity.e_schema.defaults():
+            if not attr in self:
+                self[str(attr)] = value
+
+    def check(self, creation=False):
+        """check the entity edition against its schema. Only final relation
+        are checked here, constraint on actual relations are checked in hooks
+        """
+        entity = self.entity
+        if creation:
+            # on creations, we want to check all relations, especially
+            # required attributes
+            relations = [rschema for rschema in entity.e_schema.subject_relations()
+                         if rschema.final and rschema.type != 'eid']
+        else:
+            relations = [entity._cw.vreg.schema.rschema(rtype)
+                         for rtype in self]
+        try:
+            entity.e_schema.check(dict_protocol_catcher(entity),
+                                  creation=creation, _=entity._cw._,
+                                  relations=relations)
+        except ValidationError, ex:
+            ex.entity = self.entity
+            raise
+
+    def clone(self):
+        thecopy = EditedEntity(copy(self.entity))
+        thecopy.entity.cw_attr_cache = copy(self.entity.cw_attr_cache)
+        thecopy.entity._cw_related_cache = {}
+        thecopy.update(self, skipsec=False)
+        return thecopy
--- a/server/hook.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/hook.py	Wed Apr 27 09:54:22 2011 +0200
@@ -93,7 +93,7 @@
 unset), specific events are issued and the Hooks matching these events
 are called.
 
-You can get the event that triggered a hook by accessing its :attr:event
+You can get the event that triggered a hook by accessing its `event`
 attribute.
 
 .. _`dataflow`: http://en.wikipedia.org/wiki/Dataflow
@@ -105,68 +105,64 @@
 When called for one of these events, hook will have an `entity` attribute
 containing the entity instance.
 
-* `before_add_entity`, `before_update_entity`:
+- `before_add_entity`, `before_update_entity`:
 
-  - on those events, you can check what attributes of the entity are modified in
-    `entity.cw_edited` (by definition the database is not yet updated in a before
-    event)
+  On those events, you can access the modified attributes of the entity using
+  the `entity.cw_edited` dictionnary. The values can be modified and the old
+  values can be retrieved.
+
+  If you modify the `entity.cw_edited` dictionnary in the hook, that is before
+  the database operations take place, you will avoid the need to process a whole
+  new rql query and the underlying backend query (eg usually sql) will contain
+  the modified data. For example:
 
-  - you are allowed to further modify the entity before database
-    operations, using the dictionary notation on `cw_edited`. By doing
-    this, you'll avoid the need for a whole new rql query processing,
-    the only difference is that the underlying backend query (eg
-    usually sql) will contains the additional data. For example:
+  .. sourcecode:: python
 
-    .. sourcecode:: python
+     self.entity.cw_edited['age'] = 42
 
-       self.entity.set_attributes(age=42)
+  will modify the age before it is written to the backend storage.
 
-    will set the `age` attribute of the entity to 42. But to do so, it will
-    generate a rql query that will have to be processed, then trigger some
-    hooks, and so one (potentially leading to infinite hook loops or such
-    awkward situations..) You can avoid this by doing the modification that way:
+  Similarly, removing an attribute from `cw_edited` will cancel its
+  modification:
 
-    .. sourcecode:: python
+  .. sourcecode:: python
 
-       self.entity.cw_edited['age'] = 42
+     del self.entity.cw_edited['age']
 
-    Here the attribute will simply be edited in the same query that the
-    one that triggered the hook.
+  On a `before_update_entity` event, you can access the old and new values:
 
-    Similarly, removing an attribute from `cw_edited` will cancel its
-    modification.
+  .. sourcecode:: python
 
-  - on `before_update_entity` event, you can access to old and new values in
-    this hook, by using `entity.cw_edited.oldnewvalue(attr)`
+     old, new = entity.cw_edited.oldnewvalue('age')
+
+- `after_add_entity`, `after_update_entity`
 
+  On those events, you can get the list of attributes that were modified using
+  the `entity.cw_edited` dictionnary, but you can not modify it or get the old
+  value of an attribute.
 
-* `after_add_entity`, `after_update_entity`
+- `before_delete_entity`, `after_delete_entity`
 
-  - on those events, you can still check what attributes of the entity are
-    modified in `entity.cw_edited` but you can't get anymore the old value, nor
-    modify it.
+  On those events, the entity has no `cw_edited` dictionnary.
 
-* `before_delete_entity`, `after_delete_entity`
-
-  - on those events, the entity has no `cw_edited` set.
-
+.. note:: `self.entity.set_attributes(age=42)` will set the `age` attribute to
+  42. But to do so, it will generate a rql query that will have to be processed,
+  hence may trigger some hooks, etc. This could lead to infinitely looping hooks.
 
 Relation modification related events
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 When called for one of these events, hook will have `eidfrom`, `rtype`, `eidto`
-attributes containing respectivly the eid of the subject entity, the relation
+attributes containing respectively the eid of the subject entity, the relation
 type and the eid of the object entity.
 
 * `before_add_relation`, `before_delete_relation`
 
-  - on those events, you can still get original relation by issuing a rql query
+  On those events, you can still get the original relation by issuing a rql query.
 
 * `after_add_relation`, `after_delete_relation`
 
-This is an occasion to remind us that relations support the add / delete
-operation, but no update.
-
+Take note that relations can be added or deleted, but not updated.
 
 Non data events
 ~~~~~~~~~~~~~~~
--- a/server/migractions.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/migractions.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -1307,20 +1307,32 @@
     # CWProperty handling ######################################################
 
     def cmd_property_value(self, pkey):
-        rql = 'Any V WHERE X is CWProperty, X pkey %(k)s, X value V'
-        rset = self.rqlexec(rql, {'k': pkey}, ask_confirm=False)
+        """retreive the site-wide persistent property value for the given key.
+
+        To get a user specific property value, use appropriate method on CWUser
+        instance.
+        """
+        rset = self.rqlexec(
+            'Any V WHERE X is CWProperty, X pkey %(k)s, X value V, NOT X for_user U',
+            {'k': pkey}, ask_confirm=False)
         return rset[0][0]
 
     def cmd_set_property(self, pkey, value):
+        """set the site-wide persistent property value for the given key to the
+        given value.
+
+        To set a user specific property value, use appropriate method on CWUser
+        instance.
+        """
         value = unicode(value)
         try:
-            prop = self.rqlexec('CWProperty X WHERE X pkey %(k)s', {'k': pkey},
-                                ask_confirm=False).get_entity(0, 0)
+            prop = self.rqlexec(
+                'CWProperty X WHERE X pkey %(k)s, NOT X for_user U',
+                {'k': pkey}, ask_confirm=False).get_entity(0, 0)
         except:
             self.cmd_create_entity('CWProperty', pkey=unicode(pkey), value=value)
         else:
-            self.rqlexec('SET X value %(v)s WHERE X pkey %(k)s',
-                         {'k': pkey, 'v': value}, ask_confirm=False)
+            prop.set_attributes(value=value)
 
     # other data migration commands ###########################################
 
@@ -1360,6 +1372,18 @@
             self.commit()
         return entity
 
+    def cmd_find_entities(self, etype, **kwargs):
+        """find entities of the given type and attribute values"""
+        return self._cw.find_entities(etype, **kwargs)
+
+    def cmd_find_one_entity(self, etype, **kwargs):
+        """find one entity of the given type and attribute values.
+
+        raise :exc:`cubicweb.req.FindEntityError` if can not return one and only
+        one entity.
+        """
+        return self._cw.find_one_entity(etype, **kwargs)
+
     def cmd_update_etype_fti_weight(self, etype, weight):
         if self.repo.system_source.dbdriver == 'postgres':
             self.sqlexec('UPDATE appears SET weight=%(weight)s '
--- a/server/msplanner.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/msplanner.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -519,6 +519,16 @@
                 invariant = getattr(lhs, '_q_invariant', False)
                 # XXX NOT NOT
                 neged = srel.neged(traverse_scope=True) or (rel and rel.neged(strict=True))
+                has_copy_based_source = False
+                sources_ = []
+                for source in sources:
+                    if source.copy_based_source:
+                        has_copy_based_source = True
+                        if not self.system_source in sources_:
+                            sources_.append(self.system_source)
+                    else:
+                        sources_.append(source)
+                sources = sources_
                 if neged:
                     for source in sources:
                         if invariant and source is self.system_source:
@@ -535,7 +545,8 @@
                 if rel is None or (len(var.stinfo['relations']) == 2 and
                                    not var.stinfo['selected']):
                     self._remove_source_term(self.system_source, var)
-                    if not (len(sources) > 1 or usesys or invariant):
+                    if not (has_copy_based_source or len(sources) > 1
+                            or usesys or invariant):
                         if rel is None:
                             srel.parent.remove(srel)
                         else:
@@ -1212,11 +1223,22 @@
     def build_non_final_part(self, select, solindices, sources, insertedvars,
                              table):
         """non final step, will have to store results in a temporary table"""
+        inputmapkey = tuple(sorted(solindices))
         solutions = [self._solutions[i] for i in solindices]
-        rqlst = self.plan.finalize(select, solutions, insertedvars)
-        step = FetchStep(self.plan, rqlst, sources, table, False)
+        # XXX be smarter vs rql comparison
+        idx_key = (select.as_string(), inputmapkey,
+                   tuple(sorted(sources)), tuple(sorted(insertedvars)))
+        try:
+            # if a similar step has already been process, simply backport its
+            # input map
+            step = self.plan.ms_steps_idx[idx_key]
+        except KeyError:
+            # processing needed
+            rqlst = self.plan.finalize(select, solutions, insertedvars)
+            step = FetchStep(self.plan, rqlst, sources, table, False)
+            self.plan.ms_steps_idx[idx_key] = step
+            self.plan.add_step(step)
         # update input map for following steps, according to processed solutions
-        inputmapkey = tuple(sorted(solindices))
         inputmap = self._inputmaps.setdefault(inputmapkey, {})
         for varname, mapping in step.outputmap.iteritems():
             if varname in inputmap and not '.' in varname and  \
@@ -1224,7 +1246,6 @@
                         self._schema.eschema(solutions[0][varname]).final):
                 self._conflicts.append((varname, inputmap[varname]))
         inputmap.update(step.outputmap)
-        self.plan.add_step(step)
 
 
 class MSPlanner(SSPlanner):
@@ -1248,6 +1269,7 @@
             print 'PLANNING', rqlst
         ppis = [PartPlanInformation(plan, select, self.rqlhelper)
                 for select in rqlst.children]
+        plan.ms_steps_idx = {}
         steps = self._union_plan(plan, ppis)
         if server.DEBUG & server.DBG_MS:
             from pprint import pprint
@@ -1398,9 +1420,7 @@
                             steps.append(ppi.build_final_part(minrqlst, solindices, inputmap,
                                                               sources, insertedvars))
                 else:
-                    table = '_T%s%s' % (''.join(sorted(v._ms_table_key() for v in terms)),
-                                        ''.join(sorted(str(i) for i in solindices)))
-                    table = plan.make_temp_table_name(table)
+                    table = plan.make_temp_table_name('T%s' % make_uid(id(select)))
                     ppi.build_non_final_part(minrqlst, solindices, sources,
                                              insertedvars, table)
         # finally: join parts, deal with aggregat/group/sorts if necessary
--- a/server/mssteps.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/mssteps.py	Wed Apr 27 09:54:22 2011 +0200
@@ -162,6 +162,7 @@
 
     def get_sql(self):
         self.inputmap = inputmap = self.children[-1].outputmap
+        dbhelper=self.plan.syssource.dbhelper
         # get the select clause
         clause = []
         for i, term in enumerate(self.selection):
@@ -218,12 +219,16 @@
                         if not vref.name in grouped:
                             sql[-1] += ', ' + self.inputmap[vref.name]
                             grouped.add(vref.name)
-            sql.append('ORDER BY %s' % ', '.join(clause))
-        if self.limit:
-            sql.append('LIMIT %s' % self.limit)
-        if self.offset:
-            sql.append('OFFSET %s' % self.offset)
-        return ' '.join(sql)
+            sql = dbhelper.sql_add_order_by(' '.join(sql),
+                                            clause,
+                                            None, False,
+                                            self.limit or self.offset)
+        else:
+            sql = ' '.join(sql)
+            clause = None
+
+        sql = dbhelper.sql_add_limit_offset(sql, self.limit, self.offset, clause)
+        return sql
 
     def visit_function(self, function):
         """generate SQL name for a function"""
--- a/server/querier.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/querier.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -38,7 +38,8 @@
 
 from cubicweb.server.utils import cleanup_solutions
 from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata
-from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction, EditedEntity
+from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction
+from cubicweb.server.edition import EditedEntity
 from cubicweb.server.session import security_enabled
 
 def empty_rset(rql, args, rqlst=None):
@@ -353,7 +354,7 @@
                     myrqlst = select.copy(solutions=lchecksolutions)
                     myunion.append(myrqlst)
                     # in-place rewrite + annotation / simplification
-                    lcheckdef = [((var, 'X'), rqlexprs) for var, rqlexprs in lcheckdef]
+                    lcheckdef = [({var: 'X'}, rqlexprs) for var, rqlexprs in lcheckdef]
                     rewrite(myrqlst, lcheckdef, lchecksolutions, self.args)
                     add_noinvariant(noinvariant, restricted, myrqlst, nbtrees)
                 if () in localchecks:
--- a/server/repository.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/repository.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -59,6 +59,7 @@
      security_enabled
 from cubicweb.server.ssplanner import EditedEntity
 
+
 def prefill_entity_caches(entity, relations):
     session = entity._cw
     # prefill entity relation caches
@@ -134,6 +135,7 @@
             vreg = cwvreg.CubicWebVRegistry(config)
         self.vreg = vreg
         self.pyro_registered = False
+        self.pyro_uri = None
         self.info('starting repository from %s', self.config.apphome)
         # dictionary of opened sessions
         self._sessions = {}
@@ -207,12 +209,6 @@
             self.init_sources_from_database()
             if 'CWProperty' in self.schema:
                 self.vreg.init_properties(self.properties())
-            # call source's init method to complete their initialisation if
-            # needed (for instance looking for persistent configuration using an
-            # internal session, which is not possible until pools have been
-            # initialized)
-            for source in self.sources:
-                source.init()
         else:
             # call init_creating so that for instance native source can
             # configurate tsearch according to postgres version
@@ -241,11 +237,12 @@
         try:
             # FIXME: sources should be ordered (add_entity priority)
             for sourceent in session.execute(
-                'Any S, SN, SA, SC WHERE S is CWSource, '
+                'Any S, SN, SA, SC WHERE S is_instance_of CWSource, '
                 'S name SN, S type SA, S config SC').entities():
                 if sourceent.name == 'system':
                     self.system_source.eid = sourceent.eid
                     self.sources_by_eid[sourceent.eid] = self.system_source
+                    self.system_source.init(True, sourceent)
                     continue
                 self.add_source(sourceent, add_to_pools=False)
         finally:
@@ -258,34 +255,41 @@
 
     def add_source(self, sourceent, add_to_pools=True):
         source = self.get_source(sourceent.type, sourceent.name,
-                                 sourceent.host_config)
-        source.eid = sourceent.eid
+                                 sourceent.host_config, sourceent.eid)
         self.sources_by_eid[sourceent.eid] = source
         self.sources_by_uri[sourceent.name] = source
         if self.config.source_enabled(source):
-            self.sources.append(source)
-            self.querier.set_planner()
-            if add_to_pools:
-                for pool in self.pools:
-                    pool.add_source(source)
+            # call source's init method to complete their initialisation if
+            # needed (for instance looking for persistent configuration using an
+            # internal session, which is not possible until pools have been
+            # initialized)
+            source.init(True, sourceent)
+            if not source.copy_based_source:
+                self.sources.append(source)
+                self.querier.set_planner()
+                if add_to_pools:
+                    for pool in self.pools:
+                        pool.add_source(source)
+        else:
+            source.init(False, sourceent)
         self._clear_planning_caches()
 
     def remove_source(self, uri):
         source = self.sources_by_uri.pop(uri)
         del self.sources_by_eid[source.eid]
-        if self.config.source_enabled(source):
+        if self.config.source_enabled(source) and not source.copy_based_source:
             self.sources.remove(source)
             self.querier.set_planner()
             for pool in self.pools:
                 pool.remove_source(source)
         self._clear_planning_caches()
 
-    def get_source(self, type, uri, source_config):
+    def get_source(self, type, uri, source_config, eid=None):
         # set uri and type in source config so it's available through
         # source_defs()
         source_config['uri'] = uri
         source_config['type'] = type
-        return sources.get_source(type, source_config, self)
+        return sources.get_source(type, source_config, self, eid)
 
     def set_schema(self, schema, resetvreg=True, rebuildinfered=True):
         if rebuildinfered:
@@ -413,7 +417,9 @@
                 self.exception('error while closing %s' % pool)
                 continue
         if self.pyro_registered:
-            pyro_unregister(self.config)
+            if self._use_pyrons():
+                pyro_unregister(self.config)
+            self.pyro_uri = None
         hits, misses = self.querier.cache_hit, self.querier.cache_miss
         try:
             self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses,
@@ -427,33 +433,24 @@
         except ZeroDivisionError:
             pass
 
-    def _login_from_email(self, login):
-        session = self.internal_session()
-        try:
-            rset = session.execute('Any L WHERE U login L, U primary_email M, '
-                                   'M address %(login)s', {'login': login},
-                                   build_descr=False)
-            if rset.rowcount == 1:
-                login = rset[0][0]
-        finally:
-            session.close()
-        return login
-
-    def authenticate_user(self, session, login, **kwargs):
-        """validate login / password, raise AuthenticationError on failure
-        return associated CWUser instance on success
+    def check_auth_info(self, session, login, authinfo):
+        """validate authentication, raise AuthenticationError on failure, return
+        associated CWUser's eid on success.
         """
-        if self.vreg.config['allow-email-login'] and '@' in login:
-            login = self._login_from_email(login)
         for source in self.sources:
             if source.support_entity('CWUser'):
                 try:
-                    eid = source.authenticate(session, login, **kwargs)
-                    break
+                    return source.authenticate(session, login, **authinfo)
                 except AuthenticationError:
                     continue
         else:
             raise AuthenticationError('authentication failed with all sources')
+
+    def authenticate_user(self, session, login, **authinfo):
+        """validate login / password, raise AuthenticationError on failure
+        return associated CWUser instance on success
+        """
+        eid = self.check_auth_info(session, login, authinfo)
         cwuser = self._build_user(session, eid)
         if self.config.consider_user_state and \
                not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES:
@@ -1029,9 +1026,10 @@
         return extid
 
     def extid2eid(self, source, extid, etype, session=None, insert=True,
-                  recreate=False):
+                  sourceparams=None):
         """get eid from a local id. An eid is attributed if no record is found"""
-        cachekey = (extid, source.uri)
+        uri = 'system' if source.copy_based_source else source.uri
+        cachekey = (extid, uri)
         try:
             return self._extid_cache[cachekey]
         except KeyError:
@@ -1040,20 +1038,10 @@
         if session is None:
             session = self.internal_session()
             reset_pool = True
-        eid = self.system_source.extid2eid(session, source, extid)
+        eid = self.system_source.extid2eid(session, uri, extid)
         if eid is not None:
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, source.uri, extid)
-            # XXX used with extlite (eg vcsfile), probably not needed anymore
-            if recreate:
-                entity = source.before_entity_insertion(session, extid, etype, eid)
-                entity._cw_recreating = True
-                if source.should_call_hooks:
-                    self.hm.call_hooks('before_add_entity', session, entity=entity)
-                # XXX add fti op ?
-                source.after_entity_insertion(session, extid, entity)
-                if source.should_call_hooks:
-                    self.hm.call_hooks('after_add_entity', session, entity=entity)
+            self._type_source_cache[eid] = (etype, uri, extid)
             if reset_pool:
                 session.reset_pool()
             return eid
@@ -1071,13 +1059,14 @@
         try:
             eid = self.system_source.create_eid(session)
             self._extid_cache[cachekey] = eid
-            self._type_source_cache[eid] = (etype, source.uri, extid)
-            entity = source.before_entity_insertion(session, extid, etype, eid)
+            self._type_source_cache[eid] = (etype, uri, extid)
+            entity = source.before_entity_insertion(
+                session, extid, etype, eid, sourceparams)
             if source.should_call_hooks:
                 self.hm.call_hooks('before_add_entity', session, entity=entity)
             # XXX call add_info with complete=False ?
             self.add_info(session, entity, source, extid)
-            source.after_entity_insertion(session, extid, entity)
+            source.after_entity_insertion(session, extid, entity, sourceparams)
             if source.should_call_hooks:
                 self.hm.call_hooks('after_add_entity', session, entity=entity)
             session.commit(reset_pool)
@@ -1094,7 +1083,7 @@
         hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid)
         self.system_source.add_info(session, entity, source, extid, complete)
 
-    def delete_info(self, session, entity, sourceuri, extid, scleanup=False):
+    def delete_info(self, session, entity, sourceuri, extid, scleanup=None):
         """called by external source when some entity known by the system source
         has been deleted in the external source
         """
@@ -1103,7 +1092,7 @@
         hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid)
         self._delete_info(session, entity, sourceuri, extid, scleanup)
 
-    def delete_info_multi(self, session, entities, sourceuri, extids, scleanup=False):
+    def delete_info_multi(self, session, entities, sourceuri, extids, scleanup=None):
         """same as delete_info but accepts a list of entities and
         extids with the same etype and belonging to the same source
         """
@@ -1114,7 +1103,7 @@
             op.add_data(entity.eid)
         self._delete_info_multi(session, entities, sourceuri, extids, scleanup)
 
-    def _delete_info(self, session, entity, sourceuri, extid, scleanup=False):
+    def _delete_info(self, session, entity, sourceuri, extid, scleanup=None):
         """delete system information on deletion of an entity:
         * delete all remaining relations from/to this entity
         * call delete info on the system source which will transfer record from
@@ -1135,18 +1124,19 @@
                     rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype
                 else:
                     rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
-                if scleanup:
+                if scleanup is not None:
                     # source cleaning: only delete relations stored locally
-                    rql += ', NOT (Y cw_source S, S name %(source)s)'
+                    # (here, scleanup
+                    rql += ', NOT (Y cw_source S, S eid %(seid)s)'
                 try:
-                    session.execute(rql, {'x': eid, 'source': sourceuri},
+                    session.execute(rql, {'x': eid, 'seid': scleanup},
                                     build_descr=False)
                 except:
                     self.exception('error while cascading delete for entity %s '
                                    'from %s. RQL: %s', entity, sourceuri, rql)
         self.system_source.delete_info(session, entity, sourceuri, extid)
 
-    def _delete_info_multi(self, session, entities, sourceuri, extids, scleanup=False):
+    def _delete_info_multi(self, session, entities, sourceuri, extids, scleanup=None):
         """same as _delete_info but accepts a list of entities with
         the same etype and belinging to the same source.
         """
@@ -1167,12 +1157,11 @@
                     rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids)
                 else:
                     rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids)
-                if scleanup:
+                if scleanup is not None:
                     # source cleaning: only delete relations stored locally
-                    rql += ', NOT (Y cw_source S, S name %(source)s)'
+                    rql += ', NOT (Y cw_source S, S eid %(seid)s)'
                 try:
-                    session.execute(rql, {'source': sourceuri},
-                                    build_descr=False)
+                    session.execute(rql, {'seid': scleanup}, build_descr=False)
                 except:
                     self.exception('error while cascading delete for entity %s '
                                    'from %s. RQL: %s', entities, sourceuri, rql)
@@ -1214,6 +1203,8 @@
         if suri == 'system':
             extid = None
         else:
+            if source.copy_based_source:
+                suri = 'system'
             extid = source.get_extid(entity)
             self._extid_cache[(str(extid), suri)] = entity.eid
         self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid)
@@ -1432,20 +1423,32 @@
         config['pyro-instance-id'] = appid
         return appid
 
+    def _use_pyrons(self):
+        """return True if the pyro-ns-host is set to something else
+        than NO_PYRONS, meaning we want to go through a pyro
+        nameserver"""
+        return self.config['pyro-ns-host'] != 'NO_PYRONS'
+
     def pyro_register(self, host=''):
         """register the repository as a pyro object"""
         from logilab.common import pyro_ext as pyro
         daemon = pyro.register_object(self, self.pyro_appid,
                                       daemonhost=self.config['pyro-host'],
-                                      nshost=self.config['pyro-ns-host'])
+                                      nshost=self.config['pyro-ns-host'],
+                                      use_pyrons=self._use_pyrons())
         self.info('repository registered as a pyro object %s', self.pyro_appid)
+        self.pyro_uri =  pyro.get_object_uri(self.pyro_appid)
+        self.info('pyro uri is: %s', self.pyro_uri)
         self.pyro_registered = True
         # register a looping task to regularly ensure we're still registered
         # into the pyro name server
-        self.looping_task(60*10, self._ensure_pyro_ns)
+        if self._use_pyrons():
+            self.looping_task(60*10, self._ensure_pyro_ns)
         return daemon
 
     def _ensure_pyro_ns(self):
+        if not self._use_pyrons():
+            return
         from logilab.common import pyro_ext as pyro
         pyro.ns_reregister(self.pyro_appid, nshost=self.config['pyro-ns-host'])
         self.info('repository re-registered as a pyro object %s',
--- a/server/rqlannotation.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/rqlannotation.py	Wed Apr 27 09:54:22 2011 +0200
@@ -98,10 +98,19 @@
                     # variable of an inlined relation
                     if not rel in stinfo['rhsrelations'] and rschema.inlined:
                         break
-                else:
-                    # variable used as main variable of an optional relation
-                    # can't be invariant
+                # variable used as main variable of an optional relation can't
+                # be invariant, unless we can use some other relation as
+                # reference for the outer join
+                elif not stinfo['constnode']:
                     break
+                elif len(stinfo['relations']) == 2:
+                    if onlhs:
+                        ostinfo = rhs.children[0].variable.stinfo
+                    else:
+                        ostinfo = lhs.variable.stinfo
+                    if not any(orel for orel in ostinfo['relations']
+                               if orel.optional and orel is not rel):
+                        break
             if rschema.final or (onlhs and rschema.inlined):
                 if rschema.type != 'has_text':
                     # need join anyway if the variable appears in a final or
--- a/server/schemaserial.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/schemaserial.py	Wed Apr 27 09:54:22 2011 +0200
@@ -557,6 +557,8 @@
                     yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action,
                            {'g': groupmap[group_or_rqlexpr]})
                 except KeyError:
+                    print ("WARNING: group %s used in permissions for %s was ignored because it doesn't exist."
+                           " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema))
                     continue
             else:
                 # rqlexpr
--- a/server/serverconfig.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/serverconfig.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -215,8 +215,6 @@
 
     # read the schema from the database
     read_instance_schema = True
-    # set to true while creating an instance
-    creating = False
     # set this to true to get a minimal repository, for instance to get cubes
     # information on commands such as i18ninstance, db-restore, etc...
     quick_start = False
--- a/server/serverctl.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/serverctl.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -27,11 +27,11 @@
 
 from logilab.common import nullobject
 from logilab.common.configuration import Configuration
-from logilab.common.shellutils import ASK
+from logilab.common.shellutils import ASK, generate_password
 
 from cubicweb import AuthenticationError, ExecutionError, ConfigurationError
 from cubicweb.toolsutils import Command, CommandHandler, underline_title
-from cubicweb.cwctl import CWCTL
+from cubicweb.cwctl import CWCTL, check_options_consistency
 from cubicweb.server import SOURCE_TYPES
 from cubicweb.server.serverconfig import (
     USER_OPTIONS, ServerConfiguration, SourceConfiguration,
@@ -39,7 +39,7 @@
 
 # utility functions ###########################################################
 
-def source_cnx(source, dbname=None, special_privs=False, verbose=True):
+def source_cnx(source, dbname=None, special_privs=False, interactive=True):
     """open and return a connection to the system database defined in the
     given server.serverconfig
     """
@@ -50,21 +50,20 @@
         dbname = source['db-name']
     driver = source['db-driver']
     dbhelper = get_db_helper(driver)
-    if verbose:
+    if interactive:
         print '-> connecting to %s database' % driver,
         if dbhost:
             print '%s@%s' % (dbname, dbhost),
         else:
             print dbname,
     if dbhelper.users_support:
-        if not special_privs and source.get('db-user'):
-            user = source['db-user']
-            if verbose:
+        if not interactive or (not special_privs and source.get('db-user')):
+            user = source.get('db-user', os.environ.get('USER', ''))
+            if interactive:
                 print 'as', user
             password = source.get('db-password')
         else:
-            if verbose:
-                print
+            print
             if special_privs:
                 print 'WARNING'
                 print ('the user will need the following special access rights '
@@ -95,7 +94,7 @@
     return cnx
 
 def system_source_cnx(source, dbms_system_base=False,
-                      special_privs='CREATE/DROP DATABASE', verbose=True):
+                      special_privs='CREATE/DROP DATABASE', interactive=True):
     """shortcut to get a connextion to the instance system database
     defined in the given config. If <dbms_system_base> is True,
     connect to the dbms system database instead (for task such as
@@ -104,10 +103,12 @@
     if dbms_system_base:
         from logilab.database import get_db_helper
         system_db = get_db_helper(source['db-driver']).system_database()
-        return source_cnx(source, system_db, special_privs=special_privs, verbose=verbose)
-    return source_cnx(source, special_privs=special_privs, verbose=verbose)
+        return source_cnx(source, system_db, special_privs=special_privs,
+                          interactive=interactive)
+    return source_cnx(source, special_privs=special_privs,
+                      interactive=interactive)
 
-def _db_sys_cnx(source, special_privs, verbose=True):
+def _db_sys_cnx(source, special_privs, interactive=True):
     """return a connection on the RDMS system table (to create/drop a user or a
     database)
     """
@@ -118,7 +119,7 @@
     helper = get_db_helper(driver)
     # connect on the dbms system base to create our base
     cnx = system_source_cnx(source, True, special_privs=special_privs,
-                            verbose=verbose)
+                            interactive=interactive)
     # disable autocommit (isolation_level(1)) because DROP and
     # CREATE DATABASE can't be executed in a transaction
     try:
@@ -153,38 +154,49 @@
     cmdname = 'create'
     cfgname = 'repository'
 
-    def bootstrap(self, cubes, inputlevel=0):
+    def bootstrap(self, cubes, automatic=False, inputlevel=0):
         """create an instance by copying files from the given cube and by asking
         information necessary to build required configuration files
         """
         config = self.config
-        print underline_title('Configuring the repository')
-        config.input_config('email', inputlevel)
-        # ask for pyro configuration if pyro is activated and we're not using a
-        # all-in-one config, in which case this is done by the web side command
-        # handler
-        if config.pyro_enabled() and config.name != 'all-in-one':
-            config.input_config('pyro', inputlevel)
-        print '\n'+underline_title('Configuring the sources')
+        if not automatic:
+            print underline_title('Configuring the repository')
+            config.input_config('email', inputlevel)
+            # ask for pyro configuration if pyro is activated and we're not
+            # using a all-in-one config, in which case this is done by the web
+            # side command handler
+            if config.pyro_enabled() and config.name != 'all-in-one':
+                config.input_config('pyro', inputlevel)
+            print '\n'+underline_title('Configuring the sources')
         sourcesfile = config.sources_file()
-        # XXX hack to make Method('default_instance_id') usable in db option
-        # defs (in native.py)
+        # hack to make Method('default_instance_id') usable in db option defs
+        # (in native.py)
         sconfig = SourceConfiguration(config,
                                       options=SOURCE_TYPES['native'].options)
-        sconfig.input_config(inputlevel=inputlevel)
+        if not automatic:
+            sconfig.input_config(inputlevel=inputlevel)
+            print
         sourcescfg = {'system': sconfig}
-        print
-        sconfig = Configuration(options=USER_OPTIONS)
-        sconfig.input_config(inputlevel=inputlevel)
+        if automatic:
+            # XXX modify a copy
+            password = generate_password()
+            print '-> set administrator account to admin / %s' % password
+            USER_OPTIONS[1][1]['default'] = password
+            sconfig = Configuration(options=USER_OPTIONS)
+        else:
+            sconfig = Configuration(options=USER_OPTIONS)
+            sconfig.input_config(inputlevel=inputlevel)
         sourcescfg['admin'] = sconfig
         config.write_sources_file(sourcescfg)
         # remember selected cubes for later initialization of the database
         config.write_bootstrap_cubes_file(cubes)
 
-    def postcreate(self):
-        if ASK.confirm('Run db-create to create the system database ?'):
-            verbosity = (self.config.mode == 'installed') and 'y' or 'n'
-            CWCTL.run(['db-create', self.config.appid, '--verbose=%s' % verbosity])
+    def postcreate(self, automatic=False, inputlevel=0):
+        if automatic:
+            CWCTL.run(['db-create', '--automatic', self.config.appid])
+        elif ASK.confirm('Run db-create to create the system database ?'):
+            CWCTL.run(['db-create', '--config-level', str(inputlevel),
+                       self.config.appid])
         else:
             print ('-> nevermind, you can do it later with '
                    '"cubicweb-ctl db-create %s".' % self.config.appid)
@@ -292,27 +304,30 @@
     arguments = '<instance>'
     min_args = max_args = 1
     options = (
+        ('automatic',
+         {'short': 'a', 'action' : 'store_true',
+          'default': False,
+          'help': 'automatic mode: never ask and use default answer to every '
+          'question. this may require that your login match a database super '
+          'user (allowed to create database & all).',
+          }),
+        ('config-level',
+         {'short': 'l', 'type' : 'int', 'metavar': '<level>',
+          'default': 0,
+          'help': 'configuration level (0..2): 0 will ask for essential '
+          'configuration parameters only while 2 will ask for all parameters',
+          }),
         ('create-db',
          {'short': 'c', 'type': 'yn', 'metavar': '<y or n>',
           'default': True,
-          'help': 'create the database (yes by default)'}),
-        ('verbose',
-         {'short': 'v', 'type' : 'yn', 'metavar': '<verbose>',
-          'default': 'n',
-          'help': 'verbose mode: will ask all possible configuration questions',
-          }
-         ),
-        ('automatic',
-         {'short': 'a', 'type' : 'yn', 'metavar': '<auto>',
-          'default': 'n',
-          'help': 'automatic mode: never ask and use default answer to every question',
-          }
-         ),
+          'help': 'create the database (yes by default)'
+          }),
         )
+
     def run(self, args):
         """run the command with its specific arguments"""
         from logilab.database import get_db_helper
-        verbose = self.get('verbose')
+        check_options_consistency(self.config)
         automatic = self.get('automatic')
         appid = args.pop()
         config = ServerConfiguration.config_for(appid)
@@ -329,7 +344,7 @@
             print '\n'+underline_title('Creating the system database')
             # connect on the dbms system base to create our base
             dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER',
-                                verbose=verbose)
+                                interactive=not automatic)
             cursor = dbcnx.cursor()
             try:
                 if helper.users_support:
@@ -342,6 +357,8 @@
                     if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
                         cursor.execute('DROP DATABASE %s' % dbname)
                     else:
+                        print ('you may want to run "cubicweb-ctl db-init '
+                               '--drop %s" manually to continue.' % config.appid)
                         return
                 createdb(helper, source, dbcnx, cursor)
                 dbcnx.commit()
@@ -350,7 +367,7 @@
                 dbcnx.rollback()
                 raise
         cnx = system_source_cnx(source, special_privs='CREATE LANGUAGE',
-                                verbose=verbose)
+                                interactive=not automatic)
         cursor = cnx.cursor()
         helper.init_fti_extensions(cursor)
         # postgres specific stuff
@@ -363,8 +380,12 @@
         cnx.commit()
         print '-> database for instance %s created and necessary extensions installed.' % appid
         print
-        if automatic or ASK.confirm('Run db-init to initialize the system database ?'):
-            CWCTL.run(['db-init', config.appid])
+        if automatic:
+            CWCTL.run(['db-init', '--automatic', '--config-level', '0',
+                       config.appid])
+        elif ASK.confirm('Run db-init to initialize the system database ?'):
+            CWCTL.run(['db-init', '--config-level',
+                       str(self.config.config_level), config.appid])
         else:
             print ('-> nevermind, you can do it later with '
                    '"cubicweb-ctl db-init %s".' % config.appid)
@@ -383,18 +404,27 @@
     arguments = '<instance>'
     min_args = max_args = 1
     options = (
+        ('automatic',
+         {'short': 'a', 'action' : 'store_true',
+          'default': False,
+          'help': 'automatic mode: never ask and use default answer to every '
+          'question.',
+          }),
+        ('config-level',
+         {'short': 'l', 'type': 'int', 'default': 1,
+          'help': 'level threshold for questions asked when configuring '
+          'another source'
+          }),
         ('drop',
          {'short': 'd', 'action': 'store_true',
           'default': False,
-          'help': 'insert drop statements to remove previously existant \
-tables, indexes... (no by default)'}),
-        ('config-level',
-         {'short': 'l', 'type': 'int', 'default': 1,
-          'help': 'level threshold for questions asked when configuring another source'
+          'help': 'insert drop statements to remove previously existant '
+          'tables, indexes... (no by default)'
           }),
         )
 
     def run(self, args):
+        check_options_consistency(self.config)
         print '\n'+underline_title('Initializing the system database')
         from cubicweb.server import init_repository
         from logilab.database import get_connection
@@ -415,8 +445,10 @@
                 'the %s file. Resolve this first (error: %s).'
                 % (config.sources_file(), str(ex).strip()))
         init_repository(config, drop=self.config.drop)
-        while ASK.confirm('Enter another source ?', default_is_yes=False):
-            CWCTL.run(['add-source', '--config-level', self.config.config_level, config.appid])
+        if not self.config.automatic:
+            while ASK.confirm('Enter another source ?', default_is_yes=False):
+                CWCTL.run(['add-source', '--config-level',
+                           str(self.config.config_level), config.appid])
 
 
 class AddSourceCommand(Command):
@@ -927,39 +959,11 @@
         mih.cmd_synchronize_schema()
 
 
-class CheckMappingCommand(Command):
-    """Check content of the mapping file of an external source.
-
-    The mapping is checked against the instance's schema, searching for
-    inconsistencies or stuff you may have forgotten. It's higly recommanded to
-    run it when you setup a multi-sources instance.
-
-    <instance>
-      the identifier of the instance.
-
-    <mapping file>
-      the mapping file to check.
-    """
-    name = 'check-mapping'
-    arguments = '<instance> <mapping file>'
-    min_args = max_args = 2
-
-    def run(self, args):
-        from cubicweb.server.checkintegrity import check_mapping
-        from cubicweb.server.sources.pyrorql import load_mapping_file
-        appid, mappingfile = args
-        config = ServerConfiguration.config_for(appid)
-        config.quick_start = True
-        mih = config.migration_handler(connect=False, verbosity=1)
-        repo = mih.repo_connect() # necessary to get cubes
-        check_mapping(config.load_schema(), load_mapping_file(mappingfile))
-
 for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand,
                  GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
                  StartRepositoryCommand,
                  DBDumpCommand, DBRestoreCommand, DBCopyCommand,
                  AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
                  SynchronizeInstanceSchemaCommand,
-                 CheckMappingCommand,
                  ):
     CWCTL.register(cmdclass)
--- a/server/session.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/session.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -37,6 +37,7 @@
 from cubicweb.dbapi import ConnectionProperties
 from cubicweb.utils import make_uid, RepeatList
 from cubicweb.rqlrewrite import RQLRewriter
+from cubicweb.server.edition import EditedEntity
 
 ETYPE_PYOBJ_MAP[Binary] = 'Bytes'
 
@@ -215,8 +216,9 @@
         with security_enabled(self, False, False):
             if self.vreg.schema[rtype].inlined:
                 entity = self.entity_from_eid(fromeid)
-                entity[rtype] = toeid
-                self.repo.glob_update_entity(self, entity, set((rtype,)))
+                edited = EditedEntity(entity)
+                edited.edited_attribute(rtype, toeid)
+                self.repo.glob_update_entity(self, edited)
             else:
                 self.repo.glob_add_relation(self, fromeid, rtype, toeid)
 
@@ -234,7 +236,7 @@
         with security_enabled(self, False, False):
             if self.vreg.schema[rtype].inlined:
                 entity = self.entity_from_eid(fromeid)
-                entity[rtype] = None
+                entity.cw_attr_cache[rtype] = None
                 self.repo.glob_update_entity(self, entity, set((rtype,)))
             else:
                 self.repo.glob_delete_relation(self, fromeid, rtype, toeid)
--- a/server/sources/__init__.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/__init__.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,15 +19,19 @@
 
 __docformat__ = "restructuredtext en"
 
+import itertools
 from os.path import join, splitext
 from datetime import datetime, timedelta
 from logging import getLogger
-import itertools
+
+from logilab.common import configuration
 
-from cubicweb import set_log_methods, server
+from yams.schema import role_name
+
+from cubicweb import ValidationError, set_log_methods, server
 from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.server.sqlutils import SQL_PREFIX
-from cubicweb.server.ssplanner import EditedEntity
+from cubicweb.server.edition import EditedEntity
 
 
 def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'):
@@ -75,6 +79,9 @@
 
 class AbstractSource(object):
     """an abstract class for sources"""
+    # does the source copy data into the system source, or is it a *true* source
+    # (i.e. entities are not stored physically here)
+    copy_based_source = False
 
     # boolean telling if modification hooks should be called when something is
     # modified in this source
@@ -103,51 +110,22 @@
     # force deactivation (configuration error for instance)
     disabled = False
 
-    def __init__(self, repo, source_config, *args, **kwargs):
+    # source configuration options
+    options = ()
+
+    def __init__(self, repo, source_config, eid=None):
         self.repo = repo
-        self.uri = source_config['uri']
-        set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
         self.set_schema(repo.schema)
         self.support_relations['identity'] = False
-        self.eid = None
+        self.eid = eid
         self.public_config = source_config.copy()
         self.remove_sensitive_information(self.public_config)
-
-    def init_creating(self):
-        """method called by the repository once ready to create a new instance"""
-        pass
-
-    def init(self):
-        """method called by the repository once ready to handle request"""
-        pass
-
-    def backup(self, backupfile, confirm):
-        """method called to create a backup of source's data"""
-        pass
-
-    def restore(self, backupfile, confirm, drop):
-        """method called to restore a backup of source's data"""
-        pass
-
-    def close_pool_connections(self):
-        for pool in self.repo.pools:
-            pool._cursors.pop(self.uri, None)
-            pool.source_cnxs[self.uri][1].close()
-
-    def open_pool_connections(self):
-        for pool in self.repo.pools:
-            pool.source_cnxs[self.uri] = (self, self.get_connection())
-
-    def reset_caches(self):
-        """method called during test to reset potential source caches"""
-        pass
-
-    def clear_eid_cache(self, eid, etype):
-        """clear potential caches for the given eid"""
-        pass
+        self.uri = source_config.pop('uri')
+        set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
+        source_config.pop('type')
 
     def __repr__(self):
-        return '<%s source @%#x>' % (self.uri, id(self))
+        return '<%s source %s @%#x>' % (self.uri, self.eid, id(self))
 
     def __cmp__(self, other):
         """simple comparison function to get predictable source order, with the
@@ -161,10 +139,137 @@
             return -1
         return cmp(self.uri, other.uri)
 
+    def backup(self, backupfile, confirm):
+        """method called to create a backup of source's data"""
+        pass
+
+    def restore(self, backupfile, confirm, drop):
+        """method called to restore a backup of source's data"""
+        pass
+
+    @classmethod
+    def check_conf_dict(cls, eid, confdict, _=unicode, fail_if_unknown=True):
+        """check configuration of source entity. Return config dict properly
+        typed with defaults set.
+        """
+        processed = {}
+        for optname, optdict in cls.options:
+            value = confdict.pop(optname, optdict.get('default'))
+            if value is configuration.REQUIRED:
+                if not fail_if_unknown:
+                    continue
+                msg = _('specifying %s is mandatory' % optname)
+                raise ValidationError(eid, {role_name('config', 'subject'): msg})
+            elif value is not None:
+                # type check
+                try:
+                    value = configuration.convert(value, optdict, optname)
+                except Exception, ex:
+                    msg = unicode(ex) # XXX internationalization
+                    raise ValidationError(eid, {role_name('config', 'subject'): msg})
+            processed[optname] = value
+        # cw < 3.10 bw compat
+        try:
+            processed['adapter'] = confdict['adapter']
+        except:
+            pass
+        # check for unknown options
+        if confdict and not confdict.keys() == ['adapter']:
+            if fail_if_unknown:
+                msg = _('unknown options %s') % ', '.join(confdict)
+                raise ValidationError(eid, {role_name('config', 'subject'): msg})
+            else:
+                logger = getLogger('cubicweb.sources')
+                logger.warning('unknown options %s', ', '.join(confdict))
+                # add options to processed, they may be necessary during migration
+                processed.update(confdict)
+        return processed
+
+    @classmethod
+    def check_config(cls, source_entity):
+        """check configuration of source entity"""
+        return cls.check_conf_dict(source_entity.eid, source_entity.host_config,
+                                    _=source_entity._cw._)
+
+    def update_config(self, source_entity, typedconfig):
+        """update configuration from source entity. `typedconfig` is config
+        properly typed with defaults set
+        """
+        pass
+
+    # source initialization / finalization #####################################
+
     def set_schema(self, schema):
         """set the instance'schema"""
         self.schema = schema
 
+    def init_creating(self):
+        """method called by the repository once ready to create a new instance"""
+        pass
+
+    def init(self, activated, source_entity):
+        """method called by the repository once ready to handle request.
+        `activated` is a boolean flag telling if the source is activated or not.
+        """
+        pass
+
+    PUBLIC_KEYS = ('type', 'uri')
+    def remove_sensitive_information(self, sourcedef):
+        """remove sensitive information such as login / password from source
+        definition
+        """
+        for key in sourcedef.keys():
+            if not key in self.PUBLIC_KEYS:
+                sourcedef.pop(key)
+
+    # connections handling #####################################################
+
+    def get_connection(self):
+        """open and return a connection to the source"""
+        raise NotImplementedError()
+
+    def check_connection(self, cnx):
+        """Check connection validity, return None if the connection is still
+        valid else a new connection (called when the pool using the given
+        connection is being attached to a session). Do nothing by default.
+        """
+        pass
+
+    def close_pool_connections(self):
+        for pool in self.repo.pools:
+            pool._cursors.pop(self.uri, None)
+            pool.source_cnxs[self.uri][1].close()
+
+    def open_pool_connections(self):
+        for pool in self.repo.pools:
+            pool.source_cnxs[self.uri] = (self, self.get_connection())
+
+    def pool_reset(self, cnx):
+        """the pool using the given connection is being reseted from its current
+        attached session
+
+        do nothing by default
+        """
+        pass
+
+    # cache handling ###########################################################
+
+    def reset_caches(self):
+        """method called during test to reset potential source caches"""
+        pass
+
+    def clear_eid_cache(self, eid, etype):
+        """clear potential caches for the given eid"""
+        pass
+
+    # external source api ######################################################
+
+    def eid2extid(self, eid, session=None):
+        return self.repo.eid2extid(self, eid, session)
+
+    def extid2eid(self, value, etype, session=None, **kwargs):
+        return self.repo.extid2eid(self, value, etype, session, **kwargs)
+
     def support_entity(self, etype, write=False):
         """return true if the given entity's type is handled by this adapter
         if write is true, return true only if it's a RW support
@@ -219,98 +324,59 @@
             return rtype in self.cross_relations
         return rtype not in self.dont_cross_relations
 
-    def eid2extid(self, eid, session=None):
-        return self.repo.eid2extid(self, eid, session)
-
-    def extid2eid(self, value, etype, session=None, **kwargs):
-        return self.repo.extid2eid(self, value, etype, session, **kwargs)
+    def before_entity_insertion(self, session, lid, etype, eid, sourceparams):
+        """called by the repository when an eid has been attributed for an
+        entity stored here but the entity has not been inserted in the system
+        table yet.
 
-    PUBLIC_KEYS = ('type', 'uri')
-    def remove_sensitive_information(self, sourcedef):
-        """remove sensitive information such as login / password from source
-        definition
-        """
-        for key in sourcedef.keys():
-            if not key in self.PUBLIC_KEYS:
-                sourcedef.pop(key)
-
-    def _cleanup_system_relations(self, session):
-        """remove relation in the system source referencing entities coming from
-        this source
+        This method must return the an Entity instance representation of this
+        entity.
         """
-        cu = session.system_sql('SELECT eid FROM entities WHERE source=%(uri)s',
-                                {'uri': self.uri})
-        myeids = ','.join(str(r[0]) for r in cu.fetchall())
-        if not myeids:
-            return
-        # delete relations referencing one of those eids
-        eidcolum = SQL_PREFIX + 'eid'
-        for rschema in self.schema.relations():
-            if rschema.final or rschema.type in VIRTUAL_RTYPES:
-                continue
-            if rschema.inlined:
-                column = SQL_PREFIX + rschema.type
-                for subjtype in rschema.subjects():
-                    table = SQL_PREFIX + str(subjtype)
-                    for objtype in rschema.objects(subjtype):
-                        if self.support_entity(objtype):
-                            sql = 'UPDATE %s SET %s=NULL WHERE %s IN (%s);' % (
-                                table, column, eidcolum, myeids)
-                            session.system_sql(sql)
-                            break
-                continue
-            for etype in rschema.subjects():
-                if self.support_entity(etype):
-                    sql = 'DELETE FROM %s_relation WHERE eid_from IN (%s);' % (
-                        rschema.type, myeids)
-                    session.system_sql(sql)
-                    break
-            for etype in rschema.objects():
-                if self.support_entity(etype):
-                    sql = 'DELETE FROM %s_relation WHERE eid_to IN (%s);' % (
-                        rschema.type, myeids)
-                    session.system_sql(sql)
-                    break
+        entity = self.repo.vreg['etypes'].etype_class(etype)(session)
+        entity.eid = eid
+        entity.cw_edited = EditedEntity(entity)
+        return entity
 
-    def cleanup_entities_info(self, session):
-        """cleanup system tables from information for entities coming from
-        this source. This should be called when a source is removed to
-        properly cleanup the database
-        """
-        self._cleanup_system_relations(session)
-        # fti / entities tables cleanup
-        # sqlite doesn't support DELETE FROM xxx USING yyy
-        dbhelper = session.pool.source('system').dbhelper
-        session.system_sql('DELETE FROM %s WHERE %s.%s IN (SELECT eid FROM '
-                           'entities WHERE entities.source=%%(uri)s)'
-                           % (dbhelper.fti_table, dbhelper.fti_table,
-                              dbhelper.fti_uid_attr),
-                           {'uri': self.uri})
-        session.system_sql('DELETE FROM entities WHERE source=%(uri)s',
-                           {'uri': self.uri})
-
-    # abstract methods to override (at least) in concrete source classes #######
-
-    def get_connection(self):
-        """open and return a connection to the source"""
-        raise NotImplementedError()
-
-    def check_connection(self, cnx):
-        """check connection validity, return None if the connection is still valid
-        else a new connection (called when the pool using the given connection is
-        being attached to a session)
-
-        do nothing by default
+    def after_entity_insertion(self, session, lid, entity, sourceparams):
+        """called by the repository after an entity stored here has been
+        inserted in the system table.
         """
         pass
 
-    def pool_reset(self, cnx):
-        """the pool using the given connection is being reseted from its current
-        attached session
+    def _load_mapping(self, session=None, **kwargs):
+        if not 'CWSourceSchemaConfig' in self.schema:
+            self.warning('instance is not mapping ready')
+            return
+        if session is None:
+            _session = self.repo.internal_session()
+        else:
+            _session = session
+        try:
+            for schemacfg in _session.execute(
+                'Any CFG,CFGO,S WHERE '
+                'CFG options CFGO, CFG cw_schema S, '
+                'CFG cw_for_source X, X eid %(x)s', {'x': self.eid}).entities():
+                self.add_schema_config(schemacfg, **kwargs)
+        finally:
+            if session is None:
+                _session.close()
 
-        do nothing by default
-        """
-        pass
+    def add_schema_config(self, schemacfg, checkonly=False):
+        """added CWSourceSchemaConfig, modify mapping accordingly"""
+        msg = schemacfg._cw._("this source doesn't use a mapping")
+        raise ValidationError(schemacfg.eid, {None: msg})
+
+    def del_schema_config(self, schemacfg, checkonly=False):
+        """deleted CWSourceSchemaConfig, modify mapping accordingly"""
+        msg = schemacfg._cw._("this source doesn't use a mapping")
+        raise ValidationError(schemacfg.eid, {None: msg})
+
+    def update_schema_config(self, schemacfg, checkonly=False):
+        """updated CWSourceSchemaConfig, modify mapping accordingly"""
+        self.del_schema_config(schemacfg, checkonly)
+        self.add_schema_config(schemacfg, checkonly)
+
+    # user authentication api ##################################################
 
     def authenticate(self, session, login, **kwargs):
         """if the source support CWUser entity type, it should implement
@@ -320,6 +386,8 @@
         """
         raise NotImplementedError()
 
+    # RQL query api ############################################################
+
     def syntax_tree_search(self, session, union,
                            args=None, cachekey=None, varmap=None, debug=0):
         """return result from this source for a rql query (actually from a rql
@@ -338,27 +406,7 @@
         res = self.syntax_tree_search(session, union, args, varmap=varmap)
         session.pool.source('system').manual_insert(res, table, session)
 
-    # system source don't have to implement the two methods below
-
-    def before_entity_insertion(self, session, lid, etype, eid):
-        """called by the repository when an eid has been attributed for an
-        entity stored here but the entity has not been inserted in the system
-        table yet.
-
-        This method must return the an Entity instance representation of this
-        entity.
-        """
-        entity = self.repo.vreg['etypes'].etype_class(etype)(session)
-        entity.eid = eid
-        entity.cw_edited = EditedEntity(entity)
-        return entity
-
-    def after_entity_insertion(self, session, lid, entity):
-        """called by the repository after an entity stored here has been
-        inserted in the system table.
-        """
-        pass
-
+    # write modification api ###################################################
     # read-only sources don't have to implement methods below
 
     def get_extid(self, entity):
@@ -536,8 +584,8 @@
     except KeyError:
         raise RuntimeError('Unknown source type %r' % source_type)
 
-def get_source(type, source_config, repo):
-    """return a source adapter according to the adapter field in the
-    source's configuration
+def get_source(type, source_config, repo, eid):
+    """return a source adapter according to the adapter field in the source's
+    configuration
     """
-    return source_adapter(type)(repo, source_config)
+    return source_adapter(type)(repo, source_config, eid)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/sources/datafeed.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,237 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""datafeed sources: copy data from an external data stream into the system
+database
+"""
+from datetime import datetime, timedelta
+from base64 import b64decode
+
+from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError
+from cubicweb.server.sources import AbstractSource
+from cubicweb.appobject import AppObject
+
+class DataFeedSource(AbstractSource):
+    copy_based_source = True
+
+    options = (
+        ('synchronize',
+         {'type' : 'yn',
+          'default': True,
+          'help': ('Is the repository responsible to automatically import '
+                   'content from this source? '
+                   'You should say yes unless you don\'t want this behaviour '
+                   'or if you use a multiple repositories setup, in which '
+                   'case you should say yes on one repository, no on others.'),
+          'group': 'datafeed-source', 'level': 2,
+          }),
+        ('synchronization-interval',
+         {'type' : 'time',
+          'default': '5min',
+          'help': ('Interval in seconds between synchronization with the '
+                   'external source (default to 5 minutes, must be >= 1 min).'),
+          'group': 'datafeed-source', 'level': 2,
+          }),
+        ('delete-entities',
+         {'type' : 'yn',
+          'default': True,
+          'help': ('Should already imported entities not found anymore on the '
+                   'external source be deleted?'),
+          'group': 'datafeed-source', 'level': 2,
+          }),
+
+        )
+    def __init__(self, repo, source_config, eid=None):
+        AbstractSource.__init__(self, repo, source_config, eid)
+        self.update_config(None, self.check_conf_dict(eid, source_config))
+
+    def check_config(self, source_entity):
+        """check configuration of source entity"""
+        typedconfig = super(DataFeedSource, self).check_config(source_entity)
+        if typedconfig['synchronization-interval'] < 60:
+            _ = source_entity._cw._
+            msg = _('synchronization-interval must be greater than 1 minute')
+            raise ValidationError(source_entity.eid, {'config': msg})
+        return typedconfig
+
+    def _entity_update(self, source_entity):
+        source_entity.complete()
+        self.parser = source_entity.parser
+        self.latest_retrieval = source_entity.latest_retrieval
+        self.urls = [url.strip() for url in source_entity.url.splitlines()
+                     if url.strip()]
+
+    def update_config(self, source_entity, typedconfig):
+        """update configuration from source entity. `typedconfig` is config
+        properly typed with defaults set
+        """
+        self.synchro_interval = timedelta(seconds=typedconfig['synchronization-interval'])
+        if source_entity is not None:
+            self._entity_update(source_entity)
+        self.config = typedconfig
+
+    def init(self, activated, source_entity):
+        if activated:
+            self._entity_update(source_entity)
+        self.parser = source_entity.parser
+        self.load_mapping(source_entity._cw)
+
+    def _get_parser(self, session, **kwargs):
+        return self.repo.vreg['parsers'].select(
+            self.parser, session, source=self, **kwargs)
+
+    def load_mapping(self, session):
+        self.mapping = {}
+        self.mapping_idx = {}
+        try:
+            parser = self._get_parser(session)
+        except (RegistryNotFound, ObjectNotFound):
+            return # no parser yet, don't go further
+        self._load_mapping(session, parser=parser)
+
+    def add_schema_config(self, schemacfg, checkonly=False, parser=None):
+        """added CWSourceSchemaConfig, modify mapping accordingly"""
+        if parser is None:
+            parser = self._get_parser(schemacfg._cw)
+        parser.add_schema_config(schemacfg, checkonly)
+
+    def del_schema_config(self, schemacfg, checkonly=False, parser=None):
+        """deleted CWSourceSchemaConfig, modify mapping accordingly"""
+        if parser is None:
+            parser = self._get_parser(schemacfg._cw)
+        parser.del_schema_config(schemacfg, checkonly)
+
+    def fresh(self):
+        if self.latest_retrieval is None:
+            return False
+        return datetime.now() < (self.latest_retrieval + self.synchro_interval)
+
+    def pull_data(self, session, force=False):
+        if not force and self.fresh():
+            return {}
+        if self.config['delete-entities']:
+            myuris = self.source_cwuris(session)
+        else:
+            myuris = None
+        parser = self._get_parser(session, sourceuris=myuris)
+        error = False
+        self.info('pulling data for source %s', self.uri)
+        for url in self.urls:
+            try:
+                if parser.process(url):
+                    error = True
+            except IOError, exc:
+                self.error('could not pull data while processing %s: %s',
+                           url, exc)
+                error = True
+        if error:
+            self.warning("some error occured, don't attempt to delete entities")
+        elif self.config['delete-entities'] and myuris:
+            byetype = {}
+            for eid, etype in myuris.values():
+                byetype.setdefault(etype, []).append(str(eid))
+            self.error('delete %s entities %s', self.uri, byetype)
+            for etype, eids in byetype.iteritems():
+                session.execute('DELETE %s X WHERE X eid IN (%s)'
+                                % (etype, ','.join(eids)))
+        self.latest_retrieval = datetime.now()
+        session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
+                        {'x': self.eid, 'date': self.latest_retrieval})
+        return parser.stats
+
+    def before_entity_insertion(self, session, lid, etype, eid, sourceparams):
+        """called by the repository when an eid has been attributed for an
+        entity stored here but the entity has not been inserted in the system
+        table yet.
+
+        This method must return the an Entity instance representation of this
+        entity.
+        """
+        entity = super(DataFeedSource, self).before_entity_insertion(
+            session, lid, etype, eid, sourceparams)
+        entity.cw_edited['cwuri'] = unicode(lid)
+        entity.cw_edited.set_defaults()
+        sourceparams['parser'].before_entity_copy(entity, sourceparams)
+        # avoid query to search full-text indexed attributes
+        for attr in entity.e_schema.indexable_attributes():
+            entity.cw_edited.setdefault(attr, u'')
+        return entity
+
+    def after_entity_insertion(self, session, lid, entity, sourceparams):
+        """called by the repository after an entity stored here has been
+        inserted in the system table.
+        """
+        if session.is_hook_category_activated('integrity'):
+            entity.cw_edited.check(creation=True)
+        self.repo.system_source.add_entity(session, entity)
+        entity.cw_edited.saved = entity._cw_is_saved = True
+        sourceparams['parser'].after_entity_copy(entity, sourceparams)
+
+    def source_cwuris(self, session):
+        sql = ('SELECT extid, eid, type FROM entities, cw_source_relation '
+               'WHERE entities.eid=cw_source_relation.eid_from '
+               'AND cw_source_relation.eid_to=%s' % self.eid)
+        return dict((b64decode(uri), (eid, type))
+                    for uri, eid, type in session.system_sql(sql))
+
+
+class DataFeedParser(AppObject):
+    __registry__ = 'parsers'
+
+    def __init__(self, session, source, sourceuris=None):
+        self._cw = session
+        self.source = source
+        self.sourceuris = sourceuris
+        self.stats = {'created': set(),
+                      'updated': set()}
+
+    def add_schema_config(self, schemacfg, checkonly=False):
+        """added CWSourceSchemaConfig, modify mapping accordingly"""
+        msg = schemacfg._cw._("this parser doesn't use a mapping")
+        raise ValidationError(schemacfg.eid, {None: msg})
+
+    def del_schema_config(self, schemacfg, checkonly=False):
+        """deleted CWSourceSchemaConfig, modify mapping accordingly"""
+        msg = schemacfg._cw._("this parser doesn't use a mapping")
+        raise ValidationError(schemacfg.eid, {None: msg})
+
+    def extid2entity(self, uri, etype, **sourceparams):
+        sourceparams['parser'] = self
+        eid = self.source.extid2eid(str(uri), etype, self._cw,
+                                    sourceparams=sourceparams)
+        if self.sourceuris is not None:
+            self.sourceuris.pop(str(uri), None)
+        return self._cw.entity_from_eid(eid, etype)
+
+    def process(self, url):
+        """main callback: process the url"""
+        raise NotImplementedError
+
+    def before_entity_copy(self, entity, sourceparams):
+        raise NotImplementedError
+
+    def after_entity_copy(self, entity, sourceparams):
+        self.stats['created'].add(entity.eid)
+
+    def created_during_pull(self, entity):
+        return entity.eid in self.stats['created']
+
+    def updated_during_pull(self, entity):
+        return entity.eid in self.stats['updated']
+
+    def notify_updated(self, entity):
+        return self.stats['updated'].add(entity.eid)
--- a/server/sources/ldapuser.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/ldapuser.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -34,15 +34,13 @@
 from __future__ import division
 from base64 import b64decode
 
-from logilab.common.textutils import splitstrip
-from rql.nodes import Relation, VariableRef, Constant, Function
-
 import ldap
 from ldap.ldapobject import ReconnectLDAPObject
 from ldap.filter import filter_format, escape_filter_chars
 from ldapurl import LDAPUrl
 
-from logilab.common.configuration import time_validator
+from rql.nodes import Relation, VariableRef, Constant, Function
+
 from cubicweb import AuthenticationError, UnknownEid, RepositoryError
 from cubicweb.server.utils import cartesian_product
 from cubicweb.server.sources import (AbstractSource, TrFunc, GlobTrFunc,
@@ -168,58 +166,52 @@
 
     )
 
-    def __init__(self, repo, source_config, *args, **kwargs):
-        AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
-        self.host = source_config['host']
-        self.protocol = source_config.get('protocol', 'ldap')
-        self.authmode = source_config.get('auth-mode', 'simple')
+    def __init__(self, repo, source_config, eid=None):
+        AbstractSource.__init__(self, repo, source_config, eid)
+        self.update_config(None, self.check_conf_dict(eid, source_config))
+        self._conn = None
+
+    def update_config(self, source_entity, typedconfig):
+        """update configuration from source entity. `typedconfig` is config
+        properly typed with defaults set
+        """
+        self.host = typedconfig['host']
+        self.protocol = typedconfig['protocol']
+        self.authmode = typedconfig['auth-mode']
         self._authenticate = getattr(self, '_auth_%s' % self.authmode)
-        self.cnx_dn = source_config.get('data-cnx-dn') or ''
-        self.cnx_pwd = source_config.get('data-cnx-password') or ''
-        self.user_base_scope = globals()[source_config['user-scope']]
-        self.user_base_dn = str(source_config['user-base-dn'])
-        self.user_base_scope = globals()[source_config['user-scope']]
-        self.user_classes = splitstrip(source_config['user-classes'])
-        self.user_login_attr = source_config['user-login-attr']
-        self.user_default_groups = splitstrip(source_config['user-default-group'])
-        self.user_attrs = dict(v.split(':', 1) for v in splitstrip(source_config['user-attrs-map']))
-        self.user_filter = source_config.get('user-filter')
+        self.cnx_dn = typedconfig['data-cnx-dn']
+        self.cnx_pwd = typedconfig['data-cnx-password']
+        self.user_base_dn = str(typedconfig['user-base-dn'])
+        self.user_base_scope = globals()[typedconfig['user-scope']]
+        self.user_login_attr = typedconfig['user-login-attr']
+        self.user_default_groups = typedconfig['user-default-group']
+        self.user_attrs = typedconfig['user-attrs-map']
         self.user_rev_attrs = {'eid': 'dn'}
         for ldapattr, cwattr in self.user_attrs.items():
             self.user_rev_attrs[cwattr] = ldapattr
-        self.base_filters = self._make_base_filters()
+        self.base_filters = [filter_format('(%s=%s)', ('objectClass', o))
+                             for o in typedconfig['user-classes']]
+        if typedconfig['user-filter']:
+            self.base_filters.append(typedconfig['user-filter'])
+        self._interval = typedconfig['synchronization-interval']
+        self._cache_ttl = max(71, typedconfig['cache-life-time'])
+        self.reset_caches()
         self._conn = None
-        self._cache = {}
-        # ttlm is in minutes!
-        self._cache_ttl = time_validator(None, None,
-                              source_config.get('cache-life-time', 2*60*60))
-        self._cache_ttl = max(71, self._cache_ttl)
-        self._query_cache = TimedCache(self._cache_ttl)
-        # interval is in seconds !
-        self._interval = time_validator(None, None,
-                                    source_config.get('synchronization-interval',
-                                                      24*60*60))
-
-    def _make_base_filters(self):
-        filters =  [filter_format('(%s=%s)', ('objectClass', o))
-                              for o in self.user_classes]
-        if self.user_filter:
-            filters += [self.user_filter]
-        return filters
 
     def reset_caches(self):
         """method called during test to reset potential source caches"""
         self._cache = {}
         self._query_cache = TimedCache(self._cache_ttl)
 
-    def init(self):
+    def init(self, activated, source_entity):
         """method called by the repository once ready to handle request"""
-        self.info('ldap init')
-        # set minimum period of 5min 1s (the additional second is to minimize
-        # resonnance effet)
-        self.repo.looping_task(max(301, self._interval), self.synchronize)
-        self.repo.looping_task(self._cache_ttl // 10,
-                               self._query_cache.clear_expired)
+        if activated:
+            self.info('ldap init')
+            # set minimum period of 5min 1s (the additional second is to
+            # minimize resonnance effet)
+            self.repo.looping_task(max(301, self._interval), self.synchronize)
+            self.repo.looping_task(self._cache_ttl // 10,
+                                   self._query_cache.clear_expired)
 
     def synchronize(self):
         """synchronize content known by this repository with content in the
@@ -299,7 +291,7 @@
             # we really really don't want that
             raise AuthenticationError()
         searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))]
-        searchfilter.extend(self._make_base_filters())
+        searchfilter.extend(self.base_filters)
         searchstr = '(&%s)' % ''.join(searchfilter)
         # first search the user
         try:
@@ -530,7 +522,8 @@
     def _search(self, session, base, scope,
                 searchstr='(objectClass=*)', attrs=()):
         """make an ldap query"""
-        self.debug('ldap search %s %s %s %s %s', self.uri, base, scope, searchstr, list(attrs))
+        self.debug('ldap search %s %s %s %s %s', self.uri, base, scope,
+                   searchstr, list(attrs))
         # XXX for now, we do not have connection pool support for LDAP, so
         # this is always self._conn
         cnx = session.pool.connection(self.uri).cnx
@@ -584,7 +577,7 @@
         self.debug('ldap built results %s', len(result))
         return result
 
-    def before_entity_insertion(self, session, lid, etype, eid):
+    def before_entity_insertion(self, session, lid, etype, eid, sourceparams):
         """called by the repository when an eid has been attributed for an
         entity stored here but the entity has not been inserted in the system
         table yet.
@@ -593,18 +586,20 @@
         entity.
         """
         self.debug('ldap before entity insertion')
-        entity = super(LDAPUserSource, self).before_entity_insertion(session, lid, etype, eid)
+        entity = super(LDAPUserSource, self).before_entity_insertion(
+            session, lid, etype, eid, sourceparams)
         res = self._search(session, lid, BASE)[0]
         for attr in entity.e_schema.indexable_attributes():
             entity.cw_edited[attr] = res[self.user_rev_attrs[attr]]
         return entity
 
-    def after_entity_insertion(self, session, lid, entity):
+    def after_entity_insertion(self, session, lid, entity, sourceparams):
         """called by the repository after an entity stored here has been
         inserted in the system table.
         """
         self.debug('ldap after entity insertion')
-        super(LDAPUserSource, self).after_entity_insertion(session, lid, entity)
+        super(LDAPUserSource, self).after_entity_insertion(
+            session, lid, entity, sourceparams)
         dn = lid
         for group in self.user_default_groups:
             session.execute('SET X in_group G WHERE X eid %(x)s, G name %(group)s',
--- a/server/sources/native.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/native.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -45,8 +45,10 @@
 from logilab.database import get_db_helper
 
 from yams import schema2sql as y2sql
+from yams.schema import role_name
 
-from cubicweb import UnknownEid, AuthenticationError, ValidationError, Binary, UniqueTogetherError
+from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary,
+                      UniqueTogetherError)
 from cubicweb import transaction as tx, server, neg_role
 from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.cwconfig import CubicWebNoAppConfiguration
@@ -56,7 +58,7 @@
 from cubicweb.server.rqlannotation import set_qdata
 from cubicweb.server.hook import CleanupDeletedEidsCacheOp
 from cubicweb.server.session import hooks_control, security_enabled
-from cubicweb.server.ssplanner import EditedEntity
+from cubicweb.server.edition import EditedEntity
 from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results
 from cubicweb.server.sources.rql2sql import SQLGenerator
 
@@ -267,6 +269,8 @@
     def __init__(self, repo, source_config, *args, **kwargs):
         SQLAdapterMixIn.__init__(self, source_config)
         self.authentifiers = [LoginPasswordAuthentifier(self)]
+        if repo.config['allow-email-login']:
+            self.authentifiers.insert(0, EmailPasswordAuthentifier(self))
         AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
         # sql generator
         self._rql_sqlgen = self.sqlgen_class(self.schema, self.dbhelper,
@@ -278,7 +282,7 @@
         self._temp_table_data = {}
         # we need a lock to protect eid attribution function (XXX, really?
         # explain)
-        self._eid_creation_lock = Lock()
+        self._eid_cnx_lock = Lock()
         self._eid_creation_cnx = None
         # (etype, attr) / storage mapping
         self._storages = {}
@@ -308,6 +312,13 @@
         #      consuming, find another way
         return SQLAdapterMixIn.get_connection(self)
 
+    def check_config(self, source_entity):
+        """check configuration of source entity"""
+        if source_entity.host_config:
+            msg = source_entity._cw._('the system source has its configuration '
+                                      'stored on the file-system')
+            raise ValidationError(source_entity.eid, {role_name('config', 'subject'): msg})
+
     def add_authentifier(self, authentifier):
         self.authentifiers.append(authentifier)
         authentifier.source = self
@@ -327,17 +338,21 @@
         """execute the query and return its result"""
         return self.process_result(self.doexec(session, sql, args))
 
-    def init_creating(self):
-        pool = self.repo._get_pool()
-        pool.pool_set()
+    def init_creating(self, pool=None):
         # check full text index availibility
         if self.do_fti:
-            if not self.dbhelper.has_fti_table(pool['system']):
+            if pool is None:
+                _pool = self.repo._get_pool()
+                _pool.pool_set()
+            else:
+                _pool = pool
+            if not self.dbhelper.has_fti_table(_pool['system']):
                 if not self.repo.config.creating:
                     self.critical('no text index table')
                 self.do_fti = False
-        pool.pool_reset()
-        self.repo._free_pool(pool)
+            if pool is None:
+                _pool.pool_reset()
+                self.repo._free_pool(_pool)
 
     def backup(self, backupfile, confirm):
         """method called to create a backup of the source's data"""
@@ -357,8 +372,8 @@
             if self.repo.config.open_connections_pools:
                 self.open_pool_connections()
 
-    def init(self):
-        self.init_creating()
+    def init(self, activated, source_entity):
+        self.init_creating(source_entity._cw.pool)
 
     def shutdown(self):
         if self._eid_creation_cnx:
@@ -788,13 +803,13 @@
             res[-1] = b64decode(res[-1])
         return res
 
-    def extid2eid(self, session, source, extid):
+    def extid2eid(self, session, source_uri, extid):
         """get eid from an external id. Return None if no record found."""
         assert isinstance(extid, str)
         cursor = self.doexec(session,
                              'SELECT eid FROM entities '
                              'WHERE extid=%(x)s AND source=%(s)s',
-                             {'x': b64encode(extid), 's': source.uri})
+                             {'x': b64encode(extid), 's': source_uri})
         # XXX testing rowcount cause strange bug with sqlite, results are there
         #     but rowcount is 0
         #if cursor.rowcount > 0:
@@ -807,13 +822,7 @@
         return None
 
     def make_temp_table_name(self, table):
-        try: # XXX remove this once
-            return self.dbhelper.temporary_table_name(table)
-        except AttributeError:
-            import warnings
-            warnings.warn('Please hg up logilab.database')
-            return table
-
+        return self.dbhelper.temporary_table_name(table)
 
     def temp_table_def(self, selected, sol, table):
         return make_schema(selected, sol, table, self.dbhelper.TYPE_MAPPING)
@@ -826,27 +835,22 @@
         self.doexec(session, sql)
 
     def _create_eid_sqlite(self, session):
-        self._eid_creation_lock.acquire()
-        try:
+        with self._eid_cnx_lock:
             for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
                 cursor = self.doexec(session, sql)
             return cursor.fetchone()[0]
-        finally:
-            self._eid_creation_lock.release()
 
 
     def create_eid(self, session):
-        # lock needed to prevent 'Connection is busy with results for another command (0)' errors with SQLServer
-        self._eid_creation_lock.acquire()
-        try:
+        # lock needed to prevent 'Connection is busy with results for another
+        # command (0)' errors with SQLServer
+        with self._eid_cnx_lock:
             return self._create_eid()
-        finally:
-            self._eid_creation_lock.release()
 
     def _create_eid(self):
         # internal function doing the eid creation without locking.
         # needed for the recursive handling of disconnections (otherwise we
-        # deadlock on self._eid_creation_lock
+        # deadlock on self._eid_cnx_lock
         if self._eid_creation_cnx is None:
             self._eid_creation_cnx = self.get_connection()
         cnx = self._eid_creation_cnx
@@ -883,24 +887,24 @@
         if extid is not None:
             assert isinstance(extid, str)
             extid = b64encode(extid)
+        uri = 'system' if source.copy_based_source else source.uri
         attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
-                 'source': source.uri, 'mtime': datetime.now()}
+                 'source': uri, 'mtime': datetime.now()}
         self.doexec(session, self.sqlgen.insert('entities', attrs), attrs)
         # insert core relations: is, is_instance_of and cw_source
-        if not hasattr(entity, '_cw_recreating'):
-            try:
-                self.doexec(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)'
-                            % (entity.eid, eschema_eid(session, entity.e_schema)))
-            except IndexError:
-                # during schema serialization, skip
-                pass
-            else:
-                for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
-                    self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)'
-                               % (entity.eid, eschema_eid(session, eschema)))
-            if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
-                self.doexec(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) '
-                            'VALUES (%s,%s)' % (entity.eid, source.eid))
+        try:
+            self.doexec(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)'
+                        % (entity.eid, eschema_eid(session, entity.e_schema)))
+        except IndexError:
+            # during schema serialization, skip
+            pass
+        else:
+            for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
+                self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)'
+                           % (entity.eid, eschema_eid(session, eschema)))
+        if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
+            self.doexec(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) '
+                        'VALUES (%s,%s)' % (entity.eid, source.eid))
         # now we can update the full text index
         if self.do_fti and self.need_fti_indexation(entity.__regid__):
             if complete:
@@ -1524,3 +1528,19 @@
             return rset[0][0]
         except IndexError:
             raise AuthenticationError('bad password')
+
+
+class EmailPasswordAuthentifier(BaseAuthentifier):
+    def authenticate(self, session, login, **authinfo):
+        # email_auth flag prevent from infinite recursion (call to
+        # repo.check_auth_info at the end of this method may lead us here again)
+        if not '@' in login or authinfo.pop('email_auth', None):
+            raise AuthenticationError('not an email')
+        rset = session.execute('Any L WHERE U login L, U primary_email M, '
+                               'M address %(login)s', {'login': login},
+                               build_descr=False)
+        if rset.rowcount != 1:
+            raise AuthenticationError('unexisting email')
+        login = rset.rows[0][0]
+        authinfo['email_auth'] = True
+        return self.source.repo.check_auth_info(session, login, authinfo)
--- a/server/sources/pyrorql.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/pyrorql.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -31,11 +31,14 @@
 from logilab.common.configuration import REQUIRED
 from logilab.common.optik_ext import check_yn
 
+from yams.schema import role_name
+
 from rql.nodes import Constant
 from rql.utils import rqlvar_maker
 
 from cubicweb import dbapi, server
-from cubicweb import BadConnectionId, UnknownEid, ConnectionError
+from cubicweb import ValidationError, BadConnectionId, UnknownEid, ConnectionError
+from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.cwconfig import register_persistent_options
 from cubicweb.server.sources import (AbstractSource, ConnectionWrapper,
                                      TimedCache, dbg_st_search, dbg_results)
@@ -45,34 +48,6 @@
     select, col = union.locate_subquery(col, etype, args)
     return getattr(select.selection[col], 'uidtype', None)
 
-def load_mapping_file(mappingfile):
-    mapping = {}
-    execfile(mappingfile, mapping)
-    for junk in ('__builtins__', '__doc__'):
-        mapping.pop(junk, None)
-    mapping.setdefault('support_relations', {})
-    mapping.setdefault('dont_cross_relations', set())
-    mapping.setdefault('cross_relations', set())
-
-    # do some basic checks of the mapping content
-    assert 'support_entities' in mapping, \
-           'mapping file should at least define support_entities'
-    assert isinstance(mapping['support_entities'], dict)
-    assert isinstance(mapping['support_relations'], dict)
-    assert isinstance(mapping['dont_cross_relations'], set)
-    assert isinstance(mapping['cross_relations'], set)
-    unknown = set(mapping) - set( ('support_entities', 'support_relations',
-                                   'dont_cross_relations', 'cross_relations') )
-    assert not unknown, 'unknown mapping attribute(s): %s' % unknown
-    # relations that are necessarily not crossed
-    mapping['dont_cross_relations'] |= set(('owned_by', 'created_by'))
-    for rtype in ('is', 'is_instance_of', 'cw_source'):
-        assert rtype not in mapping['dont_cross_relations'], \
-               '%s relation should not be in dont_cross_relations' % rtype
-        assert rtype not in mapping['support_relations'], \
-               '%s relation should not be in support_relations' % rtype
-    return mapping
-
 
 class ReplaceByInOperator(Exception):
     def __init__(self, eids):
@@ -96,12 +71,6 @@
           'help': 'identifier of the repository in the pyro name server',
           'group': 'pyro-source', 'level': 0,
           }),
-        ('mapping-file',
-         {'type' : 'string',
-          'default': REQUIRED,
-          'help': 'path to a python file with the schema mapping definition',
-          'group': 'pyro-source', 'level': 1,
-          }),
         ('cubicweb-user',
          {'type' : 'string',
           'default': REQUIRED,
@@ -142,8 +111,8 @@
           'group': 'pyro-source', 'level': 2,
           }),
         ('synchronization-interval',
-         {'type' : 'int',
-          'default': 5*60,
+         {'type' : 'time',
+          'default': '5min',
           'help': 'interval between synchronization with the external \
 repository (default to 5 minutes).',
           'group': 'pyro-source', 'level': 2,
@@ -154,70 +123,112 @@
     PUBLIC_KEYS = AbstractSource.PUBLIC_KEYS + ('base-url',)
     _conn = None
 
-    def __init__(self, repo, source_config, *args, **kwargs):
-        AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
-        mappingfile = source_config['mapping-file']
-        if not mappingfile[0] == '/':
-            mappingfile = join(repo.config.apphome, mappingfile)
-        try:
-            mapping = load_mapping_file(mappingfile)
-        except IOError:
-            self.disabled = True
-            self.error('cant read mapping file %s, source disabled',
-                       mappingfile)
-            self.support_entities = {}
-            self.support_relations = {}
-            self.dont_cross_relations = set()
-            self.cross_relations = set()
-        else:
-            self.support_entities = mapping['support_entities']
-            self.support_relations = mapping['support_relations']
-            self.dont_cross_relations = mapping['dont_cross_relations']
-            self.cross_relations = mapping['cross_relations']
-        baseurl = source_config.get('base-url')
+    def __init__(self, repo, source_config, eid=None):
+        AbstractSource.__init__(self, repo, source_config, eid)
+        self.update_config(None, self.check_conf_dict(eid, source_config,
+                                                      fail_if_unknown=False))
+        self._query_cache = TimedCache(1800)
+
+    def update_config(self, source_entity, processed_config):
+        """update configuration from source entity"""
+        # XXX get it through pyro if unset
+        baseurl = processed_config.get('base-url')
         if baseurl and not baseurl.endswith('/'):
-            source_config['base-url'] += '/'
-        self.config = source_config
-        myoptions = (('%s.latest-update-time' % self.uri,
-                      {'type' : 'int', 'sitewide': True,
-                       'default': 0,
-                       'help': _('timestamp of the latest source synchronization.'),
-                       'group': 'sources',
-                       }),)
-        register_persistent_options(myoptions)
-        self._query_cache = TimedCache(1800)
-        self._skip_externals = check_yn(None, 'skip-external-entities',
-                                        source_config.get('skip-external-entities', False))
+            processed_config['base-url'] += '/'
+        self.config = processed_config
+        self._skip_externals = processed_config['skip-external-entities']
+        if source_entity is not None:
+            self.latest_retrieval = source_entity.latest_retrieval
 
     def reset_caches(self):
         """method called during test to reset potential source caches"""
         self._query_cache = TimedCache(1800)
 
-    def last_update_time(self):
-        pkey = u'sources.%s.latest-update-time' % self.uri
-        session = self.repo.internal_session()
+    def init(self, activated, source_entity):
+        """method called by the repository once ready to handle request"""
+        self.load_mapping(source_entity._cw)
+        if activated:
+            interval = self.config['synchronization-interval']
+            self.repo.looping_task(interval, self.synchronize)
+            self.repo.looping_task(self._query_cache.ttl.seconds/10,
+                                   self._query_cache.clear_expired)
+            self.latest_retrieval = source_entity.latest_retrieval
+
+    def load_mapping(self, session=None):
+        self.support_entities = {}
+        self.support_relations = {}
+        self.dont_cross_relations = set(('owned_by', 'created_by'))
+        self.cross_relations = set()
+        assert self.eid is not None
+        self._schemacfg_idx = {}
+        self._load_mapping(session)
+
+    etype_options = set(('write',))
+    rtype_options = set(('maycross', 'dontcross', 'write',))
+
+    def _check_options(self, schemacfg, allowedoptions):
+        if schemacfg.options:
+            options = set(w.strip() for w in schemacfg.options.split(':'))
+        else:
+            options = set()
+        if options - allowedoptions:
+            options = ', '.join(sorted(options - allowedoptions))
+            msg = _('unknown option(s): %s' % options)
+            raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg})
+        return options
+
+    def add_schema_config(self, schemacfg, checkonly=False):
+        """added CWSourceSchemaConfig, modify mapping accordingly"""
         try:
-            rset = session.execute('Any V WHERE X is CWProperty, X value V, X pkey %(k)s',
-                                   {'k': pkey})
-            if not rset:
-                # insert it
-                session.execute('INSERT CWProperty X: X pkey %(k)s, X value %(v)s',
-                                {'k': pkey, 'v': u'0'})
-                session.commit()
-                timestamp = 0
+            ertype = schemacfg.schema.name
+        except AttributeError:
+            msg = schemacfg._cw._("attribute/relation can't be mapped, only "
+                                  "entity and relation types")
+            raise ValidationError(schemacfg.eid, {role_name('cw_for_schema', 'subject'): msg})
+        if schemacfg.schema.__regid__ == 'CWEType':
+            options = self._check_options(schemacfg, self.etype_options)
+            if not checkonly:
+                self.support_entities[ertype] = 'write' in options
+        else: # CWRType
+            if ertype in ('is', 'is_instance_of', 'cw_source') or ertype in VIRTUAL_RTYPES:
+                msg = schemacfg._cw._('%s relation should not be in mapped') % rtype
+                raise ValidationError(schemacfg.eid, {role_name('cw_for_schema', 'subject'): msg})
+            options = self._check_options(schemacfg, self.rtype_options)
+            if 'dontcross' in options:
+                if 'maycross' in options:
+                    msg = schemacfg._("can't mix dontcross and maycross options")
+                    raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg})
+                if 'write' in options:
+                    msg = schemacfg._("can't mix dontcross and write options")
+                    raise ValidationError(schemacfg.eid, {role_name('options', 'subject'): msg})
+                if not checkonly:
+                    self.dont_cross_relations.add(ertype)
+            elif not checkonly:
+                self.support_relations[ertype] = 'write' in options
+                if 'maycross' in options:
+                    self.cross_relations.add(ertype)
+        if not checkonly:
+            # add to an index to ease deletion handling
+            self._schemacfg_idx[schemacfg.eid] = ertype
+
+    def del_schema_config(self, schemacfg, checkonly=False):
+        """deleted CWSourceSchemaConfig, modify mapping accordingly"""
+        if checkonly:
+            return
+        try:
+            ertype = self._schemacfg_idx[schemacfg.eid]
+            if ertype[0].isupper():
+                del self.support_entities[ertype]
             else:
-                assert len(rset) == 1
-                timestamp = int(rset[0][0])
-            return datetime.fromtimestamp(timestamp)
-        finally:
-            session.close()
-
-    def init(self):
-        """method called by the repository once ready to handle request"""
-        interval = int(self.config.get('synchronization-interval', 5*60))
-        self.repo.looping_task(interval, self.synchronize)
-        self.repo.looping_task(self._query_cache.ttl.seconds/10,
-                               self._query_cache.clear_expired)
+                if ertype in self.support_relations:
+                    del self.support_relations[ertype]
+                    if ertype in self.cross_relations:
+                        self.cross_relations.remove(ertype)
+                else:
+                    self.dont_cross_relations.remove(ertype)
+        except:
+            self.error('while updating mapping consequently to removal of %s',
+                       schemacfg)
 
     def local_eid(self, cnx, extid, session):
         etype, dexturi, dextid = cnx.describe(extid)
@@ -245,9 +256,9 @@
             return
         etypes = self.support_entities.keys()
         if mtime is None:
-            mtime = self.last_update_time()
-        updatetime, modified, deleted = extrepo.entities_modified_since(etypes,
-                                                                        mtime)
+            mtime = self.latest_retrieval
+        updatetime, modified, deleted = extrepo.entities_modified_since(
+            etypes, mtime)
         self._query_cache.clear()
         repo = self.repo
         session = repo.internal_session()
@@ -273,14 +284,14 @@
                     if eid is not None:
                         entity = session.entity_from_eid(eid, etype)
                         repo.delete_info(session, entity, self.uri, extid,
-                                         scleanup=True)
+                                         scleanup=self.eid)
                 except:
                     self.exception('while updating %s with external id %s of source %s',
                                    etype, extid, self.uri)
                     continue
-            session.execute('SET X value %(v)s WHERE X pkey %(k)s',
-                            {'k': u'sources.%s.latest-update-time' % self.uri,
-                             'v': unicode(int(mktime(updatetime.timetuple())))})
+            self.latest_retrieval = updatetime
+            session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
+                            {'x': self.eid, 'date': self.latest_retrieval})
             session.commit()
         finally:
             session.close()
--- a/server/sources/rql2sql.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/rql2sql.py	Wed Apr 27 09:54:22 2011 +0200
@@ -319,6 +319,31 @@
 # IGenerator implementation for RQL->SQL #######################################
 
 class StateInfo(object):
+    """this class stores data accumulated during the RQL syntax tree visit
+    for later SQL generation.
+
+    Attributes related to OUTER JOIN handling
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    * `outer_chains`, list of list of strings. Each list represent a tables
+      that have to be outer joined together.
+
+    * `outer_tables`, dictionnary used as index of tables used in outer join ::
+
+        'table alias': (outertype, [conditions], [chain])
+
+      where:
+
+      * `outertype` is one of None, 'LEFT', 'RIGHT', 'FULL'
+      * `conditions` is a list of join conditions (string)
+      * `chain` is a list of table alias (the *outer chain*) in which the key
+        alias appears
+
+    * `outer_pending` is a dictionary containing some conditions that will have
+      to be added to the outer join when the table will be turned into an
+      outerjoin ::
+
+       'table alias': [conditions]
+    """
     def __init__(self, select, existssols, unstablevars):
         self.existssols = existssols
         self.unstablevars = unstablevars
@@ -338,7 +363,9 @@
         self.actual_tables = [[]]
         for _, tsql in self.tables.itervalues():
             self.actual_tables[-1].append(tsql)
+        self.outer_chains = []
         self.outer_tables = {}
+        self.outer_pending = {}
         self.duplicate_switches = []
         self.aliases = {}
         self.restrictions = []
@@ -403,7 +430,208 @@
         self.scope_nodes.pop()
         restrictions = self.restrictions
         self.restrictions = self._restr_stack.pop()
-        return restrictions, self.actual_tables.pop()
+        scope = len(self.actual_tables) - 1
+        # check if we have some outer chain for this scope
+        matching_chains = []
+        for chain in self.outer_chains:
+            for tablealias in chain:
+                if self.tables[tablealias][0] < scope:
+                    # chain belongs to outer scope
+                    break
+            else:
+                # chain match current scope
+                matching_chains.append(chain)
+        # call to `tables_sql` will pop actual_tables
+        tables = self.tables_sql(matching_chains)
+        # cleanup outer join related structure for tables in matching chains
+        for chain in matching_chains:
+            self.outer_chains.remove(chain)
+            for alias in chain:
+                del self.outer_tables[alias]
+        return restrictions, tables
+
+    # tables handling #########################################################
+
+    def add_table(self, table, key=None, scope=-1):
+        if key is None:
+            key = table
+        if key in self.tables:
+            return
+        if scope < 0:
+            scope = len(self.actual_tables) + scope
+        self.tables[key] = (scope, table)
+        self.actual_tables[scope].append(table)
+
+    def alias_and_add_table(self, tablename, scope=-1):
+        alias = '%s%s' % (tablename, self.count)
+        self.count += 1
+        self.add_table('%s AS %s' % (tablename, alias), alias, scope)
+        return alias
+
+    def relation_table(self, relation):
+        """return the table alias used by the given relation"""
+        if relation in self.done:
+            return relation._q_sqltable
+        rid = 'rel_%s%s' % (relation.r_type, self.count)
+        # relation's table is belonging to the root scope if it is the principal
+        # table of one of it's variable and if that variable belong's to parent
+        # scope
+        for varref in relation.iget_nodes(VariableRef):
+            var = varref.variable
+            if isinstance(var, ColumnAlias):
+                scope = 0
+                break
+            # XXX may have a principal without being invariant for this generation,
+            #     not sure this is a pb or not
+            if var.stinfo.get('principal') is relation and var.scope is var.stmt:
+                scope = 0
+                break
+        else:
+            scope = -1
+        self.count += 1
+        self.add_table('%s_relation AS %s' % (relation.r_type, rid), rid, scope=scope)
+        relation._q_sqltable = rid
+        self.done.add(relation)
+        return rid
+
+    def fti_table(self, relation, fti_table):
+        """return the table alias used by the given has_text relation,
+        `fti_table` being the table name for the plain text index
+        """
+        if relation in self.done:
+            try:
+                return relation._q_sqltable
+            except AttributeError:
+                pass
+        self.done.add(relation)
+        scope = self.scopes[relation.scope]
+        alias = self.alias_and_add_table(fti_table, scope=scope)
+        relation._q_sqltable = alias
+        return alias
+
+    # outer join handling ######################################################
+
+    def mark_as_used_in_outer_join(self, tablealias, addpending=True):
+        """Mark table of given alias as used in outer join. This must be called
+        after `outer_tables[tablealias]` has been initialized.
+        """
+        # remove a table from actual_table because it's used in an outer join
+        # chain
+        scope, tabledef = self.tables[tablealias]
+        self.actual_tables[scope].remove(tabledef)
+        # check if there are some pending outer join condition for this table
+        if addpending:
+            try:
+                pending_conditions = self.outer_pending.pop(tablealias)
+            except KeyError:
+                pass
+            else:
+                self.outer_tables[tablealias][1].extend(pending_conditions)
+        else:
+            assert not tablealias in self.outer_pending
+
+    def add_outer_join_condition(self, tablealias, condition):
+        try:
+            outer, conditions, chain = self.outer_tables[tablealias]
+            conditions.append(condition)
+        except KeyError:
+            self.outer_pending.setdefault(tablealias, []).append(condition)
+
+    def replace_tables_by_outer_join(self, leftalias, rightalias,
+                                     outertype, condition):
+        """tell we need <leftalias> <outertype> JOIN <rightalias> ON <condition>
+        """
+        assert leftalias != rightalias, leftalias
+        outer_tables = self.outer_tables
+        louter, lconditions, lchain = outer_tables.get(leftalias,
+                                                      (None, None, None))
+        router, rconditions, rchain = outer_tables.get(rightalias,
+                                                      (None, None, None))
+        if lchain is None and rchain is None:
+            # create a new outer chaine
+            chain = [leftalias, rightalias]
+            outer_tables[leftalias] = (None, [], chain)
+            outer_tables[rightalias] = (outertype, [condition], chain)
+            self.outer_chains.append(chain)
+            self.mark_as_used_in_outer_join(leftalias, addpending=False)
+            self.mark_as_used_in_outer_join(rightalias)
+        elif lchain is None:
+            # [A > B > C] + [D > A] -> [D > A > B > C]
+            if rightalias == rchain[0]:
+                outer_tables[leftalias] = (None, [], rchain)
+                conditions = outer_tables[rightalias][1] + [condition]
+                outer_tables[rightalias] = (outertype, conditions, rchain)
+                rchain.insert(0, leftalias)
+            else:
+                # [A > B > C] + [D > B] -> [A > B > C < D]
+                if outertype == 'LEFT':
+                    outertype = 'RIGHT'
+                outer_tables[leftalias] = (outertype, [condition], rchain)
+                rchain.append(leftalias)
+            self.mark_as_used_in_outer_join(leftalias)
+        elif rchain is None:
+            # [A > B > C] + [B > D] -> [A > B > C > D]
+            outer_tables[rightalias] = (outertype, [condition], lchain)
+            lchain.append(rightalias)
+            self.mark_as_used_in_outer_join(rightalias)
+        elif lchain is rchain:
+            # already in the same chain, simply check compatibility and append
+            # the condition if it's ok
+            lidx = lchain.index(leftalias)
+            ridx = lchain.index(rightalias)
+            if (outertype == 'FULL' and router != 'FULL') \
+                   or (lidx < ridx and router != 'LEFT') \
+                   or (ridx < lidx and louter != 'RIGHT'):
+                raise BadRQLQuery()
+            # merge conditions
+            if lidx < ridx:
+                rconditions.append(condition)
+            else:
+                lconditions.append(condition)
+        else:
+            if louter is not None:
+                raise BadRQLQuery()
+            # merge chains
+            self.outer_chains.remove(lchain)
+            self.mark_as_used_in_outer_join(leftalias)
+            rchain += lchain
+            for alias, (aouter, aconditions, achain) in outer_tables.iteritems():
+                if achain is lchain:
+                    outer_tables[alias] = (aouter, aconditions, rchain)
+
+    # sql generation helpers ###################################################
+
+    def tables_sql(self, outer_chains=None):
+        """generate SQL for FROM clause"""
+        # sort for test predictability
+        tables = sorted(self.actual_tables.pop())
+        # process outer joins
+        if outer_chains is None:
+            assert not self.actual_tables, self.actual_tables
+            assert not self.outer_pending
+            outer_chains = self.outer_chains
+        for chain in sorted(outer_chains):
+            tablealias = chain[0]
+            outertype, conditions, _ = self.outer_tables[tablealias]
+            assert _ is chain, (chain, _)
+            assert outertype is None, (chain, self.outer_chains)
+            assert not conditions, (chain, self.outer_chains)
+            assert len(chain) > 1
+            tabledef = self.tables[tablealias][1]
+            outerjoin = [tabledef]
+            for tablealias in chain[1:]:
+                outertype, conditions, _ = self.outer_tables[tablealias]
+                assert _ is chain, (chain, self.outer_chains)
+                assert outertype in ('LEFT', 'RIGHT', 'FULL'), (
+                    tablealias, outertype, conditions)
+                assert isinstance(conditions, (list)), (
+                    tablealias, outertype, conditions)
+                tabledef = self.tables[tablealias][1]
+                outerjoin.append('%s OUTER JOIN %s ON (%s)' % (
+                    outertype, tabledef, ' AND '.join(conditions)))
+            tables.append(' '.join(outerjoin))
+        return ', '.join(tables)
+
 
 def extract_fake_having_terms(having):
     """RQL's HAVING may be used to contains stuff that should go in the WHERE
@@ -448,6 +676,7 @@
                 node.parent.remove(node)
     return fakehaving
 
+
 class SQLGenerator(object):
     """
     generation of SQL from the fully expanded RQL syntax tree
@@ -598,12 +827,12 @@
         try:
             sql = self._solutions_sql(select, morerestr, sols, distinct,
                                       needalias or needwrap)
-            # generate groups / having before wrapping query selection to
-            # get correct column aliases
+            # generate groups / having before wrapping query selection to get
+            # correct column aliases
             self._in_wrapping_query = needwrap
             if groups:
-                # no constant should be inserted in GROUP BY else the backend will
-                # interpret it as a positional index in the selection
+                # no constant should be inserted in GROUP BY else the backend
+                # will interpret it as a positional index in the selection
                 groups = ','.join(vref.accept(self) for vref in groups
                                   if not isinstance(vref, Constant))
             if having:
@@ -611,33 +840,34 @@
                 having = ' AND '.join(term.accept(self) for term in having
                                       if not isinstance(term, Constant))
             if needwrap:
-                sql = '%s FROM (%s) AS T1' % (self._selection_sql(outerselection, distinct,
-                                                                  needalias),
-                                              sql)
+                sql = '%s FROM (%s) AS T1' % (
+                    self._selection_sql(outerselection, distinct,needalias),
+                    sql)
             if groups:
                 sql += '\nGROUP BY %s' % groups
             if having:
                 sql += '\nHAVING %s' % having
             # sort
             if sorts:
-                sqlsortterms = [self._sortterm_sql(sortterm, fselectidx)
-                                for sortterm in sorts]
-                sqlsortterms = [x for x in sqlsortterms if x is not None]
+                sqlsortterms = []
+                for sortterm in sorts:
+                    _term = self._sortterm_sql(sortterm, fselectidx)
+                    if _term is not None:
+                        sqlsortterms.append(_term)
                 if sqlsortterms:
-                    sql += '\nORDER BY %s' % ','.join(sqlsortterms)
-                    if sorts and fneedwrap:
-                        selection = ['T1.C%s' % i for i in xrange(len(origselection))]
-                        sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql)
+                    sql = self.dbhelper.sql_add_order_by(
+                        sql, sqlsortterms, origselection, fneedwrap,
+                        select.limit or select.offset)
+            else:
+                sqlsortterms = None
             state.finalize_source_cbs()
         finally:
             select.selection = origselection
         # limit / offset
-        limit = select.limit
-        if limit:
-            sql += '\nLIMIT %s' % limit
-        offset = select.offset
-        if offset:
-            sql += '\nOFFSET %s' % offset
+        sql = self.dbhelper.sql_add_limit_offset(sql,
+                                                 select.limit,
+                                                 select.offset,
+                                                 sqlsortterms)
         return sql
 
     def _subqueries_sql(self, select, state):
@@ -677,10 +907,9 @@
             self._state.merge_source_cbs(self._state._needs_source_cb)
             # add required tables
             assert len(self._state.actual_tables) == 1, self._state.actual_tables
-            tables = self._state.actual_tables[-1]
+            tables = self._state.tables_sql()
             if tables:
-                # sort for test predictability
-                sql.insert(1, 'FROM %s' % ', '.join(sorted(tables)))
+                sql.insert(1, 'FROM %s' % tables)
             elif self._state.restrictions and self.dbhelper.needs_from_clause:
                 sql.insert(1, 'FROM (SELECT 1) AS _T')
             sqls.append('\n'.join(sql))
@@ -767,13 +996,13 @@
         restriction = ' AND '.join(restrictions)
         if not restriction:
             if tables:
-                return 'SELECT 1 FROM %s' % ', '.join(tables)
+                return 'SELECT 1 FROM %s' % tables
             return ''
         if not tables:
             # XXX could leave surrounding EXISTS() in this case no?
             sql = 'SELECT 1 WHERE %s' % restriction
         else:
-            sql = 'SELECT 1 FROM %s WHERE %s' % (', '.join(tables), restriction)
+            sql = 'SELECT 1 FROM %s WHERE %s' % (tables, restriction)
         return sql
 
 
@@ -814,12 +1043,11 @@
             # relation has already been processed by a previous step
             return ''
         elif relation.optional:
-            # check it has not already been treaten (to get necessary
-            # information to add an outer join condition)
-            if relation in self._state.done:
-                return ''
             # OPTIONAL relation, generate a left|right outer join
-            sql = self._visit_outer_join_relation(relation, rschema)
+            if rtype == 'identity' or rschema.inlined:
+                sql = self._visit_outer_join_inlined_relation(relation, rschema)
+            else:
+                sql = self._visit_outer_join_relation(relation, rschema)
         elif rschema.inlined:
             sql = self._visit_inlined_relation(relation)
         else:
@@ -882,7 +1110,7 @@
             lhs, rhs = relation.get_parts()
             return '%s%s' % (lhs.accept(self), rhs.accept(self))
         lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
-        rid = self._relation_table(relation)
+        rid = self._state.relation_table(relation)
         sqls = []
         sqls += self._process_relation_term(relation, rid, lhsvar, lhsconst, 'eid_from')
         sqls += self._process_relation_term(relation, rid, rhsvar, rhsconst, 'eid_to')
@@ -919,74 +1147,104 @@
            elif it's a full outer join:
            -> X FULL OUTER JOIN Y ON (X.relation=Y.eid)
         """
-        lhsvar, lhsconst, rhsvar, rhsconst = relation_info(relation)
-        if relation.optional == 'right':
-            joinattr, restrattr = 'eid_from', 'eid_to'
-        else:
-            lhsvar, rhsvar = rhsvar, lhsvar
-            lhsconst, rhsconst = rhsconst, lhsconst
+        leftvar, leftconst, rightvar, rightconst = relation_info(relation)
+        assert not (leftconst and rightconst), "doesn't make sense"
+        if relation.optional == 'left':
+            leftvar, rightvar = rightvar, leftvar
+            leftconst, rightconst = rightconst, leftconst
             joinattr, restrattr = 'eid_to', 'eid_from'
-        if relation.optional == 'both':
-            outertype = 'FULL'
         else:
-            outertype = 'LEFT'
-        if rschema.inlined or relation.r_type == 'identity':
-            self._state.done.add(relation)
-            t1 = self._var_table(lhsvar)
-            if relation.r_type == 'identity':
-                attr = 'eid'
-            else:
-                attr = relation.r_type
-            # reset lhs/rhs, we need the initial order now
-            lhs, rhs = relation.get_variable_parts()
-            if '%s.%s' % (lhs.name, attr) in self._varmap:
-                lhssql = self._varmap['%s.%s' % (lhs.name, attr)]
+            joinattr, restrattr = 'eid_from', 'eid_to'
+        # search table for this variable, to use as left table of the outer join
+        leftalias = None
+        if leftvar:
+            # take care, may return None for invariant variable
+            leftalias = self._var_table(leftvar)
+        if leftalias is None:
+            if leftvar.stinfo['principal'] is not relation:
+                # use variable's principal relation
+                leftalias = leftvar.stinfo['principal']._q_sqltable
             else:
-                lhssql = '%s.%s%s' % (self._var_table(lhs.variable), SQL_PREFIX, attr)
-            if not rhsvar is None:
-                t2 = self._var_table(rhsvar)
-                if t2 is None:
-                    if rhsconst is not None:
-                        # inlined relation with invariant as rhs
-                        condition = '%s=%s' % (lhssql, rhsconst.accept(self))
-                        if relation.r_type != 'identity':
-                            condition = '(%s OR %s IS NULL)' % (condition, lhssql)
-                        if not lhsvar.stinfo.get('optrelations'):
-                            return condition
-                        self.add_outer_join_condition(lhsvar, t1, condition)
-                    return
+                # search for relation on which we should join
+                for orelation in leftvar.stinfo['relations']:
+                    if (orelation is not relation and
+                        not self.schema.rschema(orelation.r_type).final):
+                        break
+                else:
+                    for orelation in rightvar.stinfo['relations']:
+                        if (orelation is not relation and
+                            not self.schema.rschema(orelation.r_type).final
+                            and orelation.optional):
+                            break
+                    else:
+                        # unexpected
+                        assert False, leftvar
+                leftalias = self._state.relation_table(orelation)
+        # right table of the outer join
+        rightalias = self._state.relation_table(relation)
+        # compute join condition
+        if not leftconst or (leftvar and not leftvar._q_invariant):
+            leftsql = leftvar.accept(self)
+        else:
+            leftsql = leftconst.accept(self)
+        condition = '%s.%s=%s' % (rightalias, joinattr, leftsql)
+        if rightconst:
+            condition += ' AND %s.%s=%s' % (rightalias, restrattr, rightconst.accept(self))
+        # record outer join
+        outertype = 'FULL' if relation.optional == 'both' else 'LEFT'
+        self._state.replace_tables_by_outer_join(leftalias, rightalias,
+                                                 outertype, condition)
+        # need another join?
+        if rightconst is None:
+            # we need another outer join for the other side of the relation (e.g.
+            # for "X relation Y?" in RQL, we treated earlier the (cw_X.eid /
+            # relation.eid_from) join, now we've to do (relation.eid_to /
+            # cw_Y.eid)
+            leftalias = rightalias
+            rightsql = rightvar.accept(self) # accept before using var_table
+            rightalias = self._var_table(rightvar)
+            if rightalias is None:
+                if rightvar.stinfo['principal'] is not relation:
+                    self._state.replace_tables_by_outer_join(
+                        leftalias, rightvar.stinfo['principal']._q_sqltable,
+                        outertype, '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self)))
             else:
-                condition = '%s=%s' % (lhssql, rhsconst.accept(self))
-                self.add_outer_join_condition(lhsvar, t1, condition)
-            join = '%s OUTER JOIN %s ON (%s=%s)' % (
-                outertype, self._state.tables[t2][1], lhssql, rhs.accept(self))
-            self.replace_tables_by_outer_join(join, t1, t2)
-            return ''
-        lhssql = lhsconst and lhsconst.accept(self) or lhsvar.accept(self)
-        rhssql = rhsconst and rhsconst.accept(self) or rhsvar.accept(self)
-        rid = self._relation_table(relation)
-        if not lhsvar:
-            join = ''
-            toreplace = []
-            maintable = rid
-        else:
-            join = '%s OUTER JOIN %s ON (%s.%s=%s' % (
-                outertype, self._state.tables[rid][1], rid, joinattr, lhssql)
-            toreplace = [rid]
-            maintable = self._var_table(lhsvar)
-            if rhsconst:
-                join += ' AND %s.%s=%s)' % (rid, restrattr, rhssql)
-            else:
-                join += ')'
-        if not rhsconst:
-            rhstable = rhsvar._q_sqltable
-            if rhstable:
-                assert rhstable is not None, rhsvar
-                join += ' %s OUTER JOIN %s ON (%s.%s=%s)' % (
-                    outertype, self._state.tables[rhstable][1], rid, restrattr,
-                    rhssql)
-                toreplace.append(rhstable)
-        self.replace_tables_by_outer_join(join, maintable, *toreplace)
+                self._state.replace_tables_by_outer_join(
+                    leftalias, rightalias, outertype,
+                    '%s.%s=%s' % (leftalias, restrattr, rightvar.accept(self)))
+        # this relation will hence be expressed in FROM clause, return nothing
+        # here
+        return ''
+
+
+    def _visit_outer_join_inlined_relation(self, relation, rschema):
+        leftvar, leftconst, rightvar, rightconst = relation_info(relation)
+        assert not (leftconst and rightconst), "doesn't make sense"
+        if relation.optional != 'right':
+            leftvar, rightvar = rightvar, leftvar
+            leftconst, rightconst = rightconst, leftconst
+        outertype = 'FULL' if relation.optional == 'both' else 'LEFT'
+        leftalias = self._var_table(leftvar)
+        attr = 'eid' if relation.r_type == 'identity' else relation.r_type
+        lhs, rhs = relation.get_variable_parts()
+        try:
+            lhssql = self._varmap['%s.%s' % (lhs.name, attr)]
+        except KeyError:
+            lhssql = '%s.%s%s' % (self._var_table(lhs.variable), SQL_PREFIX, attr)
+        if rightvar is not None:
+            rightalias = self._var_table(rightvar)
+            if rightalias is None:
+                if rightconst is not None:
+                    # inlined relation with invariant as rhs
+                    condition = '%s=%s' % (lhssql, rightconst.accept(self))
+                    if relation.r_type != 'identity':
+                        condition = '(%s OR %s IS NULL)' % (condition, lhssql)
+                    if not leftvar.stinfo.get('optrelations'):
+                        return condition
+                    self._state.add_outer_join_condition(leftalias, condition)
+                return
+        self._state.replace_tables_by_outer_join(
+            leftalias, rightalias, outertype, '%s=%s' % (lhssql, rhs.accept(self)))
         return ''
 
     def _visit_var_attr_relation(self, relation, rhs_vars):
@@ -1048,7 +1306,7 @@
         except AttributeError:
             sql = '%s%s' % (lhssql, rhssql)
         if lhs.variable.stinfo.get('optrelations'):
-            self.add_outer_join_condition(lhs.variable, table, sql)
+            self._state.add_outer_join_condition(table, sql)
         else:
             return sql
 
@@ -1056,7 +1314,7 @@
         """generate SQL for a has_text relation"""
         lhs, rhs = rel.get_parts()
         const = rhs.children[0]
-        alias = self._fti_table(rel)
+        alias = self._state.fti_table(rel, self.dbhelper.fti_table)
         jointo = lhs.accept(self)
         restriction = ''
         lhsvar = lhs.variable
@@ -1070,7 +1328,7 @@
                 # external entities on multisources configurations
                 ealias = lhsvar._q_sqltable = '_' + lhsvar.name
                 jointo = lhsvar._q_sql = '%s.eid' % ealias
-                self.add_table('entities AS %s' % ealias, ealias)
+                self._state.add_table('entities AS %s' % ealias, ealias)
                 if not lhsvar._q_invariant or len(lhsvar.stinfo['possibletypes']) == 1:
                     restriction = " AND %s.type='%s'" % (ealias, self._state.solution[lhs.name])
                 else:
@@ -1133,8 +1391,9 @@
                 raise BadRQLQuery("can't use FTIRANK on variable not used in an"
                                   " 'has_text' relation (eg full-text search)")
             const = rel.get_parts()[1].children[0]
-            return self.dbhelper.fti_rank_order(self._fti_table(rel),
-                                                const.eval(self._args))
+            return self.dbhelper.fti_rank_order(
+                self._state.fti_table(rel, self.dbhelper.fti_table),
+                const.eval(self._args))
         args = [c.accept(self) for c in func.children]
         if func in self._state.source_cb_funcs:
             # function executed as a callback on the source
@@ -1182,7 +1441,7 @@
             table = sql.split('.', 1)[0]
             colalias._q_sqltable = table
             colalias._q_sql = sql
-            self.add_table(table)
+            self._state.add_table(table)
             return sql
         return colalias._q_sql
 
@@ -1206,7 +1465,7 @@
             principal = variable.stinfo['principal']
             if principal is None:
                 vtablename = '_' + variable.name
-                self.add_table('entities AS %s' % vtablename, vtablename)
+                self._state.add_table('entities AS %s' % vtablename, vtablename)
                 sql = '%s.eid' % vtablename
                 if variable.stinfo['typerel'] is not None:
                     # add additional restriction on entities.type column
@@ -1219,15 +1478,16 @@
                         restr = '%s.type IN (%s)' % (vtablename, etypes)
                     self._state.add_restriction(restr)
             elif principal.r_type == 'has_text':
-                sql = '%s.%s' % (self._fti_table(principal),
+                sql = '%s.%s' % (self._state.fti_table(principal,
+                                                       self.dbhelper.fti_table),
                                  self.dbhelper.fti_uid_attr)
             elif principal in variable.stinfo['rhsrelations']:
                 if self.schema.rschema(principal.r_type).inlined:
                     sql = self._linked_var_sql(variable)
                 else:
-                    sql = '%s.eid_to' % self._relation_table(principal)
+                    sql = '%s.eid_to' % self._state.relation_table(principal)
             else:
-                sql = '%s.eid_from' % self._relation_table(principal)
+                sql = '%s.eid_from' % self._state.relation_table(principal)
         else:
             # standard variable: get table name according to etype and use .eid
             # attribute
@@ -1277,7 +1537,7 @@
         sql = self._varmap[key]
         tablealias = sql.split('.', 1)[0]
         scope = self._temp_table_scope(term.stmt, tablealias)
-        self.add_table(tablealias, scope=scope)
+        self._state.add_table(tablealias, scope=scope)
         return sql, tablealias
 
     def _var_info(self, var):
@@ -1291,7 +1551,7 @@
                 raise BadRQLQuery(var.stmt.root)
             tablealias = '_' + var.name
             sql = '%s.%seid' % (tablealias, SQL_PREFIX)
-            self.add_table('%s%s AS %s' % (SQL_PREFIX, etype, tablealias),
+            self._state.add_table('%s%s AS %s' % (SQL_PREFIX, etype, tablealias),
                            tablealias, scope=scope)
         return sql, tablealias
 
@@ -1299,7 +1559,7 @@
         try:
             sql = self._varmap['%s.%s' % (var.name, rtype)]
             scope = self._state.scopes[var.scope]
-            self.add_table(sql.split('.', 1)[0], scope=scope)
+            self._state.add_table(sql.split('.', 1)[0], scope=scope)
         except KeyError:
             sql = '%s.%s%s' % (self._var_table(var), SQL_PREFIX, rtype)
             #self._state.done.add(var.name)
@@ -1339,107 +1599,6 @@
 
     # tables handling #########################################################
 
-    def alias_and_add_table(self, tablename, scope=-1):
-        alias = '%s%s' % (tablename, self._state.count)
-        self._state.count += 1
-        self.add_table('%s AS %s' % (tablename, alias), alias, scope)
-        return alias
-
-    def add_table(self, table, key=None, scope=-1):
-        if key is None:
-            key = table
-        if key in self._state.tables:
-            return
-        if scope < 0:
-            scope = len(self._state.actual_tables) + scope
-        self._state.tables[key] = (scope, table)
-        self._state.actual_tables[scope].append(table)
-
-    def replace_tables_by_outer_join(self, substitute, lefttable, *tables):
-        for table in tables:
-            try:
-                scope, alias = self._state.tables[table]
-                self._state.actual_tables[scope].remove(alias)
-            except ValueError: # huum, not sure about what should be done here
-                msg = "%s already used in an outer join, don't know what to do!"
-                raise Exception(msg % table)
-        try:
-            tablealias = self._state.outer_tables[lefttable]
-            actualtables = self._state.actual_tables[-1]
-        except KeyError:
-            tablescope, tablealias = self._state.tables[lefttable]
-            actualtables = self._state.actual_tables[tablescope]
-        outerjoin = '%s %s' % (tablealias, substitute)
-        self._update_outer_tables(lefttable, actualtables, tablealias, outerjoin)
-        for table in tables:
-            self._state.outer_tables[table] = outerjoin
-
-    def add_outer_join_condition(self, var, table, condition):
-        try:
-            tablealias = self._state.outer_tables[table]
-            actualtables = self._state.actual_tables[-1]
-        except KeyError:
-            for rel in var.stinfo.get('optrelations'):
-                self.visit_relation(rel)
-            assert self._state.outer_tables
-            self.add_outer_join_condition(var, table, condition)
-            return
-        before, after = tablealias.split(' AS %s ' % table, 1)
-        beforep, afterp = after.split(')', 1)
-        outerjoin = '%s AS %s %s AND %s) %s' % (before, table, beforep,
-                                                condition, afterp)
-        self._update_outer_tables(table, actualtables, tablealias, outerjoin)
-
-    def _update_outer_tables(self, table, actualtables, oldalias, newalias):
-        actualtables.remove(oldalias)
-        actualtables.append(newalias)
-        self._state.outer_tables[table] = newalias
-        # some tables which have already been used as outer table and replaced
-        # by <oldalias> may not be reused here, though their associated value
-        # in the outer_tables dict has to be updated as well
-        for table, outerexpr in self._state.outer_tables.iteritems():
-            if outerexpr == oldalias:
-                self._state.outer_tables[table] = newalias
-
     def _var_table(self, var):
         var.accept(self)#.visit_variable(var)
         return var._q_sqltable
-
-    def _relation_table(self, relation):
-        """return the table alias used by the given relation"""
-        if relation in self._state.done:
-            return relation._q_sqltable
-        assert not self.schema.rschema(relation.r_type).final, relation.r_type
-        rid = 'rel_%s%s' % (relation.r_type, self._state.count)
-        # relation's table is belonging to the root scope if it is the principal
-        # table of one of it's variable and if that variable belong's to parent
-        # scope
-        for varref in relation.iget_nodes(VariableRef):
-            var = varref.variable
-            if isinstance(var, ColumnAlias):
-                scope = 0
-                break
-            # XXX may have a principal without being invariant for this generation,
-            #     not sure this is a pb or not
-            if var.stinfo.get('principal') is relation and var.scope is var.stmt:
-                scope = 0
-                break
-        else:
-            scope = -1
-        self._state.count += 1
-        self.add_table('%s_relation AS %s' % (relation.r_type, rid), rid, scope=scope)
-        relation._q_sqltable = rid
-        self._state.done.add(relation)
-        return rid
-
-    def _fti_table(self, relation):
-        if relation in self._state.done:
-            try:
-                return relation._q_sqltable
-            except AttributeError:
-                pass
-        self._state.done.add(relation)
-        scope = self._state.scopes[relation.scope]
-        alias = self.alias_and_add_table(self.dbhelper.fti_table, scope=scope)
-        relation._q_sqltable = alias
-        return alias
--- a/server/sources/storages.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/sources/storages.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,7 +24,7 @@
 
 from cubicweb import Binary, ValidationError
 from cubicweb.server import hook
-from cubicweb.server.ssplanner import EditedEntity
+from cubicweb.server.edition import EditedEntity
 
 
 def set_attribute_storage(repo, etype, attr, storage):
--- a/server/ssplanner.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/ssplanner.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -21,8 +21,6 @@
 
 __docformat__ = "restructuredtext en"
 
-from copy import copy
-
 from rql.stmts import Union, Select
 from rql.nodes import Constant, Relation
 
@@ -31,6 +29,7 @@
 from cubicweb.rqlrewrite import add_types_restriction
 from cubicweb.server.session import security_enabled
 from cubicweb.server.hook import CleanupDeletedEidsCacheOp
+from cubicweb.server.edition import EditedEntity
 
 READ_ONLY_RTYPES = set(('eid', 'has_text', 'is', 'is_instance_of', 'identity'))
 
@@ -128,132 +127,6 @@
     return select
 
 
-_MARKER = object()
-
-class dict_protocol_catcher(object):
-    def __init__(self, entity):
-        self.__entity = entity
-    def __getitem__(self, attr):
-        return self.__entity.cw_edited[attr]
-    def __setitem__(self, attr, value):
-        self.__entity.cw_edited[attr] = value
-    def __getattr__(self, attr):
-        return getattr(self.__entity, attr)
-
-
-class EditedEntity(dict):
-    """encapsulate entities attributes being written by an RQL query"""
-    def __init__(self, entity, **kwargs):
-        dict.__init__(self, **kwargs)
-        self.entity = entity
-        self.skip_security = set()
-        self.querier_pending_relations = {}
-        self.saved = False
-
-    def __hash__(self):
-        # dict|set keyable
-        return hash(id(self))
-
-    def __cmp__(self, other):
-        # we don't want comparison by value inherited from dict
-        return cmp(id(self), id(other))
-
-    def __setitem__(self, attr, value):
-        assert attr != 'eid'
-        # don't add attribute into skip_security if already in edited
-        # attributes, else we may accidentaly skip a desired security check
-        if attr not in self:
-            self.skip_security.add(attr)
-        self.edited_attribute(attr, value)
-
-    def __delitem__(self, attr):
-        assert not self.saved, 'too late to modify edited attributes'
-        super(EditedEntity, self).__delitem__(attr)
-        self.entity.cw_attr_cache.pop(attr, None)
-
-    def pop(self, attr, *args):
-        # don't update skip_security by design (think to storage api)
-        assert not self.saved, 'too late to modify edited attributes'
-        value = super(EditedEntity, self).pop(attr, *args)
-        self.entity.cw_attr_cache.pop(attr, *args)
-        return value
-
-    def setdefault(self, attr, default):
-        assert attr != 'eid'
-        # don't add attribute into skip_security if already in edited
-        # attributes, else we may accidentaly skip a desired security check
-        if attr not in self:
-            self[attr] = default
-        return self[attr]
-
-    def update(self, values, skipsec=True):
-        if skipsec:
-            setitem = self.__setitem__
-        else:
-            setitem = self.edited_attribute
-        for attr, value in values.iteritems():
-            setitem(attr, value)
-
-    def edited_attribute(self, attr, value):
-        """attribute being edited by a rql query: should'nt be added to
-        skip_security
-        """
-        assert not self.saved, 'too late to modify edited attributes'
-        super(EditedEntity, self).__setitem__(attr, value)
-        self.entity.cw_attr_cache[attr] = value
-
-    def oldnewvalue(self, attr):
-        """returns the couple (old attr value, new attr value)
-
-        NOTE: will only work in a before_update_entity hook
-        """
-        assert not self.saved, 'too late to get the old value'
-        # get new value and remove from local dict to force a db query to
-        # fetch old value
-        newvalue = self.entity.cw_attr_cache.pop(attr, _MARKER)
-        oldvalue = getattr(self.entity, attr)
-        if newvalue is not _MARKER:
-            self.entity.cw_attr_cache[attr] = newvalue
-        else:
-            newvalue = oldvalue
-        return oldvalue, newvalue
-
-    def set_defaults(self):
-        """set default values according to the schema"""
-        for attr, value in self.entity.e_schema.defaults():
-            if not attr in self:
-                self[str(attr)] = value
-
-    def check(self, creation=False):
-        """check the entity edition against its schema. Only final relation
-        are checked here, constraint on actual relations are checked in hooks
-        """
-        entity = self.entity
-        if creation:
-            # on creations, we want to check all relations, especially
-            # required attributes
-            relations = [rschema for rschema in entity.e_schema.subject_relations()
-                         if rschema.final and rschema.type != 'eid']
-        else:
-            relations = [entity._cw.vreg.schema.rschema(rtype)
-                         for rtype in self]
-        from yams import ValidationError
-        try:
-            entity.e_schema.check(dict_protocol_catcher(entity),
-                                  creation=creation, _=entity._cw._,
-                                  relations=relations)
-        except ValidationError, ex:
-            ex.entity = self.entity
-            raise
-
-    def clone(self):
-        thecopy = EditedEntity(copy(self.entity))
-        thecopy.entity.cw_attr_cache = copy(self.entity.cw_attr_cache)
-        thecopy.entity._cw_related_cache = {}
-        thecopy.update(self, skipsec=False)
-        return thecopy
-
-
 class SSPlanner(object):
     """SingleSourcePlanner: build execution plan for rql queries
 
--- a/server/test/unittest_checkintegrity.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_checkintegrity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,14 +19,16 @@
 import sys
 from StringIO import StringIO
 from logilab.common.testlib import TestCase, unittest_main
-from cubicweb.devtools import init_test_database
+from cubicweb.devtools import get_test_db_handler, TestServerConfiguration
 
 
 from cubicweb.server.checkintegrity import check, reindex_entities
 
 class CheckIntegrityTC(TestCase):
     def setUp(self):
-        self.repo, self.cnx = init_test_database(apphome=self.datadir)
+        handler = get_test_db_handler(TestServerConfiguration(apphome=self.datadir))
+        handler.build_db_cache()
+        self.repo, self.cnx = handler.get_repo_and_cnx()
         self.execute = self.cnx.cursor().execute
         self.session = self.repo._sessions[self.cnx.sessionid]
         sys.stderr = sys.stdout = StringIO()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_datafeed.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,98 @@
+# copyright 2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+from __future__ import with_statement
+
+from datetime import timedelta
+
+from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.server.sources import datafeed
+
+
+class DataFeedTC(CubicWebTC):
+    def setup_database(self):
+        self.request().create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+                                    parser=u'testparser', url=u'ignored',
+                                    config=u'synchronization-interval=1min')
+
+    def test(self):
+        self.assertIn('myfeed', self.repo.sources_by_uri)
+        dfsource = self.repo.sources_by_uri['myfeed']
+        self.assertNotIn(dfsource, self.repo.sources)
+        self.assertEqual(dfsource.latest_retrieval, None)
+        self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
+        self.assertFalse(dfsource.fresh())
+
+        class AParser(datafeed.DataFeedParser):
+            __regid__ = 'testparser'
+            def process(self, url):
+                entity = self.extid2entity('http://www.cubicweb.org/', 'Card',
+                                  item={'title': u'cubicweb.org',
+                                        'content': u'the cw web site'})
+                if not self.created_during_pull(entity):
+                    self.notify_updated(entity)
+            def before_entity_copy(self, entity, sourceparams):
+                entity.cw_edited.update(sourceparams['item'])
+
+        with self.temporary_appobjects(AParser):
+            stats = dfsource.pull_data(self.session, force=True)
+            self.commit()
+            # test import stats
+            self.assertEqual(sorted(stats.keys()), ['created', 'updated'])
+            self.assertEqual(len(stats['created']), 1)
+            entity = self.execute('Card X').get_entity(0, 0)
+            self.assertIn(entity.eid, stats['created'])
+            self.assertEqual(stats['updated'], set())
+            # test imported entities
+            self.assertEqual(entity.title, 'cubicweb.org')
+            self.assertEqual(entity.content, 'the cw web site')
+            self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
+            self.assertEqual(entity.cw_source[0].name, 'myfeed')
+            self.assertEqual(entity.cw_metainformation(),
+                             {'type': 'Card',
+                              'source': {'uri': 'system', 'type': 'native'},
+                              'extid': 'http://www.cubicweb.org/'}
+                             )
+            # test repo cache keys
+            self.assertEqual(self.repo._type_source_cache[entity.eid],
+                             ('Card', 'system', 'http://www.cubicweb.org/'))
+            self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
+                             entity.eid)
+            # test repull
+            stats = dfsource.pull_data(self.session, force=True)
+            self.assertEqual(stats['created'], set())
+            self.assertEqual(stats['updated'], set((entity.eid,)))
+            # test repull with caches reseted
+            self.repo._type_source_cache.clear()
+            self.repo._extid_cache.clear()
+            stats = dfsource.pull_data(self.session, force=True)
+            self.assertEqual(stats['created'], set())
+            self.assertEqual(stats['updated'], set((entity.eid,)))
+            self.assertEqual(self.repo._type_source_cache[entity.eid],
+                             ('Card', 'system', 'http://www.cubicweb.org/'))
+            self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
+                             entity.eid)
+
+        self.assertEqual(dfsource.source_cwuris(self.session),
+                         {'http://www.cubicweb.org/': (entity.eid, 'Card')}
+                         )
+        self.assertTrue(dfsource.latest_retrieval)
+        self.assertTrue(dfsource.fresh())
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/server/test/unittest_ldapuser.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_ldapuser.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -51,8 +51,7 @@
     """
     assert login, 'no login!'
     searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))]
-    searchfilter.extend([filter_format('(%s=%s)', ('objectClass', o))
-                         for o in self.user_classes])
+    searchfilter.extend(self.base_filters)
     searchstr = '(&%s)' % ''.join(searchfilter)
     # first search the user
     try:
@@ -463,8 +462,7 @@
         self.pool = repo._get_pool()
         session = mock_object(pool=self.pool)
         self.o = RQL2LDAPFilter(ldapsource, session)
-        self.ldapclasses = ''.join('(objectClass=%s)' % ldapcls
-                                   for ldapcls in ldapsource.user_classes)
+        self.ldapclasses = ''.join(ldapsource.base_filters)
 
     def tearDown(self):
         self._repo.turn_repo_off()
--- a/server/test/unittest_msplanner.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_msplanner.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,11 +20,10 @@
 from __future__ import with_statement
 
 from logilab.common.decorators import clear_cache
-
 from yams.buildobjs import RelationDefinition
 from rql import BadRQLQuery
 
-from cubicweb.devtools import init_test_database
+from cubicweb.devtools import get_test_db_handler, TestServerConfiguration
 from cubicweb.devtools.repotest import BasePlannerTC, test_plan
 
 class _SetGenerator(object):
@@ -43,7 +42,6 @@
 from cubicweb.server.msplanner import MSPlanner, PartPlanInformation
 
 class FakeUserROSource(AbstractSource):
-    uri = 'zzz'
     support_entities = {'CWUser': False}
     support_relations = {}
     def syntax_tree_search(self, *args, **kwargs):
@@ -51,7 +49,6 @@
 
 
 class FakeCardSource(AbstractSource):
-    uri = 'ccc'
     support_entities = {'Card': True, 'Note': True, 'State': True}
     support_relations = {'in_state': True, 'multisource_rel': True, 'multisource_inlined_rel': True,
                          'multisource_crossed_rel': True,}
@@ -61,12 +58,16 @@
     def syntax_tree_search(self, *args, **kwargs):
         return []
 
+
+class FakeDataFeedSource(FakeCardSource):
+    copy_based_source = True
+
 X_ALL_SOLS = sorted([{'X': 'Affaire'}, {'X': 'BaseTransition'}, {'X': 'Basket'},
                      {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'},
                      {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWEType'},
                      {'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'},
                      {'X': 'CWRType'}, {'X': 'CWRelation'},
-                     {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'},
+                     {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'},
                      {'X': 'CWUser'}, {'X': 'CWUniqueTogetherConstraint'},
                      {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'},
                      {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'},
@@ -81,7 +82,9 @@
 # keep cnx so it's not garbage collected and the associated session is closed
 def setUpModule(*args):
     global repo, cnx
-    repo, cnx = init_test_database(apphome=BaseMSPlannerTC.datadir)
+    handler = get_test_db_handler(TestServerConfiguration(apphome=BaseMSPlannerTC.datadir))
+    handler.build_db_cache()
+    repo, cnx = handler.get_repo_and_cnx()
 
 def tearDownModule(*args):
     global repo, cnx
@@ -113,6 +116,7 @@
         self.schema['CWUser'].set_action_permissions('read', userreadperms)
         self.add_source(FakeUserROSource, 'ldap')
         self.add_source(FakeCardSource, 'cards')
+        self.add_source(FakeDataFeedSource, 'datafeed')
 
     def tearDown(self):
         # restore hijacked security
@@ -428,7 +432,7 @@
         """retrieve CWUser X from both sources and return concatenation of results
         """
         self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10',
-                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [
+                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None, [
                        ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
                         [self.ldap, self.system], {}, {'X': 'table0.C0'}, []),
                        ]),
@@ -513,7 +517,7 @@
 
     def test_complex_ordered(self):
         self._test('Any L ORDERBY L WHERE X login L',
-                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0', None,
+                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0', None,
                      [('FetchStep', [('Any L WHERE X login L, X is CWUser',
                                       [{'X': 'CWUser', 'L': 'String'}])],
                        [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []),
@@ -522,7 +526,7 @@
 
     def test_complex_ordered_limit_offset(self):
         self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L',
-                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None,
+                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY table0.C0\nLIMIT 10\nOFFSET 10', None,
                      [('FetchStep', [('Any L WHERE X login L, X is CWUser',
                                       [{'X': 'CWUser', 'L': 'String'}])],
                        [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []),
@@ -608,7 +612,7 @@
         2. return content of the table sorted
         """
         self._test('Any X,F ORDERBY F WHERE X firstname F',
-                   [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', None,
+                   [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1', None,
                      [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser',
                                       [{'X': 'CWUser', 'F': 'String'}])],
                        [self.ldap, self.system], {},
@@ -900,6 +904,7 @@
         ueid = self.session.user.eid
         ALL_SOLS = X_ALL_SOLS[:]
         ALL_SOLS.remove({'X': 'CWSourceHostConfig'}) # not authorized
+        ALL_SOLS.remove({'X': 'CWSourceSchemaConfig'}) # not authorized
         self._test('Any MAX(X)',
                    [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
                      [self.cards, self.system],  None, {'E': 'table1.C0'}, []),
@@ -950,7 +955,7 @@
         ueid = self.session.user.eid
         X_ET_ALL_SOLS = []
         for s in X_ALL_SOLS:
-            if s == {'X': 'CWSourceHostConfig'}:
+            if s in ({'X': 'CWSourceHostConfig'}, {'X': 'CWSourceSchemaConfig'}):
                 continue # not authorized
             ets = {'ET': 'CWEType'}
             ets.update(s)
@@ -1341,7 +1346,7 @@
         self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
                    [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
                      [self.ldap, self.system], None, {'X': 'table0.C0'}, []),
-                    ('AggrStep', 'SELECT table1.C1 FROM table1 ORDER BY table1.C0', None, [
+                    ('AggrStep', 'SELECT table1.C1 FROM table1\nORDER BY table1.C0', None, [
                         ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser',
                                         [{'X': 'CWUser'}])],
                          [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []),
@@ -1398,7 +1403,7 @@
 
     def test_sort_func(self):
         self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF',
-                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [
+                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None, [
                        ('FetchStep', [('Any X,RF WHERE X type RF, X is Note',
                                        [{'X': 'Note', 'RF': 'String'}])],
                         [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []),
@@ -1407,7 +1412,7 @@
 
     def test_ambigous_sort_func(self):
         self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)',
-                   [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None,
+                   [('AggrStep', 'SELECT table0.C0 FROM table0\nORDER BY DUMB_SORT(table0.C1)', None,
                      [('FetchStep', [('Any X,RF WHERE X title RF, X is Card',
                                       [{'X': 'Card', 'RF': 'String'}])],
                        [self.cards, self.system], {},
@@ -1894,7 +1899,7 @@
         try:
             self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
                        [('AggrStep',
-                         'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
+                         'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1',
                          None,
                          [('FetchStep',
                            [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
@@ -1957,6 +1962,22 @@
                     ])
 
     def test_source_specified_1_2(self):
+        self._test('Card X WHERE X cw_source S, S name "datafeed"',
+                   [('OneFetchStep', [('Any X WHERE X cw_source S, S name "datafeed", X is Card',
+                                       [{'X': 'Card', 'S': 'CWSource'}])],
+                     None, None,
+                     [self.system],{}, [])
+                    ])
+
+    def test_source_specified_1_3(self):
+        self._test('Any X, SN WHERE X is Card, X cw_source S, S name "datafeed", S name SN',
+                   [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "datafeed", '
+                                       'S name SN',
+                                       [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])],
+                     None, None, [self.system], {}, [])
+                    ])
+
+    def test_source_specified_1_4(self):
         sols = []
         for sol in X_ALL_SOLS:
             sol = sol.copy()
@@ -2006,6 +2027,14 @@
                     ])
 
     def test_source_specified_3_2(self):
+        self._test('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"',
+                   [('OneFetchStep',
+                     [('Any X,XT WHERE X is Card, X title XT, X cw_source S, S name "datafeed"',
+                       [{'X': 'Card', 'XT': 'String', 'S': 'CWSource'}])],
+                     None, None, [self.system], {}, [])
+                    ])
+
+    def test_source_specified_3_3(self):
         self.skipTest('oops')
         self._test('Any STN WHERE X is Note, X type XT, X in_state ST, ST name STN, X cw_source S, S name "cards"',
                    [('OneFetchStep',
@@ -2044,7 +2073,7 @@
         try:
             self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
                        [('AggrStep',
-                         'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
+                         'SELECT table0.C0, table0.C1 FROM table0\nORDER BY table0.C1',
                          None,
                          [('FetchStep',
                            [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
@@ -2091,7 +2120,7 @@
                     ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser',
                                     [{'X': 'CWUser', 'D': 'Datetime'}])],
                      [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []),
-                    ('AggrStep', 'SELECT table2.C0 FROM table2 ORDER BY table2.C1 DESC', None, [
+                    ('AggrStep', 'SELECT table2.C0 FROM table2\nORDER BY table2.C1 DESC', None, [
                         ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid,
                                         [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])],
                          [self.system],
@@ -2240,7 +2269,7 @@
                                     [{'X': 'Note', 'Z': 'Datetime'}])],
                      [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'},
                      []),
-                    ('AggrStep', 'SELECT table1.C0 FROM table1 ORDER BY table1.C1 DESC', None,
+                    ('AggrStep', 'SELECT table1.C0 FROM table1\nORDER BY table1.C1 DESC', None,
                      [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark',
                                       [{'X': 'Bookmark', 'Z': 'Datetime'}])],
                        [self.system], {},   {'X': 'table1.C0', 'X.modification_date': 'table1.C1',
@@ -2356,6 +2385,56 @@
                      None, None, [self.system], {}, [])],
                    {'x': 999999, 'u': 999998})
 
+    def test_nonregr_similar_subquery(self):
+        repo._type_source_cache[999999] = ('Personne', 'system', 999999)
+        self._test('Any T,TD,U,T,UL WITH T,TD,U,UL BEING ('
+                   '(Any T,TD,U,UL WHERE X eid %(x)s, T comments X, T content TD, T created_by U?, U login UL)'
+                   ' UNION '
+                   '(Any T,TD,U,UL WHERE X eid %(x)s, X connait P, T comments P, T content TD, T created_by U?, U login UL))',
+                   # XXX optimization: use a OneFetchStep with a UNION of both queries
+                   [('FetchStep', [('Any U,UL WHERE U login UL, U is CWUser',
+                                    [{'U': 'CWUser', 'UL': 'String'}])],
+                     [self.ldap, self.system], None,
+                     {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
+                     []),
+                    ('UnionFetchStep',
+                     [('FetchStep',
+                       [('Any T,TD,U,UL WHERE T comments 999999, T content TD, T created_by U?, U login UL, T is Comment, U is CWUser',
+                         [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])],
+                       [self.system],
+                       {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
+                       {'T': 'table1.C0',
+                        'T.content': 'table1.C1',
+                        'TD': 'table1.C1',
+                        'U': 'table1.C2',
+                        'U.login': 'table1.C3',
+                        'UL': 'table1.C3'},
+                       []),
+                      ('FetchStep',
+                       [('Any T,TD,U,UL WHERE 999999 connait P, T comments P, T content TD, T created_by U?, U login UL, P is Personne, T is Comment, U is CWUser',
+                         [{'P': 'Personne',
+                           'T': 'Comment',
+                           'TD': 'String',
+                           'U': 'CWUser',
+                           'UL': 'String'}])],
+                       [self.system],
+                       {'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
+                       {'T': 'table1.C0',
+                        'T.content': 'table1.C1',
+                        'TD': 'table1.C1',
+                        'U': 'table1.C2',
+                        'U.login': 'table1.C3',
+                        'UL': 'table1.C3'},
+                       [])]),
+                    ('OneFetchStep',
+                     [('Any T,TD,U,T,UL',
+                       [{'T': 'Comment', 'TD': 'String', 'U': 'CWUser', 'UL': 'String'}])],
+                     None, None,
+                     [self.system],
+                     {'T': 'table1.C0', 'TD': 'table1.C1', 'U': 'table1.C2', 'UL': 'table1.C3'},
+                     [])],
+                   {'x': 999999})
+
 
 class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC):
     """test planner related feature on a 3-sources repository:
@@ -2542,7 +2621,7 @@
                      None, {'X': 'table0.C0'}, []),
                     ('UnionStep', None, None,
                      [('OneFetchStep',
-                       [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+                       [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWSourceSchemaConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
                          [{'U': 'CWUser', 'X': 'Affaire'},
                           {'U': 'CWUser', 'X': 'BaseTransition'},
                           {'U': 'CWUser', 'X': 'Basket'},
@@ -2559,6 +2638,7 @@
                           {'U': 'CWUser', 'X': 'CWRelation'},
                           {'U': 'CWUser', 'X': 'CWSource'},
                           {'U': 'CWUser', 'X': 'CWSourceHostConfig'},
+                          {'U': 'CWUser', 'X': 'CWSourceSchemaConfig'},
                           {'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'},
                           {'U': 'CWUser', 'X': 'CWUser'},
                           {'U': 'CWUser', 'X': 'Division'},
--- a/server/test/unittest_multisources.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_multisources.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
- # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,6 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
 from datetime import datetime, timedelta
+from itertools import repeat
 
 from cubicweb.devtools import TestServerConfiguration, init_test_database
 from cubicweb.devtools.testlib import CubicWebTC, Tags
@@ -35,7 +36,6 @@
 pyro-ns-id = extern
 cubicweb-user = admin
 cubicweb-password = gingkow
-mapping-file = extern_mapping.py
 base-url=http://extern.org/
 '''
 
@@ -46,6 +46,12 @@
 PyroRQLSource_get_connection = PyroRQLSource.get_connection
 Connection_close = Connection.close
 
+def add_extern_mapping(source):
+    source.init_mapping(zip(('Card', 'Affaire', 'State',
+                             'in_state', 'documented_by', 'multisource_inlined_rel'),
+                            repeat(u'write')))
+
+
 def pre_setup_database_extern(session, config):
     session.execute('INSERT Card X: X title "C3: An external card", X wikiid "aaa"')
     session.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"')
@@ -119,11 +125,13 @@
 pyro-ns-id = extern-multi
 cubicweb-user = admin
 cubicweb-password = gingkow
-mapping-file = extern_mapping.py
 ''')]:
-            session.create_entity('CWSource', name=unicode(uri),
-                                         type=u'pyrorql',
-                                         config=unicode(src_config))
+            source = session.create_entity('CWSource', name=unicode(uri),
+                                           type=u'pyrorql',
+                                           config=unicode(src_config))
+            session.commit()
+            add_extern_mapping(source)
+
         session.commit()
         # trigger discovery
         session.execute('Card X')
--- a/server/test/unittest_querier.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_querier.py	Wed Apr 27 09:54:22 2011 +0200
@@ -504,15 +504,15 @@
                               [[u'description_format', 12],
                                [u'description', 13],
                                [u'name', 15],
-                               [u'created_by', 40],
-                               [u'creation_date', 40],
-                               [u'cw_source', 40],
-                               [u'cwuri', 40],
-                               [u'in_basket', 40],
-                               [u'is', 40],
-                               [u'is_instance_of', 40],
-                               [u'modification_date', 40],
-                               [u'owned_by', 40]])
+                               [u'created_by', 41],
+                               [u'creation_date', 41],
+                               [u'cw_source', 41],
+                               [u'cwuri', 41],
+                               [u'in_basket', 41],
+                               [u'is', 41],
+                               [u'is_instance_of', 41],
+                               [u'modification_date', 41],
+                               [u'owned_by', 41]])
 
     def test_select_aggregat_having_dumb(self):
         # dumb but should not raise an error
--- a/server/test/unittest_repository.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_repository.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,5 +1,5 @@
 # -*- coding: iso-8859-1 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -105,7 +105,9 @@
         self.failIf(self.execute('Any X WHERE NOT X cw_source S'))
 
     def test_connect(self):
-        self.assert_(self.repo.connect(self.admlogin, password=self.admpassword))
+        cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
+        self.assert_(cnxid)
+        self.repo.close(cnxid)
         self.assertRaises(AuthenticationError,
                           self.repo.connect, self.admlogin, password='nimportnawak')
         self.assertRaises(AuthenticationError,
@@ -133,7 +135,9 @@
                      {'login': u"barnabé", 'passwd': u"héhéhé".encode('UTF8')})
         repo.commit(cnxid)
         repo.close(cnxid)
-        self.assert_(repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8')))
+        cnxid = repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))
+        self.assert_(cnxid)
+        repo.close(cnxid)
 
     def test_rollback_on_commit_error(self):
         cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
@@ -142,6 +146,7 @@
                           {'login': u"tutetute", 'passwd': 'tutetute'})
         self.assertRaises(ValidationError, self.repo.commit, cnxid)
         self.failIf(self.repo.execute(cnxid, 'CWUser X WHERE X login "tutetute"'))
+        self.repo.close(cnxid)
 
     def test_rollback_on_execute_validation_error(self):
         class ValidationErrorAfterHook(Hook):
@@ -234,6 +239,7 @@
         repo.commit(cnxid)
         result = repo.execute(cnxid, 'Personne X')
         self.assertEqual(result.rowcount, 1)
+        repo.close(cnxid)
 
     def test_transaction_base2(self):
         repo = self.repo
@@ -245,6 +251,7 @@
         repo.rollback(cnxid)
         result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'")
         self.assertEqual(result.rowcount, 0, result.rows)
+        repo.close(cnxid)
 
     def test_transaction_base3(self):
         repo = self.repo
@@ -259,6 +266,7 @@
         repo.rollback(cnxid)
         rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
         self.assertEqual(len(rset), 0)
+        repo.close(cnxid)
 
     def test_transaction_interleaved(self):
         self.skipTest('implement me')
@@ -378,6 +386,7 @@
         self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None)
         class dummysource: uri = 'toto'
         self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 2, session)
+        repo.close(cnxid)
 
     def test_public_api(self):
         self.assertEqual(self.repo.get_schema(), self.repo.schema)
--- a/server/test/unittest_rql2sql.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_rql2sql.py	Wed Apr 27 09:54:22 2011 +0200
@@ -18,6 +18,7 @@
 """unit tests for module cubicweb.server.sources.rql2sql"""
 
 import sys
+import os
 
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
@@ -38,6 +39,25 @@
     pass # already registered
 
 
+from logilab import database as db
+def monkey_patch_import_driver_module(driver, drivers, quiet=True):
+    if not driver in drivers:
+        raise db.UnknownDriver(driver)
+    for modname in drivers[driver]:
+        try:
+            if not quiet:
+                print >> sys.stderr, 'Trying %s' % modname
+            module = db.load_module_from_name(modname, use_sys=False)
+            break
+        except ImportError:
+            if not quiet:
+                print >> sys.stderr, '%s is not available' % modname
+            continue
+    else:
+        return mock_object(STRING=1, BOOLEAN=2, BINARY=3, DATETIME=4, NUMBER=5), drivers[driver][0]
+    return module, modname
+
+
 def setUpModule():
     global config, schema
     config = TestServerConfiguration('data', apphome=CWRQLTC.datadir)
@@ -46,10 +66,14 @@
     schema['in_state'].inlined = True
     schema['state_of'].inlined = False
     schema['comments'].inlined = False
+    db._backup_import_driver_module = db._import_driver_module
+    db._import_driver_module = monkey_patch_import_driver_module
 
 def tearDownModule():
     global config, schema
     del config, schema
+    db._import_driver_module = db._backup_import_driver_module
+    del db._backup_import_driver_module
 
 PARSER = [
     (r"Personne P WHERE P nom 'Zig\'oto';",
@@ -93,12 +117,6 @@
     ("Personne P WHERE P eid -1",
      '''SELECT -1'''),
 
-    ("Personne P LIMIT 20 OFFSET 10",
-     '''SELECT _P.cw_eid
-FROM cw_Personne AS _P
-LIMIT 20
-OFFSET 10'''),
-
     ("Personne P WHERE S is Societe, P travaille S, S nom 'Logilab';",
      '''SELECT rel_travaille0.eid_from
 FROM cw_Societe AS _S, travaille_relation AS rel_travaille0
@@ -172,13 +190,13 @@
      "EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');",
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE _X.cw_prenom=lulu AND EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, cw_CWGroup AS _G WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers)))'''),
+WHERE _X.cw_prenom=lulu AND EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers)))'''),
 
     ("Any X WHERE X prenom 'lulu',"
      "NOT EXISTS(X owned_by U, U in_group G, G name 'lulufanclub' OR G name 'managers');",
      '''SELECT _X.cw_eid
 FROM cw_Personne AS _X
-WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, in_group_relation AS rel_in_group1, cw_CWGroup AS _G WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''),
+WHERE _X.cw_prenom=lulu AND NOT (EXISTS(SELECT 1 FROM cw_CWGroup AS _G, in_group_relation AS rel_in_group1, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_in_group1.eid_from=rel_owned_by0.eid_to AND rel_in_group1.eid_to=_G.cw_eid AND ((_G.cw_name=lulufanclub) OR (_G.cw_name=managers))))'''),
 
     ('Any X WHERE  X title V, NOT X wikiid V, NOT X title "parent", X is Card',
      '''SELECT _X.cw_eid
@@ -186,6 +204,14 @@
 WHERE NOT (_X.cw_wikiid=_X.cw_title) AND NOT (_X.cw_title=parent)''')
 ]
 
+BASIC_WITH_LIMIT = [
+    ("Personne P LIMIT 20 OFFSET 10",
+     '''SELECT _P.cw_eid
+FROM cw_Personne AS _P
+LIMIT 20
+OFFSET 10'''),
+    ]
+
 
 ADVANCED = [
     ("Societe S WHERE S nom 'Logilab' OR S nom 'Caesium'",
@@ -279,12 +305,6 @@
 FROM cw_Note AS _S, cw_Personne AS _O
 WHERE (_S.cw_ecrit_par IS NULL OR _S.cw_ecrit_par!=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''),
 
-    ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
-     '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1
-FROM cw_Note AS _S
-WHERE _S.cw_ecrit_par IS NULL
-ORDER BY 2) AS T1'''),
-
     ('Any N WHERE N todo_by U, N is Note, U eid 2, N filed_under T, T eid 3',
      # N would actually be invarient if U eid 2 had given a specific type to U
      '''SELECT _N.cw_eid
@@ -333,13 +353,6 @@
 WHERE rel_tags0.eid_to=_X.cw_eid AND _X.cw_in_state=32
 GROUP BY _X.cw_eid'''),
 
-    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
-     '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom
-FROM concerne_relation AS rel_concerne0, cw_Societe AS _C
-WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM documented_by_relation AS rel_documented_by2, cw_Card AS _N WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
-GROUP BY _C.cw_nom
-ORDER BY 1 DESC
-LIMIT 10'''),
 
     ('Any X WHERE Y evaluee X, Y is CWUser',
      '''SELECT rel_evaluee0.eid_to
@@ -435,18 +448,11 @@
 GROUP BY _X.cw_data_name,_X.cw_data_format
 ORDER BY 1,2,_X.cw_data_format'''),
 
-    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
-     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
-FROM cw_File AS _X
-GROUP BY _X.cw_data_name,_X.cw_data_format
-ORDER BY 2,3) AS T1
-'''),
-
     # ambiguity in EXISTS() -> should union the sub-query
     ('Any T WHERE T is Tag, NOT T name in ("t1", "t2"), EXISTS(T tags X, X is IN (CWUser, CWGroup))',
      '''SELECT _T.cw_eid
 FROM cw_Tag AS _T
-WHERE NOT (_T.cw_name IN(t1, t2)) AND EXISTS(SELECT 1 FROM tags_relation AS rel_tags0, cw_CWGroup AS _X WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid UNION SELECT 1 FROM tags_relation AS rel_tags1, cw_CWUser AS _X WHERE rel_tags1.eid_from=_T.cw_eid AND rel_tags1.eid_to=_X.cw_eid)'''),
+WHERE NOT (_T.cw_name IN(t1, t2)) AND EXISTS(SELECT 1 FROM cw_CWGroup AS _X, tags_relation AS rel_tags0 WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid UNION SELECT 1 FROM cw_CWUser AS _X, tags_relation AS rel_tags1 WHERE rel_tags1.eid_from=_T.cw_eid AND rel_tags1.eid_to=_X.cw_eid)'''),
 
     # must not use a relation in EXISTS scope to inline a variable
     ('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)',
@@ -462,7 +468,7 @@
     ('Any COUNT(U) WHERE EXISTS (P owned_by U, P is IN (Note, Affaire))',
      '''SELECT COUNT(_U.cw_eid)
 FROM cw_CWUser AS _U
-WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, cw_Affaire AS _P WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, cw_Note AS _P WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=_U.cw_eid)'''),
+WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=_U.cw_eid)'''),
 
     ('Any MAX(X)',
      '''SELECT MAX(_X.eid)
@@ -512,6 +518,72 @@
 GROUP BY rel_owned_by0.eid_to
 HAVING COUNT(rel_owned_by0.eid_from)>10'''),
 
+
+    ("Any X WHERE X eid 0, X test TRUE",
+     '''SELECT _X.cw_eid
+FROM cw_Personne AS _X
+WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
+
+    ('Any 1 WHERE X in_group G, X is CWUser',
+     '''SELECT 1
+FROM in_group_relation AS rel_in_group0'''),
+
+    ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"',
+     '''SELECT _X.cw_eid
+FROM cw_CWEType AS _X
+WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''),
+    ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"',
+     '''SELECT _X.cw_eid
+FROM cw_CWEType AS _X
+WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''),
+    ]
+
+ADVANCED_WITH_GROUP_CONCAT = [
+        ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup",
+     '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name)
+FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
+GROUP BY _X.cw_eid,_X.cw_name
+ORDER BY _X.cw_name'''),
+
+    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN",
+     '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
+FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
+UNION ALL
+SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
+FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
+UNION ALL
+SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
+FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0
+WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1
+GROUP BY T1.C0,T1.C2
+ORDER BY T1.C2'''),
+
+]
+
+ADVANCED_WITH_LIMIT_OR_ORDERBY = [
+    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
+     '''SELECT COUNT(rel_concerne0.eid_from), _C.cw_nom
+FROM concerne_relation AS rel_concerne0, cw_Societe AS _C
+WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
+GROUP BY _C.cw_nom
+ORDER BY 1 DESC
+LIMIT 10'''),
+    ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1
+FROM cw_Note AS _S
+WHERE _S.cw_ecrit_par IS NULL
+ORDER BY 2) AS T1'''),
+
+    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
+     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
+FROM cw_File AS _X
+GROUP BY _X.cw_data_name,_X.cw_data_format
+ORDER BY 2,3) AS T1
+'''),
+
     ('DISTINCT Any X ORDERBY stockproc(X) WHERE U login X',
      '''SELECT T1.C0 FROM (SELECT DISTINCT _U.cw_login AS C0, STOCKPROC(_U.cw_login) AS C1
 FROM cw_CWUser AS _U
@@ -546,48 +618,8 @@
 ORDER BY 4 DESC'''),
 
 
-    ("Any X WHERE X eid 0, X test TRUE",
-     '''SELECT _X.cw_eid
-FROM cw_Personne AS _X
-WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
-
-    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN, X is CWGroup",
-     '''SELECT _X.cw_eid, GROUP_CONCAT(_T.cw_name)
-FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
-WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
-GROUP BY _X.cw_eid,_X.cw_name
-ORDER BY _X.cw_name'''),
-
-    ("Any X,GROUP_CONCAT(TN) GROUPBY X ORDERBY XN WHERE T tags X, X name XN, T name TN",
-     '''SELECT T1.C0, GROUP_CONCAT(T1.C1) FROM (SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
-FROM cw_CWGroup AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
-WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
-UNION ALL
-SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
-FROM cw_State AS _X, cw_Tag AS _T, tags_relation AS rel_tags0
-WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid
-UNION ALL
-SELECT _X.cw_eid AS C0, _T.cw_name AS C1, _X.cw_name AS C2
-FROM cw_Tag AS _T, cw_Tag AS _X, tags_relation AS rel_tags0
-WHERE rel_tags0.eid_from=_T.cw_eid AND rel_tags0.eid_to=_X.cw_eid) AS T1
-GROUP BY T1.C0,T1.C2
-ORDER BY T1.C2'''),
-
-    ('Any 1 WHERE X in_group G, X is CWUser',
-     '''SELECT 1
-FROM in_group_relation AS rel_in_group0'''),
-
-    ('CWEType X WHERE X name CV, X description V HAVING NOT V=CV AND NOT V = "parent"',
-     '''SELECT _X.cw_eid
-FROM cw_CWEType AS _X
-WHERE NOT (EXISTS(SELECT 1 WHERE _X.cw_description=parent)) AND NOT (EXISTS(SELECT 1 WHERE _X.cw_description=_X.cw_name))'''),
-    ('CWEType X WHERE X name CV, X description V HAVING V!=CV AND V != "parent"',
-     '''SELECT _X.cw_eid
-FROM cw_CWEType AS _X
-WHERE _X.cw_description!=parent AND _X.cw_description!=_X.cw_name'''),
     ]
 
-
 MULTIPLE_SEL = [
     ("DISTINCT Any X,Y where P is Personne, P nom X , P prenom Y;",
      '''SELECT DISTINCT _P.cw_nom, _P.cw_prenom
@@ -691,7 +723,7 @@
     ('Any X WHERE NOT Y evaluee X, Y is CWUser',
      '''SELECT _X.cw_eid
 FROM cw_Note AS _X
-WHERE NOT (EXISTS(SELECT 1 FROM evaluee_relation AS rel_evaluee0, cw_CWUser AS _Y WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid))'''),
+WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _Y, evaluee_relation AS rel_evaluee0 WHERE rel_evaluee0.eid_from=_Y.cw_eid AND rel_evaluee0.eid_to=_X.cw_eid))'''),
 
     ('Any X,RT WHERE X relation_type RT, NOT X is CWAttribute',
      '''SELECT _X.cw_eid, _X.cw_relation_type
@@ -712,12 +744,39 @@
      '''SELECT _S.cw_eid
 FROM cw_State AS _S
 WHERE NOT (EXISTS(SELECT 1 FROM cw_CWUser AS _X WHERE _X.cw_in_state=_S.cw_eid AND _S.cw_name=somename))'''),
+    ]
+
+HAS_TEXT_LG_INDEXER = [
+            ('Any X WHERE X has_text "toto tata"',
+             """SELECT DISTINCT appears0.uid
+FROM appears AS appears0
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""),
+            ('Personne X WHERE X has_text "toto tata"',
+             """SELECT DISTINCT _X.eid
+FROM appears AS appears0, entities AS _X
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.eid AND _X.type='Personne'"""),
+            ('Personne X WHERE X has_text %(text)s',
+             """SELECT DISTINCT _X.eid
+FROM appears AS appears0, entities AS _X
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('hip', 'hop', 'momo')) AND appears0.uid=_X.eid AND _X.type='Personne'
+"""),
+            ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)',
+             """SELECT DISTINCT _X.cw_eid
+FROM appears AS appears0, cw_Basket AS _X
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu
+UNION
+SELECT DISTINCT _X.cw_eid
+FROM appears AS appears0, cw_Folder AS _X
+WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu""")
+        ]
+
+
 
 # XXXFIXME fail
 #         ('Any X,RT WHERE X relation_type RT?, NOT X is CWAttribute',
 #      '''SELECT _X.cw_eid, _X.cw_relation_type
 # FROM cw_CWRelation AS _X'''),
-]
+
 
 OUTER_JOIN = [
     ('Any X,S WHERE X travaille S?',
@@ -757,7 +816,7 @@
     ('Any X WHERE X is Affaire, S is Societe, EXISTS(X owned_by U OR (X concerne S?, S owned_by U))',
      '''SELECT _X.cw_eid
 FROM cw_Affaire AS _X
-WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, cw_CWUser AS _U, cw_Affaire AS _A LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_A.cw_eid) LEFT OUTER JOIN cw_Societe AS _S ON (rel_concerne1.eid_to=_S.cw_eid), owned_by_relation AS rel_owned_by2 WHERE ((rel_owned_by0.eid_from=_A.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid) OR (rel_owned_by2.eid_from=_S.cw_eid AND rel_owned_by2.eid_to=_U.cw_eid)) AND _X.cw_eid=_A.cw_eid)'''),
+WHERE EXISTS(SELECT 1 FROM cw_CWUser AS _U, owned_by_relation AS rel_owned_by0, owned_by_relation AS rel_owned_by2, cw_Affaire AS _A LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_A.cw_eid) LEFT OUTER JOIN cw_Societe AS _S ON (rel_concerne1.eid_to=_S.cw_eid) WHERE ((rel_owned_by0.eid_from=_A.cw_eid AND rel_owned_by0.eid_to=_U.cw_eid) OR (rel_owned_by2.eid_from=_S.cw_eid AND rel_owned_by2.eid_to=_U.cw_eid)) AND _X.cw_eid=_A.cw_eid)'''),
 
     ('Any C,M WHERE C travaille G?, G evaluee M?, G is Societe',
      '''SELECT _C.cw_eid, rel_evaluee1.eid_to
@@ -768,13 +827,18 @@
      'F name "read", F require_group E, U in_group E)), U eid 1',
      '''SELECT _A.cw_eid, rel_documented_by0.eid_to
 FROM cw_Affaire AS _A LEFT OUTER JOIN documented_by_relation AS rel_documented_by0 ON (rel_documented_by0.eid_from=_A.cw_eid)
-WHERE ((rel_documented_by0.eid_to IS NULL) OR (EXISTS(SELECT 1 FROM require_permission_relation AS rel_require_permission1, cw_CWPermission AS _F, require_group_relation AS rel_require_group2, in_group_relation AS rel_in_group3 WHERE rel_documented_by0.eid_to=rel_require_permission1.eid_from AND rel_require_permission1.eid_to=_F.cw_eid AND _F.cw_name=read AND rel_require_group2.eid_from=_F.cw_eid AND rel_in_group3.eid_to=rel_require_group2.eid_to AND rel_in_group3.eid_from=1)))'''),
+WHERE ((rel_documented_by0.eid_to IS NULL) OR (EXISTS(SELECT 1 FROM cw_CWPermission AS _F, in_group_relation AS rel_in_group3, require_group_relation AS rel_require_group2, require_permission_relation AS rel_require_permission1 WHERE rel_documented_by0.eid_to=rel_require_permission1.eid_from AND rel_require_permission1.eid_to=_F.cw_eid AND _F.cw_name=read AND rel_require_group2.eid_from=_F.cw_eid AND rel_in_group3.eid_to=rel_require_group2.eid_to AND rel_in_group3.eid_from=1)))'''),
 
     ("Any X WHERE X eid 12, P? connait X",
      '''SELECT _X.cw_eid
-FROM cw_Personne AS _X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=12)
+FROM cw_Personne AS _X LEFT OUTER JOIN connait_relation AS rel_connait0 ON (rel_connait0.eid_to=_X.cw_eid)
 WHERE _X.cw_eid=12'''
     ),
+    ("Any P WHERE X eid 12, P? concerne X, X todo_by S",
+     '''SELECT rel_concerne0.eid_from
+FROM todo_by_relation AS rel_todo_by1 LEFT OUTER JOIN concerne_relation AS rel_concerne0 ON (rel_concerne0.eid_to=12)
+WHERE rel_todo_by1.eid_from=12'''
+    ),
 
     ('Any GN, TN ORDERBY GN WHERE T tags G?, T name TN, G name GN',
     '''
@@ -840,11 +904,11 @@
 FROM cw_CWUser AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)
 UNION ALL
 SELECT _G.cw_eid AS C0, _S.cw_eid AS C1
-FROM cw_Note AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop) ) AS _T0 ON (rel_tags0.eid_to=_T0.C0)'''),
+FROM cw_Note AS _G LEFT OUTER JOIN cw_State AS _S ON (_G.cw_in_state=_S.cw_eid AND _S.cw_name=hop)) AS _T0 ON (rel_tags0.eid_to=_T0.C0)'''),
 
     ('Any O,AD  WHERE NOT S inline1 O, S eid 123, O todo_by AD?',
      '''SELECT _O.cw_eid, rel_todo_by0.eid_to
-FROM cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid), cw_Note AS _S
+FROM cw_Note AS _S, cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid)
 WHERE (_S.cw_inline1 IS NULL OR _S.cw_inline1!=_O.cw_eid) AND _S.cw_eid=123''')
     ]
 
@@ -965,20 +1029,22 @@
 WHERE (rel_connait0.eid_from=_X.cw_eid AND rel_connait0.eid_to=_P.cw_eid OR rel_connait0.eid_to=_X.cw_eid AND rel_connait0.eid_from=_P.cw_eid) AND _P.cw_nom=nom'''
     ),
 
-    ('Any X ORDERBY X DESC LIMIT 9 WHERE E eid 0, E connait X',
+    ('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"',
+     '''SELECT DISTINCT _P.cw_eid
+FROM connait_relation AS rel_connait0, cw_Personne AS _P, cw_Personne AS _S
+WHERE (rel_connait0.eid_from=_P.cw_eid AND rel_connait0.eid_to=_S.cw_eid OR rel_connait0.eid_to=_P.cw_eid AND rel_connait0.eid_from=_S.cw_eid) AND _S.cw_nom=chouette'''
+     )
+    ]
+
+SYMMETRIC_WITH_LIMIT = [
+        ('Any X ORDERBY X DESC LIMIT 9 WHERE E eid 0, E connait X',
     '''SELECT DISTINCT _X.cw_eid
 FROM connait_relation AS rel_connait0, cw_Personne AS _X
 WHERE (rel_connait0.eid_from=0 AND rel_connait0.eid_to=_X.cw_eid OR rel_connait0.eid_to=0 AND rel_connait0.eid_from=_X.cw_eid)
 ORDER BY 1 DESC
 LIMIT 9'''
      ),
-
-    ('DISTINCT Any P WHERE P connait S OR S connait P, S nom "chouette"',
-     '''SELECT DISTINCT _P.cw_eid
-FROM connait_relation AS rel_connait0, cw_Personne AS _P, cw_Personne AS _S
-WHERE (rel_connait0.eid_from=_P.cw_eid AND rel_connait0.eid_to=_S.cw_eid OR rel_connait0.eid_to=_P.cw_eid AND rel_connait0.eid_from=_S.cw_eid) AND _S.cw_nom=chouette'''
-     )
-    ]
+]
 
 INLINE = [
 
@@ -1060,7 +1126,7 @@
     ('Any PN WHERE NOT X travaille S, X nom PN, S is IN(Division, Societe)',
      '''SELECT _X.cw_nom
 FROM cw_Personne AS _X
-WHERE NOT (EXISTS(SELECT 1 FROM travaille_relation AS rel_travaille0, cw_Division AS _S WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid UNION SELECT 1 FROM travaille_relation AS rel_travaille1, cw_Societe AS _S WHERE rel_travaille1.eid_from=_X.cw_eid AND rel_travaille1.eid_to=_S.cw_eid))'''),
+WHERE NOT (EXISTS(SELECT 1 FROM cw_Division AS _S, travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_from=_X.cw_eid AND rel_travaille0.eid_to=_S.cw_eid UNION SELECT 1 FROM cw_Societe AS _S, travaille_relation AS rel_travaille1 WHERE rel_travaille1.eid_from=_X.cw_eid AND rel_travaille1.eid_to=_S.cw_eid))'''),
 
     ('Any PN WHERE NOT X travaille S, S nom PN, S is IN(Division, Societe)',
      '''SELECT _S.cw_nom
@@ -1151,6 +1217,7 @@
                     print r, nargs
                 print '!='
                 print sql.strip()
+            print 'RQL:', rql
             raise
 
     def _parse(self, rqls):
@@ -1244,11 +1311,11 @@
             yield t
 
     def test_basic_parse(self):
-        for t in self._parse(BASIC):
+        for t in self._parse(BASIC + BASIC_WITH_LIMIT):
             yield t
 
     def test_advanced_parse(self):
-        for t in self._parse(ADVANCED):
+        for t in self._parse(ADVANCED + ADVANCED_WITH_LIMIT_OR_ORDERBY + ADVANCED_WITH_GROUP_CONCAT):
             yield t
 
     def test_outer_join_parse(self):
@@ -1341,7 +1408,7 @@
 UNION ALL
 (SELECT _X.cw_eid AS C0
 FROM cw_Affaire AS _X
-WHERE ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_owned_by0.eid_to=1)) OR (((EXISTS(SELECT 1 FROM cw_Affaire AS _D LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_D.cw_eid) LEFT OUTER JOIN cw_Note AS _B ON (rel_concerne1.eid_to=_B.cw_eid), owned_by_relation AS rel_owned_by2 WHERE rel_owned_by2.eid_from=_B.cw_eid AND rel_owned_by2.eid_to=1 AND _X.cw_eid=_D.cw_eid)) OR (EXISTS(SELECT 1 FROM cw_Affaire AS _F LEFT OUTER JOIN concerne_relation AS rel_concerne3 ON (rel_concerne3.eid_from=_F.cw_eid) LEFT OUTER JOIN cw_Societe AS _E ON (rel_concerne3.eid_to=_E.cw_eid), owned_by_relation AS rel_owned_by4 WHERE rel_owned_by4.eid_from=_E.cw_eid AND rel_owned_by4.eid_to=1 AND _X.cw_eid=_F.cw_eid))))))) AS _T0, cw_CWEType AS _ET, is_relation AS rel_is0
+WHERE ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_X.cw_eid AND rel_owned_by0.eid_to=1)) OR (((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by2, cw_Affaire AS _D LEFT OUTER JOIN concerne_relation AS rel_concerne1 ON (rel_concerne1.eid_from=_D.cw_eid) LEFT OUTER JOIN cw_Note AS _B ON (rel_concerne1.eid_to=_B.cw_eid) WHERE rel_owned_by2.eid_from=_B.cw_eid AND rel_owned_by2.eid_to=1 AND _X.cw_eid=_D.cw_eid)) OR (EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by4, cw_Affaire AS _F LEFT OUTER JOIN concerne_relation AS rel_concerne3 ON (rel_concerne3.eid_from=_F.cw_eid) LEFT OUTER JOIN cw_Societe AS _E ON (rel_concerne3.eid_to=_E.cw_eid) WHERE rel_owned_by4.eid_from=_E.cw_eid AND rel_owned_by4.eid_to=1 AND _X.cw_eid=_F.cw_eid))))))) AS _T0, cw_CWEType AS _ET, is_relation AS rel_is0
 WHERE rel_is0.eid_from=_T0.C0 AND rel_is0.eid_to=_ET.cw_eid
 GROUP BY _ET.cw_name'''),
             )):
@@ -1357,7 +1424,7 @@
         self.assertRaises(BadRQLQuery, self.o.generate, rqlst)
 
     def test_symmetric(self):
-        for t in self._parse(SYMMETRIC):
+        for t in self._parse(SYMMETRIC + SYMMETRIC_WITH_LIMIT):
             yield t
 
     def test_inline(self):
@@ -1460,7 +1527,7 @@
     def test_ambigous_exists_no_from_clause(self):
         self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))',
                     '''SELECT COUNT(1)
-WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, cw_Affaire AS _P WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, cw_Note AS _P WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''')
+WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''')
 
     def test_attr_map_sqlcb(self):
         def generate_ref(gen, linkedvar, rel):
@@ -1509,6 +1576,131 @@
                     '''SELECT 1
 WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''')
 
+    def test_nonregr_subquery_missing_join(self):
+        self._check('Any COUNT(P1148),G GROUPBY G '
+                    'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148, '
+                    'K1148 eid 1148, P1148? in_group G',
+                    '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid
+FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_in_group2.eid_from=rel_bookmarked_by1.eid_to)
+WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122 AND rel_bookmarked_by1.eid_from=1148
+GROUP BY _G.cw_eid'''
+                    )
+
+    def test_nonregr_subquery_missing_join2(self):
+        self._check('Any COUNT(P1148),G GROUPBY G '
+                    'WHERE G owned_by D, D eid 1122, K1148 bookmarked_by P1148?, '
+                    'K1148 eid 1148, P1148? in_group G',
+                    '''SELECT COUNT(rel_bookmarked_by1.eid_to), _G.cw_eid
+FROM owned_by_relation AS rel_owned_by0, cw_CWGroup AS _G LEFT OUTER JOIN in_group_relation AS rel_in_group2 ON (rel_in_group2.eid_to=_G.cw_eid) LEFT OUTER JOIN bookmarked_by_relation AS rel_bookmarked_by1 ON (rel_bookmarked_by1.eid_from=1148 AND rel_in_group2.eid_from=rel_bookmarked_by1.eid_to)
+WHERE rel_owned_by0.eid_from=_G.cw_eid AND rel_owned_by0.eid_to=1122
+GROUP BY _G.cw_eid''')
+
+
+class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC):
+    backend = 'sqlserver2005'
+    def _norm_sql(self, sql):
+        return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ')
+
+    def test_has_text(self):
+        for t in self._parse(HAS_TEXT_LG_INDEXER):
+            yield t
+
+    def test_or_having_fake_terms(self):
+        self._check('Any X WHERE X is CWUser, X creation_date D HAVING YEAR(D) = "2010" OR D = NULL',
+                    '''SELECT _X.cw_eid
+FROM cw_CWUser AS _X
+WHERE ((YEAR(_X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''')
+
+    def test_date_extraction(self):
+        self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT MONTH(_P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
+    def test_symmetric(self):
+        for t in self._parse(SYMMETRIC):
+            yield t
+
+    def test_basic_parse(self):
+        for t in self._parse(BASIC):# + BASIC_WITH_LIMIT):
+            yield t
+
+    def test_advanced_parse(self):
+        for t in self._parse(ADVANCED):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
+            yield t
+
+    def test_limit_offset(self):
+        WITH_LIMIT = [
+    ("Personne P LIMIT 20 OFFSET 10",
+             '''WITH orderedrows AS (
+SELECT
+_L01
+, ROW_NUMBER() OVER (ORDER BY _L01) AS __RowNumber
+FROM (
+SELECT _P.cw_eid AS _L01 FROM  cw_Personne AS _P
+) AS _SQ1 )
+SELECT
+_L01
+FROM orderedrows WHERE
+__RowNumber <= 30 AND __RowNumber > 10
+ '''),
+
+    ('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 WHERE S is Affaire, C is Societe, S concerne C, C nom CS, (EXISTS(S owned_by 1)) OR (EXISTS(S documented_by N, N title "published"))',
+     '''WITH orderedrows AS (
+SELECT
+_L01, _L02
+, ROW_NUMBER() OVER (ORDER BY _L01 DESC) AS __RowNumber
+FROM (
+SELECT COUNT(rel_concerne0.eid_from) AS _L01, _C.cw_nom AS _L02 FROM  concerne_relation AS rel_concerne0, cw_Societe AS _C
+WHERE rel_concerne0.eid_to=_C.cw_eid AND ((EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by1 WHERE rel_concerne0.eid_from=rel_owned_by1.eid_from AND rel_owned_by1.eid_to=1)) OR (EXISTS(SELECT 1 FROM cw_Card AS _N, documented_by_relation AS rel_documented_by2 WHERE rel_concerne0.eid_from=rel_documented_by2.eid_from AND rel_documented_by2.eid_to=_N.cw_eid AND _N.cw_title=published)))
+GROUP BY _C.cw_nom
+) AS _SQ1 )
+SELECT
+_L01, _L02
+FROM orderedrows WHERE
+__RowNumber <= 10
+     '''),
+
+    ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
+     '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2
+FROM cw_File AS _X
+GROUP BY _X.cw_data_name,_X.cw_data_format) AS T1
+ORDER BY T1.C1,T1.C2
+'''),
+
+
+    ('DISTINCT Any X ORDERBY Y WHERE B bookmarked_by X, X login Y',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _X.cw_login AS C1
+FROM bookmarked_by_relation AS rel_bookmarked_by0, cw_CWUser AS _X
+WHERE rel_bookmarked_by0.eid_to=_X.cw_eid) AS T1
+ORDER BY T1.C1
+ '''),
+
+    ('DISTINCT Any X ORDERBY SN WHERE X in_state S, S name SN',
+     '''SELECT T1.C0 FROM (SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
+FROM cw_Affaire AS _X, cw_State AS _S
+WHERE _X.cw_in_state=_S.cw_eid
+UNION
+SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
+FROM cw_CWUser AS _X, cw_State AS _S
+WHERE _X.cw_in_state=_S.cw_eid
+UNION
+SELECT DISTINCT _X.cw_eid AS C0, _S.cw_name AS C1
+FROM cw_Note AS _X, cw_State AS _S
+WHERE _X.cw_in_state=_S.cw_eid) AS T1
+ORDER BY T1.C1'''),
+
+    ('Any O,AA,AB,AC ORDERBY AC DESC '
+     'WHERE NOT S use_email O, S eid 1, O is EmailAddress, O address AA, O alias AB, O modification_date AC, '
+     'EXISTS(A use_email O, EXISTS(A identity B, NOT B in_group D, D name "guests", D is CWGroup), A is CWUser), B eid 2',
+     '''
+SELECT _O.cw_eid, _O.cw_address, _O.cw_alias, _O.cw_modification_date
+FROM cw_EmailAddress AS _O
+WHERE NOT (EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email0 WHERE rel_use_email0.eid_from=1 AND rel_use_email0.eid_to=_O.cw_eid)) AND EXISTS(SELECT 1 FROM use_email_relation AS rel_use_email1 WHERE rel_use_email1.eid_to=_O.cw_eid AND EXISTS(SELECT 1 FROM cw_CWGroup AS _D WHERE rel_use_email1.eid_from=2 AND NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group2 WHERE rel_in_group2.eid_from=2 AND rel_in_group2.eid_to=_D.cw_eid)) AND _D.cw_name=guests))
+ORDER BY 4 DESC'''),
+            ]
+        for t in self._parse(WITH_LIMIT):# + ADVANCED_WITH_LIMIT_OR_ORDERBY):
+            yield t
+
 
 
 class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC):
@@ -1708,7 +1900,7 @@
         self._check('Any COUNT(U) WHERE U eid 1, EXISTS (P owned_by U, P is IN (Note, Affaire))',
                     '''SELECT COUNT(1)
 FROM (SELECT 1) AS _T
-WHERE EXISTS(SELECT 1 FROM owned_by_relation AS rel_owned_by0, cw_Affaire AS _P WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM owned_by_relation AS rel_owned_by1, cw_Note AS _P WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''')
+WHERE EXISTS(SELECT 1 FROM cw_Affaire AS _P, owned_by_relation AS rel_owned_by0 WHERE rel_owned_by0.eid_from=_P.cw_eid AND rel_owned_by0.eid_to=1 UNION SELECT 1 FROM cw_Note AS _P, owned_by_relation AS rel_owned_by1 WHERE rel_owned_by1.eid_from=_P.cw_eid AND rel_owned_by1.eid_to=1)''')
 
     def test_groupby_multiple_outerjoins(self):
         self._check('Any A,U,P,group_concat(TN) GROUPBY A,U,P WHERE A is Affaire, A concerne N, N todo_by U?, T? tags A, T name TN, A todo_by P?',
--- a/server/test/unittest_ssplanner.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/server/test/unittest_ssplanner.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/parsers.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,382 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""datafeed parser for xml generated by cubicweb"""
+
+import urllib2
+import StringIO
+import os.path as osp
+from cookielib import CookieJar
+from datetime import datetime, timedelta
+
+from lxml import etree
+
+from logilab.common.date import todate, totime
+from logilab.common.textutils import splitstrip, text_to_dict
+
+from yams.constraints import BASE_CONVERTERS
+from yams.schema import role_name as rn
+
+from cubicweb import ValidationError, typed_eid
+from cubicweb.server.sources import datafeed
+
+def ensure_str_keys(dict):
+    for key in dict:
+        dict[str(key)] = dict.pop(key)
+
+# see cubicweb.web.views.xmlrss.SERIALIZERS
+DEFAULT_CONVERTERS = BASE_CONVERTERS.copy()
+DEFAULT_CONVERTERS['String'] = unicode
+DEFAULT_CONVERTERS['Password'] = lambda x: x.encode('utf8')
+def convert_date(ustr):
+    return todate(datetime.strptime(ustr, '%Y-%m-%d'))
+DEFAULT_CONVERTERS['Date'] = convert_date
+def convert_datetime(ustr):
+    if '.' in ustr: # assume %Y-%m-%d %H:%M:%S.mmmmmm
+        ustr = ustr.split('.',1)[0]
+    return datetime.strptime(ustr, '%Y-%m-%d %H:%M:%S')
+DEFAULT_CONVERTERS['Datetime'] = convert_datetime
+def convert_time(ustr):
+    return totime(datetime.strptime(ustr, '%H:%M:%S'))
+DEFAULT_CONVERTERS['Time'] = convert_time
+def convert_interval(ustr):
+    return time(seconds=int(ustr))
+DEFAULT_CONVERTERS['Interval'] = convert_interval
+
+# use a cookie enabled opener to use session cookie if any
+_OPENER = urllib2.build_opener()
+try:
+    from logilab.common import urllib2ext
+    _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
+except ImportError: # python-kerberos not available
+    pass
+_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
+
+def extract_typed_attrs(eschema, stringdict, converters=DEFAULT_CONVERTERS):
+    typeddict = {}
+    for rschema in eschema.subject_relations():
+        if rschema.final and rschema in stringdict:
+            if rschema == 'eid':
+                continue
+            attrtype = eschema.destination(rschema)
+            typeddict[rschema.type] = converters[attrtype](stringdict[rschema])
+    return typeddict
+
+def _entity_etree(parent):
+    for node in list(parent):
+        try:
+            item = {'cwtype': unicode(node.tag),
+                    'cwuri': node.attrib['cwuri'],
+                    'eid': typed_eid(node.attrib['eid']),
+                    }
+        except KeyError:
+            # cw < 3.11 compat mode XXX
+            item = {'cwtype': unicode(node.tag),
+                    'cwuri': node.find('cwuri').text,
+                    'eid': typed_eid(node.find('eid').text),
+                    }
+        rels = {}
+        for child in node:
+            role = child.get('role')
+            if child.get('role'):
+                # relation
+                related = rels.setdefault(role, {}).setdefault(child.tag, [])
+                related += [ritem for ritem, _ in _entity_etree(child)]
+            else:
+                # attribute
+                item[child.tag] = unicode(child.text)
+        yield item, rels
+
+def build_search_rql(etype, attrs):
+    restrictions = []
+    for attr in attrs:
+        restrictions.append('X %(attr)s %%(%(attr)s)s' % {'attr': attr})
+    return 'Any X WHERE X is %s, %s' % (etype, ','.join(restrictions))
+
+def rtype_role_rql(rtype, role):
+    if role == 'object':
+        return 'Y %s X WHERE X eid %%(x)s' % rtype
+    else:
+        return 'X %s Y WHERE X eid %%(x)s' % rtype
+
+
+def _check_no_option(action, options, eid, _):
+    if options:
+        msg = _("'%s' action doesn't take any options") % action
+        raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+def _check_linkattr_option(action, options, eid, _):
+    if not 'linkattr' in options:
+        msg = _("'%s' action require 'linkattr' option") % action
+        raise ValidationError(eid, {rn('options', 'subject'): msg})
+
+
+class CWEntityXMLParser(datafeed.DataFeedParser):
+    """datafeed parser for the 'xml' entity view"""
+    __regid__ = 'cw.entityxml'
+
+    action_options = {
+        'copy': _check_no_option,
+        'link-or-create': _check_linkattr_option,
+        'link': _check_linkattr_option,
+        }
+
+    def __init__(self, *args, **kwargs):
+        super(CWEntityXMLParser, self).__init__(*args, **kwargs)
+        self.action_methods = {
+            'copy': self.related_copy,
+            'link-or-create': self.related_link_or_create,
+            'link': self.related_link,
+            }
+
+    # mapping handling #########################################################
+
+    def add_schema_config(self, schemacfg, checkonly=False):
+        """added CWSourceSchemaConfig, modify mapping accordingly"""
+        _ = self._cw._
+        try:
+            rtype = schemacfg.schema.rtype.name
+        except AttributeError:
+            msg = _("entity and relation types can't be mapped, only attributes "
+                    "or relations")
+            raise ValidationError(schemacfg.eid, {rn('cw_for_schema', 'subject'): msg})
+        if schemacfg.options:
+            options = text_to_dict(schemacfg.options)
+        else:
+            options = {}
+        try:
+            role = options.pop('role')
+            if role not in ('subject', 'object'):
+                raise KeyError
+        except KeyError:
+            msg = _('"role=subject" or "role=object" must be specified in options')
+            raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
+        try:
+            action = options.pop('action')
+            self.action_options[action](action, options, schemacfg.eid, _)
+        except KeyError:
+            msg = _('"action" must be specified in options; allowed values are '
+                    '%s') % ', '.join(self.action_methods)
+            raise ValidationError(schemacfg.eid, {rn('options', 'subject'): msg})
+        if not checkonly:
+            if role == 'subject':
+                etype = schemacfg.schema.stype.name
+                ttype = schemacfg.schema.otype.name
+            else:
+                etype = schemacfg.schema.otype.name
+                ttype = schemacfg.schema.stype.name
+            etyperules = self.source.mapping.setdefault(etype, {})
+            etyperules.setdefault((rtype, role, action), []).append(
+                (ttype, options) )
+            self.source.mapping_idx[schemacfg.eid] = (
+                etype, rtype, role, action, ttype)
+
+    def del_schema_config(self, schemacfg, checkonly=False):
+        """deleted CWSourceSchemaConfig, modify mapping accordingly"""
+        etype, rtype, role, action, ttype = self.source.mapping_idx[schemacfg.eid]
+        rules = self.source.mapping[etype][(rtype, role, action)]
+        rules = [x for x in rules if not x[0] == ttype]
+        if not rules:
+            del self.source.mapping[etype][(rtype, role, action)]
+
+    # import handling ##########################################################
+
+    def process(self, url, partialcommit=True):
+        """IDataFeedParser main entry point"""
+        # XXX suppression support according to source configuration. If set, get
+        # all cwuri of entities from this source, and compare with newly
+        # imported ones
+        error = False
+        for item, rels in self.parse(url):
+            cwuri = item['cwuri']
+            try:
+                self.process_item(item, rels)
+                if partialcommit:
+                    # commit+set_pool instead of commit(reset_pool=False) to let
+                    # other a chance to get our pool
+                    self._cw.commit()
+                    self._cw.set_pool()
+            except ValidationError, exc:
+                if partialcommit:
+                    self.source.error('Skipping %s because of validation error %s' % (cwuri, exc))
+                    self._cw.rollback()
+                    self._cw.set_pool()
+                    error = True
+                else:
+                    raise
+        return error
+
+    def parse(self, url):
+        if not url.startswith('http'):
+            stream = StringIO.StringIO(url)
+        else:
+            for mappedurl in HOST_MAPPING:
+                if url.startswith(mappedurl):
+                    url = url.replace(mappedurl, HOST_MAPPING[mappedurl], 1)
+                    break
+            self.source.info('GET %s', url)
+            stream = _OPENER.open(url)
+        return _entity_etree(etree.parse(stream).getroot())
+
+    def process_one(self, url):
+        # XXX assert len(root.children) == 1
+        for item, rels in self.parse(url):
+            return self.process_item(item, rels)
+
+    def process_item(self, item, rels):
+        entity = self.extid2entity(str(item.pop('cwuri')),
+                                   item.pop('cwtype'),
+                                   item=item)
+        if not (self.created_during_pull(entity)
+                or self.updated_during_pull(entity)):
+            self.notify_updated(entity)
+            item.pop('eid')
+            # XXX check modification date
+            attrs = extract_typed_attrs(entity.e_schema, item)
+            entity.set_attributes(**attrs)
+        for (rtype, role, action), rules in self.source.mapping.get(entity.__regid__, {}).iteritems():
+            try:
+                rel = rels[role][rtype]
+            except KeyError:
+                self.source.error('relation %s-%s doesn\'t seem exported in %s xml',
+                                  rtype, role, entity.__regid__)
+                continue
+            try:
+                actionmethod = self.action_methods[action]
+            except KeyError:
+                raise Exception('Unknown action %s' % action)
+            actionmethod(entity, rtype, role, rel, rules)
+        return entity
+
+    def before_entity_copy(self, entity, sourceparams):
+        """IDataFeedParser callback"""
+        attrs = extract_typed_attrs(entity.e_schema, sourceparams['item'])
+        entity.cw_edited.update(attrs)
+
+    def related_copy(self, entity, rtype, role, value, rules):
+        """implementation of 'copy' action
+
+        Takes no option.
+        """
+        assert not any(x[1] for x in rules), "'copy' action takes no option"
+        ttypes = set([x[0] for x in rules])
+        value = [item for item in value if item['cwtype'] in ttypes]
+        eids = [] # local eids
+        if not value:
+            self._clear_relation(entity, rtype, role, ttypes)
+            return
+        for item in value:
+            eids.append(self.process_one(self._complete_url(item)).eid)
+        self._set_relation(entity, rtype, role, eids)
+
+    def related_link(self, entity, rtype, role, value, rules):
+        """implementation of 'link' action
+
+        requires an options to control search of the linked entity.
+        """
+        for ttype, options in rules:
+            assert 'linkattr' in options, (
+                "'link-or-create' action require a list of attributes used to "
+                "search if the entity already exists")
+            self._related_link(entity, rtype, role, ttype, value, [options['linkattr']],
+                               self._log_not_found)
+
+    def related_link_or_create(self, entity, rtype, role, value, rules):
+        """implementation of 'link-or-create' action
+
+        requires an options to control search of the linked entity.
+        """
+        for ttype, options in rules:
+            assert 'linkattr' in options, (
+                "'link-or-create' action require a list of attributes used to "
+                "search if the entity already exists")
+            self._related_link(entity, rtype, role, ttype, value, [options['linkattr']],
+                               self._create_not_found)
+
+    def _log_not_found(self, entity, rtype, role, ritem, searchvalues):
+        self.source.error('can find %s entity with attributes %s',
+                          ritem['cwtype'], searchvalues)
+
+    def _create_not_found(self, entity, rtype, role, ritem, searchvalues):
+        ensure_str_keys(searchvalues) # XXX necessary with python < 2.6
+        return self._cw.create_entity(ritem['cwtype'], **searchvalues).eid
+
+    def _related_link(self, entity, rtype, role, ttype, value, searchattrs,
+                      notfound_callback):
+        eids = [] # local eids
+        for item in value:
+            if item['cwtype'] != ttype:
+                continue
+            if not all(attr in item for attr in searchattrs):
+                # need to fetch related entity's xml
+                ritems = list(self.parse(self._complete_url(item, False)))
+                assert len(ritems) == 1, 'unexpected xml'
+                ritem = ritems[0][0] # list of 2-uples
+                assert all(attr in ritem for attr in searchattrs), \
+                       'missing attribute, got %s expected keys %s' % (item, searchattrs)
+            else:
+                ritem = item
+            kwargs = dict((attr, ritem[attr]) for attr in searchattrs)
+            rql = build_search_rql(item['cwtype'], kwargs)
+            rset = self._cw.execute(rql, kwargs)
+            if rset:
+                assert len(rset) == 1
+                eids.append(rset[0][0])
+            else:
+                eid = notfound_callback(entity, rtype, role, ritem, kwargs)
+                if eid is not None:
+                    eids.append(eid)
+        if not eids:
+            self._clear_relation(entity, rtype, role, (ttype,))
+        else:
+            self._set_relation(entity, rtype, role, eids)
+
+    def _complete_url(self, item, add_relations=True):
+        itemurl = item['cwuri'] + '?vid=xml'
+        for rtype, role, _ in self.source.mapping.get(item['cwtype'], ()):
+            itemurl += '&relation=%s_%s' % (rtype, role)
+        return itemurl
+
+    def _clear_relation(self, entity, rtype, role, ttypes):
+        if entity.eid not in self.stats['created']:
+            if len(ttypes) > 1:
+                typerestr = ', Y is IN(%s)' % ','.join(ttypes)
+            else:
+                typerestr = ', Y is %s' % ','.join(ttypes)
+            self._cw.execute('DELETE ' + rtype_role_rql(rtype, role) + typerestr,
+                             {'x': entity.eid})
+
+    def _set_relation(self, entity, rtype, role, eids):
+        eidstr = ','.join(str(eid) for eid in eids)
+        rql = rtype_role_rql(rtype, role)
+        self._cw.execute('DELETE %s, NOT Y eid IN (%s)' % (rql, eidstr),
+                         {'x': entity.eid})
+        if role == 'object':
+            rql = 'SET %s, Y eid IN (%s), NOT Y %s X' % (rql, eidstr, rtype)
+        else:
+            rql = 'SET %s, Y eid IN (%s), NOT X %s Y' % (rql, eidstr, rtype)
+        self._cw.execute(rql, {'x': entity.eid})
+
+def registration_callback(vreg):
+    vreg.register_all(globals().values(), __name__)
+    global HOST_MAPPING
+    HOST_MAPPING = {}
+    if vreg.config.apphome:
+        host_mapping_file = osp.join(vreg.config.apphome, 'hostmapping.py')
+        if osp.exists(host_mapping_file):
+            HOST_MAPPING = eval(file(host_mapping_file).read())
+            vreg.info('using host mapping %s from %s', HOST_MAPPING, host_mapping_file)
--- a/sobjects/test/data/schema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/sobjects/test/data/schema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,10 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
-from yams.buildobjs import RelationDefinition
+from yams.buildobjs import EntityType, RelationDefinition, String, SubjectRelation
 
 class comments(RelationDefinition):
     subject = 'Comment'
@@ -26,3 +23,6 @@
     cardinality='1*'
     composite='object'
 
+class Tag(EntityType):
+    name = String(unique=True)
+    tags = SubjectRelation('CWUser')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sobjects/test/unittest_parsers.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,170 @@
+# copyright 2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+from datetime import datetime
+
+from cubicweb.devtools.testlib import CubicWebTC
+
+from cubicweb.sobjects.parsers import CWEntityXMLParser
+
+orig_parse = CWEntityXMLParser.parse
+
+def parse(self, url):
+    try:
+        url = RELATEDXML[url.split('?')[0]]
+    except KeyError:
+        pass
+    return orig_parse(self, url)
+
+def setUpModule():
+    CWEntityXMLParser.parse = parse
+
+def tearDownModule():
+    CWEntityXMLParser.parse = orig_parse
+
+
+BASEXML = ''.join(u'''
+<rset size="1">
+ <CWUser eid="5" cwuri="http://pouet.org/5">
+  <login>sthenault</login>
+  <upassword>toto</upassword>
+  <last_login_time>2011-01-25 14:14:06</last_login_time>
+  <creation_date>2010-01-22 10:27:59</creation_date>
+  <modification_date>2011-01-25 14:14:06</modification_date>
+  <use_email role="subject">
+    <EmailAddress cwuri="http://pouet.org/6" eid="6"/>
+  </use_email>
+  <in_group role="subject">
+    <CWGroup cwuri="http://pouet.org/7" eid="7"/>
+    <CWGroup cwuri="http://pouet.org/8" eid="8"/>
+  </in_group>
+  <tags role="object">
+    <Tag cwuri="http://pouet.org/9" eid="9"/>
+    <Tag cwuri="http://pouet.org/10" eid="10"/>
+  </tags>
+ </CWUser>
+</rset>
+'''.splitlines())
+
+RELATEDXML ={
+    'http://pouet.org/6': u'''
+<rset size="1">
+ <EmailAddress eid="6" cwuri="http://pouet.org/6">
+  <address>syt@logilab.fr</address>
+  <modification_date>2010-04-13 14:35:56</modification_date>
+  <creation_date>2010-04-13 14:35:56</creation_date>
+ </EmailAddress>
+</rset>
+''',
+    'http://pouet.org/7': u'''
+<rset size="1">
+ <CWGroup eid="7" cwuri="http://pouet.org/7">
+  <name>users</name>
+ </CWGroup>
+</rset>
+''',
+    'http://pouet.org/8': u'''
+<rset size="1">
+ <CWGroup eid="8" cwuri="http://pouet.org/8">
+  <name>unknown</name>
+ </CWGroup>
+</rset>
+''',
+    'http://pouet.org/9': u'''
+<rset size="1">
+ <Tag eid="9" cwuri="http://pouet.org/9">
+  <name>hop</name>
+ </Tag>
+</rset>
+''',
+    'http://pouet.org/10': u'''
+<rset size="1">
+ <Tag eid="10" cwuri="http://pouet.org/10">
+  <name>unknown</name>
+ </Tag>
+</rset>
+''',
+    }
+
+class CWEntityXMLParserTC(CubicWebTC):
+    def setup_database(self):
+        req = self.request()
+        source = req.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+                                   parser=u'cw.entityxml', url=BASEXML)
+        self.commit()
+        source.init_mapping([(('CWUser', 'use_email', '*'),
+                              u'role=subject\naction=copy'),
+                             (('CWUser', 'in_group', '*'),
+                              u'role=subject\naction=link\nlinkattr=name'),
+                             (('*', 'tags', 'CWUser'),
+                              u'role=object\naction=link-or-create\nlinkattr=name'),
+                            ])
+        req.create_entity('Tag', name=u'hop')
+
+    def test_actions(self):
+        dfsource = self.repo.sources_by_uri['myfeed']
+        self.assertEqual(dfsource.mapping,
+                         {u'CWUser': {
+                             (u'in_group', u'subject', u'link'): [
+                                 (u'CWGroup', {u'linkattr': u'name'})],
+                             (u'tags', u'object', u'link-or-create'): [
+                                 (u'Tag', {u'linkattr': u'name'})],
+                             (u'use_email', u'subject', u'copy'): [
+                                 (u'EmailAddress', {})]
+                             }
+                          })
+        session = self.repo.internal_session()
+        stats = dfsource.pull_data(session, force=True)
+        self.assertEqual(sorted(stats.keys()), ['created', 'updated'])
+        self.assertEqual(len(stats['created']), 2)
+        self.assertEqual(stats['updated'], set())
+
+        user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
+        self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
+        self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+        self.assertEqual(user.cwuri, 'http://pouet.org/5')
+        self.assertEqual(user.cw_source[0].name, 'myfeed')
+        self.assertEqual(len(user.use_email), 1)
+        # copy action
+        email = user.use_email[0]
+        self.assertEqual(email.address, 'syt@logilab.fr')
+        self.assertEqual(email.cwuri, 'http://pouet.org/6')
+        self.assertEqual(email.cw_source[0].name, 'myfeed')
+        # link action
+        self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"'))
+        groups = sorted([g.name for g in user.in_group])
+        self.assertEqual(groups, ['users'])
+        # link or create action
+        tags = sorted([t.name for t in user.reverse_tags])
+        self.assertEqual(tags, ['hop', 'unknown'])
+        tag = self.execute('Tag X WHERE X name "unknown"').get_entity(0, 0)
+        self.assertEqual(tag.cwuri, 'http://testing.fr/cubicweb/%s' % tag.eid)
+        self.assertEqual(tag.cw_source[0].name, 'system')
+
+        stats = dfsource.pull_data(session, force=True)
+        self.assertEqual(stats['created'], set())
+        self.assertEqual(len(stats['updated']), 2)
+        self.repo._type_source_cache.clear()
+        self.repo._extid_cache.clear()
+        stats = dfsource.pull_data(session, force=True)
+        self.assertEqual(stats['created'], set())
+        self.assertEqual(len(stats['updated']), 2)
+
+if __name__ == '__main__':
+    from logilab.common.testlib import unittest_main
+    unittest_main()
--- a/test/unittest_entity.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/test/unittest_entity.py	Wed Apr 27 09:54:22 2011 +0200
@@ -223,38 +223,48 @@
                           'Any X,AA ORDERBY AA DESC '
                           'WHERE E eid %(x)s, E tags X, X modification_date AA')
 
-    def test_unrelated_rql_security_1(self):
+    def test_unrelated_rql_security_1_manager(self):
         user = self.request().user
         rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0]
         self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC '
-                          'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC')
+                         'WHERE NOT S use_email O, S eid %(x)s, '
+                         'O is EmailAddress, O address AA, O alias AB, O modification_date AC')
+
+    def test_unrelated_rql_security_1_user(self):
         self.create_user('toto')
         self.login('toto')
         user = self.request().user
         rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0]
         self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC '
-                          'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC')
+                          'WHERE NOT S use_email O, S eid %(x)s, '
+                         'O is EmailAddress, O address AA, O alias AB, O modification_date AC')
         user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0)
-        self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject')
+        rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0]
+        self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE '
+                         'NOT EXISTS(S use_email O), S eid %(x)s, '
+                         'O is EmailAddress, O address AA, O alias AB, O modification_date AC, '
+                         'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)')
+
+    def test_unrelated_rql_security_1_anon(self):
         self.login('anon')
         user = self.request().user
-        self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject')
+        rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0]
+        self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC WHERE '
+                         'NOT EXISTS(S use_email O), S eid %(x)s, '
+                         'O is EmailAddress, O address AA, O alias AB, O modification_date AC, '
+                         'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)')
 
     def test_unrelated_rql_security_2(self):
         email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0)
         rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0]
         self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ASC '
                           'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD')
-        #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0]
-        #self.assertEqual(rql, '')
         self.login('anon')
         email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0)
         rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0]
         self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA '
                           'WHERE NOT EXISTS(S use_email O), O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, '
                           'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)')
-        #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0]
-        #self.assertEqual(rql, '')
 
     def test_unrelated_rql_security_nonexistant(self):
         self.login('anon')
--- a/test/unittest_rqlrewrite.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/test/unittest_rqlrewrite.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -62,15 +62,17 @@
             def simplify(mainrqlst, needcopy=False):
                 rqlhelper.simplify(rqlst, needcopy)
     rewriter = RQLRewriter(mock_object(vreg=FakeVReg, user=(mock_object(eid=1))))
-    for v, snippets in snippets_map.items():
-        snippets_map[v] = [isinstance(snippet, basestring)
-                           and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
-                                           expression='Any X WHERE '+snippet)
-                           or snippet
-                           for snippet in snippets]
+    snippets = []
+    for v, exprs in snippets_map.items():
+        rqlexprs = [isinstance(snippet, basestring)
+                    and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
+                                    expression='Any X WHERE '+snippet)
+                    or snippet
+                    for snippet in exprs]
+        snippets.append((dict([v]), rqlexprs))
     rqlhelper.compute_solutions(rqlst.children[0], {'eid': eid_func_map}, kwargs=kwargs)
     solutions = rqlst.children[0].solutions
-    rewriter.rewrite(rqlst.children[0], snippets_map.items(), solutions, kwargs,
+    rewriter.rewrite(rqlst.children[0], snippets, solutions, kwargs,
                      existingvars)
     test_vrefs(rqlst.children[0])
     return rewriter.rewritten
--- a/test/unittest_schema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/test/unittest_schema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -163,7 +163,7 @@
                              'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType',
                              'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
                              'CWPermission', 'CWProperty', 'CWRType',
-                             'CWSource', 'CWSourceHostConfig',
+                             'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig',
                              'CWUniqueTogetherConstraint', 'CWUser',
                              'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note',
                              'Password', 'Personne',
@@ -171,7 +171,7 @@
                              'Societe', 'State', 'StateFull', 'String', 'SubNote', 'SubWorkflowExitPoint',
                              'Tag', 'Time', 'Transition', 'TrInfo',
                              'Workflow', 'WorkflowTransition']
-        self.assertListEqual(entities, sorted(expected_entities))
+        self.assertListEqual(sorted(expected_entities), entities)
         relations = sorted([str(r) for r in schema.relations()])
         expected_relations = ['add_permission', 'address', 'alias', 'allowed_transition',
                               'bookmarked_by', 'by_transition',
@@ -181,8 +181,7 @@
                               'constrained_by', 'constraint_of',
                               'content', 'content_format',
                               'created_by', 'creation_date', 'cstrtype', 'custom_workflow',
-                              'cwuri', 'cw_source', 'cw_host_config_of',
-                              'cw_support', 'cw_dont_cross', 'cw_may_cross',
+                              'cwuri', 'cw_for_source', 'cw_host_config_of', 'cw_schema', 'cw_source',
 
                               'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission',
                               'description', 'description_format', 'destination_state',
@@ -196,15 +195,15 @@
                               'identity', 'in_group', 'in_state', 'indexed',
                               'initial_state', 'inlined', 'internationalizable', 'is', 'is_instance_of',
 
-                              'label', 'last_login_time', 'login',
+                              'label', 'last_login_time', 'latest_retrieval', 'login',
 
                               'mainvars', 'match_host', 'modification_date',
 
                               'name', 'nom',
 
-                              'ordernum', 'owned_by',
+                              'options', 'ordernum', 'owned_by',
 
-                              'path', 'pkey', 'prefered_form', 'prenom', 'primary_email',
+                              'parser', 'path', 'pkey', 'prefered_form', 'prenom', 'primary_email',
 
                               'read_permission', 'relation_type', 'relations', 'require_group',
 
@@ -212,13 +211,13 @@
 
                               'tags', 'timestamp', 'title', 'to_entity', 'to_state', 'transition_of', 'travaille', 'type',
 
-                              'upassword', 'update_permission', 'uri', 'use_email',
+                              'upassword', 'update_permission', 'url', 'uri', 'use_email',
 
                               'value',
 
                               'wf_info_for', 'wikiid', 'workflow_of', 'tr_count']
 
-        self.assertListEqual(relations, sorted(expected_relations))
+        self.assertListEqual(sorted(expected_relations), relations)
 
         eschema = schema.eschema('CWUser')
         rels = sorted(str(r) for r in eschema.subject_relations())
--- a/toolsutils.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/toolsutils.py	Wed Apr 27 09:54:22 2011 +0200
@@ -58,12 +58,12 @@
     """create a directory if it doesn't exist yet"""
     try:
         makedirs(directory)
-        print '-> created directory %s.' % directory
+        print '-> created directory %s' % directory
     except OSError, ex:
         import errno
         if ex.errno != errno.EEXIST:
             raise
-        print '-> directory %s already exists, no need to create it.' % directory
+        print '-> no need to create existing directory %s' % directory
 
 def create_symlink(source, target):
     """create a symbolic link"""
@@ -154,9 +154,9 @@
 def restrict_perms_to_user(filepath, log=None):
     """set -rw------- permission on the given file"""
     if log:
-        log('set %s permissions to 0600', filepath)
+        log('set permissions to 0600 for %s', filepath)
     else:
-        print '-> set %s permissions to 0600' % filepath
+        print '-> set permissions to 0600 for %s' % filepath
     chmod(filepath, 0600)
 
 def read_config(config_file):
--- a/utils.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/utils.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -114,7 +114,7 @@
 
 # use networkX instead ?
 # http://networkx.lanl.gov/reference/algorithms.traversal.html#module-networkx.algorithms.traversal.astar
-def transitive_closure_of(entity, relname, _seen=None):
+def transitive_closure_of(entity, rtype, _seen=None):
     """return transitive closure *for the subgraph starting from the given
     entity* (eg 'parent' entities are not included in the results)
     """
@@ -122,10 +122,10 @@
         _seen = set()
     _seen.add(entity.eid)
     yield entity
-    for child in getattr(entity, relname):
+    for child in getattr(entity, rtype):
         if child.eid in _seen:
             continue
-        for subchild in transitive_closure_of(child, relname, _seen):
+        for subchild in transitive_closure_of(child, rtype, _seen):
             yield subchild
 
 
Binary file web/data/add_button.png has changed
--- a/web/data/cubicweb.ajax.js	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.ajax.js	Wed Apr 27 09:54:22 2011 +0200
@@ -283,7 +283,7 @@
  * dictionary, `reqtype` the HTTP request type (get 'GET' or 'POST').
  */
 function loadRemote(url, form, reqtype, sync) {
-    if (!url.toLowerCase().startswith(baseuri())) {
+    if (!url.toLowerCase().startswith(baseuri().toLowerCase())) {
         url = baseuri() + url;
     }
     if (!sync) {
@@ -501,7 +501,7 @@
                 var fck = new FCKeditor(this.id);
                 fck.Config['CustomConfigurationsPath'] = fckconfigpath;
                 fck.Config['DefaultLanguage'] = fcklang;
-                fck.BasePath = "fckeditor/";
+                fck.BasePath = baseuri() + "fckeditor/";
                 fck.ReplaceTextarea();
             } else {
                 cw.log('fckeditor could not be found.');
@@ -608,6 +608,29 @@
     $('#'+domid).loadxhtml('json', params, null, 'swap');
 }
 
+/* ajax tabs ******************************************************************/
+
+function setTab(tabname, cookiename) {
+    // set appropriate cookie
+    loadRemote('json', ajaxFuncArgs('set_cookie', null, cookiename, tabname));
+    // trigger show + tabname event
+    triggerLoad(tabname);
+}
+
+function loadNow(eltsel, holesel, reloadable) {
+    var lazydiv = jQuery(eltsel);
+    var hole = lazydiv.children(holesel);
+    if ((hole.length == 0) && ! reloadable) {
+        /* the hole is already filed */
+        return;
+    }
+    lazydiv.loadxhtml(lazydiv.attr('cubicweb:loadurl'), {'pageid': pageid});
+}
+
+function triggerLoad(divid) {
+    jQuery('#lazy-' + divid).trigger('load_' + divid);
+}
+
 /* DEPRECATED *****************************************************************/
 
 preprocessAjaxLoad = cw.utils.deprecatedFunction(
--- a/web/data/cubicweb.calendar.css	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.calendar.css	Wed Apr 27 09:54:22 2011 +0200
@@ -336,3 +336,48 @@
 td.next {
   text-align: right;
 }
+
+/* ------------------------- */
+/* tooltips for fullcalendar */
+
+div.calevent div.tooltip {
+   display: none; /* tooltip hidden */
+}
+
+div.calevent:hover {
+   z-index: auto !important; /* in order that the tooltip from the above .calevent div can be put over this div*/
+}
+
+div.calevent a{
+   display: inline;
+   font-size: none;
+   font-weight: bold;
+}
+
+div.calevent:hover div.tooltip{
+   display: block;
+   position: absolute;
+   z-index: 10;
+   color: black;
+   border:1px solid black;
+   background: white;
+   padding: 5px;
+   overflow: visible;
+   width: 200px;
+}
+
+div.tooltip a{
+   border: none;
+   background: none;
+   color: #2952A3;
+   text-decoration: none;
+ }
+
+div.tooltip a:hover{
+   text-decoration: underline;
+ }
+
+
+div.fc-view{
+  overflow: visible;
+}
\ No newline at end of file
--- a/web/data/cubicweb.css	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.css	Wed Apr 27 09:54:22 2011 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -220,7 +220,7 @@
 /* header */
 
 table#header {
-  background: %(headerBgColor)s url("banner.png") repeat-x top left;
+  background: %(headerBg)s;
   width: 100%;
 }
 
@@ -598,6 +598,10 @@
 }
 
 /* pagination */
+
+div.pagination{
+  margin: 0.5em 0;
+}
 span.slice a:visited,
 span.slice a:hover{
   color: %(helperColor)s;
@@ -752,7 +756,7 @@
 table.listing th {
   font-weight: bold;
   font-size: 8pt;
-  background: %(listingHeaderBgColor)s; 
+  background: %(listingHeaderBgColor)s;
   padding: 2px 4px;
   border: 1px solid %(listingBorderColor)s;
   border-right:none;
@@ -881,6 +885,12 @@
   background-color: transparent;
 }
 
+a.addButton {
+  margin-left: 0.5em;
+  padding-left: 16px;
+  background: transparent url("add_button.png") 0% 50% no-repeat;
+}
+
 /***************************************/
 /* lists                               */
 /***************************************/
--- a/web/data/cubicweb.htmlhelpers.js	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.htmlhelpers.js	Wed Apr 27 09:54:22 2011 +0200
@@ -16,11 +16,15 @@
  * available and inspects the <base> tag manually otherwise.)
  */
 function baseuri() {
-    var uri = document.baseURI;
-    if (uri) { // some browsers don't define baseURI
-        return uri.toLowerCase();
+    if (typeof BASE_URL === 'undefined') {
+        // backward compatibility, BASE_URL might be undefined
+        var uri = document.baseURI;
+        if (uri) { // some browsers don't define baseURI
+            return uri.toLowerCase();
+        }
+        return jQuery('base').attr('href').toLowerCase();
     }
-    return jQuery('base').attr('href').toLowerCase();
+    return BASE_URL;
 }
 
 /**
--- a/web/data/cubicweb.lazy.js	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.lazy.js	Wed Apr 27 09:54:22 2011 +0200
@@ -1,14 +1,1 @@
-function load_now(eltsel, holesel, reloadable) {
-    var lazydiv = jQuery(eltsel);
-    var hole = lazydiv.children(holesel);
-    if ((hole.length == 0) && ! reloadable) {
-        /* the hole is already filled */
-        return;
-    }
-    lazydiv.loadxhtml(lazydiv.attr('cubicweb:loadurl'));
-}
 
-function trigger_load(divid) {
-    jQuery('#lazy-' + divid).trigger('load_' + divid);
-}
-
--- a/web/data/cubicweb.old.css	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.old.css	Wed Apr 27 09:54:22 2011 +0200
@@ -1,6 +1,6 @@
 /*
  *  :organization: Logilab
- *  :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ *  :copyright: 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
  *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
  */
 
@@ -56,7 +56,7 @@
 }
 
 a, a:active, a:visited, a:link {
-  color: #ff4500;
+  color: %(aColor)s;
   text-decoration: none;
 }
 
@@ -227,7 +227,7 @@
 /* header */
 
 table#header {
-  background: #ff7700 url("banner.png") left top repeat-x;
+  background: %(headerBg)s;
   width: 100%;
 }
 
@@ -283,7 +283,7 @@
 }
 
 table#mainLayout{
- margin:0px 3px;
+ padding: 0px 3px;
 }
 
 table#mainLayout td#contentColumn {
@@ -602,6 +602,10 @@
   border-bottom: 1px solid #ccc;
 }
 
+div.pagination{
+  margin: 0.5em 0;
+}
+
 span.slice a:visited,
 span.slice a:hover{
   color: #555544;
@@ -899,6 +903,12 @@
   background-color: transparent;
 }
 
+a.addButton {
+  margin-left: 0.5em;
+  padding-left: 16px;
+  background: transparent url("add_button.png") 0% 50% no-repeat;
+}
+
 /***************************************/
 /* footer                              */
 /***************************************/
--- a/web/data/cubicweb.tabs.js	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/cubicweb.tabs.js	Wed Apr 27 09:54:22 2011 +0200
@@ -1,7 +1,1 @@
-function set_tab(tabname, cookiename) {
-    // set appropriate cookie
-    loadRemote('json', ajaxFuncArgs('set_cookie', null, cookiename, tabname));
-    // trigger show + tabname event
-    trigger_load(tabname);
-}
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/fullcalendar.css	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,586 @@
+/*
+ * FullCalendar v1.4.8 Stylesheet
+ *
+ * Feel free to edit this file to customize the look of FullCalendar.
+ * When upgrading to newer versions, please upgrade this file as well,
+ * porting over any customizations afterwards.
+ *
+ * Date: Sat Oct 16 17:10:03 2010 -0700
+ *
+ */
+ 
+ 
+/* TODO: make font sizes look the same in all doctypes */
+
+
+.fc,
+.fc .fc-header,
+.fc .fc-content {
+	font-size: 1em;
+	}
+	
+.fc {
+	direction: ltr;
+	text-align: left;
+	}
+	
+.fc table {
+	border-collapse: collapse;
+	border-spacing: 0;
+	}
+	
+.fc td, .fc th {
+	padding: 0;
+	vertical-align: top;
+	}
+
+
+
+/* Header
+------------------------------------------------------------------------*/
+	
+table.fc-header {
+	width: 100%;
+	}
+	
+.fc-header-left {
+	width: 25%;
+	}
+	
+.fc-header-left table {
+	float: left;
+	}
+	
+.fc-header-center {
+	width: 50%;
+	text-align: center;
+	}
+	
+.fc-header-center table {
+	margin: 0 auto;
+	}
+	
+.fc-header-right {
+	width: 25%;
+	}
+	
+.fc-header-right table {
+	float: right;
+	}
+	
+.fc-header-title {
+	margin-top: 0;
+	white-space: nowrap;
+	}
+	
+.fc-header-space {
+	padding-left: 10px;
+	}
+	
+/* right-to-left */
+
+.fc-rtl .fc-header-title {
+	direction: rtl;
+	}
+
+
+
+/* Buttons
+------------------------------------------------------------------------*/
+
+.fc-header .fc-state-default,
+.fc-header .ui-state-default {
+	margin-bottom: 1em;
+	cursor: pointer;
+	}
+	
+.fc-header .fc-state-default {
+	border-width: 1px 0;
+	padding: 0 1px;
+	}
+	
+.fc-header .fc-state-default,
+.fc-header .fc-state-default a {
+	border-style: solid;
+	}
+	
+.fc-header .fc-state-default a {
+	display: block;
+	border-width: 0 1px;
+	margin: 0 -1px;
+	width: 100%;
+	text-decoration: none;
+	}
+	
+.fc-header .fc-state-default span {
+	display: block;
+	border-style: solid;
+	border-width: 1px 0 1px 1px;
+	padding: 3px 5px;
+	}
+	
+.fc-header .ui-state-default {
+	padding: 4px 6px;
+	}
+	
+.fc-header .fc-state-default span,
+.fc-header .ui-state-default span {
+	white-space: nowrap;
+	}
+	
+/* for adjacent buttons */
+	
+.fc-header .fc-no-right {
+	padding-right: 0;
+	}
+	
+.fc-header .fc-no-right a {
+	margin-right: 0;
+	border-right: 0;
+	}
+	
+.fc-header .ui-no-right {
+	border-right: 0;
+	}
+	
+/* for fake rounded corners */
+	
+.fc-header .fc-corner-left {
+	margin-left: 1px;
+	padding-left: 0;
+	}
+	
+.fc-header .fc-corner-right {
+	margin-right: 1px;
+	padding-right: 0;
+	}
+	
+/* DEFAULT button COLORS */
+	
+.fc-header .fc-state-default,
+.fc-header .fc-state-default a {
+	border-color: #777; /* outer border */
+	color: #333;
+	}
+
+.fc-header .fc-state-default span {
+	border-color: #fff #fff #d1d1d1; /* inner border */
+	background: #e8e8e8;
+	}
+	
+/* PRESSED button COLORS (down and active) */
+	
+.fc-header .fc-state-active a {
+	color: #fff;
+	}
+	
+.fc-header .fc-state-down span,
+.fc-header .fc-state-active span {
+	background: #888;
+	border-color: #808080 #808080 #909090; /* inner border */
+	}
+	
+/* DISABLED button COLORS */
+	
+.fc-header .fc-state-disabled a {
+	color: #999;
+	}
+	
+.fc-header .fc-state-disabled,
+.fc-header .fc-state-disabled a {
+	border-color: #ccc; /* outer border */
+	}
+	
+.fc-header .fc-state-disabled span {
+	border-color: #fff #fff #f0f0f0; /* inner border */
+	background: #f0f0f0;
+	}
+	
+	
+	
+/* Content Area & Global Cell Styles
+------------------------------------------------------------------------*/
+	
+.fc-widget-content {
+	border: 1px solid #ccc; /* outer border color */
+	}
+	
+.fc-content {
+	clear: both;
+	}
+	
+.fc-content .fc-state-default {
+	border-style: solid;
+	border-color: #ccc; /* inner border color */
+	}
+	
+.fc-content .fc-state-highlight { /* today */
+	background: #ffc;
+	}
+	
+.fc-content .fc-not-today { /* override jq-ui highlight (TODO: ui-widget-content) */
+	background: none;
+	}
+	
+.fc-cell-overlay { /* semi-transparent rectangle while dragging */
+	background: #9cf;
+	opacity: .2;
+	filter: alpha(opacity=20); /* for IE */
+	}
+	
+.fc-view { /* prevents dragging outside of widget */
+	width: 100%;
+	overflow: hidden;
+	}
+	
+	
+	
+
+
+/* Global Event Styles
+------------------------------------------------------------------------*/
+
+.fc-event,
+.fc-agenda .fc-event-time,
+.fc-event a {
+	border-style: solid; 
+	border-color: #36c;     /* default BORDER color (probably the same as background-color) */
+	background-color: #36c; /* default BACKGROUND color */
+	color: #fff;            /* default TEXT color */
+	}
+	
+	/* Use the 'className' CalEvent property and the following
+	 * example CSS to change event color on a per-event basis:
+	 *
+	 * .myclass,
+	 * .fc-agenda .myclass .fc-event-time,
+	 * .myclass a {
+	 *     background-color: black;
+	 *     border-color: black;
+	 *     color: red;
+	 *     }
+	 */
+	 
+.fc-event {
+	text-align: left;
+	}
+	
+.fc-event a {
+	overflow: hidden;
+	font-size: .85em;
+	text-decoration: none;
+	cursor: pointer;
+	}
+	
+.fc-event-editable {
+	cursor: pointer;
+	}
+	
+.fc-event-time,
+.fc-event-title {
+	padding: 0 1px;
+	}
+	
+/* for fake rounded corners */
+
+.fc-event a {
+	display: block;
+	position: relative;
+	width: 100%;
+	height: 100%;
+	}
+	
+/* right-to-left */
+
+.fc-rtl .fc-event a {
+	text-align: right;
+	}
+	
+/* resizable */
+	
+.fc .ui-resizable-handle {
+	display: block;
+	position: absolute;
+	z-index: 99999;
+	border: 0 !important; /* important overrides pre jquery ui 1.7 styles */
+	background: url(data:image/gif;base64,AAAA) !important; /* hover fix for IE */
+	}
+	
+	
+	
+/* Horizontal Events
+------------------------------------------------------------------------*/
+
+.fc-event-hori {
+	border-width: 1px 0;
+	margin-bottom: 1px;
+	}
+	
+.fc-event-hori a {
+	border-width: 0;
+	}
+	
+/* for fake rounded corners */
+	
+.fc-content .fc-corner-left {
+	margin-left: 1px;
+	}
+	
+.fc-content .fc-corner-left a {
+	margin-left: -1px;
+	border-left-width: 1px;
+	}
+	
+.fc-content .fc-corner-right {
+	margin-right: 1px;
+	}
+	
+.fc-content .fc-corner-right a {
+	margin-right: -1px;
+	border-right-width: 1px;
+	}
+	
+/* resizable */
+	
+.fc-event-hori .ui-resizable-e {
+	top: 0           !important; /* importants override pre jquery ui 1.7 styles */
+	right: -3px      !important;
+	width: 7px       !important;
+	height: 100%     !important;
+	cursor: e-resize;
+	}
+	
+.fc-event-hori .ui-resizable-w {
+	top: 0           !important;
+	left: -3px       !important;
+	width: 7px       !important;
+	height: 100%     !important;
+	cursor: w-resize;
+	}
+	
+.fc-event-hori .ui-resizable-handle {
+	_padding-bottom: 14px; /* IE6 had 0 height */
+	}
+	
+	
+	
+
+/* Month View, Basic Week View, Basic Day View
+------------------------------------------------------------------------*/
+
+.fc-grid table {
+	width: 100%;
+	}
+	
+.fc .fc-grid th {
+	border-width: 0 0 0 1px;
+	text-align: center;
+	}
+	
+.fc .fc-grid td {
+	border-width: 1px 0 0 1px;
+	}
+	
+.fc-grid th.fc-leftmost,
+.fc-grid td.fc-leftmost {
+	border-left: 0;
+	}
+	
+.fc-grid .fc-day-number {
+	float: right;
+	padding: 0 2px;
+	}
+	
+.fc-grid .fc-other-month .fc-day-number {
+	opacity: 0.3;
+	filter: alpha(opacity=30); /* for IE */
+	/* opacity with small font can sometimes look too faded
+	   might want to set the 'color' property instead
+	   making day-numbers bold also fixes the problem */
+	}
+	
+.fc-grid .fc-day-content {
+	clear: both;
+	padding: 2px 2px 0; /* distance between events and day edges */
+	}
+	
+/* event styles */
+	
+.fc-grid .fc-event-time {
+	font-weight: bold;
+	}
+	
+/* right-to-left */
+
+.fc-rtl .fc-grid {
+	direction: rtl;
+	}
+	
+.fc-rtl .fc-grid .fc-day-number {
+	float: left;
+	}
+	
+.fc-rtl .fc-grid .fc-event-time {
+	float: right;
+	}
+	
+/* Agenda Week View, Agenda Day View
+------------------------------------------------------------------------*/
+
+.fc .fc-agenda th,
+.fc .fc-agenda td {
+	border-width: 1px 0 0 1px;
+	}
+	
+.fc .fc-agenda .fc-leftmost {
+	border-left: 0;
+	}
+	
+.fc-agenda tr.fc-first th,
+.fc-agenda tr.fc-first td {
+	border-top: 0;
+	}
+	
+.fc-agenda-head tr.fc-last th {
+	border-bottom-width: 1px;
+	}
+	
+.fc .fc-agenda-head td,
+.fc .fc-agenda-body td {
+	background: none;
+	}
+	
+.fc-agenda-head th {
+	text-align: center;
+	}
+	
+/* the time axis running down the left side */
+	
+.fc-agenda .fc-axis {
+	width: 50px;
+	padding: 0 4px;
+	vertical-align: middle;
+	white-space: nowrap;
+	text-align: right;
+	font-weight: normal;
+	}
+	
+/* all-day event cells at top */
+	
+.fc-agenda-head tr.fc-all-day th {
+	height: 35px;
+	}
+	
+.fc-agenda-head td {
+	padding-bottom: 10px;
+	}
+	
+.fc .fc-divider div {
+	font-size: 1px; /* for IE6/7 */
+	height: 2px;
+	}
+	
+.fc .fc-divider .fc-state-default {
+	background: #eee; /* color for divider between all-day and time-slot events */
+	}
+
+/* body styles */
+	
+.fc .fc-agenda-body td div {
+	height: 20px; /* slot height */
+	}
+	
+.fc .fc-agenda-body tr.fc-minor th,
+.fc .fc-agenda-body tr.fc-minor td {
+	border-top-style: dotted;
+	}
+	
+.fc-agenda .fc-day-content {
+	padding: 2px 2px 0; /* distance between events and day edges */
+	}
+	
+/* vertical background columns */
+
+.fc .fc-agenda-bg .ui-state-highlight {
+	background-image: none; /* tall column, don't want repeating background image */
+	}
+	
+
+
+/* Vertical Events
+------------------------------------------------------------------------*/
+
+.fc-event-vert {
+	border-width: 0 1px;
+	}
+	
+.fc-event-vert a {
+	border-width: 0;
+	}
+	
+/* for fake rounded corners */
+	
+.fc-content .fc-corner-top {
+	margin-top: 1px;
+	}
+	
+.fc-content .fc-corner-top a {
+	margin-top: -1px;
+	border-top-width: 1px;
+	}
+	
+.fc-content .fc-corner-bottom {
+	margin-bottom: 1px;
+	}
+	
+.fc-content .fc-corner-bottom a {
+	margin-bottom: -1px;
+	border-bottom-width: 1px;
+	}
+	
+/* event content */
+	
+.fc-event-vert span {
+	display: block;
+	position: relative;
+	z-index: 2;
+	}
+	
+.fc-event-vert span.fc-event-time {
+	white-space: nowrap;
+	_white-space: normal;
+	overflow: hidden;
+	border: 0;
+	font-size: 10px;
+	}
+	
+.fc-event-vert span.fc-event-title {
+	line-height: 13px;
+	}
+	
+.fc-event-vert span.fc-event-bg { /* makes the event lighter w/ a semi-transparent overlay  */
+	position: absolute;
+	z-index: 1;
+	top: 0;
+	left: 0;
+	width: 100%;
+	height: 100%;
+	background: #fff;
+	opacity: .3;
+	filter: alpha(opacity=30); /* for IE */
+	}
+	
+/* resizable */
+	
+.fc-event-vert .ui-resizable-s {
+	bottom: 0        !important; /* importants override pre jquery ui 1.7 styles */
+	width: 100%      !important;
+	height: 8px      !important;
+	line-height: 8px !important;
+	font-size: 11px  !important;
+	font-family: monospace;
+	text-align: center;
+	cursor: s-resize;
+	}
+	
+	
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/fullcalendar.min.js	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,108 @@
+/*
+
+ FullCalendar v1.4.8
+ http://arshaw.com/fullcalendar/
+
+ Use fullcalendar.css for basic styling.
+ For event drag & drop, requires jQuery UI draggable.
+ For event resizing, requires jQuery UI resizable.
+
+ Copyright (c) 2010 Adam Shaw
+ Dual licensed under the MIT and GPL licenses, located in
+ MIT-LICENSE.txt and GPL-LICENSE.txt respectively.
+
+ Date: Sat Oct 16 17:10:03 2010 -0700
+
+*/
+(function(m,Z){function fb(a){m.extend(true,Pa,a)}function Bb(a,b,f){function h(e){if(qa){E();ea();M();H(e)}else i()}function i(){ya=b.theme?"ui":"fc";a.addClass("fc");b.isRTL&&a.addClass("fc-rtl");b.theme&&a.addClass("ui-widget");qa=m("<div class='fc-content "+ya+"-widget-content' style='position:relative'/>").prependTo(a);pa=new Cb(p,b);(ta=pa.render())&&a.prepend(ta);w(b.defaultView);m(window).resize(y);t()||s()}function s(){setTimeout(function(){!F.start&&t()&&H()},0)}function j(){m(window).unbind("resize",
+y);pa.destroy();qa.remove();a.removeClass("fc fc-rtl fc-ui-widget")}function l(){return ca.offsetWidth!==0}function t(){return m("body")[0].offsetWidth!==0}function w(e){if(!F||e!=F.name){g++;N();var q=F,J;if(q){if(q.eventsChanged){M();q.eventDirty=q.eventsChanged=false}(q.beforeHide||gb)();Ra(qa,qa.height());q.element.hide()}else Ra(qa,1);qa.css("overflow","hidden");if(F=ka[e])F.element.show();else F=ka[e]=new Ha[e](J=ua=m("<div class='fc-view fc-view-"+e+"' style='position:absolute'/>").appendTo(qa),
+p);q&&pa.deactivateButton(q.name);pa.activateButton(e);H();qa.css("overflow","");q&&Ra(qa,1);J||(F.afterShow||gb)();g--}}function H(e){if(l()){g++;N();ma===Z&&E();if(!F.start||e||n<F.start||n>=F.end){F.render(n,e||0);K(true);!b.lazyFetching||fa()?X():F.renderEvents(Y())}else if(F.sizeDirty||F.eventsDirty||!b.lazyFetching){F.clearEvents();F.sizeDirty&&K();!b.lazyFetching||fa()?X():F.renderEvents(Y())}ia=a.outerWidth();F.sizeDirty=false;F.eventsDirty=false;pa.updateTitle(F.title);e=new Date;e>=F.start&&
+e<F.end?pa.disableButton("today"):pa.enableButton("today");g--;F.trigger("viewDisplay",ca)}}function E(){ma=b.contentHeight?b.contentHeight:b.height?b.height-(ta?ta.height():0)-Sa(qa[0]):Math.round(qa.width()/Math.max(b.aspectRatio,0.5))}function K(e){g++;F.setHeight(ma,e);if(ua){ua.css("position","relative");ua=null}F.setWidth(qa.width(),e);g--}function y(){if(!g)if(F.start){var e=++d;setTimeout(function(){if(e==d&&!g&&l())if(ia!=(ia=a.outerWidth())){g++;P();F.trigger("windowResize",ca);g--}},200)}else s()}
+function P(){ea();if(l()){E();K();N();F.rerenderEvents();F.sizeDirty=false}}function ea(){m.each(ka,function(e,q){q.sizeDirty=true})}function S(){M();if(l()){F.clearEvents();F.renderEvents(Y());F.eventsDirty=false}}function M(){m.each(ka,function(e,q){q.eventsDirty=true})}function X(){$(function(e){F.renderEvents(e)})}function ga(e,q,J){F.select(e,q,J===Z?true:J)}function N(){F&&F.unselect()}function U(){H(-1)}function u(){H(1)}function B(){Ta(n,-1);H()}function G(){Ta(n,1);H()}function k(){n=new Date;
+H()}function c(e,q,J){if(e instanceof Date)n=x(e);else hb(n,e,q,J);H()}function A(e,q,J){e!==Z&&Ta(n,e);q!==Z&&Ua(n,q);J!==Z&&O(n,J);H()}function C(){return x(n)}function I(){return F}function V(e,q){if(q===Z)return b[e];if(e=="height"||e=="contentHeight"||e=="aspectRatio"){b[e]=q;P()}}function r(e,q){if(b[e])return b[e].apply(q||ca,Array.prototype.slice.call(arguments,2))}var p=this;p.options=b;p.render=h;p.destroy=j;p.changeView=w;p.select=ga;p.unselect=N;p.rerenderEvents=S;p.prev=U;p.next=u;p.prevYear=
+B;p.nextYear=G;p.today=k;p.gotoDate=c;p.incrementDate=A;p.formatDate=function(e,q){return Ja(e,q,b)};p.formatDates=function(e,q,J){return Va(e,q,J,b)};p.getDate=C;p.getView=I;p.option=V;p.trigger=r;Db.call(p,b,f);var $=p.fetchEvents,fa=p.isFetchNeeded,Y=p.clientEvents,ca=a[0],pa,ta,qa,ya,F,ka={},ia,ma,ua,d=0,g=0,n=new Date;hb(n,b.year,b.month,b.date);var z;b.droppable&&m(document).bind("dragstart",function(e,q){var J=e.target,R=m(J);if(!R.parents(".fc").length){var Q=b.dropAccept;if(m.isFunction(Q)?
+Q.call(J,R):R.is(Q)){z=J;F.dragStart(z,e,q)}}}).bind("dragstop",function(e,q){if(z){F.dragStop(z,e,q);z=null}})}function Cb(a,b){function f(){K=b.theme?"ui":"fc";var y=b.header;if(y)return E=m("<table class='fc-header'/>").append(m("<tr/>").append(m("<td class='fc-header-left'/>").append(i(y.left))).append(m("<td class='fc-header-center'/>").append(i(y.center))).append(m("<td class='fc-header-right'/>").append(i(y.right))))}function h(){E.remove()}function i(y){if(y){var P=m("<tr/>");m.each(y.split(" "),
+function(ea){ea>0&&P.append("<td><span class='fc-header-space'/></td>");var S;m.each(this.split(","),function(M,X){if(X=="title"){P.append("<td><h2 class='fc-header-title'>&nbsp;</h2></td>");S&&S.addClass(K+"-corner-right");S=null}else{var ga;if(a[X])ga=a[X];else if(Ha[X])ga=function(){N.removeClass(K+"-state-hover");a.changeView(X)};if(ga){S&&S.addClass(K+"-no-right");var N;M=b.theme?Wa(b.buttonIcons,X):null;var U=Wa(b.buttonText,X);if(M)N=m("<div class='fc-button-"+X+" ui-state-default'><a><span class='ui-icon ui-icon-"+
+M+"'/></a></div>");else if(U)N=m("<div class='fc-button-"+X+" "+K+"-state-default'><a><span>"+U+"</span></a></div>");if(N){N.click(function(){N.hasClass(K+"-state-disabled")||ga()}).mousedown(function(){N.not("."+K+"-state-active").not("."+K+"-state-disabled").addClass(K+"-state-down")}).mouseup(function(){N.removeClass(K+"-state-down")}).hover(function(){N.not("."+K+"-state-active").not("."+K+"-state-disabled").addClass(K+"-state-hover")},function(){N.removeClass(K+"-state-hover").removeClass(K+
+"-state-down")}).appendTo(m("<td/>").appendTo(P));S?S.addClass(K+"-no-right"):N.addClass(K+"-corner-left");S=N}}}});S&&S.addClass(K+"-corner-right")});return m("<table/>").append(P)}}function s(y){E.find("h2.fc-header-title").html(y)}function j(y){E.find("div.fc-button-"+y).addClass(K+"-state-active")}function l(y){E.find("div.fc-button-"+y).removeClass(K+"-state-active")}function t(y){E.find("div.fc-button-"+y).addClass(K+"-state-disabled")}function w(y){E.find("div.fc-button-"+y).removeClass(K+
+"-state-disabled")}var H=this;H.render=f;H.destroy=h;H.updateTitle=s;H.activateButton=j;H.deactivateButton=l;H.disableButton=t;H.enableButton=w;var E=m([]),K}function Db(a,b){function f(c){b.push(c);l(c,N)}function h(c){b=m.grep(b,function(A){return A!=c});G=m.grep(G,function(A){return A.source!=c});N()}function i(c){G=[];s(b,c)}function s(c,A){function C($,fa){var Y=X();if(r!=Y)r.eventsDirty=true;if(I==U&&u<=Y.visStart&&B>=Y.visEnd){if(m.inArray($,b)!=-1){for(Y=0;Y<fa.length;Y++){S(fa[Y]);fa[Y].source=
+$}G=G.concat(fa)}--V||A&&A(G)}}var I=++U,V=c.length,r=X();u=x(r.visStart);B=x(r.visEnd);for(var p=0;p<c.length;p++)j(c[p],C)}function j(c,A){function C(r){A(c,r)}function I(r){C(r);ea()}if(typeof c=="string"){var V={};V[a.startParam]=Math.round(u.getTime()/1E3);V[a.endParam]=Math.round(B.getTime()/1E3);if(a.cacheParam)V[a.cacheParam]=(new Date).getTime();P();m.ajax({url:c,dataType:"json",data:V,cache:a.cacheParam||false,success:I})}else if(m.isFunction(c)){P();c(x(u),x(B),I)}else C(c)}function l(c,
+A){s([c],A)}function t(){i(N)}function w(){var c=X();return!u||c.visStart<u||c.visEnd>B}function H(c){var A,C=G.length,I,V=X().defaultEventEnd,r=c.start-c._start,p=c.end?c.end-(c._end||V(c)):0;for(A=0;A<C;A++){I=G[A];if(I._id==c._id&&I!=c){I.start=new Date(+I.start+r);I.end=c.end?I.end?new Date(+I.end+p):new Date(+V(I)+p):null;I.title=c.title;I.url=c.url;I.allDay=c.allDay;I.className=c.className;I.editable=c.editable;S(I)}}S(c);N()}function E(c,A){S(c);if(!c.source){if(A)(c.source=b[0]).push(c);G.push(c)}N()}
+function K(c){if(c){if(!m.isFunction(c)){var A=c+"";c=function(I){return I._id==A}}G=m.grep(G,c,true);for(C=0;C<b.length;C++)if(typeof b[C]=="object")b[C]=m.grep(b[C],c,true)}else{G=[];for(var C=0;C<b.length;C++)if(typeof b[C]=="object")b[C]=[]}N()}function y(c){if(m.isFunction(c))return m.grep(G,c);else if(c){c+="";return m.grep(G,function(A){return A._id==c})}return G}function P(){k++||ga("loading",null,true)}function ea(){--k||ga("loading",null,false)}function S(c){c._id=c._id||(c.id===Z?"_fc"+
+Eb++:c.id+"");if(c.date){if(!c.start)c.start=c.date;delete c.date}c._start=x(c.start=Xa(c.start,a.ignoreTimezone));c.end=Xa(c.end,a.ignoreTimezone);if(c.end&&c.end<=c.start)c.end=null;c._end=c.end?x(c.end):null;if(c.allDay===Z)c.allDay=a.allDayDefault;if(c.className){if(typeof c.className=="string")c.className=c.className.split(/\s+/)}else c.className=[]}var M=this;M.fetchEvents=i;M.refetchEvents=t;M.isFetchNeeded=w;M.addEventSource=f;M.removeEventSource=h;M.updateEvent=H;M.renderEvent=E;M.removeEvents=
+K;M.clientEvents=y;M.normalizeEvent=S;var X=M.getView,ga=M.trigger,N=M.rerenderEvents,U=0,u,B,G=[],k=0;b.unshift([])}function Fb(a,b){function f(l,t){if(t){Ua(l,t);l.setDate(1)}l=x(l,true);l.setDate(1);t=Ua(x(l),1);var w=x(l),H=x(t),E=i("firstDay"),K=i("weekends")?0:1;if(K){va(w);va(H,-1,true)}O(w,-((w.getDay()-Math.max(E,K)+7)%7));O(H,(7-H.getDay()+Math.max(E,K))%7);E=Math.round((H-w)/(ib*7));if(i("weekMode")=="fixed"){O(H,(6-E)*7);E=6}h.title=j(l,i("titleFormat"));h.start=l;h.end=t;h.visStart=w;
+h.visEnd=H;s(E,K?5:7,true)}var h=this;h.render=f;Ya.call(h,a,b,"month");var i=h.opt,s=h.renderBasic,j=b.formatDate}function Gb(a,b){function f(l,t){t&&O(l,t*7);l=O(x(l),-((l.getDay()-i("firstDay")+7)%7));t=O(x(l),7);var w=x(l),H=x(t),E=i("weekends");if(!E){va(w);va(H,-1,true)}h.title=j(w,O(x(H),-1),i("titleFormat"));h.start=l;h.end=t;h.visStart=w;h.visEnd=H;s(1,E?7:5,false)}var h=this;h.render=f;Ya.call(h,a,b,"basicWeek");var i=h.opt,s=h.renderBasic,j=b.formatDates}function Hb(a,b){function f(l,t){if(t){O(l,
+t);i("weekends")||va(l,t<0?-1:1)}h.title=j(l,i("titleFormat"));h.start=h.visStart=x(l,true);h.end=h.visEnd=O(x(h.start),1);s(1,1,false)}var h=this;h.render=f;Ya.call(h,a,b,"basicDay");var i=h.opt,s=h.renderBasic,j=b.formatDate}function Ya(a,b,f){function h(d,g,n){Y=d;ca=g;if(V=B("isRTL")){r=-1;p=ca-1}else{r=1;p=0}$=B("firstDay");fa=B("weekends")?0:1;var z=B("theme")?"ui":"fc",e=B("columnFormat"),q=u.start.getMonth(),J=Ia(new Date),R,Q=x(u.visStart);if(F){k();g=F.find("tr").length;if(Y<g)F.find("tr:gt("+
+(Y-1)+")").remove();else if(Y>g){d="";for(g=g;g<Y;g++){d+="<tr class='fc-week"+g+"'>";for(R=0;R<ca;R++){d+="<td class='fc-"+Da[Q.getDay()]+" "+z+"-state-default fc-new fc-day"+(g*ca+R)+(R==p?" fc-leftmost":"")+"'>"+(n?"<div class='fc-day-number'></div>":"")+"<div class='fc-day-content'><div style='position:relative'>&nbsp;</div></div></td>";O(Q,1);fa&&va(Q)}d+="</tr>"}F.append(d)}j(F.find("td.fc-new").removeClass("fc-new"));Q=x(u.visStart);F.find("td").each(function(){var ha=m(this);if(Y>1)Q.getMonth()==
+q?ha.removeClass("fc-other-month"):ha.addClass("fc-other-month");+Q==+J?ha.removeClass("fc-not-today").addClass("fc-today").addClass(z+"-state-highlight"):ha.addClass("fc-not-today").removeClass("fc-today").removeClass(z+"-state-highlight");ha.find("div.fc-day-number").text(Q.getDate());O(Q,1);fa&&va(Q)});if(Y==1){Q=x(u.visStart);ya.find("th").each(function(ha,da){m(da).text(I(Q,e));da.className=da.className.replace(/^fc-\w+(?= )/,"fc-"+Da[Q.getDay()]);O(Q,1);fa&&va(Q)});Q=x(u.visStart);F.find("td").each(function(ha,
+da){da.className=da.className.replace(/^fc-\w+(?= )/,"fc-"+Da[Q.getDay()]);O(Q,1);fa&&va(Q)})}}else{var ba=m("<table/>").appendTo(a);d="<thead><tr>";for(g=0;g<ca;g++){d+="<th class='fc-"+Da[Q.getDay()]+" "+z+"-state-default"+(g==p?" fc-leftmost":"")+"'>"+I(Q,e)+"</th>";O(Q,1);fa&&va(Q)}ya=m(d+"</tr></thead>").appendTo(ba);d="<tbody>";Q=x(u.visStart);for(g=0;g<Y;g++){d+="<tr class='fc-week"+g+"'>";for(R=0;R<ca;R++){d+="<td class='fc-"+Da[Q.getDay()]+" "+z+"-state-default fc-day"+(g*ca+R)+(R==p?" fc-leftmost":
+"")+(Y>1&&Q.getMonth()!=q?" fc-other-month":"")+(+Q==+J?" fc-today "+z+"-state-highlight":" fc-not-today")+"'>"+(n?"<div class='fc-day-number'>"+Q.getDate()+"</div>":"")+"<div class='fc-day-content'><div style='position:relative'>&nbsp;</div></div></td>";O(Q,1);fa&&va(Q)}d+="</tr>"}F=m(d+"</tbody>").appendTo(ba);j(F.find("td"));ka=m("<div style='position:absolute;z-index:8;top:0;left:0'/>").appendTo(a)}}function i(d){qa=d;d=F.find("tr td:first-child");var g=qa-ya.height(),n;if(B("weekMode")=="variable")n=
+g=Math.floor(g/(Y==1?2:6));else{n=Math.floor(g/Y);g=g-n*(Y-1)}if(Za===Z){var z=F.find("tr:first").find("td:first");z.height(n);Za=n!=z.height()}if(Za){d.slice(0,-1).height(n);d.slice(-1).height(g)}else{Qa(d.slice(0,-1),n);Qa(d.slice(-1),g)}}function s(d){ta=d;ua.clear();pa=Math.floor(ta/ca);Ka(ya.find("th").slice(0,-1),pa)}function j(d){d.click(l).mousedown(C)}function l(d){if(!B("selectable")){var g=parseInt(this.className.match(/fc\-day(\d+)/)[1]);g=O(x(u.visStart),Math.floor(g/ca)*7+g%ca);G("dayClick",
+this,g,true,d)}}function t(d,g,n){n&&ia.build();n=x(u.visStart);for(var z=O(x(n),ca),e=0;e<Y;e++){var q=new Date(Math.max(n,d)),J=new Date(Math.min(z,g));if(q<J){var R;if(V){R=Ea(J,n)*r+p+1;q=Ea(q,n)*r+p+1}else{R=Ea(q,n);q=Ea(J,n)}j(w(e,R,e,q-1))}O(n,7);O(z,7)}}function w(d,g,n,z){d=ia.rect(d,g,n,z,a);return c(d,a)}function H(d){return x(d)}function E(d,g){t(d,O(x(g),1),true)}function K(){A()}function y(d,g){ma.start(function(n){A();n&&w(n.row,n.col,n.row,n.col)},g)}function P(d,g,n){var z=ma.stop();
+A();if(z){z=ga(z);G("drop",d,z,true,g,n)}}function ea(d){return x(d.start)}function S(d){return ua.left(d)}function M(d){return ua.right(d)}function X(d){return(d-Math.max($,fa)+ca)%ca}function ga(d){return O(x(u.visStart),d.row*7+d.col*r+p)}function N(d){return F.find("tr:eq("+d+")")}function U(){return{left:0,right:ta}}var u=this;u.renderBasic=h;u.setHeight=i;u.setWidth=s;u.renderDayOverlay=t;u.defaultSelectionEnd=H;u.renderSelection=E;u.clearSelection=K;u.dragStart=y;u.dragStop=P;u.defaultEventEnd=
+ea;u.getHoverListener=function(){return ma};u.colContentLeft=S;u.colContentRight=M;u.dayOfWeekCol=X;u.cellDate=ga;u.cellIsAllDay=function(){return true};u.allDayTR=N;u.allDayBounds=U;u.getRowCnt=function(){return Y};u.getColCnt=function(){return ca};u.getColWidth=function(){return pa};u.getDaySegmentContainer=function(){return ka};jb.call(u,a,b,f);kb.call(u);lb.call(u);Ib.call(u);var B=u.opt,G=u.trigger,k=u.clearEvents,c=u.renderOverlay,A=u.clearOverlays,C=u.daySelectionMousedown,I=b.formatDate,V,
+r,p,$,fa,Y,ca,pa,ta,qa,ya,F,ka,ia,ma,ua;mb(a.addClass("fc-grid"));ia=new nb(function(d,g){var n,z,e,q=F.find("tr:first td");if(V)q=m(q.get().reverse());q.each(function(J,R){n=m(R);z=n.offset().left;if(J)e[1]=z;e=[z];g[J]=e});e[1]=z+n.outerWidth();F.find("tr").each(function(J,R){n=m(R);z=n.offset().top;if(J)e[1]=z;e=[z];d[J]=e});e[1]=z+n.outerHeight()});ma=new ob(ia);ua=new pb(function(d){return F.find("td:eq("+d+") div div")})}function Ib(){function a(G){w(B=G);U(h(G))}function b(G){f();U(h(B),G)}
+function f(){H();ea().empty()}function h(G){var k=ga(),c=N(),A=x(j.visStart);c=O(x(A),c);var C=m.map(G,Oa),I,V,r,p,$,fa,Y=[];for(I=0;I<k;I++){V=$a(ab(G,C,A,c));for(r=0;r<V.length;r++){p=V[r];for($=0;$<p.length;$++){fa=p[$];fa.row=I;fa.level=r;Y.push(fa)}}O(A,7);O(c,7)}return Y}function i(G,k,c){E(G,k);if(G.editable||G.editable===Z&&l("editable")){s(G,k);c.isEnd&&u(G,k)}}function s(G,k){if(!l("disableDragging")&&k.draggable){var c=S(),A;k.draggable({zIndex:9,delay:50,opacity:l("dragOpacity"),revertDuration:l("dragRevertDuration"),
+start:function(C,I){t("eventDragStart",k,G,C,I);y(G,k);c.start(function(V,r,p,$){k.draggable("option","revert",!V||!p&&!$);X();if(V){A=p*7+$*(l("isRTL")?-1:1);M(O(x(G.start),A),O(Oa(G),A))}else A=0},C,"drag")},stop:function(C,I){c.stop();X();t("eventDragStop",k,G,C,I);if(A){k.find("a").removeAttr("href");P(this,G,A,0,G.allDay,C,I)}else{m.browser.msie&&k.css("filter","");K(G,k)}}})}}var j=this;j.renderEvents=a;j.rerenderEvents=b;j.clearEvents=f;j.bindDaySeg=i;qb.call(j);var l=j.opt,t=j.trigger,w=j.reportEvents,
+H=j.clearEventData,E=j.eventElementHandlers,K=j.showEvents,y=j.hideEvents,P=j.eventDrop,ea=j.getDaySegmentContainer,S=j.getHoverListener,M=j.renderDayOverlay,X=j.clearOverlays,ga=j.getRowCnt,N=j.getColCnt,U=j.renderDaySegs,u=j.resizableDayEvent,B=[]}function Jb(a,b){function f(l,t){t&&O(l,t*7);l=O(x(l),-((l.getDay()-i("firstDay")+7)%7));t=O(x(l),7);var w=x(l),H=x(t),E=i("weekends");if(!E){va(w);va(H,-1,true)}h.title=j(w,O(x(H),-1),i("titleFormat"));h.start=l;h.end=t;h.visStart=w;h.visEnd=H;s(E?7:
+5)}var h=this;h.render=f;rb.call(h,a,b,"agendaWeek");var i=h.opt,s=h.renderAgenda,j=b.formatDates}function Kb(a,b){function f(l,t){if(t){O(l,t);i("weekends")||va(l,t<0?-1:1)}t=x(l,true);var w=O(x(t),1);h.title=j(l,i("titleFormat"));h.start=h.visStart=t;h.end=h.visEnd=w;s(1)}var h=this;h.render=f;rb.call(h,a,b,"agendaDay");var i=h.opt,s=h.renderAgenda,j=b.formatDate}function rb(a,b,f){function h(o){g=o;ba=p("theme")?"ui":"fc";da=p("weekends")?0:1;ha=p("firstDay");if(ra=p("isRTL")){aa=-1;T=g-1}else{aa=
+1;T=0}na=bb(p("minTime"));ja=bb(p("maxTime"));o=ra?O(x(r.visEnd),-1):x(r.visStart);var v=x(o),D=Ia(new Date),L=p("columnFormat");if(ka){fa();ka.find("tr:first th").slice(1,-1).each(function(La,xa){m(xa).text(F(v,L));xa.className=xa.className.replace(/^fc-\w+(?= )/,"fc-"+Da[v.getDay()]);O(v,aa);da&&va(v,aa)});v=x(o);d.find("td").each(function(La,xa){xa.className=xa.className.replace(/^fc-\w+(?= )/,"fc-"+Da[v.getDay()]);+v==+D?m(xa).removeClass("fc-not-today").addClass("fc-today").addClass(ba+"-state-highlight"):
+m(xa).addClass("fc-not-today").removeClass("fc-today").removeClass(ba+"-state-highlight");O(v,aa);da&&va(v,aa)})}else{var W,wa,Fa=p("slotMinutes")%15==0,oa="<div class='fc-agenda-head' style='position:relative;z-index:4'><table style='width:100%'><tr class='fc-first"+(p("allDaySlot")?"":" fc-last")+"'><th class='fc-leftmost "+ba+"-state-default'>&nbsp;</th>";for(W=0;W<g;W++){oa+="<th class='fc-"+Da[v.getDay()]+" "+ba+"-state-default'>"+F(v,L)+"</th>";O(v,aa);da&&va(v,aa)}oa+="<th class='"+ba+"-state-default'>&nbsp;</th></tr>";
+if(p("allDaySlot"))oa+="<tr class='fc-all-day'><th class='fc-axis fc-leftmost "+ba+"-state-default'>"+p("allDayText")+"</th><td colspan='"+g+"' class='"+ba+"-state-default'><div class='fc-day-content'><div style='position:relative'>&nbsp;</div></div></td><th class='"+ba+"-state-default'>&nbsp;</th></tr><tr class='fc-divider fc-last'><th colspan='"+(g+2)+"' class='"+ba+"-state-default fc-leftmost'><div/></th></tr>";oa+="</table></div>";ka=m(oa).appendTo(a);w(ka.find("td"));sb=m("<div style='position:absolute;z-index:8;top:0;left:0'/>").appendTo(ka);
+v=tb();var cb=sa(x(v),ja);sa(v,na);oa="<table>";for(W=0;v<cb;W++){wa=v.getMinutes();oa+="<tr class='"+(!W?"fc-first":!wa?"":"fc-minor")+"'><th class='fc-axis fc-leftmost "+ba+"-state-default'>"+(!Fa||!wa?F(v,p("axisFormat")):"&nbsp;")+"</th><td class='fc-slot"+W+" "+ba+"-state-default'><div style='position:relative'>&nbsp;</div></td></tr>";sa(v,p("slotMinutes"));n++}oa+="</table>";ia=m("<div class='fc-agenda-body' style='position:relative;z-index:2;overflow:auto'/>").append(ma=m("<div style='position:relative;overflow:hidden'>").append(ua=
+m(oa))).appendTo(a);H(ia.find("td"));ub=m("<div style='position:absolute;z-index:8;top:0;left:0'/>").appendTo(ma);v=x(o);oa="<div class='fc-agenda-bg' style='position:absolute;z-index:1'><table style='width:100%;height:100%'><tr class='fc-first'>";for(W=0;W<g;W++){oa+="<td class='fc-"+Da[v.getDay()]+" "+ba+"-state-default "+(!W?"fc-leftmost ":"")+(+v==+D?ba+"-state-highlight fc-today":"fc-not-today")+"'><div class='fc-day-content'><div>&nbsp;</div></div></td>";O(v,aa);da&&va(v,aa)}oa+="</tr></table></div>";
+d=m(oa).appendTo(a)}}function i(o,v){if(o===Z)o=R;R=o;Aa={};o=o-ka.height();o=Math.min(o,ua.height());ia.height(o);q=ia.find("tr:first div").height()+1;v&&j()}function s(o){J=o;Ba.clear();ia.width(o).css("overflow","auto");ua.width("");var v=ka.find("tr:first th"),D=ka.find("tr.fc-all-day th:last"),L=d.find("td"),W=ia[0].clientWidth;ua.width(W);W=ia[0].clientWidth;ua.width(W);z=0;Ka(ka.find("tr:lt(2) th:first").add(ia.find("tr:first th")).width(1).each(function(){z=Math.max(z,m(this).outerWidth())}),
+z);e=Math.floor((W-z)/g);Ka(L.slice(0,-1),e);Ka(v.slice(1,-2),e);if(o!=W){Ka(v.slice(-2,-1),W-z-e*(g-1));v.slice(-1).show();D.show()}else{ia.css("overflow","hidden");v.slice(-2,-1).width("");v.slice(-1).hide();D.hide()}d.css({top:ka.find("tr").height(),left:z,width:W-z,height:R})}function j(){var o=tb(),v=x(o);v.setHours(p("firstHour"));var D=X(o,v)+1;o=function(){ia.scrollTop(D)};o();setTimeout(o,0)}function l(){Q=ia.scrollTop()}function t(){ia.scrollTop(Q)}function w(o){o.click(E).mousedown(qa)}
+function H(o){o.click(E).mousedown(C)}function E(o){if(!p("selectable")){var v=Math.min(g-1,Math.floor((o.pageX-d.offset().left)/e));v=O(x(r.visStart),v*aa+T);var D=this.className.match(/fc-slot(\d+)/);if(D){D=parseInt(D[1])*p("slotMinutes");var L=Math.floor(D/60);v.setHours(L);v.setMinutes(D%60+na);$("dayClick",this,v,false,o)}else $("dayClick",this,v,true,o)}}function K(o,v,D){D&&la.build();var L=x(r.visStart);if(ra){D=Ea(v,L)*aa+T+1;o=Ea(o,L)*aa+T+1}else{D=Ea(o,L);o=Ea(v,L)}D=Math.max(0,D);o=Math.min(g,
+o);D<o&&w(y(0,D,0,o-1))}function y(o,v,D,L){o=la.rect(o,v,D,L,ka);return Y(o,ka)}function P(o,v){for(var D=x(r.visStart),L=O(x(D),1),W=0;W<g;W++){var wa=new Date(Math.max(D,o)),Fa=new Date(Math.min(L,v));if(wa<Fa){var oa=W*aa+T;oa=la.rect(0,oa,0,oa,ma);wa=X(D,wa);Fa=X(D,Fa);oa.top=wa;oa.height=Fa-wa;H(Y(oa,ma))}O(D,1);O(L,1)}}function ea(o){return z+Ba.left(o)}function S(o){return z+Ba.right(o)}function M(o){return(o-Math.max(ha,da)+g)%g*aa+T}function X(o,v){o=x(o,true);if(v<sa(x(o),na))return 0;
+if(v>=sa(x(o),ja))return ma.height();o=p("slotMinutes");v=v.getHours()*60+v.getMinutes()-na;var D=Math.floor(v/o),L=Aa[D];if(L===Z)L=Aa[D]=ia.find("tr:eq("+D+") td div")[0].offsetTop;return Math.max(0,Math.round(L-1+q*(v%o/o)))}function ga(o){var v=O(x(r.visStart),o.col*aa+T);o=o.row;p("allDaySlot")&&o--;o>=0&&sa(v,na+o*p("slotMinutes"));return v}function N(o){return p("allDaySlot")&&!o.row}function U(){return{left:z,right:J}}function u(){return ka.find("tr.fc-all-day")}function B(o){var v=x(o.start);
+if(o.allDay)return v;return sa(v,p("defaultEventMinutes"))}function G(o,v){if(v)return x(o);return sa(x(o),p("slotMinutes"))}function k(o,v,D){if(D)p("allDaySlot")&&K(o,O(x(v),1),true);else c(o,v)}function c(o,v){var D=p("selectHelper");la.build();if(D){var L=Ea(o,r.visStart)*aa+T;if(L>=0&&L<g){L=la.rect(0,L,0,L,ma);var W=X(o,o),wa=X(o,v);if(wa>W){L.top=W;L.height=wa-W;L.left+=2;L.width-=5;if(m.isFunction(D)){if(o=D(o,v)){L.position="absolute";L.zIndex=8;za=m(o).css(L).appendTo(ma)}}else{za=m(ya({title:"",
+start:o,end:v,className:[],editable:false},L,"fc-event fc-event-vert fc-corner-top fc-corner-bottom "));m.browser.msie&&za.find("span.fc-event-bg").hide();za.css("opacity",p("dragOpacity"))}if(za){H(za);ma.append(za);Ka(za,L.width,true);Qa(za,L.height,true)}}}}else P(o,v)}function A(){ca();if(za){za.remove();za=null}}function C(o){if(o.which==1&&p("selectable")){ta(o);var v=this,D;Ca.start(function(L,W){A();if(L&&L.col==W.col&&!N(L)){W=ga(W);L=ga(L);D=[W,sa(x(W),p("slotMinutes")),L,sa(x(L),p("slotMinutes"))].sort(vb);
+c(D[0],D[3])}else D=null},o);m(document).one("mouseup",function(L){Ca.stop();if(D){+D[0]==+D[1]&&$("dayClick",v,D[0],false,L);pa(D[0],D[3],false,L)}})}}function I(o,v){Ca.start(function(D){ca();if(D)if(N(D))y(D.row,D.col,D.row,D.col);else{D=ga(D);var L=sa(x(D),p("defaultEventMinutes"));P(D,L)}},v)}function V(o,v,D){var L=Ca.stop();ca();L&&$("drop",o,ga(L),N(L),v,D)}var r=this;r.renderAgenda=h;r.setWidth=s;r.setHeight=i;r.beforeHide=l;r.afterShow=t;r.defaultEventEnd=B;r.timePosition=X;r.dayOfWeekCol=
+M;r.cellDate=ga;r.cellIsAllDay=N;r.allDayTR=u;r.allDayBounds=U;r.getHoverListener=function(){return Ca};r.colContentLeft=ea;r.colContentRight=S;r.getDaySegmentContainer=function(){return sb};r.getSlotSegmentContainer=function(){return ub};r.getMinMinute=function(){return na};r.getMaxMinute=function(){return ja};r.getBodyContent=function(){return ma};r.getRowCnt=function(){return 1};r.getColCnt=function(){return g};r.getColWidth=function(){return e};r.getSlotHeight=function(){return q};r.defaultSelectionEnd=
+G;r.renderDayOverlay=K;r.renderSelection=k;r.clearSelection=A;r.dragStart=I;r.dragStop=V;jb.call(r,a,b,f);kb.call(r);lb.call(r);Lb.call(r);var p=r.opt,$=r.trigger,fa=r.clearEvents,Y=r.renderOverlay,ca=r.clearOverlays,pa=r.reportSelection,ta=r.unselect,qa=r.daySelectionMousedown,ya=r.slotSegHtml,F=b.formatDate,ka,ia,ma,ua,d,g,n=0,z,e,q,J,R,Q,ba,ha,da,ra,aa,T,na,ja,la,Ca,Ba,Aa={},za,sb,ub;mb(a.addClass("fc-agenda"));la=new nb(function(o,v){function D(xa){return Math.max(oa,Math.min(cb,xa))}var L,W,
+wa;d.find("td").each(function(xa,Mb){L=m(Mb);W=L.offset().left;if(xa)wa[1]=W;wa=[W];v[xa]=wa});wa[1]=W+L.outerWidth();if(p("allDaySlot")){L=ka.find("td");W=L.offset().top;o[0]=[W,W+L.outerHeight()]}for(var Fa=ma.offset().top,oa=ia.offset().top,cb=oa+ia.outerHeight(),La=0;La<n;La++)o.push([D(Fa+q*La),D(Fa+q*(La+1))])});Ca=new ob(la);Ba=new pb(function(o){return d.find("td:eq("+o+") div div")})}function Lb(){function a(d,g){M(ua=d);var n,z=d.length,e=[],q=[];for(n=0;n<z;n++)d[n].allDay?e.push(d[n]):
+q.push(d[n]);if(P("allDaySlot")){I(h(e),g);N()}j(i(q),g)}function b(d){f();a(ua,d)}function f(){X();U().empty();u().empty()}function h(d){d=$a(ab(d,m.map(d,Oa),y.visStart,y.visEnd));var g,n=d.length,z,e,q,J=[];for(g=0;g<n;g++){z=d[g];for(e=0;e<z.length;e++){q=z[e];q.row=0;q.level=g;J.push(q)}}return J}function i(d){var g=r(),n=k(),z=G(),e=sa(x(y.visStart),n),q=m.map(d,s),J,R,Q,ba,ha,da,ra=[];for(J=0;J<g;J++){R=$a(ab(d,q,e,sa(x(e),z-n)));Nb(R);for(Q=0;Q<R.length;Q++){ba=R[Q];for(ha=0;ha<ba.length;ha++){da=
+ba[ha];da.col=J;da.level=Q;ra.push(da)}}O(e,1,true)}return ra}function s(d){return d.end?x(d.end):sa(x(d.start),P("defaultEventMinutes"))}function j(d,g){var n,z=d.length,e,q,J,R,Q,ba,ha,da,ra,aa,T="",na,ja,la={},Ca={},Ba=u(),Aa;n=r();if(na=P("isRTL")){ja=-1;Aa=n-1}else{ja=1;Aa=0}for(n=0;n<z;n++){e=d[n];q=e.event;J="fc-event fc-event-vert ";if(e.isStart)J+="fc-corner-top ";if(e.isEnd)J+="fc-corner-bottom ";R=c(e.start,e.start);Q=c(e.start,e.end);ba=e.col;ha=e.level;da=e.forward||0;ra=A(ba*ja+Aa);
+aa=C(ba*ja+Aa)-ra;aa=Math.min(aa-6,aa*0.95);ba=ha?aa/(ha+da+1):da?(aa/(da+1)-6)*2:aa;ha=ra+aa/(ha+da+1)*ha*ja+(na?aa-ba:0);e.top=R;e.left=ha;e.outerWidth=ba;e.outerHeight=Q-R;T+=l(q,e,J)}Ba[0].innerHTML=T;na=Ba.children();for(n=0;n<z;n++){e=d[n];q=e.event;T=m(na[n]);ja=ea("eventRender",q,q,T);if(ja===false)T.remove();else{if(ja&&ja!==true){T.remove();T=m(ja).css({position:"absolute",top:e.top,left:e.left}).appendTo(Ba)}e.element=T;if(q._id===g)w(q,T,e);else T[0]._fci=n;Y(q,T)}}wb(Ba,d,w);for(n=0;n<
+z;n++){e=d[n];if(T=e.element){q=la[g=e.key=xb(T[0])];e.vsides=q===Z?(la[g]=Sa(T[0],true)):q;q=Ca[g];e.hsides=q===Z?(Ca[g]=db(T[0],true)):q;g=T.find("span.fc-event-title");if(g.length)e.titleTop=g[0].offsetTop}}for(n=0;n<z;n++){e=d[n];if(T=e.element){T[0].style.width=Math.max(0,e.outerWidth-e.hsides)+"px";la=Math.max(0,e.outerHeight-e.vsides);T[0].style.height=la+"px";q=e.event;if(e.titleTop!==Z&&la-e.titleTop<10){T.find("span.fc-event-time").text(ia(q.start,P("timeFormat"))+" - "+q.title);T.find("span.fc-event-title").remove()}ea("eventAfterRender",
+q,q,T)}}}function l(d,g,n){return"<div class='"+n+d.className.join(" ")+"' style='position:absolute;z-index:8;top:"+g.top+"px;left:"+g.left+"px'><a"+(d.url?" href='"+Ma(d.url)+"'":"")+"><span class='fc-event-bg'></span><span class='fc-event-time'>"+Ma(ma(d.start,d.end,P("timeFormat")))+"</span><span class='fc-event-title'>"+Ma(d.title)+"</span></a>"+((d.editable||d.editable===Z&&P("editable"))&&!P("disableResizing")&&m.fn.resizable?"<div class='ui-resizable-handle ui-resizable-s'>=</div>":"")+"</div>"}
+function t(d,g,n){ga(d,g);if(d.editable||d.editable===Z&&P("editable")){H(d,g,n.isStart);n.isEnd&&V(d,g,p())}}function w(d,g,n){ga(d,g);if(d.editable||d.editable===Z&&P("editable")){var z=g.find("span.fc-event-time");E(d,g,z);n.isEnd&&K(d,g,z)}}function H(d,g,n){if(!P("disableDragging")&&g.draggable){var z,e=true,q,J=P("isRTL")?-1:1,R=B(),Q=p(),ba=$(),ha=k();g.draggable({zIndex:9,opacity:P("dragOpacity","month"),revertDuration:P("dragRevertDuration"),start:function(ra,aa){ea("eventDragStart",g,d,
+ra,aa);pa(d,g);z=g.width();R.start(function(T,na,ja,la){g.draggable("option","revert",!T||!ja&&!la);F();if(T){q=la*J;if(T.row){if(n&&e){Qa(g.width(Q-10),ba*Math.round((d.end?(d.end-d.start)/Ob:P("defaultEventMinutes"))/P("slotMinutes")));g.draggable("option","grid",[Q,1]);e=false}}else{ya(O(x(d.start),q),O(Oa(d),q));da()}}},ra,"drag")},stop:function(ra,aa){var T=R.stop();F();ea("eventDragStop",g,d,ra,aa);if(T&&(!e||q)){g.find("a").removeAttr("href");T=0;e||(T=Math.round((g.offset().top-fa().offset().top)/
+ba)*P("slotMinutes")+ha-(d.start.getHours()*60+d.start.getMinutes()));ta(this,d,q,T,e,ra,aa)}else{da();m.browser.msie&&g.css("filter","");ca(d,g)}}});function da(){if(!e){g.width(z).height("").draggable("option","grid",null);e=true}}}}function E(d,g,n){if(!P("disableDragging")&&g.draggable){var z,e=false,q,J,R,Q=P("isRTL")?-1:1,ba=B(),ha=r(),da=p(),ra=$();g.draggable({zIndex:9,scroll:false,grid:[da,ra],axis:ha==1?"y":false,opacity:P("dragOpacity"),revertDuration:P("dragRevertDuration"),start:function(na,
+ja){ea("eventDragStart",g,d,na,ja);pa(d,g);m.browser.msie&&g.find("span.fc-event-bg").hide();z=g.position();J=R=0;ba.start(function(la,Ca,Ba,Aa){g.draggable("option","revert",!la);F();if(la){q=Aa*Q;if(P("allDaySlot")&&!la.row){if(!e){e=true;n.hide();g.draggable("option","grid",null)}ya(O(x(d.start),q),O(Oa(d),q))}else T()}},na,"drag")},drag:function(na,ja){J=Math.round((ja.position.top-z.top)/ra)*P("slotMinutes");if(J!=R){e||aa(J);R=J}},stop:function(na,ja){var la=ba.stop();F();ea("eventDragStop",
+g,d,na,ja);if(la&&(q||J||e))ta(this,d,q,e?0:J,e,na,ja);else{T();g.css(z);aa(0);m.browser.msie&&g.css("filter","").find("span.fc-event-bg").css("display","");ca(d,g)}}});function aa(na){var ja=sa(x(d.start),na),la;if(d.end)la=sa(x(d.end),na);n.text(ma(ja,la,P("timeFormat")))}function T(){if(e){n.css("display","");g.draggable("option","grid",[da,ra]);e=false}}}}function K(d,g,n){if(!P("disableResizing")&&g.resizable){var z,e,q=$();g.resizable({handles:{s:"div.ui-resizable-s"},grid:q,start:function(J,
+R){z=e=0;pa(d,g);m.browser.msie&&m.browser.version=="6.0"&&g.css("overflow","hidden");g.css("z-index",9);ea("eventResizeStart",this,d,J,R)},resize:function(J,R){z=Math.round((Math.max(q,g.height())-R.originalSize.height)/q);if(z!=e){n.text(ma(d.start,!z&&!d.end?null:sa(S(d),P("slotMinutes")*z),P("timeFormat")));e=z}},stop:function(J,R){ea("eventResizeStop",this,d,J,R);if(z)qa(this,d,0,P("slotMinutes")*z,J,R);else{g.css("z-index",8);ca(d,g)}}})}}var y=this;y.renderEvents=a;y.rerenderEvents=b;y.clearEvents=
+f;y.slotSegHtml=l;y.bindDaySeg=t;qb.call(y);var P=y.opt,ea=y.trigger,S=y.eventEnd,M=y.reportEvents,X=y.clearEventData,ga=y.eventElementHandlers,N=y.setHeight,U=y.getDaySegmentContainer,u=y.getSlotSegmentContainer,B=y.getHoverListener,G=y.getMaxMinute,k=y.getMinMinute,c=y.timePosition,A=y.colContentLeft,C=y.colContentRight,I=y.renderDaySegs,V=y.resizableDayEvent,r=y.getColCnt,p=y.getColWidth,$=y.getSlotHeight,fa=y.getBodyContent,Y=y.reportEventElement,ca=y.showEvents,pa=y.hideEvents,ta=y.eventDrop,
+qa=y.eventResize,ya=y.renderDayOverlay,F=y.clearOverlays,ka=y.calendar,ia=ka.formatDate,ma=ka.formatDates,ua=[]}function Nb(a){var b,f,h,i,s,j;for(b=a.length-1;b>0;b--){i=a[b];for(f=0;f<i.length;f++){s=i[f];for(h=0;h<a[b-1].length;h++){j=a[b-1][h];if(yb(s,j))j.forward=Math.max(j.forward||0,(s.forward||0)+1)}}}}function jb(a,b,f){function h(k,c){k=G[k];if(typeof k=="object")return Wa(k,c||f);return k}function i(k,c){return b.trigger.apply(b,[k,c||M].concat(Array.prototype.slice.call(arguments,2),[M]))}
+function s(k){U={};var c,A=k.length,C;for(c=0;c<A;c++){C=k[c];if(U[C._id])U[C._id].push(C);else U[C._id]=[C]}}function j(){u=[];B={}}function l(k){return k.end?x(k.end):X(k)}function t(k,c){u.push(c);if(B[k._id])B[k._id].push(c);else B[k._id]=[c]}function w(k,c){c.click(function(A){if(!c.hasClass("ui-draggable-dragging")&&!c.hasClass("ui-resizable-resizing"))return i("eventClick",this,k,A)}).hover(function(A){i("eventMouseover",this,k,A)},function(A){i("eventMouseout",this,k,A)})}function H(k,c){K(k,
+c,"show")}function E(k,c){K(k,c,"hide")}function K(k,c,A){k=B[k._id];var C,I=k.length;for(C=0;C<I;C++)k[C][0]!=c[0]&&k[C][A]()}function y(k,c,A,C,I,V,r){var p=c.allDay,$=c._id;ea(U[$],A,C,I);i("eventDrop",k,c,A,C,I,function(){ea(U[$],-A,-C,p);N()},V,r);M.eventsChanged=true;N($)}function P(k,c,A,C,I,V){var r=c._id;S(U[r],A,C);i("eventResize",k,c,A,C,function(){S(U[r],-A,-C);N()},I,V);M.eventsChanged=true;N(r)}function ea(k,c,A,C){A=A||0;for(var I,V=k.length,r=0;r<V;r++){I=k[r];if(C!==Z)I.allDay=C;
+sa(O(I.start,c,true),A);if(I.end)I.end=sa(O(I.end,c,true),A);ga(I,G)}}function S(k,c,A){A=A||0;for(var C,I=k.length,V=0;V<I;V++){C=k[V];C.end=sa(O(l(C),c,true),A);ga(C,G)}}var M=this;M.element=a;M.calendar=b;M.name=f;M.opt=h;M.trigger=i;M.reportEvents=s;M.clearEventData=j;M.eventEnd=l;M.reportEventElement=t;M.eventElementHandlers=w;M.showEvents=H;M.hideEvents=E;M.eventDrop=y;M.eventResize=P;var X=M.defaultEventEnd,ga=b.normalizeEvent,N=b.rerenderEvents,U={},u=[],B={},G=b.options}function qb(){function a(N,
+U){var u=h("isRTL"),B,G=N.length,k,c,A,C,I,V="",r,p={},$={},fa=[],Y=[];B=y();r=B.left;var ca=B.right,pa=w();H();var ta=M();for(B=0;B<G;B++){k=N[B];c=k.event;A="fc-event fc-event-hori ";if(u){if(k.isStart)A+="fc-corner-right ";if(k.isEnd)A+="fc-corner-left ";C=k.isEnd?P(S(k.end.getDay()-1)):r;I=k.isStart?ea(S(k.start.getDay())):ca}else{if(k.isStart)A+="fc-corner-left ";if(k.isEnd)A+="fc-corner-right ";C=k.isStart?P(S(k.start.getDay())):r;I=k.isEnd?ea(S(k.end.getDay()-1)):ca}V+="<div class='"+A+c.className.join(" ")+
+"' style='position:absolute;z-index:8;left:"+C+"px'><a"+(c.url?" href='"+Ma(c.url)+"'":"")+">"+(!c.allDay&&k.isStart?"<span class='fc-event-time'>"+Ma(ga(c.start,c.end,h("timeFormat")))+"</span>":"")+"<span class='fc-event-title'>"+Ma(c.title)+"</span></a>"+((c.editable||c.editable===Z&&h("editable"))&&!h("disableResizing")&&m.fn.resizable?"<div class='ui-resizable-handle ui-resizable-"+(u?"w":"e")+"'></div>":"")+"</div>";k.left=C;k.outerWidth=I-C}ta[0].innerHTML=V;V=ta.children();for(B=0;B<G;B++){k=
+N[B];u=m(V[B]);c=k.event;r=i("eventRender",c,c,u);if(r===false)u.remove();else{if(r&&r!==true){u.remove();u=m(r).css({position:"absolute",left:k.left}).appendTo(ta)}k.element=u;if(c._id===U)X(c,u,k);else u[0]._fci=B;s(c,u)}}wb(ta,N,X);for(B=0;B<G;B++){k=N[B];if(u=k.element){c=p[U=k.key=xb(u[0])];k.hsides=c===Z?(p[U]=db(u[0],true)):c}}for(B=0;B<G;B++){k=N[B];if(u=k.element)u[0].style.width=Math.max(0,k.outerWidth-k.hsides)+"px"}for(B=0;B<G;B++){k=N[B];if(u=k.element){c=$[U=k.key];k.outerHeight=u[0].offsetHeight+
+(c===Z?($[U]=zb(u[0])):c)}}for(p=B=0;p<pa;p++){for($=U=c=0;B<G&&(k=N[B]).row==p;){if(k.level!=U){$+=c;c=0;U++}c=Math.max(c,k.outerHeight||0);k.top=$;B++}fa[p]=K(p).find("td:first div.fc-day-content > div").height($+c)}for(p=0;p<pa;p++)Y[p]=fa[p][0].offsetTop;for(B=0;B<G;B++){k=N[B];if(u=k.element){u[0].style.top=Y[k.row]+k.top+"px";c=k.event;i("eventAfterRender",c,c,u)}}}function b(N,U){if(!h("disableResizing")&&U.resizable){var u=E();U.resizable({handles:h("isRTL")?{w:"div.ui-resizable-w"}:{e:"div.ui-resizable-e"},
+grid:u,minWidth:u/2,containment:f.element.parent().parent(),start:function(B,G){U.css("z-index",9);l(N,U);i("eventResizeStart",this,N,B,G)},stop:function(B,G){i("eventResizeStop",this,N,B,G);var k=Math.round((U.width()-G.originalSize.width)/u);if(k)t(this,N,k,0,B,G);else{U.css("z-index",8);j(N,U)}}})}}var f=this;f.renderDaySegs=a;f.resizableDayEvent=b;var h=f.opt,i=f.trigger,s=f.reportEventElement,j=f.showEvents,l=f.hideEvents,t=f.eventResize,w=f.getRowCnt,H=f.getColCnt,E=f.getColWidth,K=f.allDayTR,
+y=f.allDayBounds,P=f.colContentLeft,ea=f.colContentRight,S=f.dayOfWeekCol,M=f.getDaySegmentContainer,X=f.bindDaySeg,ga=f.calendar.formatDates}function lb(){function a(E,K,y){b();K||(K=l(E,y));t(E,K,y);f(E,K,y)}function b(E){if(H){H=false;w();j("unselect",null,E)}}function f(E,K,y,P){H=true;j("select",null,E,K,y,P)}function h(E){var K=i.cellDate,y=i.cellIsAllDay,P=i.getHoverListener();if(E.which==1&&s("selectable")){b(E);var ea=this,S;P.start(function(M,X){w();if(M&&y(M)){S=[K(X),K(M)].sort(vb);t(S[0],
+S[1],true)}else S=null},E);m(document).one("mouseup",function(M){P.stop();if(S){+S[0]==+S[1]&&j("dayClick",ea,S[0],true,M);f(S[0],S[1],true,M)}})}}var i=this;i.select=a;i.unselect=b;i.reportSelection=f;i.daySelectionMousedown=h;var s=i.opt,j=i.trigger,l=i.defaultSelectionEnd,t=i.renderSelection,w=i.clearSelection,H=false;s("selectable")&&s("unselectAuto")&&m(document).mousedown(function(E){var K=s("unselectCancel");if(K)if(m(E.target).parents(K).length)return;b(E)})}function kb(){function a(s,j){var l=
+i.shift();l||(l=m("<div class='fc-cell-overlay' style='position:absolute;z-index:3'/>"));l[0].parentNode!=j[0]&&l.appendTo(j);h.push(l.css(s).show());return l}function b(){for(var s;s=h.shift();)i.push(s.hide().unbind())}var f=this;f.renderOverlay=a;f.clearOverlays=b;var h=[],i=[]}function nb(a){var b=this,f,h;b.build=function(){f=[];h=[];a(f,h)};b.cell=function(i,s){var j=f.length,l=h.length,t,w=-1,H=-1;for(t=0;t<j;t++)if(s>=f[t][0]&&s<f[t][1]){w=t;break}for(t=0;t<l;t++)if(i>=h[t][0]&&i<h[t][1]){H=
+t;break}return w>=0&&H>=0?{row:w,col:H}:null};b.rect=function(i,s,j,l,t){t=t.offset();return{top:f[i][0]-t.top,left:h[s][0]-t.left,width:h[l][1]-h[s][0],height:f[j][1]-f[i][0]}}}function ob(a){function b(l){l=a.cell(l.pageX,l.pageY);if(!l!=!j||l&&(l.row!=j.row||l.col!=j.col)){if(l){s||(s=l);i(l,s,l.row-s.row,l.col-s.col)}else i(l,s);j=l}}var f=this,h,i,s,j;f.start=function(l,t,w){i=l;s=j=null;a.build();b(t);h=w||"mousemove";m(document).bind(h,b)};f.stop=function(){m(document).unbind(h,b);return j}}
+function pb(a){function b(j){return h[j]=h[j]||a(j)}var f=this,h={},i={},s={};f.left=function(j){return i[j]=i[j]===Z?b(j).position().left:i[j]};f.right=function(j){return s[j]=s[j]===Z?f.left(j)+b(j).width():s[j]};f.clear=function(){h={};i={};s={}}}function Ta(a,b,f){a.setFullYear(a.getFullYear()+b);f||Ia(a);return a}function Ua(a,b,f){if(+a){b=a.getMonth()+b;var h=x(a);h.setDate(1);h.setMonth(b);a.setMonth(b);for(f||Ia(a);a.getMonth()!=h.getMonth();)a.setDate(a.getDate()+(a<h?1:-1))}return a}function O(a,
+b,f){if(+a){b=a.getDate()+b;var h=x(a);h.setHours(9);h.setDate(b);a.setDate(b);f||Ia(a);eb(a,h)}return a}function eb(a,b){if(+a)for(;a.getDate()!=b.getDate();)a.setTime(+a+(a<b?1:-1)*Pb)}function sa(a,b){a.setMinutes(a.getMinutes()+b);return a}function Ia(a){a.setHours(0);a.setMinutes(0);a.setSeconds(0);a.setMilliseconds(0);return a}function x(a,b){if(b)return Ia(new Date(+a));return new Date(+a)}function tb(){var a=0,b;do b=new Date(1970,a++,1);while(b.getHours());return b}function va(a,b,f){for(b=
+b||1;!a.getDay()||f&&a.getDay()==1||!f&&a.getDay()==6;)O(a,b);return a}function Ea(a,b){return Math.round((x(a,true)-x(b,true))/ib)}function hb(a,b,f,h){if(b!==Z&&b!=a.getFullYear()){a.setDate(1);a.setMonth(0);a.setFullYear(b)}if(f!==Z&&f!=a.getMonth()){a.setDate(1);a.setMonth(f)}h!==Z&&a.setDate(h)}function Xa(a,b){if(typeof a=="object")return a;if(typeof a=="number")return new Date(a*1E3);if(typeof a=="string"){if(a.match(/^\d+$/))return new Date(parseInt(a)*1E3);if(b===Z)b=true;return Ab(a,b)||
+(a?new Date(a):null)}return null}function Ab(a,b){a=a.match(/^([0-9]{4})(-([0-9]{2})(-([0-9]{2})([T ]([0-9]{2}):([0-9]{2})(:([0-9]{2})(\.([0-9]+))?)?(Z|(([-+])([0-9]{2}):([0-9]{2})))?)?)?)?$/);if(!a)return null;var f=new Date(a[1],0,1);if(b||!a[14]){b=new Date(a[1],0,1,9,0);if(a[3]){f.setMonth(a[3]-1);b.setMonth(a[3]-1)}if(a[5]){f.setDate(a[5]);b.setDate(a[5])}eb(f,b);a[7]&&f.setHours(a[7]);a[8]&&f.setMinutes(a[8]);a[10]&&f.setSeconds(a[10]);a[12]&&f.setMilliseconds(Number("0."+a[12])*1E3);eb(f,b)}else{f.setUTCFullYear(a[1],
+a[3]?a[3]-1:0,a[5]||1);f.setUTCHours(a[7]||0,a[8]||0,a[10]||0,a[12]?Number("0."+a[12])*1E3:0);b=Number(a[16])*60+Number(a[17]);b*=a[15]=="-"?1:-1;f=new Date(+f+b*60*1E3)}return f}function bb(a){if(typeof a=="number")return a*60;if(typeof a=="object")return a.getHours()*60+a.getMinutes();if(a=a.match(/(\d+)(?::(\d+))?\s*(\w+)?/)){var b=parseInt(a[1]);if(a[3]){b%=12;if(a[3].toLowerCase().charAt(0)=="p")b+=12}return b*60+(a[2]?parseInt(a[2]):0)}}function Ja(a,b,f){return Va(a,null,b,f)}function Va(a,
+b,f,h){h=h||Pa;var i=a,s=b,j,l=f.length,t,w,H,E="";for(j=0;j<l;j++){t=f.charAt(j);if(t=="'")for(w=j+1;w<l;w++){if(f.charAt(w)=="'"){if(i){E+=w==j+1?"'":f.substring(j+1,w);j=w}break}}else if(t=="(")for(w=j+1;w<l;w++){if(f.charAt(w)==")"){j=Ja(i,f.substring(j+1,w),h);if(parseInt(j.replace(/\D/,"")))E+=j;j=w;break}}else if(t=="[")for(w=j+1;w<l;w++){if(f.charAt(w)=="]"){t=f.substring(j+1,w);j=Ja(i,t,h);if(j!=Ja(s,t,h))E+=j;j=w;break}}else if(t=="{"){i=b;s=a}else if(t=="}"){i=a;s=b}else{for(w=l;w>j;w--)if(H=
+Qb[f.substring(j,w)]){if(i)E+=H(i,h);j=w-1;break}if(w==j)if(i)E+=t}}return E}function Oa(a){return a.end?Rb(a.end,a.allDay):O(x(a.start),1)}function Rb(a,b){a=x(a);return b||a.getHours()||a.getMinutes()?O(a,1):Ia(a)}function Sb(a,b){return(b.msLength-a.msLength)*100+(a.event.start-b.event.start)}function yb(a,b){return a.end>b.start&&a.start<b.end}function ab(a,b,f,h){var i=[],s,j=a.length,l,t,w,H,E;for(s=0;s<j;s++){l=a[s];t=l.start;w=b[s];if(w>f&&t<h){if(t<f){t=x(f);H=false}else{t=t;H=true}if(w>
+h){w=x(h);E=false}else{w=w;E=true}i.push({event:l,start:t,end:w,isStart:H,isEnd:E,msLength:w-t})}}return i.sort(Sb)}function $a(a){var b=[],f,h=a.length,i,s,j,l;for(f=0;f<h;f++){i=a[f];for(s=0;;){j=false;if(b[s])for(l=0;l<b[s].length;l++)if(yb(b[s][l],i)){j=true;break}if(j)s++;else break}if(b[s])b[s].push(i);else b[s]=[i]}return b}function wb(a,b,f){a.unbind("mouseover").mouseover(function(h){for(var i=h.target,s;i!=this;){s=i;i=i.parentNode}if((i=s._fci)!==Z){s._fci=Z;s=b[i];f(s.event,s.element,
+s);m(h.target).trigger(h)}h.stopPropagation()})}function Ka(a,b,f){a.each(function(h,i){i.style.width=Math.max(0,b-db(i,f))+"px"})}function Qa(a,b,f){a.each(function(h,i){i.style.height=Math.max(0,b-Sa(i,f))+"px"})}function db(a,b){return(parseFloat(m.curCSS(a,"paddingLeft",true))||0)+(parseFloat(m.curCSS(a,"paddingRight",true))||0)+(parseFloat(m.curCSS(a,"borderLeftWidth",true))||0)+(parseFloat(m.curCSS(a,"borderRightWidth",true))||0)+(b?Tb(a):0)}function Tb(a){return(parseFloat(m.curCSS(a,"marginLeft",
+true))||0)+(parseFloat(m.curCSS(a,"marginRight",true))||0)}function Sa(a,b){return(parseFloat(m.curCSS(a,"paddingTop",true))||0)+(parseFloat(m.curCSS(a,"paddingBottom",true))||0)+(parseFloat(m.curCSS(a,"borderTopWidth",true))||0)+(parseFloat(m.curCSS(a,"borderBottomWidth",true))||0)+(b?zb(a):0)}function zb(a){return(parseFloat(m.curCSS(a,"marginTop",true))||0)+(parseFloat(m.curCSS(a,"marginBottom",true))||0)}function Ra(a,b){b=typeof b=="number"?b+"px":b;a[0].style.cssText+=";min-height:"+b+";_height:"+
+b}function gb(){}function vb(a,b){return a-b}function Na(a){return(a<10?"0":"")+a}function Wa(a,b){if(a[b]!==Z)return a[b];b=b.split(/(?=[A-Z])/);for(var f=b.length-1,h;f>=0;f--){h=a[b[f].toLowerCase()];if(h!==Z)return h}return a[""]}function Ma(a){return a.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/'/g,"&#039;").replace(/"/g,"&quot;").replace(/\n/g,"<br />")}function xb(a){return a.id+"/"+a.className+"/"+a.style.cssText.replace(/(^|;)\s*(top|left|width|height)\s*:[^;]*/ig,
+"")}function mb(a){a.attr("unselectable","on").css("MozUserSelect","none").bind("selectstart.ui",function(){return false})}var Pa={defaultView:"month",aspectRatio:1.35,header:{left:"title",center:"",right:"today prev,next"},weekends:true,allDayDefault:true,ignoreTimezone:true,lazyFetching:true,startParam:"start",endParam:"end",titleFormat:{month:"MMMM yyyy",week:"MMM d[ yyyy]{ '&#8212;'[ MMM] d yyyy}",day:"dddd, MMM d, yyyy"},columnFormat:{month:"ddd",week:"ddd M/d",day:"dddd M/d"},timeFormat:{"":"h(:mm)t"},
+isRTL:false,firstDay:0,monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],buttonText:{prev:"&nbsp;&#9668;&nbsp;",next:"&nbsp;&#9658;&nbsp;",prevYear:"&nbsp;&lt;&lt;&nbsp;",nextYear:"&nbsp;&gt;&gt;&nbsp;",
+today:"today",month:"month",week:"week",day:"day"},theme:false,buttonIcons:{prev:"circle-triangle-w",next:"circle-triangle-e"},unselectAuto:true,dropAccept:"*"},Ub={header:{left:"next,prev today",center:"",right:"title"},buttonText:{prev:"&nbsp;&#9658;&nbsp;",next:"&nbsp;&#9668;&nbsp;",prevYear:"&nbsp;&gt;&gt;&nbsp;",nextYear:"&nbsp;&lt;&lt;&nbsp;"},buttonIcons:{prev:"circle-triangle-e",next:"circle-triangle-w"}},Ga=m.fullCalendar={version:"1.4.8"},Ha=Ga.views={};m.fn.fullCalendar=function(a){if(typeof a==
+"string"){var b=Array.prototype.slice.call(arguments,1),f;this.each(function(){var i=m.data(this,"fullCalendar");if(i&&m.isFunction(i[a])){i=i[a].apply(i,b);if(f===Z)f=i;a=="destroy"&&m.removeData(this,"fullCalendar")}});if(f!==Z)return f;return this}var h=a.eventSources||[];delete a.eventSources;if(a.events){h.push(a.events);delete a.events}a=m.extend(true,{},Pa,a.isRTL||a.isRTL===Z&&Pa.isRTL?Ub:{},a);this.each(function(i,s){i=m(s);s=new Bb(i,a,h);i.data("fullCalendar",s);s.render()});return this};
+var Eb=1;Ha.month=Fb;Ha.basicWeek=Gb;Ha.basicDay=Hb;var Za;fb({weekMode:"fixed"});Ha.agendaWeek=Jb;Ha.agendaDay=Kb;fb({allDaySlot:true,allDayText:"all-day",firstHour:6,slotMinutes:30,defaultEventMinutes:120,axisFormat:"h(:mm)tt",timeFormat:{agenda:"h:mm{ - h:mm}"},dragOpacity:{agenda:0.5},minTime:0,maxTime:24});Ga.addDays=O;Ga.cloneDate=x;Ga.parseDate=Xa;Ga.parseISO8601=Ab;Ga.parseTime=bb;Ga.formatDate=Ja;Ga.formatDates=Va;var Da=["sun","mon","tue","wed","thu","fri","sat"],ib=864E5,Pb=36E5,Ob=6E4,
+Qb={s:function(a){return a.getSeconds()},ss:function(a){return Na(a.getSeconds())},m:function(a){return a.getMinutes()},mm:function(a){return Na(a.getMinutes())},h:function(a){return a.getHours()%12||12},hh:function(a){return Na(a.getHours()%12||12)},H:function(a){return a.getHours()},HH:function(a){return Na(a.getHours())},d:function(a){return a.getDate()},dd:function(a){return Na(a.getDate())},ddd:function(a,b){return b.dayNamesShort[a.getDay()]},dddd:function(a,b){return b.dayNames[a.getDay()]},
+M:function(a){return a.getMonth()+1},MM:function(a){return Na(a.getMonth()+1)},MMM:function(a,b){return b.monthNamesShort[a.getMonth()]},MMMM:function(a,b){return b.monthNames[a.getMonth()]},yy:function(a){return(a.getFullYear()+"").substring(2)},yyyy:function(a){return a.getFullYear()},t:function(a){return a.getHours()<12?"a":"p"},tt:function(a){return a.getHours()<12?"am":"pm"},T:function(a){return a.getHours()<12?"A":"P"},TT:function(a){return a.getHours()<12?"AM":"PM"},u:function(a){return Ja(a,
+"yyyy-MM-dd'T'HH:mm:ss'Z'")},S:function(a){a=a.getDate();if(a>10&&a<20)return"th";return["st","nd","rd"][a%10-1]||"th"}}})(jQuery);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.ui.datepicker-de.js	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,23 @@
+/* German initialisation for the jQuery UI date picker plugin. */
+/* Written by Milian Wolff (mail@milianw.de). */
+jQuery(function($){
+	$.datepicker.regional['de'] = {
+		closeText: 'schließen',
+		prevText: '&#x3c;zurück',
+		nextText: 'Vor&#x3e;',
+		currentText: 'heute',
+		monthNames: ['Januar','Februar','März','April','Mai','Juni',
+		'Juli','August','September','Oktober','November','Dezember'],
+		monthNamesShort: ['Jan','Feb','Mär','Apr','Mai','Jun',
+		'Jul','Aug','Sep','Okt','Nov','Dez'],
+		dayNames: ['Sonntag','Montag','Dienstag','Mittwoch','Donnerstag','Freitag','Samstag'],
+		dayNamesShort: ['So','Mo','Di','Mi','Do','Fr','Sa'],
+		dayNamesMin: ['So','Mo','Di','Mi','Do','Fr','Sa'],
+		weekHeader: 'Wo',
+		dateFormat: 'dd.mm.yy',
+		firstDay: 1,
+		isRTL: false,
+		showMonthAfterYear: false,
+		yearSuffix: ''};
+	$.datepicker.setDefaults($.datepicker.regional['de']);
+});
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.ui.datepicker-es.js	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,23 @@
+/* Inicialización en español para la extensión 'UI date picker' para jQuery. */
+/* Traducido por Vester (xvester@gmail.com). */
+jQuery(function($){
+	$.datepicker.regional['es'] = {
+		closeText: 'Cerrar',
+		prevText: '&#x3c;Ant',
+		nextText: 'Sig&#x3e;',
+		currentText: 'Hoy',
+		monthNames: ['Enero','Febrero','Marzo','Abril','Mayo','Junio',
+		'Julio','Agosto','Septiembre','Octubre','Noviembre','Diciembre'],
+		monthNamesShort: ['Ene','Feb','Mar','Abr','May','Jun',
+		'Jul','Ago','Sep','Oct','Nov','Dic'],
+		dayNames: ['Domingo','Lunes','Martes','Miércoles','Jueves','Viernes','Sábado'],
+		dayNamesShort: ['Dom','Lun','Mar','Mié','Juv','Vie','Sáb'],
+		dayNamesMin: ['Do','Lu','Ma','Mi','Ju','Vi','Sá'],
+		weekHeader: 'Sm',
+		dateFormat: 'dd/mm/yy',
+		firstDay: 1,
+		isRTL: false,
+		showMonthAfterYear: false,
+		yearSuffix: ''};
+	$.datepicker.setDefaults($.datepicker.regional['es']);
+});
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/jquery.ui.datepicker-fr.js	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,23 @@
+/* French initialisation for the jQuery UI date picker plugin. */
+/* Written by Keith Wood (kbwood{at}iinet.com.au) and Stéphane Nahmani (sholby@sholby.net). */
+jQuery(function($){
+	$.datepicker.regional['fr'] = {
+		closeText: 'Fermer',
+		prevText: '&#x3c;Préc',
+		nextText: 'Suiv&#x3e;',
+		currentText: 'Courant',
+		monthNames: ['Janvier','Février','Mars','Avril','Mai','Juin',
+		'Juillet','Août','Septembre','Octobre','Novembre','Décembre'],
+		monthNamesShort: ['Jan','Fév','Mar','Avr','Mai','Jun',
+		'Jul','Aoû','Sep','Oct','Nov','Déc'],
+		dayNames: ['Dimanche','Lundi','Mardi','Mercredi','Jeudi','Vendredi','Samedi'],
+		dayNamesShort: ['Dim','Lun','Mar','Mer','Jeu','Ven','Sam'],
+		dayNamesMin: ['Di','Lu','Ma','Me','Je','Ve','Sa'],
+		weekHeader: 'Sm',
+		dateFormat: 'dd/mm/yy',
+		firstDay: 1,
+		isRTL: false,
+		showMonthAfterYear: false,
+		yearSuffix: ''};
+	$.datepicker.setDefaults($.datepicker.regional['fr']);
+});
\ No newline at end of file
--- a/web/data/uiprops.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/data/uiprops.py	Wed Apr 27 09:54:22 2011 +0200
@@ -75,6 +75,7 @@
 
 # header
 headerBgColor = '#ff7700'
+headerBg = lazystr('%(headerBgColor)s url("banner.png") repeat-x top left')
 
 # h
 h1FontSize = '1.5em' # 18px
@@ -93,7 +94,6 @@
 
 # links
 aColor = '#e6820e'
-aActiveColor = aVisitedColor = aLinkColor = lazystr('%(aColor)s')
 
 
 # page frame
--- a/web/facet.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/facet.py	Wed Apr 27 09:54:22 2011 +0200
@@ -53,7 +53,7 @@
 from logilab.mtconverter import xml_escape
 from logilab.common.graph import has_path
 from logilab.common.decorators import cached
-from logilab.common.date import datetime2ticks
+from logilab.common.date import datetime2ticks, ustrftime, ticks2datetime
 from logilab.common.compat import all
 
 from rql import parse, nodes, utils
@@ -940,29 +940,37 @@
             return None
         return self.wdgclass(self, min(values), max(values))
 
-    def infvalue(self):
-        return self._cw.form.get('%s_inf' % self.__regid__)
-
-    def supvalue(self):
-        return self._cw.form.get('%s_sup' % self.__regid__)
-
     def formatvalue(self, value):
         """format `value` before in order to insert it in the RQL query"""
         return unicode(value)
 
+    def infvalue(self, min=False):
+        if min:
+            return self._cw.form.get('min_%s_inf' % self.__regid__)
+        return self._cw.form.get('%s_inf' % self.__regid__)
+
+    def supvalue(self, max=False):
+        if max:
+            return self._cw.form.get('max_%s_sup' % self.__regid__)
+        return self._cw.form.get('%s_sup' % self.__regid__)
+
     def add_rql_restrictions(self):
         infvalue = self.infvalue()
-        if infvalue is None: # nothing sent
+        supvalue = self.supvalue()
+        if infvalue is None or supvalue is None: # nothing sent
             return
-        supvalue = self.supvalue()
-        self.rqlst.add_constant_restriction(self.filtered_variable,
-                                            self.rtype,
-                                            self.formatvalue(infvalue),
-                                            self.attrtype, '>=')
-        self.rqlst.add_constant_restriction(self.filtered_variable,
-                                            self.rtype,
-                                            self.formatvalue(supvalue),
-                                            self.attrtype, '<=')
+        # when a value is equal to one of the limit, don't add the restriction,
+        # else we filter out NULL values implicitly
+        if infvalue != self.infvalue(min=True):
+            self.rqlst.add_constant_restriction(self.filtered_variable,
+                                                self.rtype,
+                                                self.formatvalue(infvalue),
+                                                self.attrtype, '>=')
+        if supvalue != self.supvalue(max=True):
+            self.rqlst.add_constant_restriction(self.filtered_variable,
+                                                self.rtype,
+                                                self.formatvalue(supvalue),
+                                                self.attrtype, '<=')
 
 
 class DateRangeFacet(RangeFacet):
@@ -981,7 +989,11 @@
 
     def formatvalue(self, value):
         """format `value` before in order to insert it in the RQL query"""
-        return '"%s"' % date.fromtimestamp(float(value) / 1000).strftime('%Y/%m/%d')
+        try:
+            date_value = ticks2datetime(float(value))
+        except (ValueError, OverflowError):
+            return u'"date out-of-range"'
+        return '"%s"' % ustrftime(date_value, '%Y/%m/%d')
 
 
 class HasRelationFacet(AbstractFacet):
@@ -1054,12 +1066,12 @@
   <option value="AND">%s</option>
 </select>''' % (facetid + '_andor', _('and/or between different values'),
                 _('OR'), _('AND')))
-        cssclass = ''
+        cssclass = 'facetBody'
         if not self.facet.start_unfolded:
             cssclass += ' hidden'
         if len(self.items) > 6:
             cssclass += ' overflowed'
-        self.w(u'<div class="facetBody%s">\n' % cssclass)
+        self.w(u'<div class="%s">\n' % cssclass)
         for item in self.items:
             item.render(w=self.w)
         self.w(u'</div>\n')
@@ -1128,14 +1140,23 @@
         self.w(u'<div id="%s" class="facet">\n' % facetid)
         self.w(u'<div class="facetTitle" cubicweb:facetName="%s">%s</div>\n' %
                (facetid, title))
+        cssclass = 'facetBody'
+        if not self.facet.start_unfolded:
+            cssclass += ' hidden'
+        self.w(u'<div class="%s">\n' % cssclass)
         self.w(u'<span id="%s_inf"></span> - <span id="%s_sup"></span>'
                % (sliderid, sliderid))
         self.w(u'<input type="hidden" name="%s_inf" value="%s" />'
                % (facetid, self.minvalue))
         self.w(u'<input type="hidden" name="%s_sup" value="%s" />'
                % (facetid, self.maxvalue))
+        self.w(u'<input type="hidden" name="min_%s_inf" value="%s" />'
+               % (facetid, self.minvalue))
+        self.w(u'<input type="hidden" name="max_%s_sup" value="%s" />'
+               % (facetid, self.maxvalue))
         self.w(u'<div id="%s"></div>' % sliderid)
         self.w(u'</div>\n')
+        self.w(u'</div>\n')
 
 
 class DateFacetRangeWidget(FacetRangeWidget):
@@ -1167,15 +1188,15 @@
         self.selected = selected
 
     def _render(self):
+        cssclass = 'facetValue facetCheckBox'
         if self.selected:
-            cssclass = ' facetValueSelected'
-            imgsrc = self._cw.datadir_url + self.selected_img
+            cssclass += ' facetValueSelected'
+            imgsrc = self._cw.data_url(self.selected_img)
             imgalt = self._cw._('selected')
         else:
-            cssclass = ''
-            imgsrc = self._cw.datadir_url + self.unselected_img
+            imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="facetValue facetCheckBox%s" cubicweb:value="%s">\n'
+        self.w(u'<div class="%s" cubicweb:value="%s">\n'
                % (cssclass, xml_escape(unicode(self.value))))
         self.w(u'<img src="%s" alt="%s"/>&#160;' % (imgsrc, imgalt))
         self.w(u'<a href="javascript: {}">%s</a>' % xml_escape(self.label))
@@ -1196,15 +1217,15 @@
         title = xml_escape(self.facet.title)
         facetid = xml_escape(self.facet.__regid__)
         self.w(u'<div id="%s" class="facet">\n' % facetid)
+        cssclass = 'facetValue facetCheckBox'
         if self.selected:
-            cssclass = ' facetValueSelected'
-            imgsrc = self._cw.datadir_url + self.selected_img
+            cssclass += ' facetValueSelected'
+            imgsrc = self._cw.data_url(self.selected_img)
             imgalt = self._cw._('selected')
         else:
-            cssclass = ''
-            imgsrc = self._cw.datadir_url + self.unselected_img
+            imgsrc = self._cw.data_url(self.unselected_img)
             imgalt = self._cw._('not selected')
-        self.w(u'<div class="facetValue facetCheckBox%s" cubicweb:value="%s">\n'
+        self.w(u'<div class="%s" cubicweb:value="%s">\n'
                % (cssclass, xml_escape(unicode(self.value))))
         self.w(u'<div class="facetCheckBoxWidget">')
         self.w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" />&#160;' % (imgsrc, imgalt))
--- a/web/formfields.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/formfields.py	Wed Apr 27 09:54:22 2011 +0200
@@ -733,7 +733,7 @@
             wdgs.append(u'<a href="%s" title="%s"><img src="%s" alt="%s"/></a>' %
                         (xml_escape(uilib.toggle_action(divid)),
                          form._cw._('show advanced fields'),
-                         xml_escape(form._cw.build_url('data/puce_down.png')),
+                         xml_escape(form._cw.data_url('puce_down.png')),
                          form._cw._('show advanced fields')))
             wdgs.append(u'<div id="%s" class="hidden">' % divid)
             if self.name_field:
--- a/web/formwidgets.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/formwidgets.py	Wed Apr 27 09:54:22 2011 +0200
@@ -580,6 +580,8 @@
 
     def _render(self, form, field, renderer):
         req = form._cw
+        if req.lang != 'en':
+            req.add_js('jquery.ui.datepicker-%s.js' % req.lang)
         domid = field.dom_id(form, self.suffix)
         # XXX find a way to understand every format
         fmt = req.property_value('ui.date-format')
@@ -592,8 +594,11 @@
             value = self.values(form, field)[0]
         else:
             value = self.datestr
+        attrs = {}
+        if self.settabindex:
+            attrs['tabindex'] = req.next_tabindex()
         return tags.input(id=domid, name=domid, value=value,
-                          type='text', size='10')
+                          type='text', size='10', **attrs)
 
 
 class JQueryTimePicker(FieldWidget):
@@ -618,6 +623,9 @@
             value = self.values(form, field)[0]
         else:
             value = self.timestr
+        attrs = {}
+        if self.settabindex:
+            attrs['tabindex'] = req.next_tabindex()
         return tags.input(id=domid, name=domid, value=value,
                           type='text', size='5')
 
@@ -768,7 +776,7 @@
             fname = entity.autocomplete_initfuncs[field.name]
         else:
             fname = self.autocomplete_initfunc
-        return entity._cw.datadir_url + fname
+        return entity._cw.data_url(fname)
 
 
 class RestrictedAutoCompletionWidget(AutoCompletionWidget):
--- a/web/request.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/request.py	Wed Apr 27 09:54:22 2011 +0200
@@ -420,7 +420,7 @@
         self.add_js('fckeditor/fckeditor.js')
         self.html_headers.define_var('fcklang', self.lang)
         self.html_headers.define_var('fckconfigpath',
-                                     self.build_url('data/cubicweb.fckcwconfig.js'))
+                                     self.data_url('cubicweb.fckcwconfig.js'))
     def use_fckeditor(self):
         return self.vreg.config.fckeditor_installed() and self.property_value('ui.fckeditor')
 
@@ -559,7 +559,7 @@
             jsfiles = (jsfiles,)
         for jsfile in jsfiles:
             if localfile:
-                jsfile = self.datadir_url + jsfile
+                jsfile = self.data_url(jsfile)
             self.html_headers.add_js(jsfile)
 
     def add_css(self, cssfiles, media=u'all', localfile=True, ieonly=False,
@@ -588,7 +588,7 @@
             add_css = self.html_headers.add_css
         for cssfile in cssfiles:
             if localfile:
-                cssfile = self.datadir_url + cssfile
+                cssfile = self.data_url(cssfile)
             add_css(cssfile, media, *extraargs)
 
     @deprecated('[3.9] use ajax_replace_url() instead, naming rql and vid arguments')
@@ -645,6 +645,10 @@
         """returns the absolute path of the base url"""
         return urlsplit(self.base_url())[2]
 
+    def data_url(self, relpath):
+        """returns the absolute path for a data resouce"""
+        return self.datadir_url + relpath
+
     @cached
     def from_controller(self):
         """return the id (string) of the controller issuing the request"""
--- a/web/test/data/schema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/data/schema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -91,3 +91,5 @@
     title = String(maxsize=32, required=True, fulltextindexed=True)
     concerns = SubjectRelation('Project', composite='object')
 
+# used by windmill for `test_edit_relation`
+from cubes.folder.schema import Folder
--- a/web/test/jstests/test_ajax.js	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/jstests/test_ajax.js	Wed Apr 27 09:54:22 2011 +0200
@@ -44,7 +44,7 @@
             callback: function() {
                 var origLength = scriptsIncluded.length;
                 scriptsIncluded = jsSources();
-                // check that foo.js has been inserted in <head>
+                // check that foo.js has been *appended* to <head>
                 equals(scriptsIncluded.length, origLength + 1);
                 equals(scriptsIncluded[origLength].indexOf('http://foo.js'), 0);
                 // check that <div class="ajaxHtmlHead"> has been removed
@@ -105,7 +105,7 @@
     test('test callback after synchronous request with parameters', function() {
         var deferred = new Deferred();
         var result = jQuery.ajax({
-            url: './ajax_url0.html',
+            url: '/../ajax_url0.html',
             async: false,
             beforeSend: function(xhr) {
                 deferred._req = xhr;
--- a/web/test/test_windmill.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/test_windmill.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,8 +1,12 @@
 # Run all scenarii found in windmill directory
+from os.path import join, dirname
 from cubicweb.devtools.cwwindmill import (CubicWebWindmillUseCase,
                                           unittest_main)
 
-class CubicWebWindmillUseCase(CubicWebWindmillUseCase): pass
+class CubicWebWindmillUseCase(CubicWebWindmillUseCase):
+    #test_dir = join(dirname(__file__), "windmill/test_edit_relation.py")
+    pass
+
 
 if __name__ == '__main__':
     unittest_main()
--- a/web/test/unittest_application.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_application.py	Wed Apr 27 09:54:22 2011 +0200
@@ -24,7 +24,7 @@
 from urllib import unquote
 
 from logilab.common.testlib import TestCase, unittest_main
-from logilab.common.decorators import clear_cache
+from logilab.common.decorators import clear_cache, classproperty
 
 from cubicweb import AuthenticationError, Unauthorized
 from cubicweb.devtools.testlib import CubicWebTC
@@ -159,6 +159,15 @@
             raise
         self.app.error_handler = raise_hdlr
 
+    @classproperty
+    def config(cls):
+        try:
+            return cls.__dict__['_config']
+        except KeyError:
+            config = super(ApplicationTC, cls).config
+            config.global_set_option('allow-email-login', True)
+            return config
+
     def test_cnx_user_groups_sync(self):
         user = self.user()
         self.assertEqual(user.groups, set(('managers',)))
@@ -306,8 +315,7 @@
 
     def test_fb_login_concept(self):
         """see data/views.py"""
-        self.set_option('auth-mode', 'cookie')
-        self.set_option('anonymous-user', 'anon')
+        self.set_auth_mode('cookie', 'anon')
         self.login('anon')
         req = self.request()
         origcnx = req.cnx
@@ -316,6 +324,7 @@
         self.failIf(req.cnx is origcnx)
         self.assertEqual(req.user.login, 'turlututu')
         self.failUnless('turlututu' in page, page)
+        req.cnx.close() # avoid warning
 
     # authentication tests ####################################################
 
@@ -324,10 +333,9 @@
         self.assertAuthFailure(req)
         self.assertRaises(AuthenticationError, self.app_publish, req, 'login')
         self.assertEqual(req.cnx, None)
-        authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
-        req._headers['Authorization'] = 'basic %s' % authstr
+        authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword))
+        req.set_request_header('Authorization', 'basic %s' % authstr)
         self.assertAuthSuccess(req, origsession)
-        self.assertEqual(req.session.authinfo, {'password': origsession.authinfo['password']})
         self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEqual(len(self.open_sessions), 0)
 
@@ -338,10 +346,9 @@
         self.failUnless('__login' in form)
         self.failUnless('__password' in form)
         self.assertEqual(req.cnx, None)
-        req.form['__login'] = origsession.login
-        req.form['__password'] = origsession.authinfo['password']
+        req.form['__login'] = self.admlogin
+        req.form['__password'] = self.admpassword
         self.assertAuthSuccess(req, origsession)
-        self.assertEqual(req.session.authinfo, {'password': origsession.authinfo['password']})
         self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEqual(len(self.open_sessions), 0)
 
@@ -351,18 +358,17 @@
         self.execute('INSERT EmailAddress X: X address %(address)s, U primary_email X '
                      'WHERE U login %(login)s', {'address': address, 'login': login})
         self.commit()
-        # option allow-email-login not set
+        # # option allow-email-login not set
         req, origsession = self.init_authentication('cookie')
-        req.form['__login'] = address
-        req.form['__password'] = origsession.authinfo['password']
-        self.assertAuthFailure(req)
+        # req.form['__login'] = address
+        # req.form['__password'] = self.admpassword
+        # self.assertAuthFailure(req)
         # option allow-email-login set
         origsession.login = address
         self.set_option('allow-email-login', True)
         req.form['__login'] = address
-        req.form['__password'] = origsession.authinfo['password']
+        req.form['__password'] = self.admpassword
         self.assertAuthSuccess(req, origsession)
-        self.assertEqual(req.session.authinfo, {'password': origsession.authinfo['password']})
         self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEqual(len(self.open_sessions), 0)
 
@@ -372,7 +378,8 @@
         cookie = Cookie.SimpleCookie()
         sessioncookie = self.app.session_handler.session_cookie(req)
         cookie[sessioncookie] = req.session.sessionid
-        req._headers['Cookie'] = cookie[sessioncookie].OutputString()
+        req.set_request_header('Cookie', cookie[sessioncookie].OutputString(),
+                               raw=True)
         clear_cache(req, 'get_authorization')
         # reset session as if it was a new incoming request
         req.session = req.cnx = None
@@ -382,7 +389,6 @@
         asession = req.session
         self.assertEqual(len(self.open_sessions), 1)
         self.assertEqual(asession.login, 'anon')
-        self.assertEqual(asession.authinfo['password'], 'anon')
         self.failUnless(asession.anonymous_session)
         self._reset_cookie(req)
 
@@ -398,12 +404,11 @@
         req, origsession = self.init_authentication('http', 'anon')
         self._test_auth_anon(req)
         authstr = base64.encodestring('toto:pouet')
-        req._headers['Authorization'] = 'basic %s' % authstr
+        req.set_request_header('Authorization', 'basic %s' % authstr)
         self._test_anon_auth_fail(req)
-        authstr = base64.encodestring('%s:%s' % (origsession.login, origsession.authinfo['password']))
-        req._headers['Authorization'] = 'basic %s' % authstr
+        authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword))
+        req.set_request_header('Authorization', 'basic %s' % authstr)
         self.assertAuthSuccess(req, origsession)
-        self.assertEqual(req.session.authinfo, {'password': origsession.authinfo['password']})
         self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEqual(len(self.open_sessions), 0)
 
@@ -413,11 +418,9 @@
         req.form['__login'] = 'toto'
         req.form['__password'] = 'pouet'
         self._test_anon_auth_fail(req)
-        req.form['__login'] = origsession.login
-        req.form['__password'] = origsession.authinfo['password']
+        req.form['__login'] = self.admlogin
+        req.form['__password'] = self.admpassword
         self.assertAuthSuccess(req, origsession)
-        self.assertEqual(req.session.authinfo,
-                          {'password': origsession.authinfo['password']})
         self.assertRaises(LogOut, self.app_publish, req, 'logout')
         self.assertEqual(len(self.open_sessions), 0)
 
--- a/web/test/unittest_form.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_form.py	Wed Apr 27 09:54:22 2011 +0200
@@ -92,9 +92,15 @@
         form.content_type = 'text/html'
         pageinfo = self._check_html(form.render(), form, template=None)
         inputs = pageinfo.find_tag('select', False)
-        self.failUnless(any(attrs for t, attrs in inputs if attrs.get('name') == 'in_group-subject:A'))
+        ok = False
+        for selectnode in pageinfo.matching_nodes('select', name='from_in_group-subject:A'):
+            for optionnode in selectnode:
+                self.assertEqual(optionnode.get('value'), str(geid))
+                self.assertEqual(ok, False)
+                ok = True
+        self.assertEqual(ok, True, 'expected option not found')
         inputs = pageinfo.find_tag('input', False)
-        self.failIf(any(attrs for t, attrs in inputs if attrs.get('name') == '__linkto'))
+        self.failIf(list(pageinfo.matching_nodes('input', name='__linkto')))
 
     def test_reledit_composite_field(self):
         rset = self.execute('INSERT BlogEntry X: X title "cubicweb.org", X content "hop"')
--- a/web/test/unittest_reledit.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_reledit.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -33,9 +33,9 @@
 class ClickAndEditFormTC(ReleditMixinTC, CubicWebTC):
 
     def test_default_config(self):
-        reledit = {'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, false, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
-                   'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-add" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to add a value"><img title="click to add a value" src="data/plus.png" alt="click to add a value"/></div></div></div>""",
-                   'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+        reledit = {'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, false, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+                   'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-add" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to add a value"><img title="click to add a value" src="http://testing.fr/cubicweb/data/plus.png" alt="click to add a value"/></div></div></div>""",
+                   'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
                    'composite_card11_2ttypes': """&lt;not specified&gt;""",
                    'concerns': """&lt;not specified&gt;"""}
 
@@ -76,7 +76,7 @@
 <td><button class="validateButton" onclick="cw.reledit.cleanupAfterCancel(&#39;title-subject-%(eid)s&#39;)" tabindex="3" type="button" value="button_cancel"><img alt="CANCEL_ICON" src="http://testing.fr/cubicweb/data/cancel.png" />button_cancel</button></td>
 </tr></table>
 </fieldset>
-</form><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, false, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+</form><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, false, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
 
                      'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><form action="http://testing.fr/cubicweb/validateform?__onsuccess=window.parent.cw.reledit.onSuccess" method="post" enctype="application/x-www-form-urlencoded" id="long_desc-subject-%(eid)s-form" onsubmit="return freezeFormButtons(&#39;long_desc-subject-%(eid)s-form&#39;);" class="releditForm" cubicweb:target="eformframe">
 <fieldset>
@@ -120,7 +120,7 @@
 <td><button class="validateButton" onclick="cw.reledit.cleanupAfterCancel(&#39;long_desc-subject-%(eid)s&#39;)" tabindex="8" type="button" value="button_cancel"><img alt="CANCEL_ICON" src="http://testing.fr/cubicweb/data/cancel.png" />button_cancel</button></td>
 </tr></table>
 </fieldset>
-</form><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-add" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to add a value"><img title="click to add a value" src="data/plus.png" alt="click to add a value"/></div></div></div>""",
+</form><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-add" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to add a value"><img title="click to add a value" src="http://testing.fr/cubicweb/data/plus.png" alt="click to add a value"/></div></div></div>""",
 
                      'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue">&lt;not specified&gt;</div><form action="http://testing.fr/cubicweb/validateform?__onsuccess=window.parent.cw.reledit.onSuccess" method="post" enctype="application/x-www-form-urlencoded" id="manager-subject-%(eid)s-form" onsubmit="return freezeFormButtons(&#39;manager-subject-%(eid)s-form&#39;);" class="releditForm" cubicweb:target="eformframe">
 <fieldset>
@@ -156,7 +156,7 @@
 <td><button class="validateButton" onclick="cw.reledit.cleanupAfterCancel(&#39;manager-subject-%(eid)s&#39;)" tabindex="11" type="button" value="button_cancel"><img alt="CANCEL_ICON" src="http://testing.fr/cubicweb/data/cancel.png" />button_cancel</button></td>
 </tr></table>
 </fieldset>
-</form><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+</form><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
                      'composite_card11_2ttypes': """&lt;not specified&gt;""",
                      'concerns': """&lt;not specified&gt;"""
             }
@@ -190,11 +190,11 @@
         reledit_ctrl.tag_object_of(('Ticket', 'concerns', 'Project'),
                                    {'edit_target': 'rtype'})
         reledit = {
-            'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, true, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
-            'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue">&lt;long_desc is required&gt;</div><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, true, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
-            'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/personne/%(toto)s" title="">Toto</a></div><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div><div id="manager-subject-%(eid)s-delete" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;deleteconf&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to delete this value"><img title="click to delete this value" src="data/cancel.png" alt="click to delete this value"/></div></div></div>""",
+            'title': """<div id="title-subject-%(eid)s-reledit" onmouseout="jQuery('#title-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#title-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="title-subject-%(eid)s-value" class="editableFieldValue">cubicweb-world-domination</div><div id="title-subject-%(eid)s" class="editableField hidden"><div id="title-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;title&#39;, &#39;subject&#39;, &#39;title-subject-%(eid)s&#39;, true, &#39;&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+            'long_desc': """<div id="long_desc-subject-%(eid)s-reledit" onmouseout="jQuery('#long_desc-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#long_desc-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="long_desc-subject-%(eid)s-value" class="editableFieldValue">&lt;long_desc is required&gt;</div><div id="long_desc-subject-%(eid)s" class="editableField hidden"><div id="long_desc-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;long_desc&#39;, &#39;subject&#39;, &#39;long_desc-subject-%(eid)s&#39;, true, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>""",
+            'manager': """<div id="manager-subject-%(eid)s-reledit" onmouseout="jQuery('#manager-subject-%(eid)s').addClass('hidden')" onmouseover="jQuery('#manager-subject-%(eid)s').removeClass('hidden')" class="releditField"><div id="manager-subject-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/personne/%(toto)s" title="">Toto</a></div><div id="manager-subject-%(eid)s" class="editableField hidden"><div id="manager-subject-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;edition&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div><div id="manager-subject-%(eid)s-delete" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;deleteconf&#39;, %(eid)s, &#39;manager&#39;, &#39;subject&#39;, &#39;manager-subject-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to delete this value"><img title="click to delete this value" src="http://testing.fr/cubicweb/data/cancel.png" alt="click to delete this value"/></div></div></div>""",
             'composite_card11_2ttypes': """&lt;not specified&gt;""",
-            'concerns': """<div id="concerns-object-%(eid)s-reledit" onmouseout="jQuery('#concerns-object-%(eid)s').addClass('hidden')" onmouseover="jQuery('#concerns-object-%(eid)s').removeClass('hidden')" class="releditField"><div id="concerns-object-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/ticket/%(tick)s" title="">write the code</a></div><div id="concerns-object-%(eid)s" class="editableField hidden"><div id="concerns-object-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;concerns&#39;, &#39;object&#39;, &#39;concerns-object-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="data/pen_icon.png" alt="click to edit this field"/></div></div></div>"""
+            'concerns': """<div id="concerns-object-%(eid)s-reledit" onmouseout="jQuery('#concerns-object-%(eid)s').addClass('hidden')" onmouseover="jQuery('#concerns-object-%(eid)s').removeClass('hidden')" class="releditField"><div id="concerns-object-%(eid)s-value" class="editableFieldValue"><a href="http://testing.fr/cubicweb/ticket/%(tick)s" title="">write the code</a></div><div id="concerns-object-%(eid)s" class="editableField hidden"><div id="concerns-object-%(eid)s-update" class="editableField" onclick="cw.reledit.loadInlineEditionForm(&#39;base&#39;, %(eid)s, &#39;concerns&#39;, &#39;object&#39;, &#39;concerns-object-%(eid)s&#39;, false, &#39;autolimited&#39;);" title="click to edit this field"><img title="click to edit this field" src="http://testing.fr/cubicweb/data/pen_icon.png" alt="click to edit this field"/></div></div></div>"""
             }
         for rschema, ttypes, role in self.proj.e_schema.relation_definitions(includefinal=True):
             if rschema not in reledit:
--- a/web/test/unittest_urlpublisher.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_urlpublisher.py	Wed Apr 27 09:54:22 2011 +0200
@@ -56,41 +56,50 @@
         self.assertRaises(NotFound, self.process, '123/345')
         self.assertRaises(NotFound, self.process, 'not_eid')
 
-    def test_rest_path(self):
+    def test_rest_path_etype(self):
         """tests the rest path resolution"""
-        ctrl, rset = self.process('CWUser')
+        ctrl, rset = self.process('CWEType')
         self.assertEqual(ctrl, 'view')
-        self.assertEqual(rset.description[0][0], 'CWUser')
+        self.assertEqual(rset.description[0][0], 'CWEType')
         self.assertEqual(rset.printable_rql(),
-                          "Any X,AA,AB,AC,AD ORDERBY AA WHERE X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD")
+                          "Any X,AA,AB ORDERBY AA WHERE X is CWEType, X name AA, X modification_date AB")
+
+    def test_rest_path_by_attr(self):
         ctrl, rset = self.process('CWUser/login/admin')
         self.assertEqual(ctrl, 'view')
         self.assertEqual(len(rset), 1)
         self.assertEqual(rset.description[0][0], 'CWUser')
         self.assertEqual(rset.printable_rql(), 'Any X,AA,AB,AC,AD WHERE X login "admin", X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD')
+
+    def test_rest_path_unique_attr(self):
         ctrl, rset = self.process('cwuser/admin')
         self.assertEqual(ctrl, 'view')
         self.assertEqual(len(rset), 1)
         self.assertEqual(rset.description[0][0], 'CWUser')
         self.assertEqual(rset.printable_rql(), 'Any X,AA,AB,AC,AD WHERE X login "admin", X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD')
-        ctrl, rset = self.process('cwuser/eid/%s'%rset[0][0])
+
+    def test_rest_path_eid(self):
+        ctrl, rset = self.process('cwuser/eid/%s' % self.user().eid)
         self.assertEqual(ctrl, 'view')
         self.assertEqual(len(rset), 1)
         self.assertEqual(rset.description[0][0], 'CWUser')
         self.assertEqual(rset.printable_rql(), 'Any X,AA,AB,AC,AD WHERE X eid %s, X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD' % rset[0][0])
-        # test non-ascii paths
+
+    def test_rest_path_non_ascii_paths(self):
         ctrl, rset = self.process('CWUser/login/%C3%BFsa%C3%BFe')
         self.assertEqual(ctrl, 'view')
         self.assertEqual(len(rset), 1)
         self.assertEqual(rset.description[0][0], 'CWUser')
         self.assertEqual(rset.printable_rql(), u'Any X,AA,AB,AC,AD WHERE X login "\xffsa\xffe", X is CWUser, X login AA, X firstname AB, X surname AC, X modification_date AD')
-        # test quoted paths
+
+    def test_rest_path_quoted_paths(self):
         ctrl, rset = self.process('BlogEntry/title/hell%27o')
         self.assertEqual(ctrl, 'view')
         self.assertEqual(len(rset), 1)
         self.assertEqual(rset.description[0][0], 'BlogEntry')
         self.assertEqual(rset.printable_rql(), u'Any X,AA,AB,AC WHERE X title "hell\'o", X is BlogEntry, X creation_date AA, X title AB, X modification_date AC')
-        # errors
+
+    def test_rest_path_errors(self):
         self.assertRaises(NotFound, self.process, 'CWUser/eid/30000')
         self.assertRaises(NotFound, self.process, 'Workcases')
         self.assertRaises(NotFound, self.process, 'CWUser/inexistant_attribute/joe')
--- a/web/test/unittest_urlrewrite.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_urlrewrite.py	Wed Apr 27 09:54:22 2011 +0200
@@ -47,12 +47,14 @@
             ('/schema', dict(vid='schema')),
             ('/myprefs', dict(vid='propertiesform')),
             ('/siteconfig', dict(vid='systempropertiesform')),
-            ('/siteinfo', dict(vid='info')),
+            ('/siteinfo', dict(vid='siteinfo')),
             ('/manage', dict(vid='manage')),
             ('/notfound', dict(vid='404')),
             ('/error', dict(vid='error')),
             ('/sparql', dict(vid='sparql')),
             ('/processinfo', dict(vid='processinfo')),
+            ('/cwuser$', {'vid': 'cw.user-management'}),
+            ('/cwsource$', {'vid': 'cw.source-management'}),
             ('/schema/([^/]+?)/?$', {'rql': r'Any X WHERE X is CWEType, X name "\1"', 'vid': 'primary'}),
             ('/add/([^/]+?)/?$' , dict(vid='creation', etype=r'\1')),
             ('/doc/images/(.+?)/?$', dict(fid='\\1', vid='wdocimages')),
--- a/web/test/unittest_viewselector.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/unittest_viewselector.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -27,25 +27,32 @@
                                 traced_selection)
 from cubicweb.web import NoSelectableObject
 from cubicweb.web.action import Action
-from cubicweb.web.views import (primary, baseviews, tableview, editforms,
-                                calendar, management, embedding, actions,
-                                startup, cwuser, schema, xbel, vcard, owl,
-                                treeview, idownloadable, wdoc, debug,
-                                cwproperties, workflow, xmlrss, csvexport)
+from cubicweb.web.views import (
+    primary, baseviews, tableview, editforms, calendar, management, embedding,
+    actions, startup, cwuser, schema, xbel, vcard, owl, treeview, idownloadable,
+    wdoc, debug, cwuser, cwproperties, cwsources, workflow, xmlrss, rdf,
+    csvexport)
 
 from cubes.folder import views as folderviews
 
 USERACTIONS = [actions.UserPreferencesAction,
                actions.UserInfoAction,
                actions.LogoutAction]
-SITEACTIONS = [actions.SiteConfigurationAction,
-               actions.ManageAction,
-               schema.ViewSchemaAction,
-               debug.SiteInfoAction]
+SITEACTIONS = [actions.ManageAction]
 FOOTERACTIONS = [wdoc.HelpAction,
                  wdoc.ChangeLogAction,
                  wdoc.AboutAction,
                  actions.PoweredByAction]
+MANAGEACTIONS = [actions.SiteConfigurationAction,
+                 schema.ViewSchemaAction,
+                 cwuser.ManageUsersAction,
+                 cwsources.ManageSourcesAction,
+                 debug.SiteInfoAction]
+
+if hasattr(rdf, 'RDFView') is not None: # not available if rdf lib not installed
+    RDFVIEWS = [('rdf', rdf.RDFView)]
+
+assert RDFVIEWS
 
 class ViewSelectorTC(CubicWebTC):
 
@@ -83,6 +90,8 @@
         req = self.request()
         self.assertListEqual(self.pviews(req, None),
                              [('changelog', wdoc.ChangeLogView),
+                              ('cw.source-management', cwsources.CWSourceManagementView),
+                              ('cw.user-management', cwuser.CWUserManagementView),
                               ('gc', debug.GCView),
                               ('index', startup.IndexView),
                               ('info', debug.ProcessInformationView),
@@ -91,6 +100,7 @@
                               ('propertiesform', cwproperties.CWPropertiesForm),
                               ('registry', debug.RegistryView),
                               ('schema', schema.SchemaView),
+                              ('siteinfo', debug.SiteInfoView),
                               ('systempropertiesform', cwproperties.SystemCWPropertiesForm),
                               ('tree', folderviews.FolderTreeView),
                               ])
@@ -112,7 +122,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', cwuser.CWGroupPrimaryView),
+                              ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
@@ -136,7 +146,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', cwuser.CWGroupPrimaryView),
+                              ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
@@ -191,7 +201,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', primary.PrimaryView),
+                              ('primary', primary.PrimaryView),] + RDFVIEWS + [
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('secondary', baseviews.SecondaryView),
@@ -218,6 +228,7 @@
         rset = req.execute('CWUser X')
         self.assertListEqual(self.pviews(req, rset),
                              [('csvexport', csvexport.CSVRsetView),
+                              ('cw.user-table', cwuser.CWUserTable),
                               ('ecsvexport', csvexport.CSVEntityView),
                               ('editable-table', tableview.EditableTableView),
                               ('filetree', treeview.FileTreeView),
@@ -225,7 +236,7 @@
                               ('list', baseviews.ListView),
                               ('oneline', baseviews.OneLineView),
                               ('owlabox', owl.OWLABOXView),
-                              ('primary', primary.PrimaryView),
+                              ('primary', primary.PrimaryView)] + RDFVIEWS + [
                               ('rsetxml', xmlrss.XMLRsetView),
                               ('rss', xmlrss.RSSView),
                               ('sameetypelist', baseviews.SameETypeListView),
@@ -244,6 +255,7 @@
         self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
 
                               })
@@ -253,6 +265,7 @@
         self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
                               })
 
@@ -262,6 +275,7 @@
         self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
                               'mainactions': [actions.MultipleEditAction],
                               'moreactions': [actions.DeleteAction,
@@ -274,6 +288,7 @@
         self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
                               'moreactions': [actions.DeleteAction],
                               })
@@ -284,6 +299,7 @@
         self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
                               })
 
@@ -293,6 +309,7 @@
         self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
+                              'manage': MANAGEACTIONS,
                               'footer': FOOTERACTIONS,
                               'mainactions': [actions.ModifyAction,
                                               actions.ViewSameCWEType],
@@ -508,6 +525,7 @@
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
                               'footer': FOOTERACTIONS,
+                              'manage': MANAGEACTIONS,
                               'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
                               'moreactions': [actions.ManagePermissionsAction,
                                               actions.AddRelatedActions,
@@ -522,6 +540,7 @@
                              {'useractions': USERACTIONS,
                               'siteactions': SITEACTIONS,
                               'footer': FOOTERACTIONS,
+                              'manage': MANAGEACTIONS,
                               'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
                               'moreactions': [actions.ManagePermissionsAction,
                                               actions.AddRelatedActions,
--- a/web/test/windmill/test_creation.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/windmill/test_creation.py	Wed Apr 27 09:54:22 2011 +0200
@@ -26,7 +26,8 @@
     client.type(text=u'myuser', id=u'upassword-subject:A')
     client.type(text=u'myuser', name=u'upassword-subject-confirm:A')
     client.type(text=u'myuser', id=u'firstname-subject:A')
-    client.select(option=u'managers', id=u'in_group-subject:A')
+    client.select(option=u'managers', id=u'from_in_group-subject:A')
+    client.click(id=u'cwinoutadd')
     client.waits.forPageLoad(timeout=u'20000')
     client.click(id=u'adduse_email:Alink')
     client.waits.forPageLoad(timeout=u'20000')
--- a/web/test/windmill/test_edit_relation.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/test/windmill/test_edit_relation.py	Wed Apr 27 09:54:22 2011 +0200
@@ -22,8 +22,8 @@
     client.type(text=u'folder1', id=u'name-subject:A')
     client.click(value=u'button_ok')
     client.waits.forPageLoad(timeout=u'20000')
-    client.waits.forElement(link=u'add Folder filed_under Folder object', timeout=u'8000')
-    client.click(link=u'add Folder filed_under Folder object')
+    client.waits.forElement(link=u'add add Folder filed_under Folder object', timeout=u'8000')
+    client.click(link=u'add add Folder filed_under Folder object')
     client.waits.forPageLoad(timeout=u'20000')
     client.waits.forElement(timeout=u'8000', id=u'name-subject:A')
     client.click(id=u'name-subject:A')
@@ -44,8 +44,8 @@
     client.click(link=u'x')
     client.click(value=u'button_ok')
     client.waits.forPageLoad(timeout=u'20000')
-    client.waits.forElement(link=u'add Folder filed_under Folder object', timeout=u'8000')
-    client.click(link=u'add Folder filed_under Folder object')
+    client.waits.forElement(link=u'add add Folder filed_under Folder object', timeout=u'8000')
+    client.click(link=u'add add Folder filed_under Folder object')
     client.waits.forPageLoad(timeout=u'20000')
     client.type(text=u'subfolder2', id=u'name-subject:A')
     client.click(value=u'button_ok')
--- a/web/views/actions.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/actions.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -28,7 +28,7 @@
 from cubicweb.appobject import objectify_selector
 from cubicweb.selectors import (EntitySelector, yes,
     one_line_rset, multi_lines_rset, one_etype_rset, relation_possible,
-    nonempty_rset, non_final_entity,
+    nonempty_rset, non_final_entity, score_entity,
     authenticated_user, match_user_groups, match_search_state,
     has_permission, has_add_permission, is_instance, debug_mode,
     )
@@ -322,7 +322,7 @@
     """when displaying the schema of a CWEType, offer to list entities of that type
     """
     __regid__ = 'entitiesoftype'
-    __select__ = one_line_rset() & is_instance('CWEType')
+    __select__ = one_line_rset() & is_instance('CWEType') & score_entity(lambda x: not x.final)
     category = 'mainactions'
     order = 40
 
@@ -391,6 +391,7 @@
     __regid__ = 'siteconfig'
     title = _('site configuration')
     order = 10
+    category = 'manage'
 
 
 class ManageAction(ManagersAction):
--- a/web/views/authentication.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/authentication.py	Wed Apr 27 09:54:22 2011 +0200
@@ -100,17 +100,13 @@
             self.anoninfo = (self.anoninfo[0], {'password': self.anoninfo[1]})
 
     def validate_session(self, req, session):
-        """check session validity, reconnecting it to the repository if the
-        associated connection expired in the repository side (hence the
-        necessity for this method). Return the connected user on success.
+        """check session validity and return the connected user on success.
 
         raise :exc:`InvalidSession` if session is corrupted for a reason or
         another and should be closed
 
         also invoked while going from anonymous to logged in
         """
-        # with this authentication manager, session is actually a dbapi
-        # connection
         for retriever in self.authinforetrievers:
             if retriever.request_has_auth_info(req):
                 login = retriever.revalidate_login(req)
@@ -135,8 +131,7 @@
     def authenticate(self, req):
         """authenticate user using connection information found in the request,
         and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
-        as well as login and authentication information dictionary used to open
-        the connection.
+        as well as login used to open the connection.
 
         raise :exc:`cubicweb.AuthenticationError` if authentication failed
         (no authentication info found or wrong user/password)
@@ -152,8 +147,7 @@
                 continue # the next one may succeed
             for retriever_ in self.authinforetrievers:
                 retriever_.authenticated(retriever, req, cnx, login, authinfo)
-            return cnx, login, authinfo
-
+            return cnx, login
         # false if no authentication info found, eg this is not an
         # authentication failure
         if 'login' in locals():
@@ -162,7 +156,7 @@
         if login:
             cnx = self._authenticate(login, authinfo)
             cnx.anonymous_connection = True
-            return cnx, login, authinfo
+            return cnx, login
         raise AuthenticationError()
 
     def _authenticate(self, login, authinfo):
--- a/web/views/basecomponents.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/basecomponents.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -99,7 +99,9 @@
     """display the instance name"""
     __regid__ = 'appliname'
 
-    def render(self, w):
+    # XXX support kwargs for compat with other components which gets the view as
+    # argument
+    def render(self, w, **kwargs):
         title = self._cw.property_value('ui.site-title')
         if title:
             w(u'<span id="appliName"><a href="%s">%s</a></span>' % (
--- a/web/views/basetemplates.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/basetemplates.py	Wed Apr 27 09:54:22 2011 +0200
@@ -156,7 +156,7 @@
         lang = self._cw.lang
         self.write_doctype()
         # explictly close the <base> tag to avoid IE 6 bugs while browsing DOM
-        w(u'<base href="%s"></base>' % xml_escape(self._cw.base_url()))
+        self._cw.html_headers.define_var('BASE_URL', self._cw.base_url())
         w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
           % (content_type, self._cw.encoding))
         w(u'\n'.join(additional_headers) + u'\n')
--- a/web/views/baseviews.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/baseviews.py	Wed Apr 27 09:54:22 2011 +0200
@@ -31,7 +31,7 @@
 
 from rql import nodes
 
-from logilab.mtconverter import TransformError, xml_escape, xml_escape
+from logilab.mtconverter import TransformError, xml_escape
 
 from cubicweb import NoSelectableObject, tags
 from cubicweb.selectors import yes, empty_rset, one_etype_rset, match_kwargs
--- a/web/views/boxes.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/boxes.py	Wed Apr 27 09:54:22 2011 +0200
@@ -208,7 +208,6 @@
             raise component.EmptyComponent()
         self.items = []
 
-
 class RsetBox(component.CtxComponent):
     """helper view class to display an rset in a sidebox"""
     __select__ = nonempty_rset() & match_kwargs('title', 'vid')
--- a/web/views/calendar.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/calendar.py	Wed Apr 27 09:54:22 2011 +0200
@@ -20,15 +20,28 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
-from datetime import datetime, date, timedelta
+import copy
+from datetime import timedelta
 
 from logilab.mtconverter import xml_escape
-from logilab.common.date import ONEDAY, strptime, date_range, todate, todatetime
+from logilab.common.date import todatetime
 
+from cubicweb.utils import json_dumps
 from cubicweb.interfaces import ICalendarable
 from cubicweb.selectors import implements, adaptable
 from cubicweb.view import EntityView, EntityAdapter, implements_adapter_compat
 
+# useful constants & functions ################################################
+
+ONEDAY = timedelta(1)
+
+WEEKDAYS = (_("monday"), _("tuesday"), _("wednesday"), _("thursday"),
+            _("friday"), _("saturday"), _("sunday"))
+MONTHNAMES = ( _('january'), _('february'), _('march'), _('april'), _('may'),
+               _('june'), _('july'), _('august'), _('september'), _('october'),
+               _('november'), _('december')
+               )
+
 
 class ICalendarableAdapter(EntityAdapter):
     __needs_bw_compat__ = True
@@ -44,21 +57,10 @@
     @property
     @implements_adapter_compat('ICalendarable')
     def stop(self):
-        """return stop state"""
+        """return stop date"""
         raise NotImplementedError
 
 
-# useful constants & functions ################################################
-
-ONEDAY = timedelta(1)
-
-WEEKDAYS = (_("monday"), _("tuesday"), _("wednesday"), _("thursday"),
-            _("friday"), _("saturday"), _("sunday"))
-MONTHNAMES = ( _('january'), _('february'), _('march'), _('april'), _('may'),
-               _('june'), _('july'), _('august'), _('september'), _('october'),
-               _('november'), _('december')
-               )
-
 # Calendar views ##############################################################
 
 try:
@@ -146,9 +148,6 @@
                 self.w('<br/>%s'%self._cw.format_date(icalendarable.start
                                                       or icalendarable.stop))
 
-class CalendarLargeItemView(CalendarItemView):
-    __regid__ = 'calendarlargeitem'
-
 
 class _TaskEntry(object):
     def __init__(self, task, color, index=0):
@@ -170,413 +169,91 @@
         return self.start and self.stop and self.start.isocalendar() == self.stop.isocalendar()
 
 
-class OneMonthCal(EntityView):
-    """At some point, this view will probably replace ampm calendars"""
-    __regid__ = 'onemonthcal'
-    __select__ = adaptable('ICalendarable')
-
-    paginable = False
-    title = _('one month')
-
-    def call(self):
-        self._cw.add_js('cubicweb.ajax.js')
-        self._cw.add_css('cubicweb.calendar.css')
-        # XXX: restrict courses directy with RQL
-        _today =  datetime.today()
-
-        if 'year' in self._cw.form:
-            year = int(self._cw.form['year'])
-        else:
-            year = _today.year
-        if 'month' in self._cw.form:
-            month = int(self._cw.form['month'])
-        else:
-            month = _today.month
-
-        first_day_of_month = date(year, month, 1)
-        firstday = first_day_of_month - timedelta(first_day_of_month.weekday())
-        if month >= 12:
-            last_day_of_month = date(year + 1, 1, 1) - timedelta(1)
-        else:
-            last_day_of_month = date(year, month + 1, 1) - timedelta(1)
-        # date range exclude last day so we should at least add one day, hence
-        # the 7
-        lastday = last_day_of_month + timedelta(7 - last_day_of_month.weekday())
-        month_dates = list(date_range(firstday, lastday))
-        dates = {}
-        task_max = 0
-        for row in xrange(self.cw_rset.rowcount):
-            task = self.cw_rset.get_entity(row, 0)
-            if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser':
-                user = self.cw_rset.get_entity(row, 1)
-            else:
-                user = None
-            the_dates = []
-            icalendarable = task.cw_adapt_to('ICalendarable')
-            tstart = icalendarable.start
-            if tstart:
-                tstart = todate(icalendarable.start)
-                if tstart > lastday:
-                    continue
-                the_dates = [tstart]
-            tstop = icalendarable.stop
-            if tstop:
-                tstop = todate(tstop)
-                if tstop < firstday:
-                    continue
-                the_dates = [tstop]
-            if tstart and tstop:
-                if tstart.isocalendar() == tstop.isocalendar():
-                    if firstday <= tstart <= lastday:
-                        the_dates = [tstart]
-                else:
-                    the_dates = date_range(max(tstart, firstday),
-                                           min(tstop + ONEDAY, lastday))
-            if not the_dates:
-                continue
-
-            for d in the_dates:
-                d_tasks = dates.setdefault((d.year, d.month, d.day), {})
-                t_users = d_tasks.setdefault(task, set())
-                t_users.add( user )
-                if len(d_tasks) > task_max:
-                    task_max = len(d_tasks)
-
-        days = []
-        nrows = max(3, task_max)
-        # colors here are class names defined in cubicweb.css
-        colors = [ "col%x" % i for i in range(12) ]
-        next_color_index = 0
-
-        visited_tasks = {} # holds a description of a task
-        task_colors = {}   # remember a color assigned to a task
-        for mdate in month_dates:
-            d_tasks = dates.get((mdate.year, mdate.month, mdate.day), {})
-            rows = [None] * nrows
-            # every task that is "visited" for the first time
-            # require a special treatment, so we put them in
-            # 'postpone'
-            postpone = []
-            for task in d_tasks:
-                if task in visited_tasks:
-                    task_descr = visited_tasks[ task ]
-                    rows[task_descr.index] = task_descr
-                else:
-                    postpone.append(task)
-            for task in postpone:
-                # to every 'new' task we must affect a color
-                # (which must be the same for every user concerned
-                # by the task)
-                for i, t in enumerate(rows):
-                    if t is None:
-                        if task in task_colors:
-                            color = task_colors[task]
-                        else:
-                            color = colors[next_color_index]
-                            next_color_index = (next_color_index+1)%len(colors)
-                            task_colors[task] = color
-                        task_descr = _TaskEntry(task, color, i)
-                        rows[i] = task_descr
-                        visited_tasks[task] = task_descr
-                        break
-                else:
-                    raise RuntimeError("is it possible we got it wrong?")
-
-            days.append( rows )
-
-        curdate = first_day_of_month
-        self.w(u'<div id="onemonthcalid">')
-        # build schedule
-        self.w(u'<table class="omcalendar">')
-        prevlink, nextlink = self._prevnext_links(curdate)  # XXX
-        self.w(u'<tr><th><a href="%s">&lt;&lt;</a></th><th colspan="5">%s %s</th>'
-               u'<th><a href="%s">&gt;&gt;</a></th></tr>' %
-               (xml_escape(prevlink), self._cw._(curdate.strftime('%B').lower()),
-                curdate.year, xml_escape(nextlink)))
-
-        # output header
-        self.w(u'<tr><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s</th></tr>' %
-               tuple(self._cw._(day) for day in WEEKDAYS))
-        # build calendar
-        for mdate, task_rows in zip(month_dates, days):
-            if mdate.weekday() == 0:
-                self.w(u'<tr>')
-            self._build_calendar_cell(mdate, task_rows, curdate)
-            if mdate.weekday() == 6:
-                self.w(u'</tr>')
-        self.w(u'</table></div>')
-
-    def _prevnext_links(self, curdate):
-        prevdate = curdate - timedelta(31)
-        nextdate = curdate + timedelta(31)
-        rql = self.cw_rset.printable_rql()
-        prevlink = self._cw.ajax_replace_url('onemonthcalid', rql=rql,
-                                             vid='onemonthcal',
-                                             year=prevdate.year,
-                                             month=prevdate.month)
-        nextlink = self._cw.ajax_replace_url('onemonthcalid', rql=rql,
-                                             vid='onemonthcal',
-                                             year=nextdate.year,
-                                             month=nextdate.month)
-        return prevlink, nextlink
-
-    def _build_calendar_cell(self, celldate, rows, curdate):
-        curmonth = curdate.month
-        classes = ""
-        if celldate.month != curmonth:
-            classes += " outOfRange"
-        if celldate == date.today():
-            classes += " today"
-        self.w(u'<td class="cell%s">' % classes)
-        self.w(u'<div class="calCellTitle%s">' % classes)
-        self.w(u'<div class="day">%s</div>' % celldate.day)
-
-        if len(self.cw_rset.column_types(0)) == 1:
-            etype = list(self.cw_rset.column_types(0))[0]
-            url = self._cw.build_url(vid='creation', etype=etype,
-                                     schedule=True,
-                                     start=self._cw.format_date(celldate), stop=self._cw.format_date(celldate),
-                                     __redirectrql=self.cw_rset.printable_rql(),
-                                     __redirectparams=self._cw.build_url_params(year=curdate.year, month=curmonth),
-                                     __redirectvid=self.__regid__
-                                     )
-            self.w(u'<div class="cmd"><a href="%s">%s</a></div>' % (xml_escape(url), self._cw._(u'add')))
-            self.w(u'&#160;')
-        self.w(u'</div>')
-        self.w(u'<div class="cellContent">')
-        for task_descr in rows:
-            if task_descr:
-                task = task_descr.task
-                self.w(u'<div class="task %s">' % task_descr.color)
-                task.view('calendaritem', w=self.w )
-                url = task.absolute_url(vid='edition',
-                                        __redirectrql=self.cw_rset.printable_rql(),
-                                        __redirectparams=self._cw.build_url_params(year=curdate.year, month=curmonth),
-                                        __redirectvid=self.__regid__
-                                        )
-
-                self.w(u'<div class="tooltip" ondblclick="stopPropagation(event); window.location.assign(\'%s\'); return false;">' % xml_escape(url))
-                task.view('tooltip', w=self.w )
-                self.w(u'</div>')
-            else:
-                self.w(u'<div class="task">')
-                self.w(u"&#160;")
-            self.w(u'</div>')
-        self.w(u'</div>')
-        self.w(u'</td>')
-
-
-class OneWeekCal(EntityView):
-    """At some point, this view will probably replace ampm calendars"""
-    __regid__ = 'oneweekcal'
+class CalendarView(EntityView):
+    __regid__ = 'calendar'
     __select__ = adaptable('ICalendarable')
 
     paginable = False
-    title = _('one week')
+    title = _('calendar')
+
+    fullcalendar_options = {
+        'firstDay': 1,
+        'header': {'left': 'prev,next today',
+                   'center': 'title',
+                   'right': 'month,agendaWeek,agendaDay',
+                   },
+        'editable': True,
+        'defaultView': 'month',
+        'timeFormat': {'month': '',
+                       '': 'H:mm'},
+        'firstHour': 8,
+        'axisFormat': 'H:mm',
+        'columnFormat': {'month': 'dddd',
+                         'agendaWeek': 'dddd yyyy/M/dd',
+                         'agendaDay': 'dddd yyyy/M/dd'}
+        }
+
 
     def call(self):
-        self._cw.add_js( ('cubicweb.ajax.js', 'cubicweb.calendar.js') )
-        self._cw.add_css('cubicweb.calendar.css')
-        # XXX: restrict directly with RQL
-        _today =  datetime.today()
-        if 'year' in self._cw.form:
-            year = int(self._cw.form['year'])
-        else:
-            year = _today.year
-        if 'week' in self._cw.form:
-            week = int(self._cw.form['week'])
-        else:
-            week = _today.isocalendar()[1]
-        # week - 1 since we get week number > 0 while we want it to start from 0
-        first_day_of_week = todate(strptime('%s-%s-1' % (year, week - 1), '%Y-%U-%w'))
-        lastday = first_day_of_week + timedelta(6)
-        firstday = first_day_of_week
-        dates = [[] for i in range(7)]
-        task_colors = {}   # remember a color assigned to a task
-        # colors here are class names defined in cubicweb.css
-        colors = [ "col%x" % i for i in range(12) ]
-        next_color_index = 0
-        done_tasks = set()
-        for row in xrange(self.cw_rset.rowcount):
-            task = self.cw_rset.get_entity(row, 0)
-            if task.eid in done_tasks:
-                continue
-            done_tasks.add(task.eid)
-            the_dates = []
-            icalendarable = task.cw_adapt_to('ICalendarable')
-            tstart = icalendarable.start
-            tstop = icalendarable.stop
-            if tstart:
-                tstart = todate(tstart)
-                if tstart > lastday:
-                    continue
-                the_dates = [tstart]
-            if tstop:
-                tstop = todate(tstop)
-                if tstop < firstday:
-                    continue
-                the_dates = [tstop]
-            if tstart and tstop:
-                the_dates = date_range(max(tstart, firstday),
-                                       min(tstop + ONEDAY, lastday))
-            if not the_dates:
-                continue
+        self._cw.demote_to_html()
+        self._cw.add_css(('fullcalendar.css', 'cubicweb.calendar.css'))
+        self._cw.add_js(('jquery.ui.js', 'fullcalendar.min.js', 'jquery.qtip.min.js'))
+        self.add_onload()
+        # write calendar div to load jquery fullcalendar object
+        self.w(u'<div id="calendar"></div>')
 
-            if task not in task_colors:
-                task_colors[task] = colors[next_color_index]
-                next_color_index = (next_color_index+1) % len(colors)
-
-            for d in the_dates:
-                day = d.weekday()
-                task_descr = _TaskEntry(task, task_colors[task])
-                dates[day].append(task_descr)
-
-        self.w(u'<div id="oneweekcalid">')
-        # build schedule
-        self.w(u'<table class="omcalendar" id="week">')
-        prevlink, nextlink = self._prevnext_links(first_day_of_week)  # XXX
-        self.w(u'<tr><th class="transparent"></th>')
-        self.w(u'<th><a href="%s">&lt;&lt;</a></th><th colspan="5">%s %s %s</th>'
-               u'<th><a href="%s">&gt;&gt;</a></th></tr>' %
-               (xml_escape(prevlink), first_day_of_week.year,
-                self._cw._(u'week'), first_day_of_week.isocalendar()[1],
-                xml_escape(nextlink)))
-
-        # output header
-        self.w(u'<tr>')
-        self.w(u'<th class="transparent"></th>') # column for hours
-        _today = date.today()
-        for i, day in enumerate(WEEKDAYS):
-            wdate = first_day_of_week + timedelta(i)
-            if wdate.isocalendar() == _today.isocalendar():
-                self.w(u'<th class="today">%s<br/>%s</th>' % (self._cw._(day), self._cw.format_date(wdate)))
-            else:
-                self.w(u'<th>%s<br/>%s</th>' % (self._cw._(day), self._cw.format_date(wdate)))
-        self.w(u'</tr>')
-
-        # build week calendar
-        self.w(u'<tr>')
-        self.w(u'<td style="width:5em;">') # column for hours
-        extra = ""
-        for h in range(8, 20):
-            self.w(u'<div class="hour" %s>'%extra)
-            self.w(u'%02d:00'%h)
-            self.w(u'</div>')
-        self.w(u'</td>')
 
-        for i, day in enumerate(WEEKDAYS):
-            wdate = first_day_of_week + timedelta(i)
-            classes = ""
-            if wdate.isocalendar() == _today.isocalendar():
-                classes = " today"
-            self.w(u'<td class="column %s" id="%s">' % (classes, day))
-            if len(self.cw_rset.column_types(0)) == 1:
-                etype = list(self.cw_rset.column_types(0))[0]
-                url = self._cw.build_url(vid='creation', etype=etype,
-                                         schedule=True,
-                                         __redirectrql=self.cw_rset.printable_rql(),
-                                         __redirectparams=self._cw.build_url_params(year=year, week=week),
-                                         __redirectvid=self.__regid__
-                                         )
-                extra = ' ondblclick="addCalendarItem(event, hmin=8, hmax=20, year=%s, month=%s, day=%s, duration=2, baseurl=\'%s\')"' % (
-                    wdate.year, wdate.month, wdate.day, xml_escape(url))
-            else:
-                extra = ""
-            self.w(u'<div class="columndiv"%s>'% extra)
-            for h in range(8, 20):
-                self.w(u'<div class="hourline" style="top:%sex;">'%((h-7)*8))
-                self.w(u'</div>')
-            if dates[i]:
-                self._build_calendar_cell(wdate, dates[i])
-            self.w(u'</div>')
-            self.w(u'</td>')
-        self.w(u'</tr>')
-        self.w(u'</table></div>')
-        self.w(u'<div id="coord"></div>')
-        self.w(u'<div id="debug">&#160;</div>')
-
-    def _build_calendar_cell(self, date, task_descrs):
-        inday_tasks = [t for t in task_descrs if t.is_one_day_task() and  t.in_working_hours()]
-        wholeday_tasks = [t for t in task_descrs if not t.is_one_day_task()]
-        inday_tasks.sort(key=lambda t:t.start)
-        sorted_tasks = []
-        for i, t in enumerate(wholeday_tasks):
-            t.index = i
-        ncols = len(wholeday_tasks)
-        while inday_tasks:
-            t = inday_tasks.pop(0)
-            for i, c in enumerate(sorted_tasks):
-                if not c or c[-1].stop <= t.start:
-                    c.append(t)
-                    t.index = i+ncols
-                    break
-            else:
-                t.index = len(sorted_tasks) + ncols
-                sorted_tasks.append([t])
-        ncols += len(sorted_tasks)
-        if ncols == 0:
-            return
-
-        inday_tasks = []
-        for tasklist in sorted_tasks:
-            inday_tasks += tasklist
-        width = 100.0/ncols
-        for task_desc in wholeday_tasks + inday_tasks:
-            task = task_desc.task
-            start_hour = 8
-            start_min = 0
-            stop_hour = 20
-            stop_min = 0
-            if task_desc.start:
-                if date < todate(task_desc.start) < date + ONEDAY:
-                    start_hour = max(8, task_desc.start.hour)
-                    start_min = task_desc.start.minute
-            if task_desc.stop:
-                if date < todate(task_desc.stop) < date + ONEDAY:
-                    stop_hour = min(20, task_desc.stop.hour)
-                    if stop_hour < 20:
-                        stop_min = task_desc.stop.minute
-
-            height = 100.0*(stop_hour+stop_min/60.0-start_hour-start_min/60.0)/(20-8)
-            top = 100.0*(start_hour+start_min/60.0-8)/(20-8)
-            left = width*task_desc.index
-            style = "height: %s%%; width: %s%%; top: %s%%; left: %s%%; " % \
-                (height, width, top, left)
-            self.w(u'<div class="task %s" style="%s">' % \
-                       (task_desc.color, style))
-            task.view('calendaritem', dates=False, w=self.w)
-            url = task.absolute_url(vid='edition',
-                                    __redirectrql=self.cw_rset.printable_rql(),
-                                    __redirectparams=self._cw.build_url_params(year=date.year, week=date.isocalendar()[1]),
-                                    __redirectvid=self.__regid__
-                                 )
-
-            self.w(u'<div class="tooltip" ondblclick="stopPropagation(event); window.location.assign(\'%s\'); return false;">' % xml_escape(url))
-            task.view('tooltip', w=self.w)
-            self.w(u'</div>')
-            if task_desc.start is None:
-                self.w(u'<div class="bottommarker">')
-                self.w(u'<div class="bottommarkerline" style="margin: 0px 3px 0px 3px; height: 1px;">')
-                self.w(u'</div>')
-                self.w(u'<div class="bottommarkerline" style="margin: 0px 2px 0px 2px; height: 1px;">')
-                self.w(u'</div>')
-                self.w(u'<div class="bottommarkerline" style="margin: 0px 1px 0px 1px; height: 3ex; color: white; font-size: x-small; vertical-align: center; text-align: center;">')
-                self.w(u'end')
-                self.w(u'</div>')
-                self.w(u'</div>')
-            self.w(u'</div>')
+    def add_onload(self):
+        fullcalendar_options = self.fullcalendar_options.copy()
+        fullcalendar_options['events'] = self.get_events()
+        fullcalendar_options['buttonText'] = {'today': self._cw._('today'),
+                                              'month': self._cw._('month'),
+                                              'week': self._cw._('week'),
+                                              'day': self._cw._('day')}
+        # js callback to add a tooltip and to put html in event's title
+        js = """
+        var options = %s;
+        options.eventRender = function(event, $element) {
+          // add a tooltip for each event
+          var div = '<div class="tooltip">'+ event.description+ '</div>';
+          $element.append(div);
+          // allow to have html tags in event's title
+          $element.find('span.fc-event-title').html($element.find('span.fc-event-title').text());
+        };
+        $("#calendar").fullCalendar(options);
+        """ #"
+        self._cw.add_onload(js % json_dumps(fullcalendar_options))
 
 
-    def _prevnext_links(self, curdate):
-        prevdate = curdate - timedelta(7)
-        nextdate = curdate + timedelta(7)
-        rql = self.cw_rset.printable_rql()
-        prevlink = self._cw.ajax_replace_url('oneweekcalid', rql=rql,
-                                             vid='oneweekcal',
-                                             year=prevdate.year,
-                                             week=prevdate.isocalendar()[1])
-        nextlink = self._cw.ajax_replace_url('oneweekcalid', rql=rql,
-                                             vid='oneweekcal',
-                                             year=nextdate.year,
-                                             week=nextdate.isocalendar()[1])
-        return prevlink, nextlink
+    def get_events(self):
+        events = []
+        for entity in self.cw_rset.entities():
+            icalendarable = entity.cw_adapt_to('ICalendarable')
+            if not (icalendarable.start and icalendarable.stop):
+                continue
+            start_date = icalendarable.start or  icalendarable.stop
+            event = {'eid': entity.eid,
+                     'title': entity.view('calendaritem'),
+                     'url': xml_escape(entity.absolute_url()),
+                     'className': 'calevent',
+                     'description': entity.view('tooltip'),
+                     }
+            event['start'] = start_date.strftime('%Y-%m-%dT%H:%M')
+            event['allDay'] = True
+            if icalendarable.stop:
+                event['end'] = icalendarable.stop.strftime('%Y-%m-%dT%H:%M')
+                event['allDay'] = False
+            events.append(event)
+        return events
+
+class OneMonthCal(CalendarView):
+    __regid__ = 'onemonthcal'
+
+    title = _('one month')
+
+class OneWeekCal(CalendarView):
+    __regid__ = 'oneweekcal'
+
+    title = _('one week')
+    fullcalendar_options = CalendarView.fullcalendar_options.copy()
+    fullcalendar_options['defaultView'] = 'agendaWeek'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/cwsources.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,234 @@
+# copyright 2010-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""Specific views for data sources and related entities (eg CWSource,
+CWSourceHostConfig, CWSourceSchemaConfig).
+"""
+
+__docformat__ = "restructuredtext en"
+_ = unicode
+
+from itertools import repeat, chain
+
+from cubicweb.selectors import is_instance, score_entity, match_user_groups
+from cubicweb.view import EntityView, StartupView
+from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name
+from cubicweb.web import uicfg
+from cubicweb.web.views import tabs, actions
+
+
+_abaa = uicfg.actionbox_appearsin_addmenu
+_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_schema', '*'), False)
+_abaa.tag_object_of(('CWSourceSchemaConfig', 'cw_for_source', '*'), False)
+
+# source primary views #########################################################
+
+_pvs = uicfg.primaryview_section
+_pvs.tag_object_of(('*', 'cw_for_source', 'CWSource'), 'hidden')
+
+
+class CWSourcePrimaryView(tabs.TabbedPrimaryView):
+    __select__ = is_instance('CWSource')
+    tabs = [_('cwsource-main'), _('cwsource-mapping')]
+    default_tab = 'cwsource-main'
+
+
+class CWSourceMainTab(tabs.PrimaryTab):
+    __regid__ = 'cwsource-main'
+    __select__ = tabs.PrimaryTab.__select__ & is_instance('CWSource')
+
+
+MAPPED_SOURCE_TYPES = set( ('pyrorql', 'datafeed') )
+
+class CWSourceMappingTab(EntityView):
+    __regid__ = 'cwsource-mapping'
+    __select__ = (tabs.PrimaryTab.__select__ & is_instance('CWSource')
+                  & match_user_groups('managers')
+                  & score_entity(lambda x:x.type in MAPPED_SOURCE_TYPES))
+
+    def entity_call(self, entity):
+        _ = self._cw._
+        self.w('<h3>%s</h3>' % _('Entity and relation supported by this source'))
+        eschema = self._cw.vreg.schema.eschema('CWSourceSchemaConfig')
+        if eschema.has_perm(self._cw, 'add'):
+            self.w(u'<a href="%s" class="addButton right">%s</a>' % (
+                self._cw.build_url('add/%s' % eschema),
+                self._cw._('add a CWSourceSchemaConfig')))
+            self.w(u'<div class="clear"></div>')
+        rset = self._cw.execute(
+            'Any X, SCH, XO ORDERBY ET WHERE X options XO, X cw_for_source S, S eid %(s)s, '
+            'X cw_schema SCH, SCH is ET', {'s': entity.eid})
+        self.wview('table', rset, 'noresult')
+        # self.w('<h3>%s</h3>' % _('Relations that should not be crossed'))
+        # self.w('<p>%s</p>' % _(
+        #     'By default, when a relation is not supported by a source, it is '
+        #     'supposed that a local relation may point to an entity from the '
+        #     'external source. Relations listed here won\'t have this '
+        #     '"crossing" behaviour.'))
+        # self.wview('list', entity.related('cw_dont_cross'), 'noresult')
+        # self.w('<h3>%s</h3>' % _('Relations that can be crossed'))
+        # self.w('<p>%s</p>' % _(
+        #     'By default, when a relation is supported by a source, it is '
+        #     'supposed that a local relation can\'t point to an entity from the '
+        #     'external source. Relations listed here may have this '
+        #     '"crossing" behaviour anyway.'))
+        # self.wview('list', entity.related('cw_may_cross'), 'noresult')
+        checker = MAPPING_CHECKERS.get(entity.type, MappingChecker)(entity)
+        checker.check()
+        if (checker.errors or checker.warnings or checker.infos):
+                self.w('<h2>%s</h2>' % _('Detected problems'))
+                errors = zip(repeat(_('error')), checker.errors)
+                warnings = zip(repeat(_('warning')), checker.warnings)
+                infos = zip(repeat(_('warning')), checker.infos)
+                self.wview('pyvaltable', pyvalue=chain(errors, warnings, infos))
+
+
+class MappingChecker(object):
+    def __init__(self, cwsource):
+        self.cwsource = cwsource
+        self.errors = []
+        self.warnings = []
+        self.infos = []
+        self.schema = cwsource._cw.vreg.schema
+
+    def init(self):
+        # supported entity types
+        self.sentities = set()
+        # supported relations
+        self.srelations = {}
+        # avoid duplicated messages
+        self.seen = set()
+        # first get mapping as dict/sets
+        for schemacfg in self.cwsource.reverse_cw_for_source:
+            self.init_schemacfg(schemacfg)
+
+    def init_schemacfg(self, schemacfg):
+        cwerschema = schemacfg.schema
+        if cwerschema.__regid__ == 'CWEType':
+            self.sentities.add(cwerschema.name)
+        elif cwerschema.__regid__ == 'CWRType':
+            assert not cwerschema.name in self.srelations
+            self.srelations[cwerschema.name] = None
+        else: # CWAttribute/CWRelation
+            self.srelations.setdefault(cwerschema.rtype.name, []).append(
+                (cwerschema.stype.name, cwerschema.otype.name) )
+
+    def check(self):
+        self.init()
+        error = self.errors.append
+        warning = self.warnings.append
+        info = self.infos.append
+        for etype in self.sentities:
+            eschema = self.schema[etype]
+            for rschema, ttypes, role in eschema.relation_definitions():
+                if rschema in META_RTYPES:
+                    continue
+                ttypes = [ttype for ttype in ttypes if ttype in self.sentities]
+                if not rschema in self.srelations:
+                    for ttype in ttypes:
+                        rdef = rschema.role_rdef(etype, ttype, role)
+                        self.seen.add(rdef)
+                        if rdef.role_cardinality(role) in '1+':
+                            error(_('relation %(type)s with %(etype)s as %(role)s '
+                                    'and target type %(target)s is mandatory but '
+                                    'not supported') %
+                                  {'rtype': rschema, 'etype': etype, 'role': role,
+                                   'target': ttype})
+                        elif ttype in self.sentities:
+                            warning(_('%s could be supported') % rdef)
+                elif not ttypes:
+                    warning(_('relation %(rtype)s with %(etype)s as %(role)s is '
+                              'supported but no target type supported') %
+                            {'rtype': rschema, 'role': role, 'etype': etype})
+        for rtype in self.srelations:
+            rschema = self.schema[rtype]
+            for subj, obj in rschema.rdefs:
+                if subj in self.sentities and obj in self.sentities:
+                    break
+            else:
+                error(_('relation %s is supported but none if its definitions '
+                        'matches supported entities') % rtype)
+        self.custom_check()
+
+    def custom_check(self):
+        pass
+
+
+class PyroRQLMappingChecker(MappingChecker):
+    """pyrorql source mapping checker"""
+
+    def init(self):
+        self.dontcross = set()
+        self.maycross = set()
+        super(PyroRQLMappingChecker, self).init()
+
+    def init_schemacfg(self, schemacfg):
+        options = schemacfg.options or ()
+        if 'dontcross' in options:
+            self.dontcross.add(schemacfg.schema.name)
+        else:
+            super(PyroRQLMappingChecker, self).init_schemacfg(schemacfg)
+            if 'maycross' in options:
+                self.maycross.add(schemacfg.schema.name)
+
+    def custom_check(self):
+        error = self.errors.append
+        info = self.infos.append
+        for etype in self.sentities:
+            eschema = self.schema[etype]
+            for rschema, ttypes, role in eschema.relation_definitions():
+                if rschema in META_RTYPES:
+                    continue
+                if not rschema in self.srelations:
+                    if rschema not in self.dontcross:
+                        if role == 'subject' and rschema.inlined:
+                            error(_('inlined relation %(rtype)s of %(etype)s '
+                                    'should be supported') %
+                                  {'rtype': rschema, 'etype': etype})
+                        elif (rschema not in self.seen and rschema not in self.maycross):
+                            info(_('you may want to specify something for %s') %
+                                 rschema)
+                            self.seen.add(rschema)
+                elif rschema in self.maycross and rschema.inlined:
+                    error(_('you should un-inline relation %s which is '
+                            'supported and may be crossed ') % rschema)
+
+MAPPING_CHECKERS = {
+    'pyrorql': PyroRQLMappingChecker,
+    }
+
+# sources management view ######################################################
+
+class ManageSourcesAction(actions.ManagersAction):
+    __regid__ = 'cwsource'
+    title = _('data sources')
+    category = 'manage'
+
+class CWSourceManagementView(StartupView):
+    __regid__ = 'cw.source-management'
+    rql = ('Any S, ST, SN ORDERBY SN WHERE S is CWSource, S name SN, S type ST')
+    title = _('data sources management')
+
+    def call(self, **kwargs):
+        self.w('<h1>%s</h1>' % self._cw._(self.title))
+        eschema = self._cw.vreg.schema.eschema('CWSource')
+        if eschema.has_perm(self._cw, 'add'):
+            self.w(u'<a href="%s" class="addButton right">%s</a>' % (
+                self._cw.build_url('add/%s' % eschema),
+                self._cw._('add a CWSource')))
+            self.w(u'<div class="clear"></div>')
+        self.wview('table', self._cw.execute(self.rql), displaycols=range(2))
--- a/web/views/cwuser.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/cwuser.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -24,10 +24,11 @@
 
 from logilab.mtconverter import xml_escape
 
+from cubicweb.schema import display_name
 from cubicweb.selectors import one_line_rset, is_instance, match_user_groups
-from cubicweb.view import EntityView
+from cubicweb.view import EntityView, StartupView
 from cubicweb.web import action, uicfg, formwidgets
-from cubicweb.web.views import tabs
+from cubicweb.web.views import tabs, tableview, actions
 
 _pvs = uicfg.primaryview_section
 _pvs.tag_attribute(('CWUser', 'login'), 'hidden')
@@ -157,3 +158,50 @@
         entity = self.cw_rset.complete_entity(row, col)
         self.w(u'<a href="%s" class="%s">%s</a>' % (
             entity.absolute_url(), entity.name, entity.printable_value('name')))
+
+
+# user / groups management views ###############################################
+
+class ManageUsersAction(actions.ManagersAction):
+    __regid__ = 'cwuser' # see rewrite rule /cwuser
+    title = _('users and groups')
+    category = 'manage'
+
+
+class CWUserManagementView(StartupView):
+    __regid__ = 'cw.user-management'
+    rql = ('Any U, F, S, U, L ORDERBY L WHERE U is CWUser, U login L, U firstname F, U surname S')
+    title = _('users and groups management')
+
+    def call(self, **kwargs):
+        self.w('<h1>%s</h1>' % self._cw._(self.title))
+        for etype in ('CWUser', 'CWGroup'):
+            eschema = self._cw.vreg.schema.eschema(etype)
+            if eschema.has_perm(self._cw, 'add'):
+                self.w(u'<a href="%s" class="addButton right">%s</a>' % (
+                    self._cw.build_url('add/%s' % eschema),
+                    self._cw._('add a %s' % etype).capitalize()))
+        self.w(u'<div class="clear"></div>')
+        self.wview('cw.user-table', self._cw.execute(self.rql))
+
+
+class CWUserTable(tableview.EditableTableView):
+    __regid__ = 'cw.user-table'
+    __select__ = is_instance('CWUser')
+
+    def call(self, **kwargs):
+        headers = (display_name(self._cw, 'CWUser', 'plural'),
+                   self._cw._('firstname'), self._cw._('surname'),
+                   display_name(self._cw, 'CWGroup', 'plural'))
+        super(CWUserTable, self).call(
+            paginate=True, cellvids={3: 'cw.user-table.group-cell'},
+            headers=headers, **kwargs)
+
+
+class CWUserGroupCell(EntityView):
+    __regid__ = 'cw.user-table.group-cell'
+    __select__ = is_instance('CWUser')
+
+    def cell_call(self, row, col, **kwargs):
+        entity = self.cw_rset.get_entity(row, col)
+        self.w(entity.view('reledit', rtype='in_group', role='subject'))
--- a/web/views/debug.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/debug.py	Wed Apr 27 09:54:22 2011 +0200
@@ -27,7 +27,7 @@
 from cubicweb import BadConnectionId
 from cubicweb.selectors import none_rset, match_user_groups
 from cubicweb.view import StartupView
-from cubicweb.web.views import actions
+from cubicweb.web.views import actions, tabs
 
 def dict_to_html(w, dict):
     # XHTML doesn't allow emtpy <ul> nodes
@@ -39,12 +39,24 @@
         w(u'</ul>')
 
 
-
 class SiteInfoAction(actions.ManagersAction):
     __regid__ = 'siteinfo'
     __select__ = match_user_groups('users','managers')
-    title = _('info')
-    order = 30
+    title = _('siteinfo')
+    category = 'manage'
+    order = 1000
+
+
+class SiteInfoView(tabs.TabsMixin, StartupView):
+    __regid__ = 'siteinfo'
+    title = _('Site information')
+    tabs = [_('info'), _('registry'), _('gc')]
+    default_tab = 'info'
+
+    def call(self, **kwargs):
+        """The default view representing the instance's management"""
+        self.w(u'<h1>%s</h1>' % self._cw._(self.title))
+        self.render_tabs(self.tabs, self.default_tab)
 
 
 class ProcessInformationView(StartupView):
@@ -61,7 +73,7 @@
         _ = req._
         w = self.w
         # generic instance information
-        w(u'<h1>%s</h1>' % _('Instance'))
+        w(u'<h2>%s</h2>' % _('Instance'))
         w(u'<table>')
         w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
             _('config type'), self._cw.vreg.config.name))
@@ -82,7 +94,7 @@
         w(u'</table>')
         # repository information
         repo = req.vreg.config.repository(None)
-        w(u'<h1>%s</h1>' % _('Repository'))
+        w(u'<h2>%s</h2>' % _('Repository'))
         w(u'<h3>%s</h3>' % _('resources usage'))
         w(u'<table>')
         stats = repo.stats()
@@ -107,7 +119,7 @@
             else:
                 w(u'<p>%s</p>' % _('no repository sessions found'))
         # web server information
-        w(u'<h1>%s</h1>' % _('Web server'))
+        w(u'<h2>%s</h2>' % _('Web server'))
         w(u'<table>')
         w(u'<tr><th align="left">%s</th><td>%s</td></tr>' % (
             _('base url'), req.base_url()))
@@ -146,7 +158,7 @@
     cache_max_age = 0
 
     def call(self, **kwargs):
-        self.w(u'<h1>%s</h1>' % self._cw._("Registry's content"))
+        self.w(u'<h2>%s</h2>' % self._cw._("Registry's content"))
         keys = sorted(self._cw.vreg)
         url = xml_escape(self._cw.url())
         self.w(u'<p>%s</p>\n' % ' - '.join('<a href="%s#%s">%s</a>'
@@ -154,7 +166,7 @@
         for key in keys:
             if key in ('boxes', 'contentnavigation'): # those are bw compat registries
                 continue
-            self.w(u'<h2 id="%s">%s</h2>' % (key, key))
+            self.w(u'<h3 id="%s">%s</h3>' % (key, key))
             if self._cw.vreg[key]:
                 values = sorted(self._cw.vreg[key].iteritems())
                 self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val)))
@@ -186,7 +198,7 @@
             lookupclasses += (InternalSession, Session)
         except ImportError:
             pass # no server part installed
-        self.w(u'<h1>%s</h1>' % _('Garbage collection information'))
+        self.w(u'<h2>%s</h2>' % _('Garbage collection information'))
         counters, ocounters, garbage = gc_info(lookupclasses,
                                                viewreferrersclasses=())
         self.w(u'<h3>%s</h3>' % self._cw._('Looked up classes'))
--- a/web/views/editcontroller.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/editcontroller.py	Wed Apr 27 09:54:22 2011 +0200
@@ -182,7 +182,12 @@
         # process inlined relations at the same time as attributes
         # this will generate less rql queries and might be useful in
         # a few dark corners
-        formid = self._cw.form.get('__form_id', 'edition')
+        if is_main_entity:
+            formid = self._cw.form.get('__form_id', 'edition')
+        else:
+            # XXX inlined forms formid should be saved in a different formparams entry
+            # inbetween, use cubicweb standard formid for inlined forms
+            formid = 'edition'
         form = self._cw.vreg['forms'].select(formid, self._cw, entity=entity)
         eid = form.actual_eid(entity.eid)
         form.formvalues = {} # init fields value cache
--- a/web/views/editforms.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/editforms.py	Wed Apr 27 09:54:22 2011 +0200
@@ -99,6 +99,7 @@
     # add yes() so it takes precedence over deprecated views in baseforms,
     # though not baseforms based customized view
     __select__ = one_line_rset() & non_final_entity() & yes()
+    form_id = 'edition'
 
     title = _('modification')
 
@@ -109,7 +110,8 @@
     def render_form(self, entity):
         """fetch and render the form"""
         self.form_title(entity)
-        form = self._cw.vreg['forms'].select('edition', self._cw, entity=entity,
+        form = self._cw.vreg['forms'].select(self.form_id, self._cw,
+                                             entity=entity,
                                              submitmsg=self.submited_message())
         self.init_form(form, entity)
         form.render(w=self.w)
--- a/web/views/ibreadcrumbs.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/ibreadcrumbs.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -117,7 +117,9 @@
     link_template = u'<a href="%s">%s</a>'
     first_separator = True
 
-    def render(self, w):
+    # XXX support kwargs for compat with other components which gets the view as
+    # argument
+    def render(self, w, **kwargs):
         entity = self.cw_rset.get_entity(0, 0)
         adapter = ibreadcrumb_adapter(entity)
         view = self.cw_extra_kwargs.get('view')
@@ -178,7 +180,9 @@
 class BreadCrumbAnyRSetVComponent(BreadCrumbEntityVComponent):
     __select__ = basecomponents.HeaderComponent.__select__ & any_rset()
 
-    def render(self, w):
+    # XXX support kwargs for compat with other components which gets the view as
+    # argument
+    def render(self, w, **kwargs):
         w(u'<span id="breadcrumbs" class="pathbar">')
         if self.first_separator:
             w(self.separator)
@@ -192,7 +196,7 @@
     def cell_call(self, row, col, **kwargs):
         entity = self.cw_rset.get_entity(row, col)
         desc = xml_escape(uilib.cut(entity.dc_description(), 50))
-        # XXX remember camember : tags.a autoescapes !
+        # NOTE remember camember: tags.a autoescapes
         self.w(tags.a(entity.view('breadcrumbtext'),
                       href=entity.absolute_url(), title=desc))
 
--- a/web/views/igeocodable.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/igeocodable.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -33,13 +33,13 @@
     @property
     @implements_adapter_compat('IGeocodable')
     def latitude(self):
-        """returns the latitude of the entity"""
+        """returns the latitude of the entity in degree (-90 < float < +90)"""
         raise NotImplementedError
 
     @property
     @implements_adapter_compat('IGeocodable')
     def longitude(self):
-        """returns the longitude of the entity"""
+        """returns the longitude of the entity in degree (-180 < float < +180)"""
         raise NotImplementedError
 
     @implements_adapter_compat('IGeocodable')
--- a/web/views/old_calendar.py	Tue Apr 05 08:39:49 2011 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,575 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""html calendar views"""
-
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-from datetime import date, time, timedelta
-
-from logilab.mtconverter import xml_escape
-from logilab.common.date import (ONEDAY, ONEWEEK, days_in_month, previous_month,
-                                 next_month, first_day, last_day, date_range)
-
-from cubicweb.interfaces import ICalendarViews
-from cubicweb.selectors import implements, adaptable
-from cubicweb.view import EntityView, EntityAdapter, implements_adapter_compat
-
-class ICalendarViewsAdapter(EntityAdapter):
-    """calendar views interface"""
-    __needs_bw_compat__ = True
-    __regid__ = 'ICalendarViews'
-    __select__ = implements(ICalendarViews, warn=False) # XXX for bw compat, should be abstract
-
-    @implements_adapter_compat('ICalendarViews')
-    def matching_dates(self, begin, end):
-        """
-        :param begin: day considered as begin of the range (`DateTime`)
-        :param end: day considered as end of the range (`DateTime`)
-
-        :return:
-          a list of dates (`DateTime`) in the range [`begin`, `end`] on which
-          this entity apply
-        """
-        raise NotImplementedError
-
-
-# used by i18n tools
-WEEKDAYS = [_("monday"), _("tuesday"), _("wednesday"), _("thursday"),
-            _("friday"), _("saturday"), _("sunday")]
-MONTHNAMES = [ _('january'), _('february'), _('march'), _('april'), _('may'),
-               _('june'), _('july'), _('august'), _('september'), _('october'),
-               _('november'), _('december')
-               ]
-
-class _CalendarView(EntityView):
-    """base calendar view containing helpful methods to build calendar views"""
-    __select__ = adaptable('ICalendarViews')
-    paginable = False
-
-    # Navigation building methods / views ####################################
-
-    PREV = u'<a href="%s">&lt;&lt;</a>&#160;&#160;<a href="%s">&lt;</a>'
-    NEXT = u'<a href="%s">&gt;</a>&#160;&#160;<a href="%s">&gt;&gt;</a>'
-    NAV_HEADER = u"""<table class="calendarPageHeader">
-<tr><td class="prev">%s</td><td class="next">%s</td></tr>
-</table>
-""" % (PREV, NEXT)
-
-    def nav_header(self, date, smallshift=3, bigshift=9):
-        """prints shortcut links to go to previous/next steps (month|week)"""
-        prev1 = previous_month(date, smallshift)
-        next1 = next_month(date, smallshift)
-        prev2 = previous_month(date, bigshift)
-        next2 = next_month(date, bigshift)
-        rql = self.cw_rset.printable_rql()
-        return self.NAV_HEADER % (
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=prev2.year,
-                                          month=prev2.month)),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=prev1.year,
-                                          month=prev1.month)),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=next1.year,
-                                          month=next1.month)),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=next2.year,
-                                          month=next2.month)))
-
-
-    # Calendar building methods ##############################################
-
-    def build_calendars(self, schedule, begin, end):
-        """build several HTML calendars at once, one for each month
-        between begin and end
-        """
-        return [self.build_calendar(schedule, date)
-                for date in date_range(begin, end, incmonth=1)]
-
-    def build_calendar(self, schedule, first_day):
-        """method responsible for building *one* HTML calendar"""
-        # FIXME  iterates between [first_day-first_day.day_of_week ;
-        #                          last_day+6-last_day.day_of_week]
-        umonth = self._cw.format_date(first_day, '%B %Y') # localized month name
-        rows = []
-        current_row = [NO_CELL] * first_day.weekday()
-        for daynum in xrange(0, days_in_month(first_day)):
-            # build cell day
-            day = first_day + timedelta(daynum)
-            events = schedule.get(day)
-            if events:
-                events = [u'\n'.join(event) for event in events.values()]
-                current_row.append(CELL % (daynum+1, '\n'.join(events)))
-            else:
-                current_row.append(EMPTY_CELL % (daynum+1))
-            # store & reset current row on Sundays
-            if day.weekday() == 6:
-                rows.append(u'<tr>%s%s</tr>' % (WEEKNUM_CELL % day.isocalendar()[1], ''.join(current_row)))
-                current_row = []
-        current_row.extend([NO_CELL] * (6-day.weekday()))
-        rql = self.cw_rset.printable_rql()
-        if day.weekday() != 6:
-            rows.append(u'<tr>%s%s</tr>' % (WEEKNUM_CELL % day.isocalendar()[1], ''.join(current_row)))
-        url = self._cw.build_url(rql=rql, vid='calendarmonth',
-                                 year=first_day.year, month=first_day.month)
-        monthlink = u'<a href="%s">%s</a>' % (xml_escape(url), umonth)
-        return CALENDAR(self._cw) % (monthlink, '\n'.join(rows))
-
-    def _mk_schedule(self, begin, end, itemvid='calendaritem'):
-        """private method that gathers information from resultset
-        and builds calendars according to it
-
-        :param begin: begin of date range
-        :param end: end of date rangs
-        :param itemvid: which view to call to render elements in cells
-
-        returns { day1 : { hour : [views] },
-                  day2 : { hour : [views] } ... }
-        """
-        # put this here since all sub views are calling this method
-        self._cw.add_css('cubicweb.calendar.css')
-        schedule = {}
-        for row in xrange(len(self.cw_rset.rows)):
-            entity = self.cw_rset.get_entity(row, 0)
-            infos = u'<div class="event">'
-            infos += self._cw.view(itemvid, self.cw_rset, row=row)
-            infos += u'</div>'
-            for date_ in entity.cw_adapt_to('ICalendarViews').matching_dates(begin, end):
-                day = date(date_.year, date_.month, date_.day)
-                try:
-                    dt = time(date_.hour, date_.minute, date_.second)
-                except AttributeError:
-                    # date instance
-                    dt = time(0, 0, 0)
-                schedule.setdefault(day, {})
-                schedule[day].setdefault(dt, []).append(infos)
-        return schedule
-
-
-    @staticmethod
-    def get_date_range(day, shift=4):
-        """returns a couple (begin, end)
-
-        <begin> is the first day of current_month - shift
-        <end> is the last day of current_month + (shift+1)
-        """
-        begin = first_day(previous_month(day, shift))
-        end = last_day(next_month(day, shift))
-        return begin, end
-
-    def _build_ampm_cells(self, events):
-        """create a view without any hourly details.
-
-        :param events: dictionnary with all events classified by hours
-        """
-        # split events according am/pm
-        am_events = [event for e_time, e_list in events.iteritems()
-                     if 0 <= e_time.hour < 12
-                     for event in e_list]
-        pm_events = [event for e_time, e_list in events.iteritems()
-                     if 12 <= e_time.hour < 24
-                     for event in e_list]
-        # format each am/pm cell
-        if am_events:
-            am_content = AMPM_CONTENT % ("amCell", "am", '\n'.join(am_events))
-        else:
-            am_content = AMPM_EMPTY % ("amCell", "am")
-        if pm_events:
-            pm_content = AMPM_CONTENT % ("pmCell", "pm", '\n'.join(pm_events))
-        else:
-            pm_content = AMPM_EMPTY % ("pmCell", "pm")
-        return am_content, pm_content
-
-
-
-class YearCalendarView(_CalendarView):
-    __regid__ = 'calendaryear'
-    title = _('calendar (year)')
-
-    def call(self, year=None, month=None):
-        """this view renders a 3x3 calendars' table"""
-        year = year or int(self._cw.form.get('year', date.today().year))
-        month = month or int(self._cw.form.get('month', date.today().month))
-        center_date = date(year, month, 1)
-        begin, end = self.get_date_range(day=center_date)
-        schedule = self._mk_schedule(begin, end)
-        self.w(self.nav_header(center_date))
-        calendars = tuple(self.build_calendars(schedule, begin, end))
-        self.w(SMALL_CALENDARS_PAGE % calendars)
-
-
-class SemesterCalendarView(_CalendarView):
-    """this view renders three semesters as three rows of six columns,
-    one column per month
-    """
-    __regid__ = 'calendarsemester'
-    title = _('calendar (semester)')
-
-    def call(self, year=None, month=None):
-        year = year or int(self._cw.form.get('year', date.today().year))
-        month = month or int(self._cw.form.get('month', date.today().month))
-        begin = previous_month(date(year, month, 1), 2)
-        end = next_month(date(year, month, 1), 3)
-        schedule = self._mk_schedule(begin, end)
-        self.w(self.nav_header(date(year, month, 1), 1, 6))
-        self.w(u'<table class="semesterCalendar">')
-        self.build_calendars(schedule, begin, end)
-        self.w(u'</table>')
-        self.w(self.nav_header(date(year, month, 1), 1, 6))
-
-    def build_calendars(self, schedule, begin, end):
-        self.w(u'<tr>')
-        rql = self.cw_rset.printable_rql()
-        for cur_month in date_range(begin, end, incmonth=1):
-            umonth = u'%s&#160;%s' % (self._cw.format_date(cur_month, '%B'), cur_month.year)
-            url = self._cw.build_url(rql=rql, vid=self.__regid__,
-                                     year=cur_month.year, month=cur_month.month)
-            self.w(u'<th colspan="2"><a href="%s">%s</a></th>' % (xml_escape(url),
-                                                                  umonth))
-        self.w(u'</tr>')
-        _ = self._cw._
-        for day_num in xrange(31):
-            self.w(u'<tr>')
-            for cur_month in date_range(begin, end, incmonth=1):
-                if day_num >= days_in_month(cur_month):
-                    self.w(u'%s%s' % (NO_CELL, NO_CELL))
-                else:
-                    day = date(cur_month.year, cur_month.month, day_num+1)
-                    events = schedule.get(day)
-                    self.w(u'<td>%s&#160;%s</td>\n' % (_(WEEKDAYS[day.weekday()])[0].upper(), day_num+1))
-                    self.format_day_events(day, events)
-            self.w(u'</tr>')
-
-    def format_day_events(self, day, events):
-        if events:
-            events = ['\n'.join(event) for event in events.values()]
-            self.w(WEEK_CELL % '\n'.join(events))
-        else:
-            self.w(WEEK_EMPTY_CELL)
-
-
-class MonthCalendarView(_CalendarView):
-    """this view renders a 3x1 calendars' table"""
-    __regid__ = 'calendarmonth'
-    title = _('calendar (month)')
-
-    def call(self, year=None, month=None):
-        year = year or int(self._cw.form.get('year', date.today().year))
-        month = month or int(self._cw.form.get('month', date.today().month))
-        center_date = date(year, month, 1)
-        begin, end = self.get_date_range(day=center_date, shift=1)
-        schedule = self._mk_schedule(begin, end)
-        calendars = self.build_calendars(schedule, begin, end)
-        self.w(self.nav_header(center_date, 1, 3))
-        self.w(BIG_CALENDARS_PAGE % tuple(calendars))
-        self.w(self.nav_header(center_date, 1, 3))
-
-
-class WeekCalendarView(_CalendarView):
-    """this view renders a calendar for week events"""
-    __regid__ = 'calendarweek'
-    title = _('calendar (week)')
-
-    def call(self, year=None, week=None):
-        year = year or int(self._cw.form.get('year', date.today().year))
-        week = week or int(self._cw.form.get('week', date.today().isocalendar()[1]))
-        day0 = date(year, 1, 1)
-        first_day_of_week = day0 - day0.weekday()*ONEDAY + ONEWEEK
-        begin, end = first_day_of_week- ONEWEEK, first_day_of_week + 2*ONEWEEK
-        schedule = self._mk_schedule(begin, end, itemvid='calendarlargeitem')
-        self.w(self.nav_header(first_day_of_week))
-        self.w(u'<table class="weekCalendar">')
-        _weeks = [(first_day_of_week-ONEWEEK, first_day_of_week-ONEDAY),
-                  (first_day_of_week, first_day_of_week+6*ONEDAY),
-                  (first_day_of_week+ONEWEEK, first_day_of_week+13*ONEDAY)]
-        self.build_calendar(schedule, _weeks)
-        self.w(u'</table>')
-        self.w(self.nav_header(first_day_of_week))
-
-    def build_calendar(self, schedule, weeks):
-        rql = self.cw_rset.printable_rql()
-        _ = self._cw._
-        for monday, sunday in weeks:
-            umonth = self._cw.format_date(monday, '%B %Y')
-            url = self._cw.build_url(rql=rql, vid='calendarmonth',
-                                     year=monday.year, month=monday.month)
-            monthlink = '<a href="%s">%s</a>' % (xml_escape(url), umonth)
-            self.w(u'<tr><th colspan="3">%s %s (%s)</th></tr>' \
-                  % (_('week'), monday.isocalendar()[1], monthlink))
-            for day in date_range(monday, sunday+ONEDAY):
-                self.w(u'<tr>')
-                self.w(u'<td>%s</td>' % _(WEEKDAYS[day.weekday()]))
-                self.w(u'<td>%s</td>' % (day.strftime('%Y-%m-%d')))
-                events = schedule.get(day)
-                if events:
-                    events = ['\n'.join(event) for event in events.values()]
-                    self.w(WEEK_CELL % '\n'.join(events))
-                else:
-                    self.w(WEEK_EMPTY_CELL)
-                self.w(u'</tr>')
-
-    def nav_header(self, date, smallshift=1, bigshift=3):
-        """prints shortcut links to go to previous/next steps (month|week)"""
-        prev1 = date - ONEWEEK * smallshift
-        prev2 = date - ONEWEEK * bigshift
-        next1 = date + ONEWEEK * smallshift
-        next2 = date + ONEWEEK * bigshift
-        rql = self.cw_rset.printable_rql()
-        return self.NAV_HEADER % (
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=prev2.year, week=prev2.isocalendar()[1])),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=prev1.year, week=prev1.isocalendar()[1])),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=next1.year, week=next1.isocalendar()[1])),
-            xml_escape(self._cw.build_url(rql=rql, vid=self.__regid__, year=next2.year, week=next2.isocalendar()[1])))
-
-
-
-class AMPMYearCalendarView(YearCalendarView):
-    __regid__ = 'ampmcalendaryear'
-    title = _('am/pm calendar (year)')
-
-    def build_calendar(self, schedule, first_day):
-        """method responsible for building *one* HTML calendar"""
-        umonth = self._cw.format_date(first_day, '%B %Y') # localized month name
-        rows = [] # each row is: (am,pm), (am,pm) ... week_title
-        current_row = [(NO_CELL, NO_CELL, NO_CELL)] * first_day.weekday()
-        rql = self.cw_rset.printable_rql()
-        for daynum in xrange(0, days_in_month(first_day)):
-            # build cells day
-            day = first_day + timedelta(daynum)
-            events = schedule.get(day)
-            if events:
-                current_row.append((AMPM_DAY % (daynum+1),) + self._build_ampm_cells(events))
-            else:
-                current_row.append((AMPM_DAY % (daynum+1),
-                                    AMPM_EMPTY % ("amCell", "am"),
-                                    AMPM_EMPTY % ("pmCell", "pm")))
-            # store & reset current row on Sundays
-            if day.weekday() == 6:
-                url = self._cw.build_url(rql=rql, vid='ampmcalendarweek',
-                                     year=day.year, week=day.isocalendar()[1])
-                weeklink = '<a href="%s">%s</a>' % (xml_escape(url),
-                                                    day.isocalendar()[1])
-                current_row.append(WEEKNUM_CELL % weeklink)
-                rows.append(current_row)
-                current_row = []
-        current_row.extend([(NO_CELL, NO_CELL, NO_CELL)] * (6-day.weekday()))
-        url = self._cw.build_url(rql=rql, vid='ampmcalendarweek',
-                             year=day.year, week=day.isocalendar()[1])
-        weeklink = '<a href="%s">%s</a>' % (xml_escape(url), day.isocalendar()[1])
-        current_row.append(WEEKNUM_CELL % weeklink)
-        rows.append(current_row)
-        # build two rows for each week: am & pm
-        formatted_rows = []
-        for row in rows:
-            week_title = row.pop()
-            day_row = [day for day, am, pm in row]
-            am_row = [am for day, am, pm in row]
-            pm_row = [pm for day, am, pm in row]
-            formatted_rows.append('<tr>%s%s</tr>'% (week_title, '\n'.join(day_row)))
-            formatted_rows.append('<tr class="amRow"><td>&#160;</td>%s</tr>'% '\n'.join(am_row))
-            formatted_rows.append('<tr class="pmRow"><td>&#160;</td>%s</tr>'% '\n'.join(pm_row))
-        # tigh everything together
-        url = self._cw.build_url(rql=rql, vid='ampmcalendarmonth',
-                             year=first_day.year, month=first_day.month)
-        monthlink = '<a href="%s">%s</a>' % (xml_escape(url), umonth)
-        return CALENDAR(self._cw) % (monthlink, '\n'.join(formatted_rows))
-
-
-
-class AMPMSemesterCalendarView(SemesterCalendarView):
-    """this view renders a 3x1 calendars' table"""
-    __regid__ = 'ampmcalendarsemester'
-    title = _('am/pm calendar (semester)')
-
-    def build_calendars(self, schedule, begin, end):
-        self.w(u'<tr>')
-        rql = self.cw_rset.printable_rql()
-        for cur_month in date_range(begin, end, incmonth=1):
-            umonth = u'%s&#160;%s' % (self._cw.format_date(cur_month, '%B'), cur_month.year)
-            url = self._cw.build_url(rql=rql, vid=self.__regid__,
-                                 year=cur_month.year, month=cur_month.month)
-            self.w(u'<th colspan="3"><a href="%s">%s</a></th>' % (xml_escape(url),
-                                                                  umonth))
-        self.w(u'</tr>')
-        _ = self._cw._
-        for day_num in xrange(31):
-            self.w(u'<tr>')
-            for cur_month in date_range(begin, end, incmonth=1):
-                if day_num >= days_in_month(cur_month):
-                    self.w(u'%s%s%s' % (NO_CELL, NO_CELL, NO_CELL))
-                else:
-                    day = date(cur_month.year, cur_month.month, day_num+1)
-                    events = schedule.get(day)
-                    self.w(u'<td>%s&#160;%s</td>\n' % (_(WEEKDAYS[day.weekday()])[0].upper(),
-                                                       day_num+1))
-                    self.format_day_events(day, events)
-            self.w(u'</tr>')
-
-    def format_day_events(self, day, events):
-        if events:
-            self.w(u'\n'.join(self._build_ampm_cells(events)))
-        else:
-            self.w(u'%s %s'% (AMPM_EMPTY % ("amCell", "am"),
-                              AMPM_EMPTY % ("pmCell", "pm")))
-
-
-class AMPMMonthCalendarView(MonthCalendarView):
-    """this view renders a 3x1 calendars' table"""
-    __regid__ = 'ampmcalendarmonth'
-    title = _('am/pm calendar (month)')
-
-    def build_calendar(self, schedule, first_day):
-        """method responsible for building *one* HTML calendar"""
-        umonth = self._cw.format_date(first_day, '%B %Y') # localized month name
-        rows = [] # each row is: (am,pm), (am,pm) ... week_title
-        current_row = [(NO_CELL, NO_CELL, NO_CELL)] * first_day.weekday()
-        rql = self.cw_rset.printable_rql()
-        for daynum in xrange(0, days_in_month(first_day)):
-            # build cells day
-            day = first_day + timedelta(daynum)
-            events = schedule.get(day)
-            if events:
-                current_row.append((AMPM_DAY % (daynum+1),) + self._build_ampm_cells(events))
-            else:
-                current_row.append((AMPM_DAY % (daynum+1),
-                                    AMPM_EMPTY % ("amCell", "am"),
-                                    AMPM_EMPTY % ("pmCell", "pm")))
-            # store & reset current row on Sundays
-            if day.weekday() == 6:
-                url = self._cw.build_url(rql=rql, vid='ampmcalendarweek',
-                                         year=day.year, week=day.isocalendar()[1])
-                weeklink = '<a href="%s">%s</a>' % (xml_escape(url),
-                                                    day.isocalendar()[1])
-                current_row.append(WEEKNUM_CELL % weeklink)
-                rows.append(current_row)
-                current_row = []
-        current_row.extend([(NO_CELL, NO_CELL, NO_CELL)] * (6-day.weekday()))
-        url = self._cw.build_url(rql=rql, vid='ampmcalendarweek',
-                                 year=day.year, week=day.isocalendar()[1])
-        weeklink = '<a href="%s">%s</a>' % (xml_escape(url),
-                                            day.isocalendar()[1])
-        current_row.append(WEEKNUM_CELL % weeklink)
-        rows.append(current_row)
-        # build two rows for each week: am & pm
-        formatted_rows = []
-        for row in rows:
-            week_title = row.pop()
-            day_row = [day for day, am, pm in row]
-            am_row = [am for day, am, pm in row]
-            pm_row = [pm for day, am, pm in row]
-            formatted_rows.append('<tr>%s%s</tr>'% (week_title, '\n'.join(day_row)))
-            formatted_rows.append('<tr class="amRow"><td>&#160;</td>%s</tr>'% '\n'.join(am_row))
-            formatted_rows.append('<tr class="pmRow"><td>&#160;</td>%s</tr>'% '\n'.join(pm_row))
-        # tigh everything together
-        url = self._cw.build_url(rql=rql, vid='ampmcalendarmonth',
-                                 year=first_day.year, month=first_day.month)
-        monthlink = '<a href="%s">%s</a>' % (xml_escape(url),
-                                             umonth)
-        return CALENDAR(self._cw) % (monthlink, '\n'.join(formatted_rows))
-
-
-
-class AMPMWeekCalendarView(WeekCalendarView):
-    """this view renders a 3x1 calendars' table"""
-    __regid__ = 'ampmcalendarweek'
-    title = _('am/pm calendar (week)')
-
-    def build_calendar(self, schedule, weeks):
-        rql = self.cw_rset.printable_rql()
-        w = self.w
-        _ = self._cw._
-        for monday, sunday in weeks:
-            umonth = self._cw.format_date(monday, '%B %Y')
-            url = self._cw.build_url(rql=rql, vid='ampmcalendarmonth',
-                                     year=monday.year, month=monday.month)
-            monthlink = '<a href="%s">%s</a>' % (xml_escape(url), umonth)
-            w(u'<tr>%s</tr>' % (
-                WEEK_TITLE % (_('week'), monday.isocalendar()[1], monthlink)))
-            w(u'<tr><th>%s</th><th>&#160;</th></tr>'% _(u'Date'))
-            for day in date_range(monday, sunday+ONEDAY):
-                events = schedule.get(day)
-                style = day.weekday() % 2 and "even" or "odd"
-                w(u'<tr class="%s">' % style)
-                if events:
-                    hours = events.keys()
-                    hours.sort()
-                    w(AMPM_DAYWEEK % (
-                        len(hours), _(WEEKDAYS[day.weekday()]),
-                        self._cw.format_date(day)))
-                    w(AMPM_WEEK_CELL % (
-                        hours[0].hour, hours[0].minute,
-                        '\n'.join(events[hours[0]])))
-                    w(u'</tr>')
-                    for hour in hours[1:]:
-                        w(u'<tr class="%s">%s</tr>'% (
-                            style, AMPM_WEEK_CELL % (hour.hour, hour.minute,
-                                                     '\n'.join(events[hour]))))
-                else:
-                    w(AMPM_DAYWEEK_EMPTY % (
-                        _(WEEKDAYS[day.weekday()]),
-                        self._cw.format_date(day)))
-                    w(WEEK_EMPTY_CELL)
-                    w(u'</tr>')
-
-
-SMALL_CALENDARS_PAGE = u"""<table class="smallCalendars">
-<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
-<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
-<tr><td class="calendar">%s</td><td class="calendar">%s</td><td class="calendar">%s</td></tr>
-</table>
-"""
-
-BIG_CALENDARS_PAGE = u"""<table class="bigCalendars">
-<tr><td class="calendar">%s</td></tr>
-<tr><td class="calendar">%s</td></tr>
-<tr><td class="calendar">%s</td></tr>
-</table>
-"""
-
-WEEKNUM_CELL = u'<td class="weeknum">%s</td>'
-
-def CALENDAR(req):
-    _ = req._
-    WEEKNUM_HEADER = u'<th class="weeknum">%s</th>' % _('week')
-    CAL_HEADER = WEEKNUM_HEADER + u' \n'.join([u'<th class="weekday">%s</th>' % _(day)[0].upper()
-                                               for day in WEEKDAYS])
-    return u"""<table>
-<tr><th class="month" colspan="8">%%s</th></tr>
-<tr>
-  %s
-</tr>
-%%s
-</table>
-""" % (CAL_HEADER,)
-
-
-DAY_TEMPLATE = """<tr><td class="weekday">%(daylabel)s</td><td>%(dmydate)s</td><td>%(dayschedule)s</td>
-"""
-
-NO_CELL = u'<td class="noday"></td>'
-EMPTY_CELL = u'<td class="cellEmpty"><span class="cellTitle">%s</span></td>'
-CELL = u'<td class="cell"><span class="cellTitle">%s</span><div class="cellContent">%s</div></td>'
-
-AMPM_DAY = u'<td class="cellDay">%d</td>'
-AMPM_EMPTY = u'<td class="%sEmpty"><span class="cellTitle">%s</span></td>'
-AMPM_CONTENT = u'<td class="%s"><span class="cellTitle">%s</span><div class="cellContent">%s</div></td>'
-
-WEEK_TITLE = u'<th class="weekTitle" colspan="2">%s %s (%s)</th>'
-WEEK_EMPTY_CELL = u'<td class="weekEmptyCell">&#160;</td>'
-WEEK_CELL = u'<td class="weekCell"><div class="cellContent">%s</div></td>'
-
-AMPM_DAYWEEK_EMPTY = u'<td>%s&#160;%s</td>'
-AMPM_DAYWEEK = u'<td rowspan="%d">%s&#160;%s</td>'
-AMPM_WEEK_CELL = u'<td class="ampmWeekCell"><div class="cellContent">%02d:%02d - %s</div></td>'
--- a/web/views/primary.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/primary.py	Wed Apr 27 09:54:22 2011 +0200
@@ -52,10 +52,8 @@
         """
         return []
 
-    def cell_call(self, row, col):
-        self.cw_row = row
-        self.cw_col = col
-        entity = self.cw_rset.complete_entity(row, col)
+    def entity_call(self, entity):
+        entity.complete()
         self.render_entity(entity)
 
     def render_entity(self, entity):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/web/views/rdf.py	Wed Apr 27 09:54:22 2011 +0200
@@ -0,0 +1,100 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""base xml and rss views"""
+
+__docformat__ = "restructuredtext en"
+_ = unicode
+
+from yams import xy
+
+from cubicweb.schema import VIRTUAL_RTYPES
+from cubicweb.view import EntityView
+from cubicweb.web.views.xmlrss import SERIALIZERS
+
+try:
+    import rdflib
+except ImportError:
+    rdflib = None
+
+if rdflib is not None:
+    RDF = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
+    CW = rdflib.Namespace('http://ns.cubicweb.org/cubicweb/0.0/')
+    from rdflib import Literal, URIRef, Namespace
+
+    def urijoin(item):
+        base, ext = item
+        return URIRef(Namespace(base)[ext])
+
+    SKIP_RTYPES = VIRTUAL_RTYPES | set(['cwuri', 'is', 'is_instance_of'])
+
+    class RDFView(EntityView):
+        """rdf view for entities"""
+        __regid__ = 'rdf'
+        title = _('rdf')
+        templatable = False
+        content_type = 'text/xml' # +rdf
+
+        def call(self):
+            graph = rdflib.Graph()
+            graph.bind('cw', CW)
+            for prefix, xmlns in xy.XY.prefixes.items():
+                graph.bind(prefix, rdflib.Namespace(xmlns))
+            for i in xrange(self.cw_rset.rowcount):
+                entity = self.cw_rset.complete_entity(i, 0)
+                self.entity2graph(graph, entity)
+            self.w(graph.serialize().decode('utf-8'))
+
+        def entity2graph(self, graph, entity):
+            cwuri = URIRef(entity.cwuri)
+            add = graph.add
+            add( (cwuri, RDF.type, CW[entity.e_schema.type]) )
+            try:
+                for item in xy.xeq(entity.e_schema.type):
+                    add( (cwuri, RDF.type, urijoin(item)) )
+            except xy.UnsupportedVocabulary:
+                pass
+            for rschema, eschemas, role in entity.e_schema.relation_definitions('relation'):
+                rtype = rschema.type
+                if rtype in SKIP_RTYPES or rtype.endswith('_permission'):
+                    continue
+                for eschema in eschemas:
+                    if eschema.final:
+                        try:
+                            value = entity.cw_attr_cache[rtype]
+                        except KeyError:
+                            continue # assuming rtype is Bytes
+                        if value is not None:
+                            add( (cwuri, CW[rtype], Literal(value)) )
+                            try:
+                                for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)):
+                                    add( (cwuri, urijoin(item[1]), Literal(value)) )
+                            except xy.UnsupportedVocabulary:
+                                pass
+                    else:
+                        for related in entity.related(rtype, role, entities=True):
+                            if role == 'subject':
+                                add( (cwuri, CW[rtype], URIRef(related.cwuri)) )
+                                try:
+                                    for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)):
+                                        add( (cwuri, urijoin(item), URIRef(related.cwuri)) )
+                                except xy.UnsupportedVocabulary:
+                                    pass
+                            else:
+                                add( (URIRef(related.cwuri), CW[rtype], cwuri) )
+
+
--- a/web/views/reledit.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/reledit.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,7 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""the 'reedit' feature (eg edit attribute/relation from primary view"""
+"""the 'reledit' feature (eg edit attribute/relation from primary view)"""
 
 __docformat__ = "restructuredtext en"
 _ = unicode
@@ -56,12 +56,15 @@
     _cancelclick = "cw.reledit.cleanupAfterCancel('%s')"
 
     # ui side actions/buttons
-    _addzone = u'<img title="%(msg)s" src="data/plus.png" alt="%(msg)s"/>'
+    _addzone = u'<img title="%(msg)s" src="%(logo)s" alt="%(msg)s"/>'
     _addmsg = _('click to add a value')
-    _deletezone = u'<img title="%(msg)s" src="data/cancel.png" alt="%(msg)s"/>'
+    _addlogo = 'plus.png'
+    _deletezone = u'<img title="%(msg)s" src="%(logo)s" alt="%(msg)s"/>'
     _deletemsg = _('click to delete this value')
-    _editzone = u'<img title="%(msg)s" src="data/pen_icon.png" alt="%(msg)s"/>'
+    _deletelogo = 'cancel.png'
+    _editzone = u'<img title="%(msg)s" src="%(logo)s" alt="%(msg)s"/>'
     _editzonemsg = _('click to edit this field')
+    _editlogo = 'pen_icon.png'
 
     # renderer
     _form_renderer_id = 'base'
@@ -81,7 +84,7 @@
         self._cw.add_js(('cubicweb.reledit.js', 'cubicweb.edition.js', 'cubicweb.ajax.js'))
         entity = self.cw_rset.get_entity(row, col)
         rschema = self._cw.vreg.schema[rtype]
-        self._rules = rctrl.etype_get(entity.e_schema, rschema, role, '*')
+        self._rules = rctrl.etype_get(entity.e_schema.type, rschema.type, role, '*')
         if rvid is not None or default_value is not None:
             warn('[3.9] specifying rvid/default_value on select is deprecated, '
                  'reledit_ctrl rtag to control this' % self, DeprecationWarning)
@@ -210,14 +213,18 @@
         # NOTE: should be sufficient given a well built schema/security
         return rschema.has_perm(self._cw, 'delete', **kwargs)
 
+    def _build_zone(self, zonedef, msg, logo):
+        return zonedef % {'msg': xml_escape(self._cw._(msg)),
+                          'logo': xml_escape(self._cw.data_url(logo))}
+
     def _build_edit_zone(self):
-        return self._editzone % {'msg' : xml_escape(self._cw._(self._editzonemsg))}
+        return self._build_zone(self._editzone, self._editzonemsg, self._editlogo)
 
     def _build_delete_zone(self):
-        return self._deletezone % {'msg': xml_escape(self._cw._(self._deletemsg))}
+        return self._build_zone(self._deletezone, self._deletemsg, self._deletelogo)
 
     def _build_add_zone(self):
-        return self._addzone % {'msg': xml_escape(self._cw._(self._addmsg))}
+        return self._build_zone(self._addzone, self._addmsg, self._addlogo)
 
     def _build_divid(self, rtype, role, entity_eid):
         """ builds an id for the root div of a reledit widget """
--- a/web/views/schema.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/schema.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -145,7 +145,7 @@
     __regid__ = 'schema'
     title = _('instance schema')
     tabs = [_('schema-diagram'), _('schema-entity-types'),
-            _('schema-relation-types'), _('schema-security')]
+            _('schema-relation-types')]
     default_tab = 'schema-diagram'
 
     def call(self):
@@ -183,110 +183,6 @@
         self.wview('table', self._cw.execute(
             'Any X ORDERBY N WHERE X is CWRType, X name N, X final FALSE'))
 
-
-class SchemaPermissionsTab(SecurityViewMixIn, StartupView):
-    __regid__ = 'schema-security'
-    __select__ = StartupView.__select__ & match_user_groups('managers')
-
-    def call(self, display_relations=True):
-        skiptypes = skip_types(self._cw)
-        schema = self._cw.vreg.schema
-        # compute entities
-        entities = sorted(eschema for eschema in schema.entities()
-                          if not (eschema.final or eschema in skiptypes))
-        # compute relations
-        if display_relations:
-            relations = sorted(rschema for rschema in schema.relations()
-                               if not (rschema.final
-                                       or rschema in skiptypes
-                                       or rschema in META_RTYPES))
-        else:
-            relations = []
-        # index
-        _ = self._cw._
-        url = xml_escape(self._cw.build_url('schema'))
-        self.w(u'<div id="schema_security">')
-        self.w(u'<h2 class="schema">%s</h2>' % _('Index'))
-        self.w(u'<h3 id="entities">%s</h3>' % _('Entity types'))
-        ents = []
-        for eschema in sorted(entities):
-            ents.append(u'<a class="grey" href="%s#%s">%s</a>' % (
-                url,  eschema.type, eschema.type))
-        self.w(u', '.join(ents))
-        self.w(u'<h3 id="relations">%s</h3>' % _('Relation types'))
-        rels = []
-        for rschema in sorted(relations):
-            rels.append(u'<a class="grey" href="%s#%s">%s</a>' %  (
-                url , rschema.type, rschema.type))
-        self.w(u', '.join(rels))
-        # permissions tables
-        self.display_entities(entities)
-        if relations:
-            self.display_relations(relations)
-        self.w(u'</div>')
-
-    def has_non_default_perms(self, rdef):
-        """return true if the given *attribute* relation definition has custom
-        permission
-        """
-        for action in rdef.ACTIONS:
-            def_rqlexprs = []
-            def_groups = []
-            for perm in DEFAULT_ATTRPERMS[action]:
-                if not isinstance(perm, basestring):
-                    def_rqlexprs.append(perm.expression)
-                else:
-                    def_groups.append(perm)
-            rqlexprs = [rql.expression for rql in rdef.get_rqlexprs(action)]
-            groups = rdef.get_groups(action)
-            if groups != frozenset(def_groups) or \
-                frozenset(rqlexprs) != frozenset(def_rqlexprs):
-                return True
-        return False
-
-    def display_entities(self, entities):
-        _ = self._cw._
-        url = xml_escape(self._cw.build_url('schema'))
-        self.w(u'<h2 id="entities" class="schema">%s</h2>' % _('Permissions for entity types'))
-        for eschema in entities:
-            self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
-                eschema.type, self._cw.build_url('cwetype/%s' % eschema.type),
-                eschema.type, _(eschema.type)))
-            self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
-                url,  self._cw.uiprops['UP_ICON'], _('up')))
-            self.w(u'</h3>')
-            self.w(u'<div style="margin: 0px 1.5em">')
-            self.permissions_table(eschema)
-            # display entity attributes only if they have some permissions modified
-            modified_attrs = []
-            for attr, etype in  eschema.attribute_definitions():
-                rdef = eschema.rdef(attr)
-                if attr not in META_RTYPES and self.has_non_default_perms(rdef):
-                    modified_attrs.append(rdef)
-            if modified_attrs:
-                self.w(u'<h4>%s</h4>' % _('Attributes with non default permissions:'))
-                self.w(u'</div>')
-                self.w(u'<div style="margin: 0px 6em">')
-                for rdef in modified_attrs:
-                    attrtype = str(rdef.rtype)
-                    self.w(u'<h4 class="schema">%s (%s)</h4> ' % (attrtype, _(attrtype)))
-                    self.permissions_table(rdef)
-            self.w(u'</div>')
-
-    def display_relations(self, relations):
-        _ = self._cw._
-        url = xml_escape(self._cw.build_url('schema'))
-        self.w(u'<h2 id="relations" class="schema">%s</h2>' % _('Permissions for relations'))
-        for rschema in relations:
-            self.w(u'<h3 id="%s" class="schema"><a href="%s">%s (%s)</a> ' % (
-                rschema.type, self._cw.build_url('cwrtype/%s' % rschema.type),
-                rschema.type, _(rschema.type)))
-            self.w(u'<a href="%s#schema_security"><img src="%s" alt="%s"/></a>' % (
-                url,  self._cw.uiprops['UP_ICON'], _('up')))
-            self.w(u'</h3>')
-            self.grouped_permissions_table(rschema)
-
-
 # CWEType ######################################################################
 
 # register msgid generated in entity relations tables
@@ -647,8 +543,9 @@
     pass
 
 class CWSchemaDotPropsHandler(s2d.SchemaDotPropsHandler):
-    def __init__(self, visitor):
+    def __init__(self, visitor, cw):
         self.visitor = visitor
+        self.cw = cw
         self.nextcolor = cycle( ('#ff7700', '#000000',
                                  '#ebbc69', '#888888') ).next
         self.colors = {}
@@ -662,7 +559,7 @@
         label.append(r'\l}') # trailing \l ensure alignement of the last one
         return {'label' : ''.join(label), 'shape' : "record",
                 'fontname' : "Courier", 'style' : "filled",
-                'href': 'cwetype/%s' % eschema.type,
+                'href': self.cw.build_url('cwetype/%s' % eschema.type),
                 'fontsize': '10px'
                 }
 
@@ -673,11 +570,12 @@
             kwargs = {'label': rschema.type,
                       'color': '#887788', 'style': 'dashed',
                       'dir': 'both', 'arrowhead': 'normal', 'arrowtail': 'normal',
-                      'fontsize': '10px', 'href': 'cwrtype/%s' % rschema.type}
+                      'fontsize': '10px',
+                      'href': self.cw.build_url('cwrtype/%s' % rschema.type)}
         else:
             kwargs = {'label': rschema.type,
                       'color' : 'black',  'style' : 'filled', 'fontsize': '10px',
-                      'href': 'cwrtype/%s' % rschema.type}
+                      'href': self.cw.build_url('cwrtype/%s' % rschema.type)}
             rdef = rschema.rdef(subjnode, objnode)
             composite = rdef.composite
             if rdef.composite == 'subject':
@@ -729,7 +627,7 @@
             alt = self._cw._('graphical representation of %(appid)s data model')
         alt %= {'rtype': rtype, 'etype': etype,
                 'appid': self._cw.vreg.config.appid}
-        prophdlr = CWSchemaDotPropsHandler(visitor)
+        prophdlr = CWSchemaDotPropsHandler(visitor, self._cw)
         generator = GraphGenerator(DotBackend('schema', 'BT',
                                               ratio='compress',size=None,
                                               renderer='dot',
@@ -794,13 +692,14 @@
     __select__ = facet.AttributeFacet.__select__ & is_instance('CWEType', 'CWRType')
     rtype = 'final'
 
+
 class ViewSchemaAction(action.Action):
     __regid__ = 'schema'
     __select__ = yes()
 
     title = _("site schema")
-    category = 'siteactions'
     order = 30
+    category = 'manage'
 
     def url(self):
         return self._cw.build_url(self.__regid__)
--- a/web/views/sessions.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/sessions.py	Wed Apr 27 09:54:22 2011 +0200
@@ -69,8 +69,8 @@
         raise :exc:`cubicweb.AuthenticationError` if authentication failed
         (no authentication info found or wrong user/password)
         """
-        cnx, login, authinfo = self.authmanager.authenticate(req)
-        session = DBAPISession(cnx, login, authinfo)
+        cnx, login = self.authmanager.authenticate(req)
+        session = DBAPISession(cnx, login)
         self._sessions[session.sessionid] = session
         # associate the connection to the current request
         req.set_session(session)
--- a/web/views/startup.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/startup.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,14 +15,15 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Set of HTML startup views. A startup view is global, e.g. doesn't
-apply to a result set.
+"""Set of HTML startup views. A startup view is global, e.g. doesn't apply to a
+result set.
 """
 
 __docformat__ = "restructuredtext en"
 _ = unicode
 
 from logilab.common.textutils import unormalize
+from logilab.common.deprecation import deprecated
 from logilab.mtconverter import xml_escape
 
 from cubicweb.view import StartupView
@@ -35,101 +36,59 @@
     title = _('manage')
     http_cache_manager = httpcache.EtagHTTPCacheManager
     add_etype_links = ()
-
-    def display_folders(self):
-        return False
+    skip_startup_views = set( ('index', 'manage', 'schema', 'owl', 'changelog',
+                               'systempropertiesform', 'propertiesform',
+                               'cw.user-management', 'cw.source-management',
+                               'siteinfo', 'info', 'registry', 'gc',
+                               'tree') )
 
     def call(self, **kwargs):
         """The default view representing the instance's management"""
         self._cw.add_css('cubicweb.manageview.css')
         self.w(u'<h1>%s</h1>' % self._cw.property_value('ui.site-title'))
-        if not self.display_folders():
-            self._main_index()
-        else:
-            self.w(u'<table><tr>\n')
-            self.w(u'<td style="width:40%">')
-            self._main_index()
-            self.w(u'</td><td style="width:60%">')
-            self.folders()
-            self.w(u'</td>')
-            self.w(u'</tr></table>\n')
+        self.entities()
+        self.manage_actions()
+        self.startup_views()
 
-    def _main_index(self):
-        req = self._cw
-        manager = req.user.matching_groups('managers')
-        if not manager and 'Card' in self._cw.vreg.schema:
-            rset = self._cw.execute('Card X WHERE X wikiid "index"')
-        else:
-            rset = None
-        if rset:
-            self.wview('inlined', rset, row=0)
-        else:
-            self.entities()
+    def manage_actions(self):
+        allactions = self._cw.vreg['actions'].possible_actions(self._cw)
+        if allactions.get('manage'):
             self.w(u'<div class="hr">&#160;</div>')
-            self.startup_views()
-        if manager and 'Card' in self._cw.vreg.schema:
-            self.w(u'<div class="hr">&#160;</div>')
-            if rset:
-                href = rset.get_entity(0, 0).absolute_url(vid='edition')
-                label = self._cw._('edit the index page')
-            else:
-                href = req.build_url('view', vid='creation', etype='Card', wikiid='index')
-                label = self._cw._('create an index page')
-            self.w(u'<br/><a href="%s">%s</a>\n' % (xml_escape(href), label))
-
-    def folders(self):
-        self.w(u'<h2>%s</h2>\n' % self._cw._('Browse by category'))
-        self._cw.vreg['views'].select('tree', self._cw).render(w=self.w, maxlevel=1)
-
-    def create_links(self):
-        self.w(u'<ul class="createLink">')
-        for etype in self.add_etype_links:
-            eschema = self._cw.vreg.schema.eschema(etype)
-            if eschema.has_perm(self._cw, 'add'):
+            self.w(u'<h2>%s</h2>\n' % self._cw._('Manage'))
+            self.w(u'<ul class="manageActions">')
+            for action in allactions['manage']:
                 self.w(u'<li><a href="%s">%s</a></li>' % (
-                        self._cw.build_url('add/%s' % eschema),
-                        self._cw.__('add a %s' % eschema).capitalize()))
-        self.w(u'</ul>')
+                    action.url(), self._cw._(action.title)))
+            self.w(u'</ul>')
 
     def startup_views(self):
-        self.w(u'<h2>%s</h2>\n' % self._cw._('Startup views'))
-        self.startupviews_table()
-
-    def startupviews_table(self):
-        views = self._cw.vreg['views'].possible_views(self._cw, None)
+        views = [v for v in self._cw.vreg['views'].possible_views(self._cw, None)
+                 if v.category == 'startupview'
+                 and v.__regid__ not in self.skip_startup_views]
         if not views:
             return
+        self.w(u'<div class="hr">&#160;</div>')
+        self.w(u'<h2>%s</h2>\n' % self._cw._('Startup views'))
         self.w(u'<ul class="startup">')
         for v in sorted(views, key=lambda x: self._cw._(x.title)):
-            if v.category != 'startupview' or v.__regid__ in ('index', 'tree', 'manage'):
-                continue
             self.w('<li><a href="%s">%s</a></li>' % (
                 xml_escape(v.url()), xml_escape(self._cw._(v.title).capitalize())))
         self.w(u'</ul>')
 
     def entities(self):
         schema = self._cw.vreg.schema
-        self.w(u'<h2>%s</h2>\n' % self._cw._('Browse by entity type'))
-        manager = self._cw.user.matching_groups('managers')
-        self.w(u'<table class="startup">')
-        if manager:
-            self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self._cw._('application entities'))
-        self.entity_types_table(eschema for eschema in schema.entities()
-                                if uicfg.indexview_etype_section.get(eschema) == 'application')
-        if manager:
-            self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self._cw._('system entities'))
-            self.entity_types_table(eschema for eschema in schema.entities()
-                                if uicfg.indexview_etype_section.get(eschema) == 'system')
-            if 'CWAttribute' in schema: # check schema support
-                self.w(u'<tr><th colspan="4">%s</th></tr>\n' % self._cw._('schema entities'))
-                self.entity_types_table(eschema for eschema in schema.entities()
-                                        if uicfg.indexview_etype_section.get(eschema) == 'schema')
-        self.w(u'</table>')
+        eschemas = [eschema for eschema in schema.entities()
+                    if uicfg.indexview_etype_section.get(eschema) == 'application']
+        if eschemas:
+            self.w(u'<div class="hr">&#160;</div>')
+            self.w(u'<h2>%s</h2>\n' % self._cw._('Browse by entity type'))
+            self.w(u'<table class="startup">')
+            self.entity_types_table(eschemas)
+            self.w(u'</table>')
 
     def entity_types_table(self, eschemas):
-        newline = 0
         infos = sorted(self.entity_types(eschemas),
-                       key=lambda (l,a,e):unormalize(l))
+                       key=lambda (l,a,e): unormalize(l))
         q, r = divmod(len(infos), 2)
         if r:
             infos.append( (None, '&#160;', '&#160;') )
@@ -140,10 +99,9 @@
             self.w(u'<td class="addcol">%s</td><td>%s</td>\n' % (addlink2, etypelink2))
             self.w(u'</tr>\n')
 
-
     def entity_types(self, eschemas):
-        """return a list of formatted links to get a list of entities of
-        a each entity's types
+        """return an iterator on formatted links to get a list of entities of
+        each entity types
         """
         req = self._cw
         for eschema in eschemas:
@@ -161,6 +119,18 @@
                 xml_escape(url), label, nb)
             if eschema.has_perm(req, 'add'):
                 yield (label, etypelink, self.add_entity_link(etype))
+            else:
+                yield (label, etypelink, u'')
+
+    def create_links(self):
+        self.w(u'<ul class="createLink">')
+        for etype in self.add_etype_links:
+            eschema = self.schema.eschema(etype)
+            if eschema.has_perm(self._cw, 'add'):
+                self.w(u'<li><a href="%s">%s</a></li>' % (
+                        self._cw.build_url('add/%s' % eschema),
+                        self._cw.__('add a %s' % eschema).capitalize()))
+        self.w(u'</ul>')
 
     def add_entity_link(self, etype):
         """creates a [+] link for adding an entity"""
@@ -168,11 +138,21 @@
         return u'[<a href="%s" title="%s">+</a>]' % (
             xml_escape(url), self._cw.__('add a %s' % etype))
 
+    @deprecated('[3.11] display_folders method is deprecated, backport it if needed')
+    def display_folders(self):
+        return False
+
+    @deprecated('[3.11] folders method is deprecated, backport it if needed')
+    def folders(self):
+        self.w(u'<h2>%s</h2>\n' % self._cw._('Browse by category'))
+        self._cw.vreg['views'].select('tree', self._cw).render(w=self.w, maxlevel=1)
+
 
 class IndexView(ManageView):
     __regid__ = 'index'
     title = _('view_index')
 
+    @deprecated('[3.11] display_folders method is deprecated, backport it if needed')
     def display_folders(self):
         return 'Folder' in self._cw.vreg.schema and self._cw.execute('Any COUNT(X) WHERE X is Folder')[0][0]
 
--- a/web/views/tabs.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/tabs.py	Wed Apr 27 09:54:22 2011 +0200
@@ -24,27 +24,27 @@
 from logilab.mtconverter import xml_escape
 
 from cubicweb import NoSelectableObject, role
+from cubicweb import tags, uilib, utils
 from cubicweb.selectors import partial_has_related_entities
 from cubicweb.view import EntityView
-from cubicweb import tags, uilib
-from cubicweb.utils import make_uid
 from cubicweb.web.views import primary
 
 class LazyViewMixin(object):
-    """provides two convenience methods for the tab machinery
-    can also be used to lazy-load arbitrary views
+    """provides two convenience methods for the tab machinery.
+
+    Can also be used to lazy-load arbitrary views.
     """
 
     def _prepare_bindings(self, vid, reloadable):
         self._cw.add_onload(u"""
   jQuery('#lazy-%(vid)s').bind('%(event)s', function(event) {
-     load_now('#lazy-%(vid)s', '#%(vid)s-hole', %(reloadable)s);
+     loadNow('#lazy-%(vid)s', '#%(vid)s-hole', %(reloadable)s);
   });""" % {'event': 'load_%s' % vid, 'vid': vid,
             'reloadable' : str(reloadable).lower()})
 
     def lazyview(self, vid, rql=None, eid=None, rset=None, tabid=None,
                  reloadable=False, show_spinbox=True, w=None):
-        """ a lazy version of wview """
+        """a lazy version of wview"""
         w = w or self.w
         self._cw.add_js('cubicweb.lazy.js')
         urlparams = self._cw.form.copy()
@@ -60,8 +60,9 @@
         w(u'<div id="lazy-%s" cubicweb:loadurl="%s">' % (
             tabid, xml_escape(self._cw.build_url('json', **urlparams))))
         if show_spinbox:
-            w(u'<img src="data/loading.gif" id="%s-hole" alt="%s"/>'
-              % (tabid, self._cw._('(loading ...)')))
+            w(u'<img src="%s" id="%s-hole" alt="%s"/>'
+              % (xml_escape(self._cw.data_url('loading.gif')),
+                 tabid, self._cw._('(loading ...)')))
         else:
             w(u'<div id="%s-hole"></div>' % tabid)
         w(u'<noscript><p><a class="style: hidden" id="seo-%s" href="%s">%s</a></p></noscript>'
@@ -71,16 +72,14 @@
         self._prepare_bindings(tabid, reloadable)
 
     def forceview(self, vid):
-        """trigger an event that will force immediate loading of the view
-        on dom readyness
+        """trigger an event that will force immediate loading of the view on dom
+        readyness
         """
-        self._cw.add_js('cubicweb.lazy.js')
-        self._cw.add_onload("trigger_load('%s');" % vid)
+        self._cw.add_onload(uilib.js.triggerLoad(vid))
 
 
 class TabsMixin(LazyViewMixin):
-    """a tab mixin
-    """
+    """a tab mixin to easily get jQuery based, lazy, ajax tabs"""
 
     @property
     def cookie_name(self):
@@ -105,11 +104,11 @@
         active_tab = uilib.domid(default_tab)
         viewsvreg = self._cw.vreg['views']
         for tab in tabs:
-            try:
+            if isinstance(tab, basestring):
+                tabid, tabkwargs = tab, {}
+            else:
                 tabid, tabkwargs = tab
                 tabkwargs = tabkwargs.copy()
-            except ValueError:
-                tabid, tabkwargs = tab, {}
             tabkwargs.setdefault('rset', self.cw_rset)
             vid = tabkwargs.get('vid', tabid)
             domid = uilib.domid(tabid)
@@ -129,20 +128,19 @@
             entity.view(default, w=self.w)
             return
         self._cw.add_css('ui.tabs.css')
-        self._cw.add_js(('ui.core.js', 'ui.tabs.js',
-                         'cubicweb.ajax.js', 'cubicweb.tabs.js', 'cubicweb.lazy.js'))
+        self._cw.add_js(('ui.core.js', 'ui.tabs.js', 'cubicweb.ajax.js'))
         # prune tabs : not all are to be shown
         tabs, active_tab = self.prune_tabs(tabs, default)
         # build the html structure
         w = self.w
-        uid = entity and entity.eid or make_uid('tab')
+        uid = entity and entity.eid or utils.make_uid('tab')
         w(u'<div id="entity-tabs-%s">' % uid)
         w(u'<ul>')
         active_tab_idx = None
         for i, (tabid, domid, tabkwargs) in enumerate(tabs):
             w(u'<li>')
             w(u'<a href="#%s">' % domid)
-            w(u'<span onclick="set_tab(\'%s\', \'%s\')">' % (domid, self.cookie_name))
+            w(u'<span onclick="%s">' % xml_escape(unicode(uilib.js.setTab(domid, self.cookie_name))))
             w(tabkwargs.pop('label', self._cw._(tabid)))
             w(u'</span>')
             w(u'</a>')
@@ -158,12 +156,12 @@
             tabkwargs.setdefault('rset', self.cw_rset)
             self.lazyview(**tabkwargs)
             w(u'</div>')
-        # call the set_tab() JS function *after* each tab is generated
+        # call the setTab() JS function *after* each tab is generated
         # because the callback binding needs to be done before
         # XXX make work history: true
         self._cw.add_onload(u"""
   jQuery('#entity-tabs-%(eeid)s > ul').tabs( { selected: %(tabindex)s });
-  set_tab('%(domid)s', '%(cookiename)s');
+  setTab('%(domid)s', '%(cookiename)s');
 """ % {'tabindex'   : active_tab_idx,
        'domid'        : active_tab,
        'eeid'       : (entity and entity.eid or uid),
--- a/web/views/urlpublishing.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/urlpublishing.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -156,7 +156,7 @@
 
         <etype>[[/<attribute name>]/<attribute value>]*
     """
-    priority = 2
+    priority = 3
 
     def evaluate_path(self, req, parts):
         if not (0 < len(parts) < 4):
@@ -214,7 +214,8 @@
 
     URL rewrite rule definitions are stored in URLRewriter objects
     """
-    priority = 3
+    priority = 2
+
     def evaluate_path(self, req, parts):
         # uri <=> req._twreq.path or req._twreq.uri
         uri = req.url_unquote('/' + '/'.join(parts))
@@ -236,6 +237,7 @@
     <any evaluator path>/<action>
     """
     priority = 4
+
     def evaluate_path(self, req, parts):
         if len(parts) < 2:
             raise PathDontMatch()
--- a/web/views/urlrewrite.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/urlrewrite.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -90,12 +90,14 @@
         ('/index', dict(vid='index')),
         ('/myprefs', dict(vid='propertiesform')),
         ('/siteconfig', dict(vid='systempropertiesform')),
-        ('/siteinfo', dict(vid='info')),
+        ('/siteinfo', dict(vid='siteinfo')),
         ('/manage', dict(vid='manage')),
         ('/notfound', dict(vid='404')),
         ('/error', dict(vid='error')),
         ('/sparql', dict(vid='sparql')),
         ('/processinfo', dict(vid='processinfo')),
+        (rgx('/cwuser', re.I), dict(vid='cw.user-management')),
+        (rgx('/cwsource', re.I), dict(vid='cw.source-management')),
         # XXX should be case insensitive as 'create', but I would like to find another way than
         # relying on the etype_selector
         (rgx('/schema/([^/]+?)/?'),  dict(vid='primary', rql=r'Any X WHERE X is CWEType, X name "\1"')),
--- a/web/views/xmlrss.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/views/xmlrss.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,6 +20,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
+from base64 import b64encode
 from time import timezone
 
 from logilab.mtconverter import xml_escape
@@ -31,6 +32,18 @@
 from cubicweb.uilib import simple_sgml_tag
 from cubicweb.web import httpcache, component
 
+def encode_bytes(value):
+    return '<![CDATA[%s]]>' % b64encode(value.getvalue())
+
+# see cubicweb.sobjects.parser.DEFAULT_CONVERTERS
+SERIALIZERS = {
+    'String': xml_escape,
+    'Bytes': encode_bytes,
+    'Date': lambda x: x.strftime('%Y-%m-%d'),
+    'Datetime': lambda x: x.strftime('%Y-%m-%d %H:%M:%S'),
+    'Time': lambda x: x.strftime('%H:%M:%S'),
+    'Interval': lambda x: x.days * 60*60*24 + x.seconds,
+    }
 
 # base xml views ##############################################################
 
@@ -61,24 +74,52 @@
     def cell_call(self, row, col):
         """ element as an item for an xml feed """
         entity = self.cw_rset.complete_entity(row, col)
-        self.w(u'<%s>\n' % (entity.e_schema))
+        self.w(u'<%s eid="%s" cwuri="%s">\n'
+               % (entity.e_schema, entity.eid, xml_escape(entity.cwuri)))
         for rschema, attrschema in entity.e_schema.attribute_definitions():
             attr = rschema.type
-            if attr == 'eid':
-                value = entity.eid
+            if attr in ('eid', 'cwuri'):
+                continue
             else:
                 try:
                     value = entity.cw_attr_cache[attr]
                 except KeyError:
                     # Bytes
                     continue
-            if value is not None:
-                if attrschema == 'Bytes':
-                    from base64 import b64encode
-                    value = '<![CDATA[%s]]>' % b64encode(value.getvalue())
-                elif isinstance(value, basestring):
-                    value = xml_escape(value)
+            if value is None:
+                self.w(u'  <%s/>\n' % attr)
+            else:
+                try:
+                    value = SERIALIZERS[attrschema](value)
+                except KeyError:
+                    pass
                 self.w(u'  <%s>%s</%s>\n' % (attr, value, attr))
+        for relstr in self._cw.list_form_param('relation'):
+            try:
+                rtype, role = relstr.split('-')
+            except ValueError:
+                self.error('badly formated relation name %r', relstr)
+                continue
+            if role == 'subject':
+                getrschema = entity.e_schema.subjrels
+            elif role == 'object':
+                getrschema = entity.e_schema.objrels
+            else:
+                self.error('badly formated relation name %r', relstr)
+                continue
+            if not rtype in getrschema:
+                self.error('unexisting relation %r', relstr)
+                continue
+            self.w(u'  <%s role="%s">\n' % (rtype, role))
+            for related in entity.related(rtype, role, entities=True):
+                # XXX put unique attributes as xml attribute, they are much
+                # probably used to search existing entities in client data feed,
+                # and putting it here may avoid an extra request to get those
+                # attributes values
+                self.w(u'    <%s eid="%s" cwuri="%s"/>\n'
+                       % (related.e_schema, related.eid,
+                          xml_escape(related.cwuri)))
+            self.w(u'  </%s>\n' % rtype)
         self.w(u'</%s>\n' % (entity.e_schema))
 
 
@@ -234,7 +275,6 @@
         if entity.creator:
             self._marker('dc:creator', entity.dc_creator())
 
-
     def _marker(self, marker, value):
         if value:
             self.w(u'  <%s>%s</%s>\n' % (marker, xml_escape(value), marker))
--- a/web/webconfig.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/webconfig.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -297,7 +297,7 @@
         baseurl = self['base-url'] or self.default_base_url()
         if baseurl and baseurl[-1] != '/':
             baseurl += '/'
-        if not self.repairing:
+        if not (self.repairing or self.creating):
             self.global_set_option('base-url', baseurl)
         httpsurl = self['https-url']
         if httpsurl:
--- a/web/webctl.py	Tue Apr 05 08:39:49 2011 +0200
+++ b/web/webctl.py	Wed Apr 27 09:54:22 2011 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,26 +17,29 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """cubicweb-ctl commands and command handlers common to twisted/modpython
 web configuration
+"""
 
-"""
 __docformat__ = "restructuredtext en"
 
+from logilab.common.shellutils import ASK
+
 from cubicweb.toolsutils import CommandHandler, underline_title
-from logilab.common.shellutils import ASK
 
 class WebCreateHandler(CommandHandler):
     cmdname = 'create'
 
-    def bootstrap(self, cubes, inputlevel=0):
+    def bootstrap(self, cubes, automatic=False, inputlevel=0):
         """bootstrap this configuration"""
-        print '\n' + underline_title('Generic web configuration')
-        config = self.config
-        if config.repo_method == 'pyro' or config.pyro_enabled():
-            print '\n' + underline_title('Pyro configuration')
-            config.input_config('pyro', inputlevel)
-        if ASK.confirm('Allow anonymous access ?', False):
-            config.global_set_option('anonymous-user', 'anon')
-            config.global_set_option('anonymous-password', 'anon')
+        if not automatic:
+            print '\n' + underline_title('Generic web configuration')
+            config = self.config
+            if config.repo_method == 'pyro' or config.pyro_enabled():
+                print '\n' + underline_title('Pyro configuration')
+                config.input_config('pyro', inputlevel)
+            config.input_config('web', inputlevel)
+            if ASK.confirm('Allow anonymous access ?', False):
+                config.global_set_option('anonymous-user', 'anon')
+                config.global_set_option('anonymous-password', 'anon')
 
-    def postcreate(self):
+    def postcreate(self, *args, **kwargs):
         """hooks called once instance's initialization has been completed"""