merge 3.20.8 in 3.21
authorRémi Cardona <remi.cardona@logilab.fr>
Mon, 22 Jun 2015 14:27:37 +0200
changeset 10411 4ee15441f2eb
parent 10410 eb681a030699 (diff)
parent 10409 843e4b3ccfbc (current diff)
child 10412 3540131a8405
merge 3.20.8 in 3.21
__pkginfo__.py
cwconfig.py
doc/book/en/annexes/faq.rst
entities/__init__.py
entities/test/unittest_base.py
hooks/test/data/bootstrap_cubes
schemas/base.py
server/schemaserial.py
server/serverctl.py
server/sources/native.py
server/sqlutils.py
server/test/unittest_querier.py
web/application.py
web/data/cubicweb.ajax.js
web/formwidgets.py
web/httpcache.py
web/test/unittest_views_editforms.py
web/test/unittest_web.py
web/views/basecontrollers.py
web/views/baseviews.py
web/views/management.py
--- a/__pkginfo__.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/__pkginfo__.py	Mon Jun 22 14:27:37 2015 +0200
@@ -115,8 +115,6 @@
         [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
          [join(_data_dir, fname) for fname in listdir(_data_dir)
           if not isdir(join(_data_dir, fname))]],
-        [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
-         [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'),
          [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]],
         [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'),
--- a/_exceptions.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/_exceptions.py	Mon Jun 22 14:27:37 2015 +0200
@@ -82,6 +82,8 @@
         self.session = session
         assert 'rtypes' in kwargs or 'cstrname' in kwargs
         self.kwargs = kwargs
+        # fill cache while the session is open
+        self.rtypes
 
     @cachedproperty
     def rtypes(self):
--- a/_gcdebug.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/_gcdebug.py	Mon Jun 22 14:27:37 2015 +0200
@@ -19,6 +19,10 @@
 import gc, types, weakref
 
 from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
+try:
+    from cubicweb.web.request import _NeedAuthAccessMock
+except ImportError:
+    _NeedAuthAccessMock = None
 
 listiterator = type(iter([]))
 
@@ -30,6 +34,8 @@
     types.ModuleType, types.FunctionType, types.MethodType,
     types.MemberDescriptorType, types.GetSetDescriptorType,
     )
+if _NeedAuthAccessMock is not None:
+    IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,)
 
 def _get_counted_class(obj, classes):
     for cls in classes:
@@ -63,7 +69,8 @@
                 ocounters[key] = 1
         if isinstance(obj, viewreferrersclasses):
             print '   ', obj, referrers(obj, showobjs, maxlevel)
-    return counters, ocounters, gc.garbage
+    garbage = [repr(obj) for obj in gc.garbage]
+    return counters, ocounters, garbage
 
 
 def referrers(obj, showobj=False, maxlevel=1):
--- a/cwconfig.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/cwconfig.py	Mon Jun 22 14:27:37 2015 +0200
@@ -279,7 +279,7 @@
     ('default-text-format',
      {'type' : 'choice',
       'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'),
-      'default': 'text/html', # use fckeditor in the web ui
+      'default': 'text/plain',
       'help': _('default text format for rich text fields.'),
       'group': 'ui',
       }),
@@ -870,6 +870,18 @@
 the repository',
           'group': 'email', 'level': 1,
           }),
+        ('logstat-interval',
+         {'type' : 'int',
+          'default': 0,
+          'help': 'interval (in seconds) at which stats are dumped in the logstat file; set 0 to disable',
+          'group': 'main', 'level': 2,
+          }),
+        ('logstat-file',
+         {'type' : 'string',
+          'default': Method('default_stats_file'),
+          'help': 'file where stats for the instance should be written',
+          'group': 'main', 'level': 2,
+          }),
         )
 
     @classmethod
@@ -953,6 +965,13 @@
             log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log')
             return log_path % (self.appid, self.name)
 
+    def default_stats_file(self):
+        """return default path to the stats file of the instance'server"""
+        logfile = self.default_log_file()
+        if logfile.endswith('.log'):
+            logfile = logfile[:-4]
+        return logfile + '.stats'
+        
     def default_pid_file(self):
         """return default path to the pid file of the instance'server"""
         if self.mode == 'system':
--- a/cwctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/cwctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -25,7 +25,7 @@
 # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash
 # completion). So import locally in command helpers.
 import sys
-from warnings import warn
+from warnings import warn, filterwarnings
 from os import remove, listdir, system, pathsep
 from os.path import exists, join, isfile, isdir, dirname, abspath
 from urlparse import urlparse
@@ -401,7 +401,7 @@
                            if 'type' in odict
                            and odict.get('level') <= self.config.config_level)
             for section in sections:
-                if section not in ('main', 'email', 'pyro', 'web'):
+                if section not in ('main', 'email', 'web'):
                     print '\n' + underline_title('%s options' % section)
                     config.input_config(section, self.config.config_level)
         # write down configuration
@@ -900,9 +900,7 @@
         ('repo-uri',
          {'short': 'H', 'type' : 'string', 'metavar': '<protocol>://<[host][:port]>',
           'help': 'URI of the CubicWeb repository to connect to. URI can be \
-pyro://[host:port] the Pyro name server host; if the pyro nameserver is not set, \
-it will be detected by using a broadcast query, a ZMQ URL or \
-inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \
+a ZMQ URL or inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \
 directly give URI as instance id instead',
           'group': 'remote'
           }),
@@ -953,7 +951,7 @@
         if self.config.repo_uri:
             warn('[3.16] --repo-uri option is deprecated, directly give the URI as instance id',
                  DeprecationWarning)
-            if urlparse(self.config.repo_uri).scheme in ('pyro', 'inmemory'):
+            if urlparse(self.config.repo_uri).scheme == 'inmemory':
                 appuri = '%s/%s' % (self.config.repo_uri.rstrip('/'), appuri)
 
         from cubicweb.utils import parse_repo_uri
@@ -1135,6 +1133,7 @@
     import os
     sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
     sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
+    filterwarnings('default', category=DeprecationWarning)
     cwcfg.load_cwctl_plugins()
     try:
         CWCTL.run(args)
--- a/dataimport.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1173 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""This module provides tools to import tabular data.
-
-
-Example of use (run this with `cubicweb-ctl shell instance import-script.py`):
-
-.. sourcecode:: python
-
-  from cubicweb.dataimport import *
-  # define data generators
-  GENERATORS = []
-
-  USERS = [('Prenom', 'firstname', ()),
-           ('Nom', 'surname', ()),
-           ('Identifiant', 'login', ()),
-           ]
-
-  def gen_users(ctl):
-      for row in ctl.iter_and_commit('utilisateurs'):
-          entity = mk_entity(row, USERS)
-          entity['upassword'] = 'motdepasse'
-          ctl.check('login', entity['login'], None)
-          entity = ctl.store.create_entity('CWUser', **entity)
-          email = ctl.store.create_entity('EmailAddress', address=row['email'])
-          ctl.store.relate(entity.eid, 'use_email', email.eid)
-          ctl.store.rql('SET U in_group G WHERE G name "users", U eid %(x)s', {'x':entity['eid']})
-
-  CHK = [('login', check_doubles, 'Utilisateurs Login',
-          'Deux utilisateurs ne devraient pas avoir le même login.'),
-         ]
-
-  GENERATORS.append( (gen_users, CHK) )
-
-  # create controller
-  ctl = CWImportController(RQLObjectStore(cnx))
-  ctl.askerror = 1
-  ctl.generators = GENERATORS
-  ctl.data['utilisateurs'] = lazytable(ucsvreader(open('users.csv')))
-  # run
-  ctl.run()
-
-.. BUG file with one column are not parsable
-.. TODO rollback() invocation is not possible yet
-"""
-__docformat__ = "restructuredtext en"
-
-import csv
-import sys
-import threading
-import traceback
-import warnings
-import cPickle
-import os.path as osp
-import inspect
-from base64 import b64encode
-from collections import defaultdict
-from copy import copy
-from datetime import date, datetime, time
-from time import asctime
-from StringIO import StringIO
-
-from logilab.common import shellutils, attrdict
-from logilab.common.date import strptime
-from logilab.common.decorators import cached
-from logilab.common.deprecation import deprecated
-
-from cubicweb import QueryError
-from cubicweb.utils import make_uid
-from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES
-from cubicweb.server.edition import EditedEntity
-from cubicweb.server.sqlutils import SQL_PREFIX
-from cubicweb.server.utils import eschema_eid
-
-
-def count_lines(stream_or_filename):
-    if isinstance(stream_or_filename, basestring):
-        f = open(stream_or_filename)
-    else:
-        f = stream_or_filename
-        f.seek(0)
-    for i, line in enumerate(f):
-        pass
-    f.seek(0)
-    return i+1
-
-def ucsvreader_pb(stream_or_path, encoding='utf-8', delimiter=',', quotechar='"',
-                  skipfirst=False, withpb=True, skip_empty=True, separator=None,
-                  quote=None):
-    """same as :func:`ucsvreader` but a progress bar is displayed as we iter on rows"""
-    if separator is not None:
-        delimiter = separator
-        warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead")
-    if quote is not None:
-        quotechar = quote
-        warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
-    if isinstance(stream_or_path, basestring):
-        if not osp.exists(stream_or_path):
-            raise Exception("file doesn't exists: %s" % stream_or_path)
-        stream = open(stream_or_path)
-    else:
-        stream = stream_or_path
-    rowcount = count_lines(stream)
-    if skipfirst:
-        rowcount -= 1
-    if withpb:
-        pb = shellutils.ProgressBar(rowcount, 50)
-    for urow in ucsvreader(stream, encoding, delimiter, quotechar,
-                           skipfirst=skipfirst, skip_empty=skip_empty):
-        yield urow
-        if withpb:
-            pb.update()
-    print ' %s rows imported' % rowcount
-
-def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"',
-               skipfirst=False, ignore_errors=False, skip_empty=True,
-               separator=None, quote=None):
-    """A csv reader that accepts files with any encoding and outputs unicode
-    strings
-
-    if skip_empty (the default), lines without any values specified (only
-    separators) will be skipped. This is useful for Excel exports which may be
-    full of such lines.
-    """
-    if separator is not None:
-        delimiter = separator
-        warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead")
-    if quote is not None:
-        quotechar = quote
-        warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
-    it = iter(csv.reader(stream, delimiter=delimiter, quotechar=quotechar))
-    if not ignore_errors:
-        if skipfirst:
-            it.next()
-        for row in it:
-            decoded = [item.decode(encoding) for item in row]
-            if not skip_empty or any(decoded):
-                yield decoded
-    else:
-        if skipfirst:
-            try:
-                row = it.next()
-            except csv.Error:
-                pass
-        # Safe version, that can cope with error in CSV file
-        while True:
-            try:
-                row = it.next()
-            # End of CSV, break
-            except StopIteration:
-                break
-            # Error in CSV, ignore line and continue
-            except csv.Error:
-                continue
-            decoded = [item.decode(encoding) for item in row]
-            if not skip_empty or any(decoded):
-                yield decoded
-
-
-def callfunc_every(func, number, iterable):
-    """yield items of `iterable` one by one and call function `func`
-    every `number` iterations. Always call function `func` at the end.
-    """
-    for idx, item in enumerate(iterable):
-        yield item
-        if not idx % number:
-            func()
-    func()
-
-def lazytable(reader):
-    """The first row is taken to be the header of the table and
-    used to output a dict for each row of data.
-
-    >>> data = lazytable(ucsvreader(open(filename)))
-    """
-    header = reader.next()
-    for row in reader:
-        yield dict(zip(header, row))
-
-def lazydbtable(cu, table, headers, orderby=None):
-    """return an iterator on rows of a sql table. On each row, fetch columns
-    defined in headers and return values as a dictionary.
-
-    >>> data = lazydbtable(cu, 'experimentation', ('id', 'nickname', 'gps'))
-    """
-    sql = 'SELECT %s FROM %s' % (','.join(headers), table,)
-    if orderby:
-        sql += ' ORDER BY %s' % ','.join(orderby)
-    cu.execute(sql)
-    while True:
-        row = cu.fetchone()
-        if row is None:
-            break
-        yield dict(zip(headers, row))
-
-def mk_entity(row, map):
-    """Return a dict made from sanitized mapped values.
-
-    ValueError can be raised on unexpected values found in checkers
-
-    >>> row = {'myname': u'dupont'}
-    >>> map = [('myname', u'name', (call_transform_method('title'),))]
-    >>> mk_entity(row, map)
-    {'name': u'Dupont'}
-    >>> row = {'myname': u'dupont', 'optname': u''}
-    >>> map = [('myname', u'name', (call_transform_method('title'),)),
-    ...        ('optname', u'MARKER', (optional,))]
-    >>> mk_entity(row, map)
-    {'name': u'Dupont', 'optname': None}
-    """
-    res = {}
-    assert isinstance(row, dict)
-    assert isinstance(map, list)
-    for src, dest, funcs in map:
-        try:
-            res[dest] = row[src]
-        except KeyError:
-            continue
-        try:
-            for func in funcs:
-                res[dest] = func(res[dest])
-                if res[dest] is None:
-                    break
-        except ValueError as err:
-            raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1]
-    return res
-
-# user interactions ############################################################
-
-def tell(msg):
-    print msg
-
-def confirm(question):
-    """A confirm function that asks for yes/no/abort and exits on abort."""
-    answer = shellutils.ASK.ask(question, ('Y', 'n', 'abort'), 'Y')
-    if answer == 'abort':
-        sys.exit(1)
-    return answer == 'Y'
-
-
-class catch_error(object):
-    """Helper for @contextmanager decorator."""
-
-    def __init__(self, ctl, key='unexpected error', msg=None):
-        self.ctl = ctl
-        self.key = key
-        self.msg = msg
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is not None:
-            if issubclass(type, (KeyboardInterrupt, SystemExit)):
-                return # re-raise
-            if self.ctl.catcherrors:
-                self.ctl.record_error(self.key, None, type, value, traceback)
-                return True # silent
-
-
-# base sanitizing/coercing functions ###########################################
-
-def optional(value):
-    """checker to filter optional field
-
-    If value is undefined (ex: empty string), return None that will
-    break the checkers validation chain
-
-    General use is to add 'optional' check in first condition to avoid
-    ValueError by further checkers
-
-    >>> MAPPER = [(u'value', 'value', (optional, int))]
-    >>> row = {'value': u'XXX'}
-    >>> mk_entity(row, MAPPER)
-    {'value': None}
-    >>> row = {'value': u'100'}
-    >>> mk_entity(row, MAPPER)
-    {'value': 100}
-    """
-    if value:
-        return value
-    return None
-
-def required(value):
-    """raise ValueError if value is empty
-
-    This check should be often found in last position in the chain.
-    """
-    if value:
-        return value
-    raise ValueError("required")
-
-def todatetime(format='%d/%m/%Y'):
-    """return a transformation function to turn string input value into a
-    `datetime.datetime` instance, using given format.
-
-    Follow it by `todate` or `totime` functions from `logilab.common.date` if
-    you want a `date`/`time` instance instead of `datetime`.
-    """
-    def coerce(value):
-        return strptime(value, format)
-    return coerce
-
-def call_transform_method(methodname, *args, **kwargs):
-    """return value returned by calling the given method on input"""
-    def coerce(value):
-        return getattr(value, methodname)(*args, **kwargs)
-    return coerce
-
-def call_check_method(methodname, *args, **kwargs):
-    """check value returned by calling the given method on input is true,
-    else raise ValueError
-    """
-    def check(value):
-        if getattr(value, methodname)(*args, **kwargs):
-            return value
-        raise ValueError('%s not verified on %r' % (methodname, value))
-    return check
-
-# base integrity checking functions ############################################
-
-def check_doubles(buckets):
-    """Extract the keys that have more than one item in their bucket."""
-    return [(k, len(v)) for k, v in buckets.items() if len(v) > 1]
-
-def check_doubles_not_none(buckets):
-    """Extract the keys that have more than one item in their bucket."""
-    return [(k, len(v)) for k, v in buckets.items()
-            if k is not None and len(v) > 1]
-
-# sql generator utility functions #############################################
-
-
-def _import_statements(sql_connect, statements, nb_threads=3,
-                       dump_output_dir=None,
-                       support_copy_from=True, encoding='utf-8'):
-    """
-    Import a bunch of sql statements, using different threads.
-    """
-    try:
-        chunksize = (len(statements) / nb_threads) + 1
-        threads = []
-        for i in xrange(nb_threads):
-            chunks = statements[i*chunksize:(i+1)*chunksize]
-            thread = threading.Thread(target=_execmany_thread,
-                                      args=(sql_connect, chunks,
-                                            dump_output_dir,
-                                            support_copy_from,
-                                            encoding))
-            thread.start()
-            threads.append(thread)
-        for t in threads:
-            t.join()
-    except Exception:
-        print 'Error in import statements'
-
-def _execmany_thread_not_copy_from(cu, statement, data, table=None,
-                                   columns=None, encoding='utf-8'):
-    """ Execute thread without copy from
-    """
-    cu.executemany(statement, data)
-
-def _execmany_thread_copy_from(cu, statement, data, table,
-                               columns, encoding='utf-8'):
-    """ Execute thread with copy from
-    """
-    buf = _create_copyfrom_buffer(data, columns, encoding=encoding)
-    if buf is None:
-        _execmany_thread_not_copy_from(cu, statement, data)
-    else:
-        if columns is None:
-            cu.copy_from(buf, table, null='NULL')
-        else:
-            cu.copy_from(buf, table, null='NULL', columns=columns)
-
-def _execmany_thread(sql_connect, statements, dump_output_dir=None,
-                     support_copy_from=True, encoding='utf-8'):
-    """
-    Execute sql statement. If 'INSERT INTO', try to use 'COPY FROM' command,
-    or fallback to execute_many.
-    """
-    if support_copy_from:
-        execmany_func = _execmany_thread_copy_from
-    else:
-        execmany_func = _execmany_thread_not_copy_from
-    cnx = sql_connect()
-    cu = cnx.cursor()
-    try:
-        for statement, data in statements:
-            table = None
-            columns = None
-            try:
-                if not statement.startswith('INSERT INTO'):
-                    cu.executemany(statement, data)
-                    continue
-                table = statement.split()[2]
-                if isinstance(data[0], (tuple, list)):
-                    columns = None
-                else:
-                    columns = list(data[0])
-                execmany_func(cu, statement, data, table, columns, encoding)
-            except Exception:
-                print 'unable to copy data into table %s' % table
-                # Error in import statement, save data in dump_output_dir
-                if dump_output_dir is not None:
-                    pdata = {'data': data, 'statement': statement,
-                             'time': asctime(), 'columns': columns}
-                    filename = make_uid()
-                    try:
-                        with open(osp.join(dump_output_dir,
-                                           '%s.pickle' % filename), 'w') as fobj:
-                            fobj.write(cPickle.dumps(pdata))
-                    except IOError:
-                        print 'ERROR while pickling in', dump_output_dir, filename+'.pickle'
-                        pass
-                cnx.rollback()
-                raise
-    finally:
-        cnx.commit()
-        cu.close()
-
-
-def _copyfrom_buffer_convert_None(value, **opts):
-    '''Convert None value to "NULL"'''
-    return 'NULL'
-
-def _copyfrom_buffer_convert_number(value, **opts):
-    '''Convert a number into its string representation'''
-    return str(value)
-
-def _copyfrom_buffer_convert_string(value, **opts):
-    '''Convert string value.
-
-    Recognized keywords:
-    :encoding: resulting string encoding (default: utf-8)
-    :replace_sep: character used when input contains characters
-                  that conflict with the column separator.
-    '''
-    encoding = opts.get('encoding','utf-8')
-    replace_sep = opts.get('replace_sep', None)
-    # Remove separators used in string formatting
-    for _char in (u'\t', u'\r', u'\n'):
-        if _char in value:
-            # If a replace_sep is given, replace
-            # the separator
-            # (and thus avoid empty buffer)
-            if replace_sep is None:
-                raise ValueError('conflicting separator: '
-                                 'you must provide the replace_sep option')
-            value = value.replace(_char, replace_sep)
-        value = value.replace('\\', r'\\')
-    if isinstance(value, unicode):
-        value = value.encode(encoding)
-    return value
-
-def _copyfrom_buffer_convert_date(value, **opts):
-    '''Convert date into "YYYY-MM-DD"'''
-    # Do not use strftime, as it yields issue with date < 1900
-    # (http://bugs.python.org/issue1777412)
-    return '%04d-%02d-%02d' % (value.year, value.month, value.day)
-
-def _copyfrom_buffer_convert_datetime(value, **opts):
-    '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"'''
-    # Do not use strftime, as it yields issue with date < 1900
-    # (http://bugs.python.org/issue1777412)
-    return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts),
-                      _copyfrom_buffer_convert_time(value, **opts))
-
-def _copyfrom_buffer_convert_time(value, **opts):
-    '''Convert time into "HH:MM:SS.UUUUUU"'''
-    return '%02d:%02d:%02d.%06d' % (value.hour, value.minute,
-                                    value.second, value.microsecond)
-
-# (types, converter) list.
-_COPYFROM_BUFFER_CONVERTERS = [
-    (type(None), _copyfrom_buffer_convert_None),
-    ((long, int, float), _copyfrom_buffer_convert_number),
-    (basestring, _copyfrom_buffer_convert_string),
-    (datetime, _copyfrom_buffer_convert_datetime),
-    (date, _copyfrom_buffer_convert_date),
-    (time, _copyfrom_buffer_convert_time),
-]
-
-def _create_copyfrom_buffer(data, columns=None, **convert_opts):
-    """
-    Create a StringIO buffer for 'COPY FROM' command.
-    Deals with Unicode, Int, Float, Date... (see ``converters``)
-
-    :data: a sequence/dict of tuples
-    :columns: list of columns to consider (default to all columns)
-    :converter_opts: keyword arguements given to converters
-    """
-    # Create a list rather than directly create a StringIO
-    # to correctly write lines separated by '\n' in a single step
-    rows = []
-    if columns is None:
-        if isinstance(data[0], (tuple, list)):
-            columns = range(len(data[0]))
-        elif isinstance(data[0], dict):
-            columns = data[0].keys()
-        else:
-            raise ValueError('Could not get columns: you must provide columns.')
-    for row in data:
-        # Iterate over the different columns and the different values
-        # and try to convert them to a correct datatype.
-        # If an error is raised, do not continue.
-        formatted_row = []
-        for col in columns:
-            try:
-                value = row[col]
-            except KeyError:
-                warnings.warn(u"Column %s is not accessible in row %s"
-                              % (col, row), RuntimeWarning)
-                # XXX 'value' set to None so that the import does not end in
-                # error.
-                # Instead, the extra keys are set to NULL from the
-                # database point of view.
-                value = None
-            for types, converter in _COPYFROM_BUFFER_CONVERTERS:
-                if isinstance(value, types):
-                    value = converter(value, **convert_opts)
-                    break
-            else:
-                raise ValueError("Unsupported value type %s" % type(value))
-            # We push the value to the new formatted row
-            # if the value is not None and could be converted to a string.
-            formatted_row.append(value)
-        rows.append('\t'.join(formatted_row))
-    return StringIO('\n'.join(rows))
-
-
-# object stores #################################################################
-
-class ObjectStore(object):
-    """Store objects in memory for *faster* validation (development mode)
-
-    But it will not enforce the constraints of the schema and hence will miss some problems
-
-    >>> store = ObjectStore()
-    >>> user = store.create_entity('CWUser', login=u'johndoe')
-    >>> group = store.create_entity('CWUser', name=u'unknown')
-    >>> store.relate(user.eid, 'in_group', group.eid)
-    """
-    def __init__(self):
-        self.items = []
-        self.eids = {}
-        self.types = {}
-        self.relations = set()
-        self.indexes = {}
-
-    def create_entity(self, etype, **data):
-        data = attrdict(data)
-        data['eid'] = eid = len(self.items)
-        self.items.append(data)
-        self.eids[eid] = data
-        self.types.setdefault(etype, []).append(eid)
-        return data
-
-    def relate(self, eid_from, rtype, eid_to, **kwargs):
-        """Add new relation"""
-        relation = eid_from, rtype, eid_to
-        self.relations.add(relation)
-        return relation
-
-    def commit(self):
-        """this commit method does nothing by default"""
-        return
-
-    def flush(self):
-        """The method is provided so that all stores share a common API"""
-        pass
-
-    @property
-    def nb_inserted_entities(self):
-        return len(self.eids)
-    @property
-    def nb_inserted_types(self):
-        return len(self.types)
-    @property
-    def nb_inserted_relations(self):
-        return len(self.relations)
-
-class RQLObjectStore(ObjectStore):
-    """ObjectStore that works with an actual RQL repository (production mode)"""
-
-    def __init__(self, cnx, commit=None):
-        if commit is not None:
-            warnings.warn('[3.19] commit argument should not be specified '
-                          'as the cnx object already provides it.',
-                          DeprecationWarning, stacklevel=2)
-        super(RQLObjectStore, self).__init__()
-        self._cnx = cnx
-        self._commit = commit or cnx.commit
-
-    def commit(self):
-        return self._commit()
-
-    def rql(self, *args):
-        return self._cnx.execute(*args)
-
-    @property
-    def session(self):
-        warnings.warn('[3.19] deprecated property.', DeprecationWarning,
-                      stacklevel=2)
-        return self._cnx.repo._get_session(self._cnx.sessionid)
-
-    def create_entity(self, *args, **kwargs):
-        entity = self._cnx.create_entity(*args, **kwargs)
-        self.eids[entity.eid] = entity
-        self.types.setdefault(args[0], []).append(entity.eid)
-        return entity
-
-    def relate(self, eid_from, rtype, eid_to, **kwargs):
-        eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
-            eid_from, rtype, eid_to, **kwargs)
-        self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
-                 {'x': int(eid_from), 'y': int(eid_to)})
-
-    @deprecated("[3.19] use cnx.find(*args, **kwargs).entities() instead")
-    def find_entities(self, *args, **kwargs):
-        return self._cnx.find(*args, **kwargs).entities()
-
-    @deprecated("[3.19] use cnx.find(*args, **kwargs).one() instead")
-    def find_one_entity(self, *args, **kwargs):
-        return self._cnx.find(*args, **kwargs).one()
-
-# the import controller ########################################################
-
-class CWImportController(object):
-    """Controller of the data import process.
-
-    >>> ctl = CWImportController(store)
-    >>> ctl.generators = list_of_data_generators
-    >>> ctl.data = dict_of_data_tables
-    >>> ctl.run()
-    """
-
-    def __init__(self, store, askerror=0, catcherrors=None, tell=tell,
-                 commitevery=50):
-        self.store = store
-        self.generators = None
-        self.data = {}
-        self.errors = None
-        self.askerror = askerror
-        if  catcherrors is None:
-            catcherrors = askerror
-        self.catcherrors = catcherrors
-        self.commitevery = commitevery # set to None to do a single commit
-        self._tell = tell
-
-    def check(self, type, key, value):
-        self._checks.setdefault(type, {}).setdefault(key, []).append(value)
-
-    def check_map(self, entity, key, map, default):
-        try:
-            entity[key] = map[entity[key]]
-        except KeyError:
-            self.check(key, entity[key], None)
-            entity[key] = default
-
-    def record_error(self, key, msg=None, type=None, value=None, tb=None):
-        tmp = StringIO()
-        if type is None:
-            traceback.print_exc(file=tmp)
-        else:
-            traceback.print_exception(type, value, tb, file=tmp)
-        # use a list to avoid counting a <nb lines> errors instead of one
-        errorlog = self.errors.setdefault(key, [])
-        if msg is None:
-            errorlog.append(tmp.getvalue().splitlines())
-        else:
-            errorlog.append( (msg, tmp.getvalue().splitlines()) )
-
-    def run(self):
-        self.errors = {}
-        if self.commitevery is None:
-            self.tell('Will commit all or nothing.')
-        else:
-            self.tell('Will commit every %s iterations' % self.commitevery)
-        for func, checks in self.generators:
-            self._checks = {}
-            func_name = func.__name__
-            self.tell("Run import function '%s'..." % func_name)
-            try:
-                func(self)
-            except Exception:
-                if self.catcherrors:
-                    self.record_error(func_name, 'While calling %s' % func.__name__)
-                else:
-                    self._print_stats()
-                    raise
-            for key, func, title, help in checks:
-                buckets = self._checks.get(key)
-                if buckets:
-                    err = func(buckets)
-                    if err:
-                        self.errors[title] = (help, err)
-        try:
-            txuuid = self.store.commit()
-            if txuuid is not None:
-                self.tell('Transaction commited (txuuid: %s)' % txuuid)
-        except QueryError as ex:
-            self.tell('Transaction aborted: %s' % ex)
-        self._print_stats()
-        if self.errors:
-            if self.askerror == 2 or (self.askerror and confirm('Display errors ?')):
-                from pprint import pformat
-                for errkey, error in self.errors.items():
-                    self.tell("\n%s (%s): %d\n" % (error[0], errkey, len(error[1])))
-                    self.tell(pformat(sorted(error[1])))
-
-    def _print_stats(self):
-        nberrors = sum(len(err) for err in self.errors.itervalues())
-        self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors'
-                  % (self.store.nb_inserted_entities,
-                     self.store.nb_inserted_types,
-                     self.store.nb_inserted_relations,
-                     nberrors))
-
-    def get_data(self, key):
-        return self.data.get(key)
-
-    def index(self, name, key, value, unique=False):
-        """create a new index
-
-        If unique is set to True, only first occurence will be kept not the following ones
-        """
-        if unique:
-            try:
-                if value in self.store.indexes[name][key]:
-                    return
-            except KeyError:
-                # we're sure that one is the first occurence; so continue...
-                pass
-        self.store.indexes.setdefault(name, {}).setdefault(key, []).append(value)
-
-    def tell(self, msg):
-        self._tell(msg)
-
-    def iter_and_commit(self, datakey):
-        """iter rows, triggering commit every self.commitevery iterations"""
-        if self.commitevery is None:
-            return self.get_data(datakey)
-        else:
-            return callfunc_every(self.store.commit,
-                                  self.commitevery,
-                                  self.get_data(datakey))
-
-
-class NoHookRQLObjectStore(RQLObjectStore):
-    """ObjectStore that works with an actual RQL repository (production mode)"""
-
-    def __init__(self, cnx, metagen=None, baseurl=None):
-        super(NoHookRQLObjectStore, self).__init__(cnx)
-        self.source = cnx.repo.system_source
-        self.rschema = cnx.repo.schema.rschema
-        self.add_relation = self.source.add_relation
-        if metagen is None:
-            metagen = MetaGenerator(cnx, baseurl)
-        self.metagen = metagen
-        self._nb_inserted_entities = 0
-        self._nb_inserted_types = 0
-        self._nb_inserted_relations = 0
-        # deactivate security
-        cnx.read_security = False
-        cnx.write_security = False
-
-    def create_entity(self, etype, **kwargs):
-        for k, v in kwargs.iteritems():
-            kwargs[k] = getattr(v, 'eid', v)
-        entity, rels = self.metagen.base_etype_dicts(etype)
-        # make a copy to keep cached entity pristine
-        entity = copy(entity)
-        entity.cw_edited = copy(entity.cw_edited)
-        entity.cw_clear_relation_cache()
-        entity.cw_edited.update(kwargs, skipsec=False)
-        entity_source, extid = self.metagen.init_entity(entity)
-        cnx = self._cnx
-        self.source.add_entity(cnx, entity)
-        self.source.add_info(cnx, entity, entity_source, extid)
-        kwargs = dict()
-        if inspect.getargspec(self.add_relation).keywords:
-            kwargs['subjtype'] = entity.cw_etype
-        for rtype, targeteids in rels.iteritems():
-            # targeteids may be a single eid or a list of eids
-            inlined = self.rschema(rtype).inlined
-            try:
-                for targeteid in targeteids:
-                    self.add_relation(cnx, entity.eid, rtype, targeteid,
-                                      inlined, **kwargs)
-            except TypeError:
-                self.add_relation(cnx, entity.eid, rtype, targeteids,
-                                  inlined, **kwargs)
-        self._nb_inserted_entities += 1
-        return entity
-
-    def relate(self, eid_from, rtype, eid_to, **kwargs):
-        assert not rtype.startswith('reverse_')
-        self.add_relation(self._cnx, eid_from, rtype, eid_to,
-                          self.rschema(rtype).inlined)
-        if self.rschema(rtype).symmetric:
-            self.add_relation(self._cnx, eid_to, rtype, eid_from,
-                              self.rschema(rtype).inlined)
-        self._nb_inserted_relations += 1
-
-    @property
-    def nb_inserted_entities(self):
-        return self._nb_inserted_entities
-    @property
-    def nb_inserted_types(self):
-        return self._nb_inserted_types
-    @property
-    def nb_inserted_relations(self):
-        return self._nb_inserted_relations
-
-
-class MetaGenerator(object):
-    META_RELATIONS = (META_RTYPES
-                      - VIRTUAL_RTYPES
-                      - set(('eid', 'cwuri',
-                             'is', 'is_instance_of', 'cw_source')))
-
-    def __init__(self, cnx, baseurl=None, source=None):
-        self._cnx = cnx
-        if baseurl is None:
-            config = cnx.vreg.config
-            baseurl = config['base-url'] or config.default_base_url()
-        if not baseurl[-1] == '/':
-            baseurl += '/'
-        self.baseurl = baseurl
-        if source is None:
-            source = cnx.repo.system_source
-        self.source = source
-        self.create_eid = cnx.repo.system_source.create_eid
-        self.time = datetime.now()
-        # attributes/relations shared by all entities of the same type
-        self.etype_attrs = []
-        self.etype_rels = []
-        # attributes/relations specific to each entity
-        self.entity_attrs = ['cwuri']
-        #self.entity_rels = [] XXX not handled (YAGNI?)
-        schema = cnx.vreg.schema
-        rschema = schema.rschema
-        for rtype in self.META_RELATIONS:
-            # skip owned_by / created_by if user is the internal manager
-            if cnx.user.eid == -1 and rtype in ('owned_by', 'created_by'):
-                continue
-            if rschema(rtype).final:
-                self.etype_attrs.append(rtype)
-            else:
-                self.etype_rels.append(rtype)
-
-    @cached
-    def base_etype_dicts(self, etype):
-        entity = self._cnx.vreg['etypes'].etype_class(etype)(self._cnx)
-        # entity are "surface" copied, avoid shared dict between copies
-        del entity.cw_extra_kwargs
-        entity.cw_edited = EditedEntity(entity)
-        for attr in self.etype_attrs:
-            genfunc = self.generate(attr)
-            if genfunc:
-                entity.cw_edited.edited_attribute(attr, genfunc(entity))
-        rels = {}
-        for rel in self.etype_rels:
-            genfunc = self.generate(rel)
-            if genfunc:
-                rels[rel] = genfunc(entity)
-        return entity, rels
-
-    def init_entity(self, entity):
-        entity.eid = self.create_eid(self._cnx)
-        extid = entity.cw_edited.get('cwuri')
-        for attr in self.entity_attrs:
-            if attr in entity.cw_edited:
-                # already set, skip this attribute
-                continue
-            genfunc = self.generate(attr)
-            if genfunc:
-                entity.cw_edited.edited_attribute(attr, genfunc(entity))
-        if isinstance(extid, unicode):
-            extid = extid.encode('utf-8')
-        return self.source, extid
-
-    def generate(self, rtype):
-        return getattr(self, 'gen_%s' % rtype, None)
-
-    def gen_cwuri(self, entity):
-        assert self.baseurl, 'baseurl is None while generating cwuri'
-        return u'%s%s' % (self.baseurl, entity.eid)
-
-    def gen_creation_date(self, entity):
-        return self.time
-
-    def gen_modification_date(self, entity):
-        return self.time
-
-    def gen_created_by(self, entity):
-        return self._cnx.user.eid
-
-    def gen_owned_by(self, entity):
-        return self._cnx.user.eid
-
-
-###########################################################################
-## SQL object store #######################################################
-###########################################################################
-class SQLGenObjectStore(NoHookRQLObjectStore):
-    """Controller of the data import process. This version is based
-    on direct insertions throught SQL command (COPY FROM or execute many).
-
-    >>> store = SQLGenObjectStore(cnx)
-    >>> store.create_entity('Person', ...)
-    >>> store.flush()
-    """
-
-    def __init__(self, cnx, dump_output_dir=None, nb_threads_statement=3):
-        """
-        Initialize a SQLGenObjectStore.
-
-        Parameters:
-
-          - cnx: connection on the cubicweb instance
-          - dump_output_dir: a directory to dump failed statements
-            for easier recovery. Default is None (no dump).
-          - nb_threads_statement: number of threads used
-            for SQL insertion (default is 3).
-        """
-        super(SQLGenObjectStore, self).__init__(cnx)
-        ### hijack default source
-        self.source = SQLGenSourceWrapper(
-            self.source, cnx.vreg.schema,
-            dump_output_dir=dump_output_dir,
-            nb_threads_statement=nb_threads_statement)
-        ### XXX This is done in super().__init__(), but should be
-        ### redone here to link to the correct source
-        self.add_relation = self.source.add_relation
-        self.indexes_etypes = {}
-
-    def flush(self):
-        """Flush data to the database"""
-        self.source.flush()
-
-    def relate(self, subj_eid, rtype, obj_eid, **kwargs):
-        if subj_eid is None or obj_eid is None:
-            return
-        # XXX Could subjtype be inferred ?
-        self.source.add_relation(self._cnx, subj_eid, rtype, obj_eid,
-                                 self.rschema(rtype).inlined, **kwargs)
-        if self.rschema(rtype).symmetric:
-            self.source.add_relation(self._cnx, obj_eid, rtype, subj_eid,
-                                     self.rschema(rtype).inlined, **kwargs)
-
-    def drop_indexes(self, etype):
-        """Drop indexes for a given entity type"""
-        if etype not in self.indexes_etypes:
-            cu = self._cnx.cnxset.cu
-            def index_to_attr(index):
-                """turn an index name to (database) attribute name"""
-                return index.replace(etype.lower(), '').replace('idx', '').strip('_')
-            indices = [(index, index_to_attr(index))
-                       for index in self.source.dbhelper.list_indices(cu, etype)
-                       # Do not consider 'cw_etype_pkey' index
-                       if not index.endswith('key')]
-            self.indexes_etypes[etype] = indices
-        for index, attr in self.indexes_etypes[etype]:
-            self._cnx.system_sql('DROP INDEX %s' % index)
-
-    def create_indexes(self, etype):
-        """Recreate indexes for a given entity type"""
-        for index, attr in self.indexes_etypes.get(etype, []):
-            sql = 'CREATE INDEX %s ON cw_%s(%s)' % (index, etype, attr)
-            self._cnx.system_sql(sql)
-
-
-###########################################################################
-## SQL Source #############################################################
-###########################################################################
-
-class SQLGenSourceWrapper(object):
-
-    def __init__(self, system_source, schema,
-                 dump_output_dir=None, nb_threads_statement=3):
-        self.system_source = system_source
-        self._sql = threading.local()
-        # Explicitely backport attributes from system source
-        self._storage_handler = self.system_source._storage_handler
-        self.preprocess_entity = self.system_source.preprocess_entity
-        self.sqlgen = self.system_source.sqlgen
-        self.uri = self.system_source.uri
-        self.eid = self.system_source.eid
-        # Directory to write temporary files
-        self.dump_output_dir = dump_output_dir
-        # Allow to execute code with SQLite backend that does
-        # not support (yet...) copy_from
-        # XXX Should be dealt with in logilab.database
-        spcfrom = system_source.dbhelper.dbapi_module.support_copy_from
-        self.support_copy_from = spcfrom
-        self.dbencoding = system_source.dbhelper.dbencoding
-        self.nb_threads_statement = nb_threads_statement
-        # initialize thread-local data for main thread
-        self.init_thread_locals()
-        self._inlined_rtypes_cache = {}
-        self._fill_inlined_rtypes_cache(schema)
-        self.schema = schema
-        self.do_fti = False
-
-    def _fill_inlined_rtypes_cache(self, schema):
-        cache = self._inlined_rtypes_cache
-        for eschema in schema.entities():
-            for rschema in eschema.ordered_relations():
-                if rschema.inlined:
-                    cache[eschema.type] = SQL_PREFIX + rschema.type
-
-    def init_thread_locals(self):
-        """initializes thread-local data"""
-        self._sql.entities = defaultdict(list)
-        self._sql.relations = {}
-        self._sql.inlined_relations = {}
-        # keep track, for each eid of the corresponding data dict
-        self._sql.eid_insertdicts = {}
-
-    def flush(self):
-        print 'starting flush'
-        _entities_sql = self._sql.entities
-        _relations_sql = self._sql.relations
-        _inlined_relations_sql = self._sql.inlined_relations
-        _insertdicts = self._sql.eid_insertdicts
-        try:
-            # try, for each inlined_relation, to find if we're also creating
-            # the host entity (i.e. the subject of the relation).
-            # In that case, simply update the insert dict and remove
-            # the need to make the
-            # UPDATE statement
-            for statement, datalist in _inlined_relations_sql.iteritems():
-                new_datalist = []
-                # for a given inlined relation,
-                # browse each couple to be inserted
-                for data in datalist:
-                    keys = list(data)
-                    # For inlined relations, it exists only two case:
-                    # (rtype, cw_eid) or (cw_eid, rtype)
-                    if keys[0] == 'cw_eid':
-                        rtype = keys[1]
-                    else:
-                        rtype = keys[0]
-                    updated_eid = data['cw_eid']
-                    if updated_eid in _insertdicts:
-                        _insertdicts[updated_eid][rtype] = data[rtype]
-                    else:
-                        # could not find corresponding insert dict, keep the
-                        # UPDATE query
-                        new_datalist.append(data)
-                _inlined_relations_sql[statement] = new_datalist
-            _import_statements(self.system_source.get_connection,
-                               _entities_sql.items()
-                               + _relations_sql.items()
-                               + _inlined_relations_sql.items(),
-                               dump_output_dir=self.dump_output_dir,
-                               nb_threads=self.nb_threads_statement,
-                               support_copy_from=self.support_copy_from,
-                               encoding=self.dbencoding)
-        finally:
-            _entities_sql.clear()
-            _relations_sql.clear()
-            _insertdicts.clear()
-            _inlined_relations_sql.clear()
-
-    def add_relation(self, cnx, subject, rtype, object,
-                     inlined=False, **kwargs):
-        if inlined:
-            _sql = self._sql.inlined_relations
-            data = {'cw_eid': subject, SQL_PREFIX + rtype: object}
-            subjtype = kwargs.get('subjtype')
-            if subjtype is None:
-                # Try to infer it
-                targets = [t.type for t in
-                           self.schema.rschema(rtype).subjects()]
-                if len(targets) == 1:
-                    subjtype = targets[0]
-                else:
-                    raise ValueError('You should give the subject etype for '
-                                     'inlined relation %s'
-                                     ', as it cannot be inferred: '
-                                     'this type is given as keyword argument '
-                                     '``subjtype``'% rtype)
-            statement = self.sqlgen.update(SQL_PREFIX + subjtype,
-                                           data, ['cw_eid'])
-        else:
-            _sql = self._sql.relations
-            data = {'eid_from': subject, 'eid_to': object}
-            statement = self.sqlgen.insert('%s_relation' % rtype, data)
-        if statement in _sql:
-            _sql[statement].append(data)
-        else:
-            _sql[statement] = [data]
-
-    def add_entity(self, cnx, entity):
-        with self._storage_handler(entity, 'added'):
-            attrs = self.preprocess_entity(entity)
-            rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ())
-            if isinstance(rtypes, str):
-                rtypes = (rtypes,)
-            for rtype in rtypes:
-                if rtype not in attrs:
-                    attrs[rtype] = None
-            sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
-            self._sql.eid_insertdicts[entity.eid] = attrs
-            self._append_to_entities(sql, attrs)
-
-    def _append_to_entities(self, sql, attrs):
-        self._sql.entities[sql].append(attrs)
-
-    def _handle_insert_entity_sql(self, cnx, sql, attrs):
-        # We have to overwrite the source given in parameters
-        # as here, we directly use the system source
-        attrs['asource'] = self.system_source.uri
-        self._append_to_entities(sql, attrs)
-
-    def _handle_is_relation_sql(self, cnx, sql, attrs):
-        self._append_to_entities(sql, attrs)
-
-    def _handle_is_instance_of_sql(self, cnx, sql, attrs):
-        self._append_to_entities(sql, attrs)
-
-    def _handle_source_relation_sql(self, cnx, sql, attrs):
-        self._append_to_entities(sql, attrs)
-
-    # add_info is _copypasted_ from the one in NativeSQLSource. We want it
-    # there because it will use the _handlers of the SQLGenSourceWrapper, which
-    # are not like the ones in the native source.
-    def add_info(self, cnx, entity, source, extid):
-        """add type and source info for an eid into the system table"""
-        # begin by inserting eid/type/source/extid into the entities table
-        if extid is not None:
-            assert isinstance(extid, str)
-            extid = b64encode(extid)
-        attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
-                 'asource': source.uri}
-        self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs)
-        # insert core relations: is, is_instance_of and cw_source
-        try:
-            self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                         (entity.eid, eschema_eid(cnx, entity.e_schema)))
-        except IndexError:
-            # during schema serialization, skip
-            pass
-        else:
-            for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
-                self._handle_is_relation_sql(cnx,
-                                             'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                             (entity.eid, eschema_eid(cnx, eschema)))
-        if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
-            self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
-                                         (entity.eid, source.eid))
-        # now we can update the full text index
-        if self.do_fti and self.need_fti_indexation(entity.cw_etype):
-            self.index_entity(cnx, entity=entity)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dataimport/__init__.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,1163 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+"""This module provides tools to import tabular data.
+
+
+Example of use (run this with `cubicweb-ctl shell instance import-script.py`):
+
+.. sourcecode:: python
+
+  from cubicweb.dataimport import *
+  # define data generators
+  GENERATORS = []
+
+  USERS = [('Prenom', 'firstname', ()),
+           ('Nom', 'surname', ()),
+           ('Identifiant', 'login', ()),
+           ]
+
+  def gen_users(ctl):
+      for row in ctl.iter_and_commit('utilisateurs'):
+          entity = mk_entity(row, USERS)
+          entity['upassword'] = 'motdepasse'
+          ctl.check('login', entity['login'], None)
+          entity = ctl.store.create_entity('CWUser', **entity)
+          email = ctl.store.create_entity('EmailAddress', address=row['email'])
+          ctl.store.relate(entity.eid, 'use_email', email.eid)
+          ctl.store.rql('SET U in_group G WHERE G name "users", U eid %(x)s', {'x':entity['eid']})
+
+  CHK = [('login', check_doubles, 'Utilisateurs Login',
+          'Deux utilisateurs ne devraient pas avoir le même login.'),
+         ]
+
+  GENERATORS.append( (gen_users, CHK) )
+
+  # create controller
+  ctl = CWImportController(RQLObjectStore(cnx))
+  ctl.askerror = 1
+  ctl.generators = GENERATORS
+  ctl.data['utilisateurs'] = lazytable(ucsvreader(open('users.csv')))
+  # run
+  ctl.run()
+
+.. BUG file with one column are not parsable
+.. TODO rollback() invocation is not possible yet
+"""
+__docformat__ = "restructuredtext en"
+
+import csv
+import sys
+import threading
+import traceback
+import warnings
+import cPickle
+import os.path as osp
+import inspect
+from base64 import b64encode
+from collections import defaultdict
+from copy import copy
+from datetime import date, datetime, time
+from time import asctime
+from StringIO import StringIO
+
+from logilab.common import shellutils, attrdict
+from logilab.common.date import strptime
+from logilab.common.decorators import cached
+from logilab.common.deprecation import deprecated
+
+from cubicweb import QueryError
+from cubicweb.utils import make_uid
+from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES
+from cubicweb.server.edition import EditedEntity
+from cubicweb.server.sqlutils import SQL_PREFIX
+from cubicweb.server.utils import eschema_eid
+
+
+def count_lines(stream_or_filename):
+    if isinstance(stream_or_filename, basestring):
+        f = open(stream_or_filename)
+    else:
+        f = stream_or_filename
+        f.seek(0)
+    for i, line in enumerate(f):
+        pass
+    f.seek(0)
+    return i+1
+
+def ucsvreader_pb(stream_or_path, encoding='utf-8', delimiter=',', quotechar='"',
+                  skipfirst=False, withpb=True, skip_empty=True, separator=None,
+                  quote=None):
+    """same as :func:`ucsvreader` but a progress bar is displayed as we iter on rows"""
+    if separator is not None:
+        delimiter = separator
+        warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead")
+    if quote is not None:
+        quotechar = quote
+        warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
+    if isinstance(stream_or_path, basestring):
+        if not osp.exists(stream_or_path):
+            raise Exception("file doesn't exists: %s" % stream_or_path)
+        stream = open(stream_or_path)
+    else:
+        stream = stream_or_path
+    rowcount = count_lines(stream)
+    if skipfirst:
+        rowcount -= 1
+    if withpb:
+        pb = shellutils.ProgressBar(rowcount, 50)
+    for urow in ucsvreader(stream, encoding, delimiter, quotechar,
+                           skipfirst=skipfirst, skip_empty=skip_empty):
+        yield urow
+        if withpb:
+            pb.update()
+    print ' %s rows imported' % rowcount
+
+def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"',
+               skipfirst=False, ignore_errors=False, skip_empty=True,
+               separator=None, quote=None):
+    """A csv reader that accepts files with any encoding and outputs unicode
+    strings
+
+    if skip_empty (the default), lines without any values specified (only
+    separators) will be skipped. This is useful for Excel exports which may be
+    full of such lines.
+    """
+    if separator is not None:
+        delimiter = separator
+        warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead")
+    if quote is not None:
+        quotechar = quote
+        warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
+    it = iter(csv.reader(stream, delimiter=delimiter, quotechar=quotechar))
+    if not ignore_errors:
+        if skipfirst:
+            it.next()
+        for row in it:
+            decoded = [item.decode(encoding) for item in row]
+            if not skip_empty or any(decoded):
+                yield decoded
+    else:
+        if skipfirst:
+            try:
+                row = it.next()
+            except csv.Error:
+                pass
+        # Safe version, that can cope with error in CSV file
+        while True:
+            try:
+                row = it.next()
+            # End of CSV, break
+            except StopIteration:
+                break
+            # Error in CSV, ignore line and continue
+            except csv.Error:
+                continue
+            decoded = [item.decode(encoding) for item in row]
+            if not skip_empty or any(decoded):
+                yield decoded
+
+
+def callfunc_every(func, number, iterable):
+    """yield items of `iterable` one by one and call function `func`
+    every `number` iterations. Always call function `func` at the end.
+    """
+    for idx, item in enumerate(iterable):
+        yield item
+        if not idx % number:
+            func()
+    func()
+
+def lazytable(reader):
+    """The first row is taken to be the header of the table and
+    used to output a dict for each row of data.
+
+    >>> data = lazytable(ucsvreader(open(filename)))
+    """
+    header = reader.next()
+    for row in reader:
+        yield dict(zip(header, row))
+
+def lazydbtable(cu, table, headers, orderby=None):
+    """return an iterator on rows of a sql table. On each row, fetch columns
+    defined in headers and return values as a dictionary.
+
+    >>> data = lazydbtable(cu, 'experimentation', ('id', 'nickname', 'gps'))
+    """
+    sql = 'SELECT %s FROM %s' % (','.join(headers), table,)
+    if orderby:
+        sql += ' ORDER BY %s' % ','.join(orderby)
+    cu.execute(sql)
+    while True:
+        row = cu.fetchone()
+        if row is None:
+            break
+        yield dict(zip(headers, row))
+
+def mk_entity(row, map):
+    """Return a dict made from sanitized mapped values.
+
+    ValueError can be raised on unexpected values found in checkers
+
+    >>> row = {'myname': u'dupont'}
+    >>> map = [('myname', u'name', (call_transform_method('title'),))]
+    >>> mk_entity(row, map)
+    {'name': u'Dupont'}
+    >>> row = {'myname': u'dupont', 'optname': u''}
+    >>> map = [('myname', u'name', (call_transform_method('title'),)),
+    ...        ('optname', u'MARKER', (optional,))]
+    >>> mk_entity(row, map)
+    {'name': u'Dupont', 'optname': None}
+    """
+    res = {}
+    assert isinstance(row, dict)
+    assert isinstance(map, list)
+    for src, dest, funcs in map:
+        try:
+            res[dest] = row[src]
+        except KeyError:
+            continue
+        try:
+            for func in funcs:
+                res[dest] = func(res[dest])
+                if res[dest] is None:
+                    break
+        except ValueError as err:
+            raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1]
+    return res
+
+# user interactions ############################################################
+
+def tell(msg):
+    print msg
+
+def confirm(question):
+    """A confirm function that asks for yes/no/abort and exits on abort."""
+    answer = shellutils.ASK.ask(question, ('Y', 'n', 'abort'), 'Y')
+    if answer == 'abort':
+        sys.exit(1)
+    return answer == 'Y'
+
+
+class catch_error(object):
+    """Helper for @contextmanager decorator."""
+
+    def __init__(self, ctl, key='unexpected error', msg=None):
+        self.ctl = ctl
+        self.key = key
+        self.msg = msg
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is not None:
+            if issubclass(type, (KeyboardInterrupt, SystemExit)):
+                return # re-raise
+            if self.ctl.catcherrors:
+                self.ctl.record_error(self.key, None, type, value, traceback)
+                return True # silent
+
+
+# base sanitizing/coercing functions ###########################################
+
+def optional(value):
+    """checker to filter optional field
+
+    If value is undefined (ex: empty string), return None that will
+    break the checkers validation chain
+
+    General use is to add 'optional' check in first condition to avoid
+    ValueError by further checkers
+
+    >>> MAPPER = [(u'value', 'value', (optional, int))]
+    >>> row = {'value': u'XXX'}
+    >>> mk_entity(row, MAPPER)
+    {'value': None}
+    >>> row = {'value': u'100'}
+    >>> mk_entity(row, MAPPER)
+    {'value': 100}
+    """
+    if value:
+        return value
+    return None
+
+def required(value):
+    """raise ValueError if value is empty
+
+    This check should be often found in last position in the chain.
+    """
+    if value:
+        return value
+    raise ValueError("required")
+
+def todatetime(format='%d/%m/%Y'):
+    """return a transformation function to turn string input value into a
+    `datetime.datetime` instance, using given format.
+
+    Follow it by `todate` or `totime` functions from `logilab.common.date` if
+    you want a `date`/`time` instance instead of `datetime`.
+    """
+    def coerce(value):
+        return strptime(value, format)
+    return coerce
+
+def call_transform_method(methodname, *args, **kwargs):
+    """return value returned by calling the given method on input"""
+    def coerce(value):
+        return getattr(value, methodname)(*args, **kwargs)
+    return coerce
+
+def call_check_method(methodname, *args, **kwargs):
+    """check value returned by calling the given method on input is true,
+    else raise ValueError
+    """
+    def check(value):
+        if getattr(value, methodname)(*args, **kwargs):
+            return value
+        raise ValueError('%s not verified on %r' % (methodname, value))
+    return check
+
+# base integrity checking functions ############################################
+
+def check_doubles(buckets):
+    """Extract the keys that have more than one item in their bucket."""
+    return [(k, len(v)) for k, v in buckets.items() if len(v) > 1]
+
+def check_doubles_not_none(buckets):
+    """Extract the keys that have more than one item in their bucket."""
+    return [(k, len(v)) for k, v in buckets.items()
+            if k is not None and len(v) > 1]
+
+# sql generator utility functions #############################################
+
+
+def _import_statements(sql_connect, statements, nb_threads=3,
+                       dump_output_dir=None,
+                       support_copy_from=True, encoding='utf-8'):
+    """
+    Import a bunch of sql statements, using different threads.
+    """
+    try:
+        chunksize = (len(statements) / nb_threads) + 1
+        threads = []
+        for i in xrange(nb_threads):
+            chunks = statements[i*chunksize:(i+1)*chunksize]
+            thread = threading.Thread(target=_execmany_thread,
+                                      args=(sql_connect, chunks,
+                                            dump_output_dir,
+                                            support_copy_from,
+                                            encoding))
+            thread.start()
+            threads.append(thread)
+        for t in threads:
+            t.join()
+    except Exception:
+        print 'Error in import statements'
+
+def _execmany_thread_not_copy_from(cu, statement, data, table=None,
+                                   columns=None, encoding='utf-8'):
+    """ Execute thread without copy from
+    """
+    cu.executemany(statement, data)
+
+def _execmany_thread_copy_from(cu, statement, data, table,
+                               columns, encoding='utf-8'):
+    """ Execute thread with copy from
+    """
+    buf = _create_copyfrom_buffer(data, columns, encoding=encoding)
+    if buf is None:
+        _execmany_thread_not_copy_from(cu, statement, data)
+    else:
+        if columns is None:
+            cu.copy_from(buf, table, null='NULL')
+        else:
+            cu.copy_from(buf, table, null='NULL', columns=columns)
+
+def _execmany_thread(sql_connect, statements, dump_output_dir=None,
+                     support_copy_from=True, encoding='utf-8'):
+    """
+    Execute sql statement. If 'INSERT INTO', try to use 'COPY FROM' command,
+    or fallback to execute_many.
+    """
+    if support_copy_from:
+        execmany_func = _execmany_thread_copy_from
+    else:
+        execmany_func = _execmany_thread_not_copy_from
+    cnx = sql_connect()
+    cu = cnx.cursor()
+    try:
+        for statement, data in statements:
+            table = None
+            columns = None
+            try:
+                if not statement.startswith('INSERT INTO'):
+                    cu.executemany(statement, data)
+                    continue
+                table = statement.split()[2]
+                if isinstance(data[0], (tuple, list)):
+                    columns = None
+                else:
+                    columns = list(data[0])
+                execmany_func(cu, statement, data, table, columns, encoding)
+            except Exception:
+                print 'unable to copy data into table %s' % table
+                # Error in import statement, save data in dump_output_dir
+                if dump_output_dir is not None:
+                    pdata = {'data': data, 'statement': statement,
+                             'time': asctime(), 'columns': columns}
+                    filename = make_uid()
+                    try:
+                        with open(osp.join(dump_output_dir,
+                                           '%s.pickle' % filename), 'w') as fobj:
+                            fobj.write(cPickle.dumps(pdata))
+                    except IOError:
+                        print 'ERROR while pickling in', dump_output_dir, filename+'.pickle'
+                        pass
+                cnx.rollback()
+                raise
+    finally:
+        cnx.commit()
+        cu.close()
+
+
+def _copyfrom_buffer_convert_None(value, **opts):
+    '''Convert None value to "NULL"'''
+    return 'NULL'
+
+def _copyfrom_buffer_convert_number(value, **opts):
+    '''Convert a number into its string representation'''
+    return str(value)
+
+def _copyfrom_buffer_convert_string(value, **opts):
+    '''Convert string value.
+
+    Recognized keywords:
+    :encoding: resulting string encoding (default: utf-8)
+    '''
+    encoding = opts.get('encoding','utf-8')
+    escape_chars = ((u'\\', ur'\\'), (u'\t', u'\\t'), (u'\r', u'\\r'),
+                    (u'\n', u'\\n'))
+    for char, replace in escape_chars:
+        value = value.replace(char, replace)
+    if isinstance(value, unicode):
+        value = value.encode(encoding)
+    return value
+
+def _copyfrom_buffer_convert_date(value, **opts):
+    '''Convert date into "YYYY-MM-DD"'''
+    # Do not use strftime, as it yields issue with date < 1900
+    # (http://bugs.python.org/issue1777412)
+    return '%04d-%02d-%02d' % (value.year, value.month, value.day)
+
+def _copyfrom_buffer_convert_datetime(value, **opts):
+    '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"'''
+    # Do not use strftime, as it yields issue with date < 1900
+    # (http://bugs.python.org/issue1777412)
+    return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts),
+                      _copyfrom_buffer_convert_time(value, **opts))
+
+def _copyfrom_buffer_convert_time(value, **opts):
+    '''Convert time into "HH:MM:SS.UUUUUU"'''
+    return '%02d:%02d:%02d.%06d' % (value.hour, value.minute,
+                                    value.second, value.microsecond)
+
+# (types, converter) list.
+_COPYFROM_BUFFER_CONVERTERS = [
+    (type(None), _copyfrom_buffer_convert_None),
+    ((long, int, float), _copyfrom_buffer_convert_number),
+    (basestring, _copyfrom_buffer_convert_string),
+    (datetime, _copyfrom_buffer_convert_datetime),
+    (date, _copyfrom_buffer_convert_date),
+    (time, _copyfrom_buffer_convert_time),
+]
+
+def _create_copyfrom_buffer(data, columns=None, **convert_opts):
+    """
+    Create a StringIO buffer for 'COPY FROM' command.
+    Deals with Unicode, Int, Float, Date... (see ``converters``)
+
+    :data: a sequence/dict of tuples
+    :columns: list of columns to consider (default to all columns)
+    :converter_opts: keyword arguements given to converters
+    """
+    # Create a list rather than directly create a StringIO
+    # to correctly write lines separated by '\n' in a single step
+    rows = []
+    if columns is None:
+        if isinstance(data[0], (tuple, list)):
+            columns = range(len(data[0]))
+        elif isinstance(data[0], dict):
+            columns = data[0].keys()
+        else:
+            raise ValueError('Could not get columns: you must provide columns.')
+    for row in data:
+        # Iterate over the different columns and the different values
+        # and try to convert them to a correct datatype.
+        # If an error is raised, do not continue.
+        formatted_row = []
+        for col in columns:
+            try:
+                value = row[col]
+            except KeyError:
+                warnings.warn(u"Column %s is not accessible in row %s"
+                              % (col, row), RuntimeWarning)
+                # XXX 'value' set to None so that the import does not end in
+                # error.
+                # Instead, the extra keys are set to NULL from the
+                # database point of view.
+                value = None
+            for types, converter in _COPYFROM_BUFFER_CONVERTERS:
+                if isinstance(value, types):
+                    value = converter(value, **convert_opts)
+                    break
+            else:
+                raise ValueError("Unsupported value type %s" % type(value))
+            # We push the value to the new formatted row
+            # if the value is not None and could be converted to a string.
+            formatted_row.append(value)
+        rows.append('\t'.join(formatted_row))
+    return StringIO('\n'.join(rows))
+
+
+# object stores #################################################################
+
+class ObjectStore(object):
+    """Store objects in memory for *faster* validation (development mode)
+
+    But it will not enforce the constraints of the schema and hence will miss some problems
+
+    >>> store = ObjectStore()
+    >>> user = store.create_entity('CWUser', login=u'johndoe')
+    >>> group = store.create_entity('CWUser', name=u'unknown')
+    >>> store.relate(user.eid, 'in_group', group.eid)
+    """
+    def __init__(self):
+        self.items = []
+        self.eids = {}
+        self.types = {}
+        self.relations = set()
+        self.indexes = {}
+
+    def create_entity(self, etype, **data):
+        data = attrdict(data)
+        data['eid'] = eid = len(self.items)
+        self.items.append(data)
+        self.eids[eid] = data
+        self.types.setdefault(etype, []).append(eid)
+        return data
+
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
+        """Add new relation"""
+        relation = eid_from, rtype, eid_to
+        self.relations.add(relation)
+        return relation
+
+    def commit(self):
+        """this commit method does nothing by default"""
+        return
+
+    def flush(self):
+        """The method is provided so that all stores share a common API"""
+        pass
+
+    @property
+    def nb_inserted_entities(self):
+        return len(self.eids)
+    @property
+    def nb_inserted_types(self):
+        return len(self.types)
+    @property
+    def nb_inserted_relations(self):
+        return len(self.relations)
+
+class RQLObjectStore(ObjectStore):
+    """ObjectStore that works with an actual RQL repository (production mode)"""
+
+    def __init__(self, cnx, commit=None):
+        if commit is not None:
+            warnings.warn('[3.19] commit argument should not be specified '
+                          'as the cnx object already provides it.',
+                          DeprecationWarning, stacklevel=2)
+        super(RQLObjectStore, self).__init__()
+        self._cnx = cnx
+        self._commit = commit or cnx.commit
+
+    def commit(self):
+        return self._commit()
+
+    def rql(self, *args):
+        return self._cnx.execute(*args)
+
+    @property
+    def session(self):
+        warnings.warn('[3.19] deprecated property.', DeprecationWarning,
+                      stacklevel=2)
+        return self._cnx.repo._get_session(self._cnx.sessionid)
+
+    def create_entity(self, *args, **kwargs):
+        entity = self._cnx.create_entity(*args, **kwargs)
+        self.eids[entity.eid] = entity
+        self.types.setdefault(args[0], []).append(entity.eid)
+        return entity
+
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
+        eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
+            eid_from, rtype, eid_to, **kwargs)
+        self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
+                 {'x': int(eid_from), 'y': int(eid_to)})
+
+    @deprecated("[3.19] use cnx.find(*args, **kwargs).entities() instead")
+    def find_entities(self, *args, **kwargs):
+        return self._cnx.find(*args, **kwargs).entities()
+
+    @deprecated("[3.19] use cnx.find(*args, **kwargs).one() instead")
+    def find_one_entity(self, *args, **kwargs):
+        return self._cnx.find(*args, **kwargs).one()
+
+# the import controller ########################################################
+
+class CWImportController(object):
+    """Controller of the data import process.
+
+    >>> ctl = CWImportController(store)
+    >>> ctl.generators = list_of_data_generators
+    >>> ctl.data = dict_of_data_tables
+    >>> ctl.run()
+    """
+
+    def __init__(self, store, askerror=0, catcherrors=None, tell=tell,
+                 commitevery=50):
+        self.store = store
+        self.generators = None
+        self.data = {}
+        self.errors = None
+        self.askerror = askerror
+        if  catcherrors is None:
+            catcherrors = askerror
+        self.catcherrors = catcherrors
+        self.commitevery = commitevery # set to None to do a single commit
+        self._tell = tell
+
+    def check(self, type, key, value):
+        self._checks.setdefault(type, {}).setdefault(key, []).append(value)
+
+    def check_map(self, entity, key, map, default):
+        try:
+            entity[key] = map[entity[key]]
+        except KeyError:
+            self.check(key, entity[key], None)
+            entity[key] = default
+
+    def record_error(self, key, msg=None, type=None, value=None, tb=None):
+        tmp = StringIO()
+        if type is None:
+            traceback.print_exc(file=tmp)
+        else:
+            traceback.print_exception(type, value, tb, file=tmp)
+        # use a list to avoid counting a <nb lines> errors instead of one
+        errorlog = self.errors.setdefault(key, [])
+        if msg is None:
+            errorlog.append(tmp.getvalue().splitlines())
+        else:
+            errorlog.append( (msg, tmp.getvalue().splitlines()) )
+
+    def run(self):
+        self.errors = {}
+        if self.commitevery is None:
+            self.tell('Will commit all or nothing.')
+        else:
+            self.tell('Will commit every %s iterations' % self.commitevery)
+        for func, checks in self.generators:
+            self._checks = {}
+            func_name = func.__name__
+            self.tell("Run import function '%s'..." % func_name)
+            try:
+                func(self)
+            except Exception:
+                if self.catcherrors:
+                    self.record_error(func_name, 'While calling %s' % func.__name__)
+                else:
+                    self._print_stats()
+                    raise
+            for key, func, title, help in checks:
+                buckets = self._checks.get(key)
+                if buckets:
+                    err = func(buckets)
+                    if err:
+                        self.errors[title] = (help, err)
+        try:
+            txuuid = self.store.commit()
+            if txuuid is not None:
+                self.tell('Transaction commited (txuuid: %s)' % txuuid)
+        except QueryError as ex:
+            self.tell('Transaction aborted: %s' % ex)
+        self._print_stats()
+        if self.errors:
+            if self.askerror == 2 or (self.askerror and confirm('Display errors ?')):
+                from pprint import pformat
+                for errkey, error in self.errors.items():
+                    self.tell("\n%s (%s): %d\n" % (error[0], errkey, len(error[1])))
+                    self.tell(pformat(sorted(error[1])))
+
+    def _print_stats(self):
+        nberrors = sum(len(err) for err in self.errors.itervalues())
+        self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors'
+                  % (self.store.nb_inserted_entities,
+                     self.store.nb_inserted_types,
+                     self.store.nb_inserted_relations,
+                     nberrors))
+
+    def get_data(self, key):
+        return self.data.get(key)
+
+    def index(self, name, key, value, unique=False):
+        """create a new index
+
+        If unique is set to True, only first occurence will be kept not the following ones
+        """
+        if unique:
+            try:
+                if value in self.store.indexes[name][key]:
+                    return
+            except KeyError:
+                # we're sure that one is the first occurence; so continue...
+                pass
+        self.store.indexes.setdefault(name, {}).setdefault(key, []).append(value)
+
+    def tell(self, msg):
+        self._tell(msg)
+
+    def iter_and_commit(self, datakey):
+        """iter rows, triggering commit every self.commitevery iterations"""
+        if self.commitevery is None:
+            return self.get_data(datakey)
+        else:
+            return callfunc_every(self.store.commit,
+                                  self.commitevery,
+                                  self.get_data(datakey))
+
+
+class NoHookRQLObjectStore(RQLObjectStore):
+    """ObjectStore that works with an actual RQL repository (production mode)"""
+
+    def __init__(self, cnx, metagen=None, baseurl=None):
+        super(NoHookRQLObjectStore, self).__init__(cnx)
+        self.source = cnx.repo.system_source
+        self.rschema = cnx.repo.schema.rschema
+        self.add_relation = self.source.add_relation
+        if metagen is None:
+            metagen = MetaGenerator(cnx, baseurl)
+        self.metagen = metagen
+        self._nb_inserted_entities = 0
+        self._nb_inserted_types = 0
+        self._nb_inserted_relations = 0
+        # deactivate security
+        cnx.read_security = False
+        cnx.write_security = False
+
+    def create_entity(self, etype, **kwargs):
+        for k, v in kwargs.iteritems():
+            kwargs[k] = getattr(v, 'eid', v)
+        entity, rels = self.metagen.base_etype_dicts(etype)
+        # make a copy to keep cached entity pristine
+        entity = copy(entity)
+        entity.cw_edited = copy(entity.cw_edited)
+        entity.cw_clear_relation_cache()
+        entity.cw_edited.update(kwargs, skipsec=False)
+        entity_source, extid = self.metagen.init_entity(entity)
+        cnx = self._cnx
+        self.source.add_entity(cnx, entity)
+        self.source.add_info(cnx, entity, entity_source, extid)
+        kwargs = dict()
+        if inspect.getargspec(self.add_relation).keywords:
+            kwargs['subjtype'] = entity.cw_etype
+        for rtype, targeteids in rels.iteritems():
+            # targeteids may be a single eid or a list of eids
+            inlined = self.rschema(rtype).inlined
+            try:
+                for targeteid in targeteids:
+                    self.add_relation(cnx, entity.eid, rtype, targeteid,
+                                      inlined, **kwargs)
+            except TypeError:
+                self.add_relation(cnx, entity.eid, rtype, targeteids,
+                                  inlined, **kwargs)
+        self._nb_inserted_entities += 1
+        return entity
+
+    def relate(self, eid_from, rtype, eid_to, **kwargs):
+        assert not rtype.startswith('reverse_')
+        self.add_relation(self._cnx, eid_from, rtype, eid_to,
+                          self.rschema(rtype).inlined)
+        if self.rschema(rtype).symmetric:
+            self.add_relation(self._cnx, eid_to, rtype, eid_from,
+                              self.rschema(rtype).inlined)
+        self._nb_inserted_relations += 1
+
+    @property
+    def nb_inserted_entities(self):
+        return self._nb_inserted_entities
+    @property
+    def nb_inserted_types(self):
+        return self._nb_inserted_types
+    @property
+    def nb_inserted_relations(self):
+        return self._nb_inserted_relations
+
+
+class MetaGenerator(object):
+    META_RELATIONS = (META_RTYPES
+                      - VIRTUAL_RTYPES
+                      - set(('eid', 'cwuri',
+                             'is', 'is_instance_of', 'cw_source')))
+
+    def __init__(self, cnx, baseurl=None, source=None):
+        self._cnx = cnx
+        if baseurl is None:
+            config = cnx.vreg.config
+            baseurl = config['base-url'] or config.default_base_url()
+        if not baseurl[-1] == '/':
+            baseurl += '/'
+        self.baseurl = baseurl
+        if source is None:
+            source = cnx.repo.system_source
+        self.source = source
+        self.create_eid = cnx.repo.system_source.create_eid
+        self.time = datetime.now()
+        # attributes/relations shared by all entities of the same type
+        self.etype_attrs = []
+        self.etype_rels = []
+        # attributes/relations specific to each entity
+        self.entity_attrs = ['cwuri']
+        #self.entity_rels = [] XXX not handled (YAGNI?)
+        schema = cnx.vreg.schema
+        rschema = schema.rschema
+        for rtype in self.META_RELATIONS:
+            # skip owned_by / created_by if user is the internal manager
+            if cnx.user.eid == -1 and rtype in ('owned_by', 'created_by'):
+                continue
+            if rschema(rtype).final:
+                self.etype_attrs.append(rtype)
+            else:
+                self.etype_rels.append(rtype)
+
+    @cached
+    def base_etype_dicts(self, etype):
+        entity = self._cnx.vreg['etypes'].etype_class(etype)(self._cnx)
+        # entity are "surface" copied, avoid shared dict between copies
+        del entity.cw_extra_kwargs
+        entity.cw_edited = EditedEntity(entity)
+        for attr in self.etype_attrs:
+            genfunc = self.generate(attr)
+            if genfunc:
+                entity.cw_edited.edited_attribute(attr, genfunc(entity))
+        rels = {}
+        for rel in self.etype_rels:
+            genfunc = self.generate(rel)
+            if genfunc:
+                rels[rel] = genfunc(entity)
+        return entity, rels
+
+    def init_entity(self, entity):
+        entity.eid = self.create_eid(self._cnx)
+        extid = entity.cw_edited.get('cwuri')
+        for attr in self.entity_attrs:
+            if attr in entity.cw_edited:
+                # already set, skip this attribute
+                continue
+            genfunc = self.generate(attr)
+            if genfunc:
+                entity.cw_edited.edited_attribute(attr, genfunc(entity))
+        if isinstance(extid, unicode):
+            extid = extid.encode('utf-8')
+        return self.source, extid
+
+    def generate(self, rtype):
+        return getattr(self, 'gen_%s' % rtype, None)
+
+    def gen_cwuri(self, entity):
+        assert self.baseurl, 'baseurl is None while generating cwuri'
+        return u'%s%s' % (self.baseurl, entity.eid)
+
+    def gen_creation_date(self, entity):
+        return self.time
+
+    def gen_modification_date(self, entity):
+        return self.time
+
+    def gen_created_by(self, entity):
+        return self._cnx.user.eid
+
+    def gen_owned_by(self, entity):
+        return self._cnx.user.eid
+
+
+###########################################################################
+## SQL object store #######################################################
+###########################################################################
+class SQLGenObjectStore(NoHookRQLObjectStore):
+    """Controller of the data import process. This version is based
+    on direct insertions throught SQL command (COPY FROM or execute many).
+
+    >>> store = SQLGenObjectStore(cnx)
+    >>> store.create_entity('Person', ...)
+    >>> store.flush()
+    """
+
+    def __init__(self, cnx, dump_output_dir=None, nb_threads_statement=3):
+        """
+        Initialize a SQLGenObjectStore.
+
+        Parameters:
+
+          - cnx: connection on the cubicweb instance
+          - dump_output_dir: a directory to dump failed statements
+            for easier recovery. Default is None (no dump).
+          - nb_threads_statement: number of threads used
+            for SQL insertion (default is 3).
+        """
+        super(SQLGenObjectStore, self).__init__(cnx)
+        ### hijack default source
+        self.source = SQLGenSourceWrapper(
+            self.source, cnx.vreg.schema,
+            dump_output_dir=dump_output_dir,
+            nb_threads_statement=nb_threads_statement)
+        ### XXX This is done in super().__init__(), but should be
+        ### redone here to link to the correct source
+        self.add_relation = self.source.add_relation
+        self.indexes_etypes = {}
+
+    def flush(self):
+        """Flush data to the database"""
+        self.source.flush()
+
+    def relate(self, subj_eid, rtype, obj_eid, **kwargs):
+        if subj_eid is None or obj_eid is None:
+            return
+        # XXX Could subjtype be inferred ?
+        self.source.add_relation(self._cnx, subj_eid, rtype, obj_eid,
+                                 self.rschema(rtype).inlined, **kwargs)
+        if self.rschema(rtype).symmetric:
+            self.source.add_relation(self._cnx, obj_eid, rtype, subj_eid,
+                                     self.rschema(rtype).inlined, **kwargs)
+
+    def drop_indexes(self, etype):
+        """Drop indexes for a given entity type"""
+        if etype not in self.indexes_etypes:
+            cu = self._cnx.cnxset.cu
+            def index_to_attr(index):
+                """turn an index name to (database) attribute name"""
+                return index.replace(etype.lower(), '').replace('idx', '').strip('_')
+            indices = [(index, index_to_attr(index))
+                       for index in self.source.dbhelper.list_indices(cu, etype)
+                       # Do not consider 'cw_etype_pkey' index
+                       if not index.endswith('key')]
+            self.indexes_etypes[etype] = indices
+        for index, attr in self.indexes_etypes[etype]:
+            self._cnx.system_sql('DROP INDEX %s' % index)
+
+    def create_indexes(self, etype):
+        """Recreate indexes for a given entity type"""
+        for index, attr in self.indexes_etypes.get(etype, []):
+            sql = 'CREATE INDEX %s ON cw_%s(%s)' % (index, etype, attr)
+            self._cnx.system_sql(sql)
+
+
+###########################################################################
+## SQL Source #############################################################
+###########################################################################
+
+class SQLGenSourceWrapper(object):
+
+    def __init__(self, system_source, schema,
+                 dump_output_dir=None, nb_threads_statement=3):
+        self.system_source = system_source
+        self._sql = threading.local()
+        # Explicitely backport attributes from system source
+        self._storage_handler = self.system_source._storage_handler
+        self.preprocess_entity = self.system_source.preprocess_entity
+        self.sqlgen = self.system_source.sqlgen
+        self.uri = self.system_source.uri
+        self.eid = self.system_source.eid
+        # Directory to write temporary files
+        self.dump_output_dir = dump_output_dir
+        # Allow to execute code with SQLite backend that does
+        # not support (yet...) copy_from
+        # XXX Should be dealt with in logilab.database
+        spcfrom = system_source.dbhelper.dbapi_module.support_copy_from
+        self.support_copy_from = spcfrom
+        self.dbencoding = system_source.dbhelper.dbencoding
+        self.nb_threads_statement = nb_threads_statement
+        # initialize thread-local data for main thread
+        self.init_thread_locals()
+        self._inlined_rtypes_cache = {}
+        self._fill_inlined_rtypes_cache(schema)
+        self.schema = schema
+        self.do_fti = False
+
+    def _fill_inlined_rtypes_cache(self, schema):
+        cache = self._inlined_rtypes_cache
+        for eschema in schema.entities():
+            for rschema in eschema.ordered_relations():
+                if rschema.inlined:
+                    cache[eschema.type] = SQL_PREFIX + rschema.type
+
+    def init_thread_locals(self):
+        """initializes thread-local data"""
+        self._sql.entities = defaultdict(list)
+        self._sql.relations = {}
+        self._sql.inlined_relations = {}
+        # keep track, for each eid of the corresponding data dict
+        self._sql.eid_insertdicts = {}
+
+    def flush(self):
+        print 'starting flush'
+        _entities_sql = self._sql.entities
+        _relations_sql = self._sql.relations
+        _inlined_relations_sql = self._sql.inlined_relations
+        _insertdicts = self._sql.eid_insertdicts
+        try:
+            # try, for each inlined_relation, to find if we're also creating
+            # the host entity (i.e. the subject of the relation).
+            # In that case, simply update the insert dict and remove
+            # the need to make the
+            # UPDATE statement
+            for statement, datalist in _inlined_relations_sql.iteritems():
+                new_datalist = []
+                # for a given inlined relation,
+                # browse each couple to be inserted
+                for data in datalist:
+                    keys = list(data)
+                    # For inlined relations, it exists only two case:
+                    # (rtype, cw_eid) or (cw_eid, rtype)
+                    if keys[0] == 'cw_eid':
+                        rtype = keys[1]
+                    else:
+                        rtype = keys[0]
+                    updated_eid = data['cw_eid']
+                    if updated_eid in _insertdicts:
+                        _insertdicts[updated_eid][rtype] = data[rtype]
+                    else:
+                        # could not find corresponding insert dict, keep the
+                        # UPDATE query
+                        new_datalist.append(data)
+                _inlined_relations_sql[statement] = new_datalist
+            _import_statements(self.system_source.get_connection,
+                               _entities_sql.items()
+                               + _relations_sql.items()
+                               + _inlined_relations_sql.items(),
+                               dump_output_dir=self.dump_output_dir,
+                               nb_threads=self.nb_threads_statement,
+                               support_copy_from=self.support_copy_from,
+                               encoding=self.dbencoding)
+        finally:
+            _entities_sql.clear()
+            _relations_sql.clear()
+            _insertdicts.clear()
+            _inlined_relations_sql.clear()
+
+    def add_relation(self, cnx, subject, rtype, object,
+                     inlined=False, **kwargs):
+        if inlined:
+            _sql = self._sql.inlined_relations
+            data = {'cw_eid': subject, SQL_PREFIX + rtype: object}
+            subjtype = kwargs.get('subjtype')
+            if subjtype is None:
+                # Try to infer it
+                targets = [t.type for t in
+                           self.schema.rschema(rtype).subjects()]
+                if len(targets) == 1:
+                    subjtype = targets[0]
+                else:
+                    raise ValueError('You should give the subject etype for '
+                                     'inlined relation %s'
+                                     ', as it cannot be inferred: '
+                                     'this type is given as keyword argument '
+                                     '``subjtype``'% rtype)
+            statement = self.sqlgen.update(SQL_PREFIX + subjtype,
+                                           data, ['cw_eid'])
+        else:
+            _sql = self._sql.relations
+            data = {'eid_from': subject, 'eid_to': object}
+            statement = self.sqlgen.insert('%s_relation' % rtype, data)
+        if statement in _sql:
+            _sql[statement].append(data)
+        else:
+            _sql[statement] = [data]
+
+    def add_entity(self, cnx, entity):
+        with self._storage_handler(entity, 'added'):
+            attrs = self.preprocess_entity(entity)
+            rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ())
+            if isinstance(rtypes, str):
+                rtypes = (rtypes,)
+            for rtype in rtypes:
+                if rtype not in attrs:
+                    attrs[rtype] = None
+            sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
+            self._sql.eid_insertdicts[entity.eid] = attrs
+            self._append_to_entities(sql, attrs)
+
+    def _append_to_entities(self, sql, attrs):
+        self._sql.entities[sql].append(attrs)
+
+    def _handle_insert_entity_sql(self, cnx, sql, attrs):
+        # We have to overwrite the source given in parameters
+        # as here, we directly use the system source
+        attrs['asource'] = self.system_source.uri
+        self._append_to_entities(sql, attrs)
+
+    def _handle_is_relation_sql(self, cnx, sql, attrs):
+        self._append_to_entities(sql, attrs)
+
+    def _handle_is_instance_of_sql(self, cnx, sql, attrs):
+        self._append_to_entities(sql, attrs)
+
+    def _handle_source_relation_sql(self, cnx, sql, attrs):
+        self._append_to_entities(sql, attrs)
+
+    # add_info is _copypasted_ from the one in NativeSQLSource. We want it
+    # there because it will use the _handlers of the SQLGenSourceWrapper, which
+    # are not like the ones in the native source.
+    def add_info(self, cnx, entity, source, extid):
+        """add type and source info for an eid into the system table"""
+        # begin by inserting eid/type/source/extid into the entities table
+        if extid is not None:
+            assert isinstance(extid, str)
+            extid = b64encode(extid)
+        attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
+                 'asource': source.uri}
+        self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs)
+        # insert core relations: is, is_instance_of and cw_source
+        try:
+            self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
+                                         (entity.eid, eschema_eid(cnx, entity.e_schema)))
+        except IndexError:
+            # during schema serialization, skip
+            pass
+        else:
+            for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
+                self._handle_is_relation_sql(cnx,
+                                             'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
+                                             (entity.eid, eschema_eid(cnx, eschema)))
+        if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
+            self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
+                                         (entity.eid, source.eid))
+        # now we can update the full text index
+        if self.do_fti and self.need_fti_indexation(entity.cw_etype):
+            self.index_entity(cnx, entity=entity)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dataimport/test/unittest_dataimport.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,168 @@
+# -*- coding: utf-8 -*-
+
+import datetime as DT
+from StringIO import StringIO
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb import dataimport
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class RQLObjectStoreTC(CubicWebTC):
+
+    def test_all(self):
+        with self.admin_access.repo_cnx() as cnx:
+            store = dataimport.RQLObjectStore(cnx)
+            group_eid = store.create_entity('CWGroup', name=u'grp').eid
+            user_eid = store.create_entity('CWUser', login=u'lgn', upassword=u'pwd').eid
+            store.relate(user_eid, 'in_group', group_eid)
+            cnx.commit()
+
+        with self.admin_access.repo_cnx() as cnx:
+            users = cnx.execute('CWUser X WHERE X login "lgn"')
+            self.assertEqual(1, len(users))
+            self.assertEqual(user_eid, users.one().eid)
+            groups = cnx.execute('CWGroup X WHERE U in_group X, U login "lgn"')
+            self.assertEqual(1, len(users))
+            self.assertEqual(group_eid, groups.one().eid)
+
+
+class CreateCopyFromBufferTC(TestCase):
+
+    # test converters
+
+    def test_convert_none(self):
+        cnvt = dataimport._copyfrom_buffer_convert_None
+        self.assertEqual('NULL', cnvt(None))
+
+    def test_convert_number(self):
+        cnvt = dataimport._copyfrom_buffer_convert_number
+        self.assertEqual('42', cnvt(42))
+        self.assertEqual('42', cnvt(42L))
+        self.assertEqual('42.42', cnvt(42.42))
+
+    def test_convert_string(self):
+        cnvt = dataimport._copyfrom_buffer_convert_string
+        # simple
+        self.assertEqual('babar', cnvt('babar'))
+        # unicode
+        self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant'))
+        self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1'))
+        # escaping
+        self.assertEqual('babar\\tceleste\\n', cnvt('babar\tceleste\n'))
+        self.assertEqual(r'C:\\new\tC:\\test', cnvt('C:\\new\tC:\\test'))
+
+    def test_convert_date(self):
+        cnvt = dataimport._copyfrom_buffer_convert_date
+        self.assertEqual('0666-01-13', cnvt(DT.date(666, 1, 13)))
+
+    def test_convert_time(self):
+        cnvt = dataimport._copyfrom_buffer_convert_time
+        self.assertEqual('06:06:06.000100', cnvt(DT.time(6, 6, 6, 100)))
+
+    def test_convert_datetime(self):
+        cnvt = dataimport._copyfrom_buffer_convert_datetime
+        self.assertEqual('0666-06-13 06:06:06.000000', cnvt(DT.datetime(666, 6, 13, 6, 6, 6)))
+
+    # test buffer
+    def test_create_copyfrom_buffer_tuple(self):
+        cnvt = dataimport._create_copyfrom_buffer
+        data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), DT.datetime(666, 6, 13, 6, 6, 6)),
+                (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), DT.datetime(2014, 1, 1, 0, 0, 0)))
+        results = dataimport._create_copyfrom_buffer(data)
+        # all columns
+        expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000
+6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000'''
+        self.assertMultiLineEqual(expected, results.getvalue())
+        # selected columns
+        results = dataimport._create_copyfrom_buffer(data, columns=(1, 3, 6))
+        expected = '''42\téléphant\t0666-06-13 06:06:06.000000
+6\tbabar\t2014-01-01 00:00:00.000000'''
+        self.assertMultiLineEqual(expected, results.getvalue())
+
+    def test_create_copyfrom_buffer_dict(self):
+        cnvt = dataimport._create_copyfrom_buffer
+        data = (dict(integer=42, double=42.42, text=u'éléphant', date=DT.datetime(666, 6, 13, 6, 6, 6)),
+                dict(integer=6, double=6.6, text=u'babar', date=DT.datetime(2014, 1, 1, 0, 0, 0)))
+        results = dataimport._create_copyfrom_buffer(data, ('integer', 'text'))
+        expected = '''42\téléphant\n6\tbabar'''
+        self.assertMultiLineEqual(expected, results.getvalue())
+
+
+class UcsvreaderTC(TestCase):
+
+    def test_empty_lines_skipped(self):
+        stream = StringIO('''a,b,c,d,
+1,2,3,4,
+,,,,
+,,,,
+''')
+        self.assertEqual([[u'a', u'b', u'c', u'd', u''],
+                          [u'1', u'2', u'3', u'4', u''],
+                          ],
+                         list(dataimport.ucsvreader(stream)))
+        stream.seek(0)
+        self.assertEqual([[u'a', u'b', u'c', u'd', u''],
+                          [u'1', u'2', u'3', u'4', u''],
+                          [u'', u'', u'', u'', u''],
+                          [u'', u'', u'', u'', u'']
+                          ],
+                         list(dataimport.ucsvreader(stream, skip_empty=False)))
+
+    def test_skip_first(self):
+        stream = StringIO('a,b,c,d,\n'
+                          '1,2,3,4,\n')
+        reader = dataimport.ucsvreader(stream, skipfirst=True,
+                                       ignore_errors=True)
+        self.assertEqual(list(reader),
+                         [[u'1', u'2', u'3', u'4', u'']])
+
+        stream.seek(0)
+        reader = dataimport.ucsvreader(stream, skipfirst=True,
+                                       ignore_errors=False)
+        self.assertEqual(list(reader),
+                         [[u'1', u'2', u'3', u'4', u'']])
+
+        stream.seek(0)
+        reader = dataimport.ucsvreader(stream, skipfirst=False,
+                                       ignore_errors=True)
+        self.assertEqual(list(reader),
+                         [[u'a', u'b', u'c', u'd', u''],
+                          [u'1', u'2', u'3', u'4', u'']])
+
+        stream.seek(0)
+        reader = dataimport.ucsvreader(stream, skipfirst=False,
+                                       ignore_errors=False)
+        self.assertEqual(list(reader),
+                         [[u'a', u'b', u'c', u'd', u''],
+                          [u'1', u'2', u'3', u'4', u'']])
+
+
+class MetaGeneratorTC(CubicWebTC):
+
+    def test_dont_generate_relation_to_internal_manager(self):
+        with self.admin_access.repo_cnx() as cnx:
+            metagen = dataimport.MetaGenerator(cnx)
+            self.assertIn('created_by', metagen.etype_rels)
+            self.assertIn('owned_by', metagen.etype_rels)
+        with self.repo.internal_cnx() as cnx:
+            metagen = dataimport.MetaGenerator(cnx)
+            self.assertNotIn('created_by', metagen.etype_rels)
+            self.assertNotIn('owned_by', metagen.etype_rels)
+
+    def test_dont_generate_specified_values(self):
+        with self.admin_access.repo_cnx() as cnx:
+            metagen = dataimport.MetaGenerator(cnx)
+            # hijack gen_modification_date to ensure we don't go through it
+            metagen.gen_modification_date = None
+            md = DT.datetime.now() - DT.timedelta(days=1)
+            entity, rels = metagen.base_etype_dicts('CWUser')
+            entity.cw_edited.update(dict(modification_date=md))
+            with cnx.ensure_cnx_set:
+                metagen.init_entity(entity)
+            self.assertEqual(entity.cw_edited['modification_date'], md)
+
+
+if __name__ == '__main__':
+    unittest_main()
--- a/dbapi.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,836 +0,0 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""DB-API 2.0 compliant module
-
-Take a look at http://www.python.org/peps/pep-0249.html
-
-(most parts of this document are reported here in docstrings)
-"""
-
-__docformat__ = "restructuredtext en"
-
-from threading import currentThread
-from logging import getLogger
-from time import time, clock
-from itertools import count
-from warnings import warn
-from os.path import join
-from uuid import uuid4
-from urlparse import  urlparse
-
-from logilab.common.logging_ext import set_log_methods
-from logilab.common.decorators import monkeypatch, cachedproperty
-from logilab.common.deprecation import deprecated
-
-from cubicweb import (ETYPE_NAME_MAP, AuthenticationError, ProgrammingError,
-                      cwvreg, cwconfig)
-from cubicweb.repoapi import get_repository
-from cubicweb.req import RequestSessionBase
-
-
-_MARKER = object()
-
-def _fake_property_value(self, name):
-    try:
-        return super(DBAPIRequest, self).property_value(name)
-    except KeyError:
-        return ''
-
-def fake(*args, **kwargs):
-    return None
-
-def multiple_connections_fix():
-    """some monkey patching necessary when an application has to deal with
-    several connections to different repositories. It tries to hide buggy class
-    attributes since classes are not designed to be shared among multiple
-    registries.
-    """
-    defaultcls = cwvreg.CWRegistryStore.REGISTRY_FACTORY[None]
-
-    etypescls = cwvreg.CWRegistryStore.REGISTRY_FACTORY['etypes']
-    orig_etype_class = etypescls.orig_etype_class = etypescls.etype_class
-    @monkeypatch(defaultcls)
-    def etype_class(self, etype):
-        """return an entity class for the given entity type.
-        Try to find out a specific class for this kind of entity or
-        default to a dump of the class registered for 'Any'
-        """
-        usercls = orig_etype_class(self, etype)
-        if etype == 'Any':
-            return usercls
-        usercls.e_schema = self.schema.eschema(etype)
-        return usercls
-
-def multiple_connections_unfix():
-    etypescls = cwvreg.CWRegistryStore.REGISTRY_FACTORY['etypes']
-    etypescls.etype_class = etypescls.orig_etype_class
-
-
-class ConnectionProperties(object):
-    def __init__(self, cnxtype=None, close=True, log=False):
-        if cnxtype is not None:
-            warn('[3.16] cnxtype argument is deprecated', DeprecationWarning,
-                 stacklevel=2)
-        self.cnxtype = cnxtype
-        self.log_queries = log
-        self.close_on_del = close
-
-
-@deprecated('[3.19] the dbapi is deprecated. Have a look at the new repoapi.')
-def _repo_connect(repo, login, **kwargs):
-    """Constructor to create a new connection to the given CubicWeb repository.
-
-    Returns a Connection instance.
-
-    Raises AuthenticationError if authentication failed
-    """
-    cnxid = repo.connect(unicode(login), **kwargs)
-    cnx = Connection(repo, cnxid, kwargs.get('cnxprops'))
-    if cnx.is_repo_in_memory:
-        cnx.vreg = repo.vreg
-    return cnx
-
-def connect(database, login=None,
-            cnxprops=None, setvreg=True, mulcnx=True, initlog=True, **kwargs):
-    """Constructor for creating a connection to the CubicWeb repository.
-    Returns a :class:`Connection` object.
-
-    Typical usage::
-
-      cnx = connect('myinstance', login='me', password='toto')
-
-    `database` may be:
-
-    * a simple instance id for in-memory connection
-
-    * a uri like scheme://host:port/instanceid where scheme may be one of
-      'pyro', 'inmemory' or 'zmqpickle'
-
-      * if scheme is 'pyro', <host:port> determine the name server address. If
-        not specified (e.g. 'pyro:///instanceid'), it will be detected through a
-        broadcast query. The instance id is the name of the instance in the name
-        server and may be prefixed by a group (e.g.
-        'pyro:///:cubicweb.instanceid')
-
-      * if scheme is handled by ZMQ (eg 'tcp'), you should not specify an
-        instance id
-
-    Other arguments:
-
-    :login:
-      the user login to use to authenticate.
-
-    :cnxprops:
-      a :class:`ConnectionProperties` instance, allowing to specify
-      the connection method (eg in memory or pyro). A Pyro connection will be
-      established if you don't specify that argument.
-
-    :setvreg:
-      flag telling if a registry should be initialized for the connection.
-      Don't change this unless you know what you're doing.
-
-    :mulcnx:
-      Will disappear at some point. Try to deal with connections to differents
-      instances in the same process unless specified otherwise by setting this
-      flag to False. Don't change this unless you know what you're doing.
-
-    :initlog:
-      flag telling if logging should be initialized. You usually don't want
-      logging initialization when establishing the connection from a process
-      where it's already initialized.
-
-    :kwargs:
-      there goes authentication tokens. You usually have to specify a password
-      for the given user, using a named 'password' argument.
-    """
-    if not urlparse(database).scheme:
-        warn('[3.16] give an qualified URI as database instead of using '
-             'host/cnxprops to specify the connection method',
-             DeprecationWarning, stacklevel=2)
-        if cnxprops and cnxprops.cnxtype == 'zmq':
-            database = kwargs.pop('host')
-        elif cnxprops and cnxprops.cnxtype == 'inmemory':
-            database = 'inmemory://' + database
-        else:
-            host = kwargs.pop('host', None)
-            if host is None:
-                host = ''
-            group = kwargs.pop('group', None)
-            if group is None:
-                group = 'cubicweb'
-            database = 'pyro://%s/%s.%s' % (host, group, database)
-    puri = urlparse(database)
-    method = puri.scheme.lower()
-    if method == 'inmemory':
-        config = cwconfig.instance_configuration(puri.netloc)
-    else:
-        config = cwconfig.CubicWebNoAppConfiguration()
-    repo = get_repository(database, config=config)
-    if method == 'inmemory':
-        vreg = repo.vreg
-    elif setvreg:
-        if mulcnx:
-            multiple_connections_fix()
-        vreg = cwvreg.CWRegistryStore(config, initlog=initlog)
-        schema = repo.get_schema()
-        for oldetype, newetype in ETYPE_NAME_MAP.items():
-            if oldetype in schema:
-                print 'aliasing', newetype, 'to', oldetype
-                schema._entities[newetype] = schema._entities[oldetype]
-        vreg.set_schema(schema)
-    else:
-        vreg = None
-    cnx = _repo_connect(repo, login, cnxprops=cnxprops, **kwargs)
-    cnx.vreg = vreg
-    return cnx
-
-def in_memory_repo(config):
-    """Return and in_memory Repository object from a config (or vreg)"""
-    if isinstance(config, cwvreg.CWRegistryStore):
-        vreg = config
-        config = None
-    else:
-        vreg = None
-    # get local access to the repository
-    return get_repository('inmemory://', config=config, vreg=vreg)
-
-def in_memory_repo_cnx(config, login, **kwargs):
-    """useful method for testing and scripting to get a dbapi.Connection
-    object connected to an in-memory repository instance
-    """
-    # connection to the CubicWeb repository
-    repo = in_memory_repo(config)
-    return repo, _repo_connect(repo, login, **kwargs)
-
-# XXX web only method, move to webconfig?
-def anonymous_session(vreg):
-    """return a new anonymous session
-
-    raises an AuthenticationError if anonymous usage is not allowed
-    """
-    anoninfo = vreg.config.anonymous_user()
-    if anoninfo[0] is None: # no anonymous user
-        raise AuthenticationError('anonymous access is not authorized')
-    anon_login, anon_password = anoninfo
-    # use vreg's repository cache
-    repo = vreg.config.repository(vreg)
-    anon_cnx = _repo_connect(repo, anon_login, password=anon_password)
-    anon_cnx.vreg = vreg
-    return DBAPISession(anon_cnx, anon_login)
-
-
-class _NeedAuthAccessMock(object):
-    def __getattribute__(self, attr):
-        raise AuthenticationError()
-    def __nonzero__(self):
-        return False
-
-class DBAPISession(object):
-    def __init__(self, cnx, login=None):
-        self.cnx = cnx
-        self.data = {}
-        self.login = login
-        # dbapi session identifier is the same as the first connection
-        # identifier, but may later differ in case of auto-reconnection as done
-        # by the web authentication manager (in cw.web.views.authentication)
-        if cnx is not None:
-            self.sessionid = cnx.sessionid
-        else:
-            self.sessionid = uuid4().hex
-
-    @property
-    def anonymous_session(self):
-        return not self.cnx or self.cnx.anonymous_connection
-
-    def __repr__(self):
-        return '<DBAPISession %r>' % self.sessionid
-
-
-class DBAPIRequest(RequestSessionBase):
-    #: Request language identifier eg: 'en'
-    lang = None
-
-    def __init__(self, vreg, session=None):
-        super(DBAPIRequest, self).__init__(vreg)
-        #: 'language' => translation_function() mapping
-        try:
-            # no vreg or config which doesn't handle translations
-            self.translations = vreg.config.translations
-        except AttributeError:
-            self.translations = {}
-        #: cache entities built during the request
-        self._eid_cache = {}
-        if session is not None:
-            self.set_session(session)
-        else:
-            # these args are initialized after a connection is
-            # established
-            self.session = DBAPISession(None)
-            self.cnx = self.user = _NeedAuthAccessMock()
-        self.set_default_language(vreg)
-
-    def get_option_value(self, option, foreid=None):
-        if foreid is not None:
-            warn('[3.19] foreid argument is deprecated', DeprecationWarning,
-                 stacklevel=2)
-        return self.cnx.get_option_value(option)
-
-    def set_session(self, session):
-        """method called by the session handler when the user is authenticated
-        or an anonymous connection is open
-        """
-        self.session = session
-        if session.cnx:
-            self.cnx = session.cnx
-            self.execute = session.cnx.cursor(self).execute
-            self.user = self.cnx.user(self)
-            self.set_entity_cache(self.user)
-
-    def execute(self, *args, **kwargs): # pylint: disable=E0202
-        """overriden when session is set. By default raise authentication error
-        so authentication is requested.
-        """
-        raise AuthenticationError()
-
-    def set_default_language(self, vreg):
-        try:
-            lang = vreg.property_value('ui.language')
-        except Exception: # property may not be registered
-            lang = 'en'
-        try:
-            self.set_language(lang)
-        except KeyError:
-            # this occurs usually during test execution
-            self._ = self.__ = unicode
-            self.pgettext = lambda x, y: unicode(y)
-
-    # server-side service call #################################################
-
-    def call_service(self, regid, **kwargs):
-        return self.cnx.call_service(regid, **kwargs)
-
-    # entities cache management ###############################################
-
-    def entity_cache(self, eid):
-        return self._eid_cache[eid]
-
-    def set_entity_cache(self, entity):
-        self._eid_cache[entity.eid] = entity
-
-    def cached_entities(self):
-        return self._eid_cache.values()
-
-    def drop_entity_cache(self, eid=None):
-        if eid is None:
-            self._eid_cache = {}
-        else:
-            del self._eid_cache[eid]
-
-    # low level session data management #######################################
-
-    @deprecated('[3.19] use session or transaction data')
-    def get_shared_data(self, key, default=None, pop=False, txdata=False):
-        """see :meth:`Connection.get_shared_data`"""
-        return self.cnx.get_shared_data(key, default, pop, txdata)
-
-    @deprecated('[3.19] use session or transaction data')
-    def set_shared_data(self, key, value, txdata=False, querydata=None):
-        """see :meth:`Connection.set_shared_data`"""
-        if querydata is not None:
-            txdata = querydata
-            warn('[3.10] querydata argument has been renamed to txdata',
-                 DeprecationWarning, stacklevel=2)
-        return self.cnx.set_shared_data(key, value, txdata)
-
-    # server session compat layer #############################################
-
-    def entity_metas(self, eid):
-        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.cnx.entity_metas(eid)
-
-    def source_defs(self):
-        """return the definition of sources used by the repository."""
-        return self.cnx.source_defs()
-
-    @deprecated('[3.19] use .entity_metas(eid) instead')
-    def describe(self, eid, asdict=False):
-        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        return self.cnx.describe(eid, asdict)
-
-    # these are overridden by set_log_methods below
-    # only defining here to prevent pylint from complaining
-    info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
-
-set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
-
-
-
-# cursor / connection objects ##################################################
-
-class Cursor(object):
-    """These objects represent a database cursor, which is used to manage the
-    context of a fetch operation. Cursors created from the same connection are
-    not isolated, i.e., any changes done to the database by a cursor are
-    immediately visible by the other cursors. Cursors created from different
-    connections are isolated.
-    """
-
-    def __init__(self, connection, repo, req=None):
-        """This read-only attribute return a reference to the Connection
-        object on which the cursor was created.
-        """
-        self.connection = connection
-        """optionnal issuing request instance"""
-        self.req = req
-        self._repo = repo
-        self._sessid = connection.sessionid
-
-    def close(self):
-        """no effect"""
-        pass
-
-    def _txid(self):
-        return self.connection._txid(self)
-
-    def execute(self, rql, args=None, build_descr=True):
-        """execute a rql query, return resulting rows and their description in
-        a :class:`~cubicweb.rset.ResultSet` object
-
-        * `rql` should be a Unicode string or a plain ASCII string, containing
-          the rql query
-
-        * `args` the optional args dictionary associated to the query, with key
-          matching named substitution in `rql`
-
-        * `build_descr` is a boolean flag indicating if the description should
-          be built on select queries (if false, the description will be en empty
-          list)
-
-        on INSERT queries, there will be one row for each inserted entity,
-        containing its eid
-
-        on SET queries, XXX describe
-
-        DELETE queries returns no result.
-
-        .. Note::
-          to maximize the rql parsing/analyzing cache performance, you should
-          always use substitute arguments in queries, i.e. avoid query such as::
-
-            execute('Any X WHERE X eid 123')
-
-          use::
-
-            execute('Any X WHERE X eid %(x)s', {'x': 123})
-        """
-        rset = self._repo.execute(self._sessid, rql, args,
-                                  build_descr=build_descr, **self._txid())
-        rset.req = self.req
-        return rset
-
-
-class LogCursor(Cursor):
-    """override the standard cursor to log executed queries"""
-
-    def execute(self, operation, parameters=None, build_descr=True):
-        """override the standard cursor to log executed queries"""
-        tstart, cstart = time(), clock()
-        rset = Cursor.execute(self, operation, parameters, build_descr=build_descr)
-        self.connection.executed_queries.append((operation, parameters,
-                                                 time() - tstart, clock() - cstart))
-        return rset
-
-def check_not_closed(func):
-    def decorator(self, *args, **kwargs):
-        if self._closed is not None:
-            raise ProgrammingError('Closed connection %s' % self.sessionid)
-        return func(self, *args, **kwargs)
-    return decorator
-
-class Connection(object):
-    """DB-API 2.0 compatible Connection object for CubicWeb
-    """
-    # make exceptions available through the connection object
-    ProgrammingError = ProgrammingError
-    # attributes that may be overriden per connection instance
-    cursor_class = Cursor
-    vreg = None
-    _closed = None
-
-    def __init__(self, repo, cnxid, cnxprops=None):
-        self._repo = repo
-        self.sessionid = cnxid
-        self._close_on_del = getattr(cnxprops, 'close_on_del', True)
-        self._web_request = False
-        if cnxprops and cnxprops.log_queries:
-            self.executed_queries = []
-            self.cursor_class = LogCursor
-
-    @property
-    def is_repo_in_memory(self):
-        """return True if this is a local, aka in-memory, connection to the
-        repository
-        """
-        try:
-            from cubicweb.server.repository import Repository
-        except ImportError:
-            # code not available, no way
-            return False
-        return isinstance(self._repo, Repository)
-
-    @property # could be a cached property but we want to prevent assigment to
-              # catch potential programming error.
-    def anonymous_connection(self):
-        login = self._repo.user_info(self.sessionid)[1]
-        anon_login = self.vreg.config.get('anonymous-user')
-        return login == anon_login
-
-    def __repr__(self):
-        if self.anonymous_connection:
-            return '<Connection %s (anonymous)>' % self.sessionid
-        return '<Connection %s>' % self.sessionid
-
-    def __enter__(self):
-        return self.cursor()
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        if exc_type is None:
-            self.commit()
-        else:
-            self.rollback()
-            return False #propagate the exception
-
-    def __del__(self):
-        """close the remote connection if necessary"""
-        if self._closed is None and self._close_on_del:
-            try:
-                self.close()
-            except Exception:
-                pass
-
-    # server-side service call #################################################
-
-    @check_not_closed
-    def call_service(self, regid, **kwargs):
-        return self._repo.call_service(self.sessionid, regid, **kwargs)
-
-    # connection initialization methods ########################################
-
-    def load_appobjects(self, cubes=_MARKER, subpath=None, expand=True):
-        config = self.vreg.config
-        if cubes is _MARKER:
-            cubes = self._repo.get_cubes()
-        elif cubes is None:
-            cubes = ()
-        else:
-            if not isinstance(cubes, (list, tuple)):
-                cubes = (cubes,)
-            if expand:
-                cubes = config.expand_cubes(cubes)
-        if subpath is None:
-            subpath = esubpath = ('entities', 'views')
-        else:
-            esubpath = subpath
-        if 'views' in subpath:
-            esubpath = list(subpath)
-            esubpath.remove('views')
-            esubpath.append(join('web', 'views'))
-        # first load available configs, necessary for proper persistent
-        # properties initialization
-        config.load_available_configs()
-        # then init cubes
-        config.init_cubes(cubes)
-        # then load appobjects into the registry
-        vpath = config.build_appobjects_path(reversed(config.cubes_path()),
-                                             evobjpath=esubpath,
-                                             tvobjpath=subpath)
-        self.vreg.register_objects(vpath)
-
-    def use_web_compatible_requests(self, baseurl, sitetitle=None):
-        """monkey patch DBAPIRequest to fake a cw.web.request, so you should
-        able to call html views using rset from a simple dbapi connection.
-
-        You should call `load_appobjects` at some point to register those views.
-        """
-        DBAPIRequest.property_value = _fake_property_value
-        DBAPIRequest.next_tabindex = count().next
-        DBAPIRequest.relative_path = fake
-        DBAPIRequest.url = fake
-        DBAPIRequest.get_page_data = fake
-        DBAPIRequest.set_page_data = fake
-        # XXX could ask the repo for it's base-url configuration
-        self.vreg.config.set_option('base-url', baseurl)
-        self.vreg.config.uiprops = {}
-        self.vreg.config.datadir_url = baseurl + '/data'
-        # XXX why is this needed? if really needed, could be fetched by a query
-        if sitetitle is not None:
-            self.vreg['propertydefs']['ui.site-title'] = {'default': sitetitle}
-        self._web_request = True
-
-    def request(self):
-        if self._web_request:
-            from cubicweb.web.request import DBAPICubicWebRequestBase
-            req = DBAPICubicWebRequestBase(self.vreg, False)
-            req.get_header = lambda x, default=None: default
-            req.set_session = lambda session: DBAPIRequest.set_session(
-                req, session)
-            req.relative_path = lambda includeparams=True: ''
-        else:
-            req = DBAPIRequest(self.vreg)
-        req.set_session(DBAPISession(self))
-        return req
-
-    @check_not_closed
-    def user(self, req=None, props=None):
-        """return the User object associated to this connection"""
-        # cnx validity is checked by the call to .user_info
-        eid, login, groups, properties = self._repo.user_info(self.sessionid,
-                                                              props)
-        if req is None:
-            req = self.request()
-        rset = req.eid_rset(eid, 'CWUser')
-        if self.vreg is not None and 'etypes' in self.vreg:
-            user = self.vreg['etypes'].etype_class('CWUser')(
-                req, rset, row=0, groups=groups, properties=properties)
-        else:
-            from cubicweb.entity import Entity
-            user = Entity(req, rset, row=0)
-        user.cw_attr_cache['login'] = login # cache login
-        return user
-
-    @check_not_closed
-    def check(self):
-        """raise `BadConnectionId` if the connection is no more valid, else
-        return its latest activity timestamp.
-        """
-        return self._repo.check_session(self.sessionid)
-
-    def _txid(self, cursor=None): # pylint: disable=E0202
-        # XXX could now handle various isolation level!
-        # return a dict as bw compat trick
-        return {'txid': currentThread().getName()}
-
-    # session data methods #####################################################
-
-    @check_not_closed
-    def get_shared_data(self, key, default=None, pop=False, txdata=False):
-        """return value associated to key in the session's data dictionary or
-        session's transaction's data if `txdata` is true.
-
-        If pop is True, value will be removed from the dictionary.
-
-        If key isn't defined in the dictionary, value specified by the
-        `default` argument will be returned.
-        """
-        return self._repo.get_shared_data(self.sessionid, key, default, pop, txdata)
-
-    @check_not_closed
-    def set_shared_data(self, key, value, txdata=False):
-        """set value associated to `key` in shared data
-
-        if `txdata` is true, the value will be added to the repository
-        session's query data which are cleared on commit/rollback of the current
-        transaction.
-        """
-        return self._repo.set_shared_data(self.sessionid, key, value, txdata)
-
-    # meta-data accessors ######################################################
-
-    @check_not_closed
-    def source_defs(self):
-        """Return the definition of sources used by the repository."""
-        return self._repo.source_defs()
-
-    @check_not_closed
-    def get_schema(self):
-        """Return the schema currently used by the repository."""
-        return self._repo.get_schema()
-
-    @check_not_closed
-    def get_option_value(self, option, foreid=None):
-        """Return the value for `option` in the configuration.
-
-        `foreid` argument is deprecated and now useless (as of 3.19).
-        """
-        if foreid is not None:
-            warn('[3.19] foreid argument is deprecated', DeprecationWarning,
-                 stacklevel=2)
-        return self._repo.get_option_value(option)
-
-
-    @check_not_closed
-    def entity_metas(self, eid):
-        """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
-        try:
-            return self._repo.entity_metas(self.sessionid, eid, **self._txid())
-        except AttributeError:
-            # talking to pre 3.19 repository
-            metas = self._repo.describe(self.sessionid, eid, **self._txid())
-            if len(metas) == 3: # even older backward compat
-                metas = list(metas)
-                metas.append(metas[1])
-            return dict(zip(('type', 'source', 'extid', 'asource'), metas))
-
-
-    @deprecated('[3.19] use .entity_metas(eid) instead')
-    @check_not_closed
-    def describe(self, eid, asdict=False):
-        try:
-            metas = self._repo.entity_metas(self.sessionid, eid, **self._txid())
-        except AttributeError:
-            metas = self._repo.describe(self.sessionid, eid, **self._txid())
-            # talking to pre 3.19 repository
-            if len(metas) == 3: # even older backward compat
-                metas = list(metas)
-                metas.append(metas[1])
-            if asdict:
-                return dict(zip(('type', 'source', 'extid', 'asource'), metas))
-            return metas[:-1]
-        if asdict:
-            metas['asource'] = meta['source'] # XXX pre 3.19 client compat
-            return metas
-        return metas['type'], metas['source'], metas['extid']
-
-
-    # db-api like interface ####################################################
-
-    @check_not_closed
-    def commit(self):
-        """Commit pending transaction for this connection to the repository.
-
-        may raises `Unauthorized` or `ValidationError` if we attempted to do
-        something we're not allowed to for security or integrity reason.
-
-        If the transaction is undoable, a transaction id will be returned.
-        """
-        return self._repo.commit(self.sessionid, **self._txid())
-
-    @check_not_closed
-    def rollback(self):
-        """This method is optional since not all databases provide transaction
-        support.
-
-        In case a database does provide transactions this method causes the the
-        database to roll back to the start of any pending transaction.  Closing
-        a connection without committing the changes first will cause an implicit
-        rollback to be performed.
-        """
-        self._repo.rollback(self.sessionid, **self._txid())
-
-    @check_not_closed
-    def cursor(self, req=None):
-        """Return a new Cursor Object using the connection.
-
-        On pyro connection, you should get cursor after calling if
-        load_appobjects method if desired (which you should call if you intend
-        to use ORM abilities).
-        """
-        if req is None:
-            req = self.request()
-        return self.cursor_class(self, self._repo, req=req)
-
-    @check_not_closed
-    def close(self):
-        """Close the connection now (rather than whenever __del__ is called).
-
-        The connection will be unusable from this point forward; an Error (or
-        subclass) exception will be raised if any operation is attempted with
-        the connection. The same applies to all cursor objects trying to use the
-        connection.  Note that closing a connection without committing the
-        changes first will cause an implicit rollback to be performed.
-        """
-        self._repo.close(self.sessionid, **self._txid())
-        del self._repo # necessary for proper garbage collection
-        self._closed = 1
-
-    # undo support ############################################################
-
-    @check_not_closed
-    def undoable_transactions(self, ueid=None, req=None, **actionfilters):
-        """Return a list of undoable transaction objects by the connection's
-        user, ordered by descendant transaction time.
-
-        Managers may filter according to user (eid) who has done the transaction
-        using the `ueid` argument. Others will only see their own transactions.
-
-        Additional filtering capabilities is provided by using the following
-        named arguments:
-
-        * `etype` to get only transactions creating/updating/deleting entities
-          of the given type
-
-        * `eid` to get only transactions applied to entity of the given eid
-
-        * `action` to get only transactions doing the given action (action in
-          'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or
-          'D'.
-
-        * `public`: when additional filtering is provided, their are by default
-          only searched in 'public' actions, unless a `public` argument is given
-          and set to false.
-        """
-        actionfilters.update(self._txid())
-        txinfos = self._repo.undoable_transactions(self.sessionid, ueid,
-                                                   **actionfilters)
-        if req is None:
-            req = self.request()
-        for txinfo in txinfos:
-            txinfo.req = req
-        return txinfos
-
-    @check_not_closed
-    def transaction_info(self, txuuid, req=None):
-        """Return transaction object for the given uid.
-
-        raise `NoSuchTransaction` if not found or if session's user is not
-        allowed (eg not in managers group and the transaction doesn't belong to
-        him).
-        """
-        txinfo = self._repo.transaction_info(self.sessionid, txuuid,
-                                             **self._txid())
-        if req is None:
-            req = self.request()
-        txinfo.req = req
-        return txinfo
-
-    @check_not_closed
-    def transaction_actions(self, txuuid, public=True):
-        """Return an ordered list of action effectued during that transaction.
-
-        If public is true, return only 'public' actions, eg not ones triggered
-        under the cover by hooks, else return all actions.
-
-        raise `NoSuchTransaction` if the transaction is not found or if
-        session's user is not allowed (eg not in managers group and the
-        transaction doesn't belong to him).
-        """
-        return self._repo.transaction_actions(self.sessionid, txuuid, public,
-                                              **self._txid())
-
-    @check_not_closed
-    def undo_transaction(self, txuuid):
-        """Undo the given transaction. Return potential restoration errors.
-
-        raise `NoSuchTransaction` if not found or if session's user is not
-        allowed (eg not in managers group and the transaction doesn't belong to
-        him).
-        """
-        return self._repo.undo_transaction(self.sessionid, txuuid,
-                                           **self._txid())
-
-in_memory_cnx = deprecated('[3.16] use _repo_connect instead)')(_repo_connect)
--- a/debian/control	Mon Jun 22 12:51:28 2015 +0200
+++ b/debian/control	Mon Jun 22 14:27:37 2015 +0200
@@ -58,7 +58,6 @@
  | python-pysqlite2,
  python-passlib
 Recommends:
- pyro (<< 4.0.0),
  cubicweb-documentation (= ${source:Version})
 Suggests:
  python-zmq
@@ -109,7 +108,6 @@
  cubicweb-ctl (= ${source:Version}),
  python-twisted-web
 Recommends:
- pyro (<< 4.0.0),
  cubicweb-documentation (= ${source:Version})
 Description: twisted-based web interface for the CubicWeb framework
  CubicWeb is a semantic web application framework.
@@ -137,6 +135,7 @@
 Breaks:
  cubicweb-inlinedit (<< 1.1.1),
  cubicweb-bootstrap (<< 0.6.6),
+ cubicweb-folder (<< 1.10.0),
 Description: web interface library for the CubicWeb framework
  CubicWeb is a semantic web application framework.
  .
--- a/debian/cubicweb-ctl.cubicweb.init	Mon Jun 22 12:51:28 2015 +0200
+++ b/debian/cubicweb-ctl.cubicweb.init	Mon Jun 22 14:27:37 2015 +0200
@@ -4,16 +4,14 @@
 # Provides:          cubicweb
 # Required-Start:    $remote_fs $syslog $local_fs $network
 # Required-Stop:     $remote_fs $syslog $local_fs $network
-# Should-Start:      postgresql pyro-nsd
-# Should-Stop:       postgresql pyro-nsd
+# Should-Start:      postgresql
+# Should-Stop:       postgresql
 # Default-Start:     2 3 4 5
 # Default-Stop:      0 1 6
 # Short-Description: Start cubicweb application at boot time
 ### END INIT INFO
 
 # FIXME Seems to be inadequate here
-# FIXME If related to pyro, try instead:
-# export PYRO_STORAGE="/tmp"
 cd /tmp
 
 # FIXME Work-around about the following lintian error
--- a/devtools/__init__.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/__init__.py	Mon Jun 22 14:27:37 2015 +0200
@@ -237,10 +237,6 @@
     def available_languages(self, *args):
         return self.cw_languages()
 
-    def pyro_enabled(self):
-        # but export PYRO_MULTITHREAD=0 or you get problems with sqlite and
-        # threads
-        return True
 
 # XXX merge with BaseApptestConfiguration ?
 class ApptestConfiguration(BaseApptestConfiguration):
@@ -251,7 +247,7 @@
     skip_db_create_and_restore = False
 
     def __init__(self, appid, apphome=None,
-                 log_threshold=logging.CRITICAL, sourcefile=None):
+                 log_threshold=logging.WARNING, sourcefile=None):
         BaseApptestConfiguration.__init__(self, appid, apphome,
                                           log_threshold=log_threshold)
         self.init_repository = sourcefile is None
@@ -398,9 +394,9 @@
 
     def _new_repo(self, config):
         """Factory method to create a new Repository Instance"""
-        from cubicweb.dbapi import in_memory_repo
+        from cubicweb.repoapi import _get_inmemory_repo
         config._cubes = None
-        repo = in_memory_repo(config)
+        repo = _get_inmemory_repo(config)
         # extending Repository class
         repo._has_started = False
         repo._needs_refresh = False
@@ -498,7 +494,7 @@
             repo = self.get_repo(startup=True)
             cnx = self.get_cnx()
             with cnx:
-                pre_setup_func(cnx._cnx, self.config)
+                pre_setup_func(cnx, self.config)
                 cnx.commit()
         self.backup_database(test_db_id)
 
--- a/devtools/devctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/devctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -77,6 +77,8 @@
         pass
     def default_log_file(self):
         return None
+    def default_stats_file(self):
+        return None
 
 
 def cleanup_sys_modules(config):
@@ -580,8 +582,8 @@
 # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
 # details.
 #
-# You should have received a copy of the GNU Lesser General Public License along
-# with this program. If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
 ''',
 
         'GPL': '''\
@@ -592,7 +594,8 @@
 #
 # This program is distributed in the hope that it will be useful, but WITHOUT
 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
 #
 # You should have received a copy of the GNU General Public License along with
 # this program. If not, see <http://www.gnu.org/licenses/>.
--- a/devtools/fake.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/fake.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -20,6 +20,8 @@
 
 __docformat__ = "restructuredtext en"
 
+from contextlib import contextmanager
+
 from logilab.database import get_db_helper
 
 from cubicweb.req import RequestSessionBase
@@ -159,6 +161,10 @@
     # for use with enabled_security context manager
     read_security = write_security = True
 
+    @contextmanager
+    def running_hooks_ops(self):
+        yield
+
 class FakeRepo(object):
     querier = None
     def __init__(self, schema, vreg=None, config=None):
@@ -173,7 +179,7 @@
     def internal_session(self):
         return FakeSession(self)
 
-    def extid2eid(self, source, extid, etype, session, insert=True):
+    def extid2eid(self, source, extid, etype, cnx, insert=True):
         try:
             return self.extids[extid]
         except KeyError:
@@ -181,10 +187,10 @@
                 return None
             self._count += 1
             eid = self._count
-            entity = source.before_entity_insertion(session, extid, etype, eid)
+            entity = source.before_entity_insertion(cnx, extid, etype, eid)
             self.extids[extid] = eid
             self.eids[eid] = extid
-            source.after_entity_insertion(session, extid, entity)
+            source.after_entity_insertion(cnx, extid, entity)
             return eid
 
 
--- a/devtools/httptest.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/httptest.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -78,8 +78,6 @@
         self.global_set_option('port', port) # force rewrite here
         return 'http://127.0.0.1:%d/' % self['port']
 
-    def pyro_enabled(self):
-        return False
 
 
 class CubicWebServerTC(CubicWebTC):
@@ -139,7 +137,6 @@
             passwd = self.admpassword
         if passwd is None:
             passwd = user
-        self.login(user)
         response = self.web_get("login?__login=%s&__password=%s" %
                                 (user, passwd))
         assert response.status == httplib.SEE_OTHER, response.status
--- a/devtools/repotest.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/repotest.py	Mon Jun 22 14:27:37 2015 +0200
@@ -259,12 +259,11 @@
 
     def qexecute(self, rql, args=None, build_descr=True):
         with self.session.new_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                try:
-                    return self.o.execute(cnx, rql, args, build_descr)
-                finally:
-                    if rql.startswith(('INSERT', 'DELETE', 'SET')):
-                        cnx.commit()
+            try:
+                return self.o.execute(cnx, rql, args, build_descr)
+            finally:
+                if rql.startswith(('INSERT', 'DELETE', 'SET')):
+                    cnx.commit()
 
 
 class BasePlannerTC(BaseQuerierTC):
--- a/devtools/test/data/cubes/i18ntestcube/views.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/test/data/cubes/i18ntestcube/views.py	Mon Jun 22 14:27:37 2015 +0200
@@ -26,9 +26,6 @@
 
 _myafs = MyAFS()
 
-# XXX useless ASA logilab.common.registry is fixed
-_myafs.__module__ = "cubes.i18ntestcube.views"
-
 _myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
 
 afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined')
--- a/devtools/testlib.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/devtools/testlib.py	Mon Jun 22 14:27:37 2015 +0200
@@ -156,30 +156,6 @@
 cwconfig.SMTP = MockSMTP
 
 
-class TestCaseConnectionProxy(object):
-    """thin wrapper around `cubicweb.repoapi.ClientConnection` context-manager
-    used in CubicWebTC (cf. `cubicweb.devtools.testlib.CubicWebTC.login` method)
-
-    It just proxies to the default connection context manager but
-    restores the original connection on exit.
-    """
-    def __init__(self, testcase, cnx):
-        self.testcase = testcase
-        self.cnx = cnx
-
-    def __getattr__(self, attrname):
-        return getattr(self.cnx, attrname)
-
-    def __enter__(self):
-        # already open
-        return self.cnx
-
-    def __exit__(self, exctype, exc, tb):
-        try:
-            return self.cnx.__exit__(exctype, exc, tb)
-        finally:
-            self.testcase.restore_connection()
-
 # Repoaccess utility ###############################################3###########
 
 class RepoAccess(object):
@@ -189,8 +165,7 @@
 
     A repo access can create three type of object:
 
-    .. automethod:: cubicweb.testlib.RepoAccess.repo_cnx
-    .. automethod:: cubicweb.testlib.RepoAccess.client_cnx
+    .. automethod:: cubicweb.testlib.RepoAccess.cnx
     .. automethod:: cubicweb.testlib.RepoAccess.web_request
 
     The RepoAccess need to be closed to destroy the associated Session.
@@ -225,16 +200,13 @@
         return session
 
     @contextmanager
-    def repo_cnx(self):
+    def cnx(self):
         """Context manager returning a server side connection for the user"""
         with self._session.new_cnx() as cnx:
             yield cnx
 
-    @contextmanager
-    def client_cnx(self):
-        """Context manager returning a client side connection for the user"""
-        with repoapi.ClientConnection(self._session) as cnx:
-            yield cnx
+    # aliases for bw compat
+    client_cnx = repo_cnx = cnx
 
     @contextmanager
     def web_request(self, url=None, headers={}, method='GET', **kwargs):
@@ -247,9 +219,10 @@
         """
         req = self.requestcls(self._repo.vreg, url=url, headers=headers,
                               method=method, form=kwargs)
-        clt_cnx = repoapi.ClientConnection(self._session)
-        req.set_cnx(clt_cnx)
-        with clt_cnx:
+        with self._session.new_cnx() as cnx:
+            if 'ecache' in cnx.transaction_data:
+                del cnx.transaction_data['ecache']
+            req.set_cnx(cnx)
             yield req
 
     def close(self):
@@ -261,7 +234,7 @@
     @contextmanager
     def shell(self):
         from cubicweb.server.migractions import ServerMigrationHelper
-        with repoapi.ClientConnection(self._session) as cnx:
+        with self._session.new_cnx() as cnx:
             mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx,
                                         interactive=False,
                                         # hack so it don't try to load fs schema
@@ -294,17 +267,12 @@
     requestcls = fake.FakeRequest
     tags = TestCase.tags | Tags('cubicweb', 'cw_repo')
     test_db_id = DEFAULT_EMPTY_DB_ID
-    _cnxs = set() # establised connection
-                  # stay on connection for leak detection purpose
 
     # anonymous is logged by default in cubicweb test cases
     anonymous_allowed = True
 
     def __init__(self, *args, **kwargs):
         self._admin_session = None
-        self._admin_clt_cnx = None
-        self._current_session = None
-        self._current_clt_cnx = None
         self.repo = None
         self._open_access = set()
         super(CubicWebTC, self).__init__(*args, **kwargs)
@@ -315,6 +283,7 @@
         """provide a new RepoAccess object for a given user
 
         The access is automatically closed at the end of the test."""
+        login = unicode(login)
         access = RepoAccess(self.repo, login, self.requestcls)
         self._open_access.add(access)
         return access
@@ -326,92 +295,11 @@
             except BadConnectionId:
                 continue # already closed
 
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def set_cnx(self, cnx):
-        assert getattr(cnx, '_session', None) is not None
-        if cnx is self._admin_clt_cnx:
-            self._pop_custom_cnx()
-        else:
-            self._cnxs.add(cnx) # register the cnx to make sure it is removed
-            self._current_session = cnx._session
-            self._current_clt_cnx = cnx
-
     @property
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def cnx(self):
-        # XXX we want to deprecate this
-        clt_cnx = self._current_clt_cnx
-        if clt_cnx is None:
-            clt_cnx = self._admin_clt_cnx
-        return clt_cnx
-
-    def _close_cnx(self):
-        """ensure that all cnx used by a test have been closed"""
-        for cnx in list(self._cnxs):
-            if cnx._open and not cnx._session.closed:
-                cnx.rollback()
-                cnx.close()
-            self._cnxs.remove(cnx)
-
-    @property
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
     def session(self):
-        """return current server side session"""
-        # XXX We want to use a srv_connection instead and deprecate this
-        # property
-        session = self._current_session
-        if session is None:
-            session = self._admin_session
-            # bypassing all sanity to use the same repo cnx in the session
-            #
-            # we can't call set_cnx as the Connection is not managed by the
-            # session.
-            session._Session__threaddata.cnx = self._admin_clt_cnx._cnx
-        else:
-            session._Session__threaddata.cnx = self.cnx._cnx
-        session.set_cnxset()
-        return session
-
-    @property
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def websession(self):
-        return self.session
-
-    @property
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def adminsession(self):
-        """return current server side session (using default manager account)"""
+        """return admin session"""
         return self._admin_session
 
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def login(self, login, **kwargs):
-        """return a connection for the given login/password"""
-        __ = kwargs.pop('autoclose', True) # not used anymore
-        if login == self.admlogin:
-            # undo any previous login, if we're not used as a context manager
-            self.restore_connection()
-            return self.cnx
-        else:
-            if not kwargs:
-                kwargs['password'] = str(login)
-            clt_cnx = repoapi.connect(self.repo, login, **kwargs)
-        self.set_cnx(clt_cnx)
-        clt_cnx.__enter__()
-        return TestCaseConnectionProxy(self, clt_cnx)
-
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def restore_connection(self):
-        self._pop_custom_cnx()
-
-    def _pop_custom_cnx(self):
-        if self._current_clt_cnx is not None:
-            if self._current_clt_cnx._open:
-                self._current_clt_cnx.close()
-            if not  self._current_session.closed:
-                self.repo.close(self._current_session.sessionid)
-            self._current_clt_cnx = None
-            self._current_session = None
-
     #XXX this doesn't need to a be classmethod anymore
     def _init_repo(self):
         """init the repository and connection to it.
@@ -425,64 +313,8 @@
         login = unicode(db_handler.config.default_admin_config['login'])
         self.admin_access = self.new_access(login)
         self._admin_session = self.admin_access._session
-        self._admin_clt_cnx = repoapi.ClientConnection(self._admin_session)
-        self._cnxs.add(self._admin_clt_cnx)
-        self._admin_clt_cnx.__enter__()
         self.config.repository = lambda x=None: self.repo
 
-    # db api ##################################################################
-
-    @nocoverage
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def cursor(self, req=None):
-        if req is not None:
-            return req.cnx
-        else:
-            return self.cnx
-
-    @nocoverage
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def execute(self, rql, args=None, req=None):
-        """executes <rql>, builds a resultset, and returns a couple (rset, req)
-        where req is a FakeRequest
-        """
-        req = req or self.request(rql=rql)
-        return req.execute(unicode(rql), args)
-
-    @nocoverage
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def commit(self):
-        try:
-            return self.cnx.commit()
-        finally:
-            self.session.set_cnxset() # ensure cnxset still set after commit
-
-    @nocoverage
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def rollback(self):
-        try:
-            self.cnx.rollback()
-        except ProgrammingError:
-            pass # connection closed
-        finally:
-            self.session.set_cnxset() # ensure cnxset still set after commit
-
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def request(self, rollbackfirst=False, url=None, headers={}, **kwargs):
-        """return a web ui request"""
-        if rollbackfirst:
-            self.cnx.rollback()
-        req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs)
-        req.set_cnx(self.cnx)
-        return req
-
-    # server side db api #######################################################
-
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def sexecute(self, rql, args=None):
-        self.session.set_cnxset()
-        return self.session.execute(rql, args)
-
 
     # config management ########################################################
 
@@ -549,15 +381,6 @@
         """return the application schema"""
         return self.vreg.schema
 
-    @deprecated('[3.19] explicitly use RepoAccess object in test instead')
-    def shell(self):
-        """return a shell session object"""
-        from cubicweb.server.migractions import ServerMigrationHelper
-        return ServerMigrationHelper(None, repo=self.repo, cnx=self.cnx,
-                                     interactive=False,
-                                     # hack so it don't try to load fs schema
-                                     schema=1)
-
     def set_option(self, optname, value):
         self.config.global_set_option(optname, value)
 
@@ -578,24 +401,17 @@
                 self.skipTest('repository is not initialised: %r' % previous_failure)
             try:
                 self._init_repo()
-                self.addCleanup(self._close_cnx)
             except Exception as ex:
                 self.__class__._repo_init_failed = ex
                 raise
             self.addCleanup(self._close_access)
         self.setup_database()
-        self._admin_clt_cnx.commit()
         MAILBOX[:] = [] # reset mailbox
 
     def tearDown(self):
         # XXX hack until logilab.common.testlib is fixed
-        if self._admin_clt_cnx is not None:
-            if self._admin_clt_cnx._open:
-                self._admin_clt_cnx.close()
-            self._admin_clt_cnx = None
         if self._admin_session is not None:
-            if not self._admin_session.closed:
-                self.repo.close(self._admin_session.sessionid)
+            self.repo.close(self._admin_session.sessionid)
             self._admin_session = None
         while self._cleanups:
             cleanup, args, kwargs = self._cleanups.pop(-1)
@@ -635,20 +451,11 @@
     def create_user(self, req, login=None, groups=('users',), password=None,
                     email=None, commit=True, **kwargs):
         """create and return a new user entity"""
-        if isinstance(req, basestring):
-            warn('[3.12] create_user arguments are now (req, login[, groups, password, commit, **kwargs])',
-                 DeprecationWarning, stacklevel=2)
-            if not isinstance(groups, (tuple, list)):
-                password = groups
-                groups = login
-            elif isinstance(login, tuple):
-                groups = login
-            login = req
-            assert not isinstance(self, type)
-            req = self._admin_clt_cnx
         if password is None:
-            password = login.encode('utf8')
-        user = req.create_entity('CWUser', login=unicode(login),
+            password = login
+        if login is not None:
+            login = unicode(login)
+        user = req.create_entity('CWUser', login=login,
                                  upassword=password, **kwargs)
         req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
                     % ','.join(repr(str(g)) for g in groups),
@@ -1037,8 +844,8 @@
     def assertAuthSuccess(self, req, origsession, nbsessions=1):
         sh = self.app.session_handler
         session = self.app.get_session(req)
-        clt_cnx = repoapi.ClientConnection(session)
-        req.set_cnx(clt_cnx)
+        cnx = repoapi.Connection(session)
+        req.set_cnx(cnx)
         self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions)
         self.assertEqual(session.login, origsession.login)
         self.assertEqual(session.anonymous_session, False)
@@ -1324,7 +1131,7 @@
         """this method populates the database with `how_many` entities
         of each possible type. It also inserts random relations between them
         """
-        with self.admin_access.repo_cnx() as cnx:
+        with self.admin_access.cnx() as cnx:
             with cnx.security_enabled(read=False, write=False):
                 self._auto_populate(cnx, how_many)
                 cnx.commit()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/3.21.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,40 @@
+What's new in CubicWeb 3.21?
+============================
+
+New features
+------------
+
+* the datadir-url configuration option lets one choose where static data files
+  are served (instead of the default ${base-url}/data/)
+
+Code movement
+-------------
+
+The cubicweb.web.views.timeline module (providing the timeline-json, timeline
+and static-timeline views) has moved to a standalone cube_
+
+.. _cube: https://www.cubicweb.org/project/cubicweb-timeline
+
+API changes
+-----------
+
+* req.set_cookie's "expires" argument, if not None, is expected to be a
+  date or a datetime in UTC.  It was previously interpreted as localtime
+  with the UTC offset the server started in, which was inconsistent (we
+  are not aware of any users of that API).
+
+Deprecated code drops
+---------------------
+
+* the user_callback api has been removed; people should use plain
+  ajax functions instead
+
+* the `Pyro` and `Zmq-pickle` remote repository access methods have
+  been entirely removed (emerging alternatives such as rqlcontroller
+  and cwclientlib should be used instead).  Note that as a side effect,
+  "repository-only" instances (i.e. without a http component) are no
+  longer possible.  If you have any such instances, you will need to
+  rename the configuration file from repository.conf to all-in-one.conf
+  and run ``cubicweb-ctl upgrade`` to update it.
+
+* the old (deprecated since 3.19) `DBAPI` api is completely removed
--- a/doc/book/en/admin/config.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/admin/config.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -14,7 +14,6 @@
 
 For advanced features, have a look to:
 
-  - `Pyro configuration`_
   - `Cubicweb resources configuration`_
 
 .. _`configure the database`: DatabaseInstallation_
@@ -22,7 +21,6 @@
 .. _`MySql configuration`: MySqlConfiguration_
 .. _`SQLServer configuration`: SQLServerConfiguration_
 .. _`SQLite configuration`: SQLiteConfiguration_
-.. _`Pyro configuration`: PyroConfiguration_
 .. _`Cubicweb resources configuration`: RessourcesConfiguration_
 
 
@@ -43,7 +41,7 @@
 Each instance can be configured with its own database connection information,
 that will be stored in the instance's :file:`sources` file. The database to use
 will be chosen when creating the instance. CubicWeb is known to run with
-Postgresql (recommended), MySQL, SQLServer and SQLite.
+Postgresql (recommended), SQLServer and SQLite, and may run with MySQL.
 
 Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial,
 but at least one relational database is required for CubicWeb to work. You do
@@ -162,6 +160,8 @@
 
 MySql
 ~~~~~
+.. warning::
+    CubicWeb's MySQL support is not commonly used, so things may or may not work properly.
 
 You must add the following lines in ``/etc/mysql/my.cnf`` file::
 
@@ -227,29 +227,3 @@
   SQLite is great for testing and to play with cubicweb but is not suited for
   production environments.
 
-
-.. _PyroConfiguration:
-
-Pyro configuration
-------------------
-
-Pyro name server
-~~~~~~~~~~~~~~~~
-
-If you want to use Pyro to access your instance remotely, or to have multi-source
-or distributed configuration, it is required to have a Pyro name server running
-on your network. By default it is detected by a broadcast request, but you can
-specify a location in the instance's configuration file.
-
-To do so, you need to :
-
-* be sure to have installed it (see :ref:`InstallDependencies`)
-
-* launch the pyro name server with `pyro-nsd start` before starting cubicweb
-
-* under debian, edit the file :file:`/etc/default/pyro-nsd` so that the name
-  server pyro will be launched automatically when the machine fire up
-
-Note that you can use the pyro server without a running pyro nameserver.
-Refer to `pyro-ns-host` server configuration option for details.
-
--- a/doc/book/en/admin/index.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/admin/index.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -22,7 +22,6 @@
    site-config
    multisources
    ldap
-   pyro
    migration
    additional-tips
    rql-logs
--- a/doc/book/en/admin/instance-config.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/admin/instance-config.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -110,32 +110,6 @@
     file where all requests RQL executed by the server are written
 
 
-Pyro configuration for the instance
------------------------------------
-Web server side:
-
-:`pyro.pyro-instance-id`:
-    pyro identifier of RQL server (e.g. the instance name)
-
-RQL server side:
-
-:`main.pyro-server`:
-    boolean to switch on/off pyro server-side
-
-:`pyro.pyro-host`:
-    pyro host:port number. If no port is specified, it is assigned
-    automatically.
-
-RQL and web servers side:
-
-:`pyro.pyro-ns-host`:
-    hostname hosting pyro server name. If no value is
-    specified, it is located by a request from broadcast
-
-:`pyro.pyro-ns-group`:
-    pyro group in which to save the instance (will default to 'cubicweb')
-
-
 Configuring e-mail
 ------------------
 RQL and web server side:
--- a/doc/book/en/admin/migration.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/admin/migration.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -8,7 +8,7 @@
 
 **Aim** : do the migration for N cubicweb instances hosted on a server to another with no downtime.
 
-**Prerequisites** : have an explicit definition of the database host (not default or localhost). In our case, the database is hosted on another host. You are not migrating your pyro server. You are not using multisource (more documentation on that soon).
+**Prerequisites** : have an explicit definition of the database host (not default or localhost). In our case, the database is hosted on another host.
 
 **Steps** :
 
@@ -21,26 +21,18 @@
     scp /etc/cubicweb.d/ newmachine:/etc/cubicweb.d/
     scp /etc/apache2/sites-available/ newmachine:/etc/apache2/sites-available/
 
-3. *on new machine* : give new ids to pyro registration so the new instances can register ::
-
-     cd /etc/cubicweb.d/ ; sed -i.bck 's/^pyro-instance-id=.*$/\02/' */all-in-one.conf
-
-4. *on new machine* : start your instances ::
+3. *on new machine* : start your instances ::
 
      cubicweb start
 
-5. *on new machine* : enable sites and modules for apache and start it, test it using by modifying your /etc/host file.
+4. *on new machine* : enable sites and modules for apache and start it, test it using by modifying your /etc/host file.
 
-6. change dns entry from your oldmachine to newmachine
+5. change dns entry from your oldmachine to newmachine
 
-7. shutdown your *old machine* (if it doesn't host other services or your database)
+6. shutdown your *old machine* (if it doesn't host other services or your database)
 
-8. That's it.
+7. That's it.
 
 **Possible enhancements** : use right from the start a pound server behind your apache, that way you can add backends and smoothily migrate by shuting down backends that pound will take into account.
 
-Migrate apache & cubicweb with pyro
------------------------------------
 
-FIXME TODO
-
--- a/doc/book/en/admin/pyro.rst	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,62 +0,0 @@
-.. _UsingPyro:
-
-Working with a distributed client (using Pyro)
-==============================================
-
-In some circumstances, it is practical to split the repository and
-web-client parts of the application for load-balancing reasons. Or
-one wants to access the repository from independant scripts to consult
-or update the database.
-
-Prerequisites
--------------
-
-For this to work, several steps have to be taken in order.
-
-You must first ensure that the appropriate software is installed and
-running (see :ref:`ConfigEnv`)::
-
-  pyro-nsd -x -p 6969
-
-Then you have to set appropriate options in your configuration. For
-instance::
-
-  pyro-server=yes
-  pyro-ns-host=localhost:6969
-
-  pyro-instance-id=myinstancename
-
-Connect to the CubicWeb repository from a python script
--------------------------------------------------------
-
-Assuming pyro-nsd is running and your instance is configured with ``pyro-server=yes``,
-you will be able to use :mod:`cubicweb.dbapi` api to initiate the connection.
-
-.. note::
-    Regardless of whether your instance is pyro activated or not, you can still
-    achieve this by using cubicweb-ctl shell scripts in a simpler way, as by default
-    it creates a repository 'in-memory' instead of connecting through pyro. That
-    also means you've to be on the host where the instance is running.
-
-Finally, the client (for instance a python script) must connect specifically
-as in the following example code:
-
-.. sourcecode:: python
-
-    from cubicweb import dbapi
-
-    cnx = dbapi.connect(database='instance-id', user='admin', password='admin')
-    cnx.load_appobjects()
-    cur = cnx.cursor()
-    for name in (u'Personal', u'Professional', u'Computers'):
-        cur.execute('INSERT Tag T: T name %(n)s', {'n': name})
-    cnx.commit()
-
-Calling :meth:`cubicweb.dbapi.load_appobjects`, will populate the
-cubicweb registries (see :ref:`VRegistryIntro`) with the application
-objects installed on the host where the script runs. You'll then be
-allowed to use the ORM goodies and custom entity methods and views. Of
-course this is optional, without it you can still get the repository
-data through the connection but in a roughly way: only RQL cursors
-will be available, e.g. you can't even build entity objects from the
-result set.
--- a/doc/book/en/annexes/faq.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/annexes/faq.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -83,11 +83,9 @@
 
 2. it offers an abstraction layer allowing your applications to run
    on multiple back-ends. That means not only various SQL backends
-   (postgresql, sqlite, mysql), but also multiple databases at the
-   same time, and also non-SQL data stores like LDAP directories and
-   subversion/mercurial repositories (see the `vcsfile`
-   component). Google App Engine is yet another supported target for
-   RQL.
+   (postgresql, sqlite, sqlserver, mysql), but also non-SQL data stores like
+   LDAP directories and subversion/mercurial repositories (see the `vcsfile`
+   component).
 
 Which ajax library is CubicWeb using ?
 --------------------------------------
--- a/doc/book/en/devrepo/datamodel/definition.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devrepo/datamodel/definition.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -163,7 +163,7 @@
 
 Common properties for attributes and relations:
 
-* `description`: an unicode string describing an attribute or a
+* `description`: a unicode string describing an attribute or a
   relation. By default this string will be used in the editing form of
   the entity, which means that it is supposed to help the end-user and
   should be flagged by the function `_` to be properly
--- a/doc/book/en/devrepo/devcore/dbapi.rst	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,133 +0,0 @@
-.. _dbapi:
-
-Python/RQL API
-~~~~~~~~~~~~~~
-
-The Python API developped to interface with RQL is inspired from the standard db-api,
-with a Connection object having the methods cursor, rollback and commit essentially.
-The most important method is the `execute` method of a cursor.
-
-.. sourcecode:: python
-
-   execute(rqlstring, args=None, build_descr=True)
-
-:rqlstring: the RQL query to execute (unicode)
-:args: if the query contains substitutions, a dictionary containing the values to use
-
-The `Connection` object owns the methods `commit` and `rollback`. You
-*should never need to use them* during the development of the web
-interface based on the *CubicWeb* framework as it determines the end
-of the transaction depending on the query execution success. They are
-however useful in other contexts such as tests or custom controllers.
-
-.. note::
-
-  If a query generates an error related to security (:exc:`Unauthorized`) or to
-  integrity (:exc:`ValidationError`), the transaction can still continue but you
-  won't be able to commit it, a rollback will be necessary to start a new
-  transaction.
-
-  Also, a rollback is automatically done if an error occurs during commit.
-
-.. note::
-
-   A :exc:`ValidationError` has a `entity` attribute. In CubicWeb,
-   this atttribute is set to the entity's eid (not a reference to the
-   entity itself).
-
-Executing RQL queries from a view or a hook
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When you're within code of the web interface, the db-api like connexion is
-handled by the request object. You should not have to access it directly, but
-use the `execute` method directly available on the request, eg:
-
-.. sourcecode:: python
-
-   rset = self._cw.execute(rqlstring, kwargs)
-
-Similarly, on the server side (eg in hooks), there is no db-api connexion (since
-you're directly inside the data-server), so you'll have to use the execute method
-of the session object.
-
-
-Proper usage of `.execute`
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Let's say you want to get T which is in configuration C, this translates to:
-
-.. sourcecode:: python
-
-   self._cw.execute('Any T WHERE T in_conf C, C eid %s' % entity.eid)
-
-But it must be written in a syntax that will benefit from the use
-of a cache on the RQL server side:
-
-.. sourcecode:: python
-
-   self._cw.execute('Any T WHERE T in_conf C, C eid %(x)s', {'x': entity.eid})
-
-The syntax tree is built once for the "generic" RQL and can be re-used
-with a number of different eids. There rql IN operator is an exception
-to this rule.
-
-.. sourcecode:: python
-
-   self._cw.execute('Any T WHERE T in_conf C, C name IN (%s)'
-                    % ','.join(['foo', 'bar']))
-
-Alternativelly, some of the common data related to an entity can be
-obtained from the `entity.related()` method (which is used under the
-hood by the orm when you use attribute access notation on an entity to
-get a relation. The initial request would then be translated to:
-
-.. sourcecode:: python
-
-   entity.related('in_conf', 'object')
-
-Additionnaly this benefits from the fetch_attrs policy (see
-:ref:`FetchAttrs`) eventually defined on the class element, which says
-which attributes must be also loaded when the entity is loaded through
-the orm.
-
-
-.. _resultset:
-
-The `ResultSet` API
-~~~~~~~~~~~~~~~~~~~
-
-ResultSet instances are a very commonly manipulated object. They have
-a rich API as seen below, but we would like to highlight a bunch of
-methods that are quite useful in day-to-day practice:
-
-* `__str__()` (applied by `print`) gives a very useful overview of both
-  the underlying RQL expression and the data inside; unavoidable for
-  debugging purposes
-
-* `printable_rql()` produces back a well formed RQL expression as a
-  string; it is very useful to build views
-
-* `entities()` returns a generator on all entities of the result set
-
-* `get_entity(row, col)` gets the entity at row, col coordinates; one
-  of the most used result set method
-
-.. autoclass:: cubicweb.rset.ResultSet
-   :members:
-
-
-The `Cursor` and `Connection` API
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The whole cursor API is developped below.
-
-.. note::
-
-  In practice you'll usually use the `.execute` method on the _cw object of
-  appobjects. Usage of other methods is quite rare.
-
-.. autoclass:: cubicweb.dbapi.Cursor
-   :members:
-
-.. autoclass:: cubicweb.dbapi.Connection
-   :members:
--- a/doc/book/en/devrepo/devcore/index.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devrepo/devcore/index.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -4,6 +4,5 @@
 .. toctree::
    :maxdepth: 1
 
-   dbapi.rst
    reqbase.rst
 
--- a/doc/book/en/devrepo/entityclasses/data-as-objects.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devrepo/entityclasses/data-as-objects.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -17,7 +17,7 @@
 `Formatting and output generation`:
 
 * :meth:`view(__vid, __registry='views', **kwargs)`, applies the given view to the entity
-  (and returns an unicode string)
+  (and returns a unicode string)
 
 * :meth:`absolute_url(*args, **kwargs)`, returns an absolute URL including the base-url
 
--- a/doc/book/en/devrepo/repo/sessions.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devrepo/repo/sessions.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -9,7 +9,8 @@
 Connections
 ===========
 
-Connections provide the `.execute` method to query the data sources.
+Connections provide the `.execute` method to query the data sources, along with
+`.commit` and `.rollback` methods for transaction management.
 
 Kinds of connections
 --------------------
@@ -38,6 +39,119 @@
 
 Connections should always be used as context managers, to avoid leaks.
 
+
+Python/RQL API
+~~~~~~~~~~~~~~
+
+The Python API developped to interface with RQL is inspired from the standard db-api,
+but since `execute` returns its results directly, there is no `cursor` concept.
+
+.. sourcecode:: python
+
+   execute(rqlstring, args=None, build_descr=True)
+
+:rqlstring: the RQL query to execute (unicode)
+:args: if the query contains substitutions, a dictionary containing the values to use
+
+The `Connection` object owns the methods `commit` and `rollback`. You
+*should never need to use them* during the development of the web
+interface based on the *CubicWeb* framework as it determines the end
+of the transaction depending on the query execution success. They are
+however useful in other contexts such as tests or custom controllers.
+
+.. note::
+
+  If a query generates an error related to security (:exc:`Unauthorized`) or to
+  integrity (:exc:`ValidationError`), the transaction can still continue but you
+  won't be able to commit it, a rollback will be necessary to start a new
+  transaction.
+
+  Also, a rollback is automatically done if an error occurs during commit.
+
+.. note::
+
+   A :exc:`ValidationError` has a `entity` attribute. In CubicWeb,
+   this atttribute is set to the entity's eid (not a reference to the
+   entity itself).
+
+Executing RQL queries from a view or a hook
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you're within code of the web interface, the Connection is handled by the
+request object. You should not have to access it directly, but use the
+`execute` method directly available on the request, eg:
+
+.. sourcecode:: python
+
+   rset = self._cw.execute(rqlstring, kwargs)
+
+Similarly, on the server side (eg in hooks), there is no request object (since
+you're directly inside the data-server), so you'll have to use the execute method
+of the Connection object.
+
+Proper usage of `.execute`
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Let's say you want to get T which is in configuration C, this translates to:
+
+.. sourcecode:: python
+
+   self._cw.execute('Any T WHERE T in_conf C, C eid %s' % entity.eid)
+
+But it must be written in a syntax that will benefit from the use
+of a cache on the RQL server side:
+
+.. sourcecode:: python
+
+   self._cw.execute('Any T WHERE T in_conf C, C eid %(x)s', {'x': entity.eid})
+
+The syntax tree is built once for the "generic" RQL and can be re-used
+with a number of different eids.  The rql IN operator is an exception
+to this rule.
+
+.. sourcecode:: python
+
+   self._cw.execute('Any T WHERE T in_conf C, C name IN (%s)'
+                    % ','.join(['foo', 'bar']))
+
+Alternatively, some of the common data related to an entity can be
+obtained from the `entity.related()` method (which is used under the
+hood by the ORM when you use attribute access notation on an entity to
+get a relation. The initial request would then be translated to:
+
+.. sourcecode:: python
+
+   entity.related('in_conf', 'object')
+
+Additionally this benefits from the fetch_attrs policy (see :ref:`FetchAttrs`)
+optionally defined on the class element, which says which attributes must be
+also loaded when the entity is loaded through the ORM.
+
+.. _resultset:
+
+The `ResultSet` API
+~~~~~~~~~~~~~~~~~~~
+
+ResultSet instances are a very commonly manipulated object. They have
+a rich API as seen below, but we would like to highlight a bunch of
+methods that are quite useful in day-to-day practice:
+
+* `__str__()` (applied by `print`) gives a very useful overview of both
+  the underlying RQL expression and the data inside; unavoidable for
+  debugging purposes
+
+* `printable_rql()` returns a well formed RQL expression as a
+  string; it is very useful to build views
+
+* `entities()` returns a generator on all entities of the result set
+
+* `get_entity(row, col)` gets the entity at row, col coordinates; one
+  of the most used result set methods
+
+.. autoclass:: cubicweb.rset.ResultSet
+   :members:
+
+
 Authentication and management of sessions
 -----------------------------------------
 
--- a/doc/book/en/devweb/edition/dissection.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devweb/edition/dissection.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -320,7 +320,7 @@
 
   * on success:
 
-    * an url (string) representing the next thing to jump to
+    * a url (string) representing the next thing to jump to
 
 Given the array structure described above, it is quite simple to
 manipulate the DOM to show the errors at appropriate places.
--- a/doc/book/en/devweb/request.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devweb/request.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -89,7 +89,7 @@
     html headers
   * `add_js(jsfiles)`: adds the given list of javascript resources to the
     current html headers
-  * `add_onload(jscode)`: inject the given jscode fragment (an unicode
+  * `add_onload(jscode)`: inject the given jscode fragment (a unicode
     string) into the current html headers, wrapped inside a
     document.ready(...) or another ajax-friendly one-time trigger event
   * `add_header(header, values)`: adds the header/value pair to the
@@ -118,13 +118,13 @@
 ```
 
 The elements we gave in overview for above are built in three layers,
-from ``cubicweb.req.RequestSessionBase``, ``cubicweb.repoapi.ClientConnection`` and
+from ``cubicweb.req.RequestSessionBase``, ``cubicweb.repoapi.Connection`` and
 ``cubicweb.web.ConnectionCubicWebRequestBase``.
 
 .. autoclass:: cubicweb.req.RequestSessionBase
    :members:
 
-.. autoclass:: cubicweb.repoapi.ClientConnection
+.. autoclass:: cubicweb.repoapi.Connection
    :members:
 
 .. autoclass:: cubicweb.web.request.ConnectionCubicWebRequestBase
--- a/doc/book/en/devweb/views/basetemplates.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devweb/views/basetemplates.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -116,7 +116,7 @@
 
 * `binary`: boolean flag telling if the view generates some text or a binary
   stream.  Default to False. When view generates text argument given to `self.w`
-  **must be an unicode string**, encoded string otherwise.
+  **must be a unicode string**, encoded string otherwise.
 
 * `content_type`, view's content type, default to 'text/xhtml'
 
@@ -132,7 +132,7 @@
 
 
 You can also modify certain aspects of the main template of a page
-when building an url or setting these parameters in the req.form:
+when building a url or setting these parameters in the req.form:
 
 * `__notemplate`, if present (whatever the value assigned), only the content view
   is returned
--- a/doc/book/en/devweb/views/reledit.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/devweb/views/reledit.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -105,9 +105,9 @@
 ensure edition of the `Person` entity instead (using a standard
 automatic form) of the association of Company and Person.
 
-Finally, the `reload` key accepts either a boolean, an eid or an
-unicode string representing an url. If an eid is provided, it will be
-internally transformed into an url. The eid/url case helps when one
+Finally, the `reload` key accepts either a boolean, an eid or a
+unicode string representing a url. If an eid is provided, it will be
+internally transformed into a url. The eid/url case helps when one
 needs to reload and the current url is inappropriate. A common case is
 edition of a key attribute, which is part of the current url. If one
 user changed the Company's name from `lozilab` to `logilab`, reloading
--- a/doc/book/en/tutorials/base/customizing-the-application.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/book/en/tutorials/base/customizing-the-application.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -268,7 +268,7 @@
   but this is not mandatory.
 
 * When we want to write something to the output stream, we simply call `self.w`,
-  with *must be passed an unicode string*.
+  which *must be passed a unicode string*.
 
 * The latest function is the most exotic stuff. The point is that without it, you
   would get an error at display time because the framework wouldn't be able to
--- a/doc/features_list.rst	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/features_list.rst	Mon Jun 22 14:27:37 2015 +0200
@@ -45,7 +45,6 @@
 | configuration - user / groups handling                             | 3  | 1  |
 | configuration - site configuration                                 | 3  | 1  |
 | configuration - distributed configuration                          | 2  | 1  |
-| configuration - pyro                                               | 2  | 2  |
 +--------------------------------------------------------------------+----+----+
 | multi-sources - capabilities                                       | NA | 0  |
 | multi-sources - configuration                                      | 2  | 0  |
--- a/doc/tools/pyjsrest.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/doc/tools/pyjsrest.py	Mon Jun 22 14:27:37 2015 +0200
@@ -134,7 +134,6 @@
     'cubicweb.preferences',
     'cubicweb.edition',
     'cubicweb.reledit',
-    'cubicweb.timeline-ext',
 ]
 
 FILES_TO_IGNORE = set([
@@ -152,7 +151,6 @@
     'cubicweb.fckcwconfig-full.js',
     'cubicweb.goa.js',
     'cubicweb.compat.js',
-    'cubicweb.timeline-bundle.js',
     ])
 
 if __name__ == '__main__':
--- a/entities/__init__.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entities/__init__.py	Mon Jun 22 14:27:37 2015 +0200
@@ -53,7 +53,7 @@
         """
         restrictions = ['X is %s' % cls.__regid__]
         selected = ['X']
-        for attrschema in cls.e_schema.indexable_attributes():
+        for attrschema in sorted(cls.e_schema.indexable_attributes()):
             varname = attrschema.type.upper()
             restrictions.append('X %s %s' % (attrschema, varname))
             selected.append(varname)
--- a/entities/adapters.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entities/adapters.py	Mon Jun 22 14:27:37 2015 +0200
@@ -79,6 +79,8 @@
         itree = self.entity.cw_adapt_to('ITree')
         if itree is not None:
             return itree.path()[:-1]
+        if view.msgid_timestamp:
+            return (self.entity.eid,)
         return ()
 
 
--- a/entities/test/unittest_base.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entities/test/unittest_base.py	Mon Jun 22 14:27:37 2015 +0200
@@ -66,8 +66,8 @@
     def test_fti_rql_method(self):
         with self.admin_access.web_request() as req:
             eclass = self.vreg['etypes'].etype_class('EmailAddress')
-            self.assertEqual(['Any X, ALIAS, ADDRESS WHERE X is EmailAddress, '
-                              'X alias ALIAS, X address ADDRESS'],
+            self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, '
+                              'X address ADDRESS, X alias ALIAS'],
                              eclass.cw_fti_index_rql_queries(req))
 
 
--- a/entities/test/unittest_wfobjs.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entities/test/unittest_wfobjs.py	Mon Jun 22 14:27:37 2015 +0200
@@ -87,12 +87,12 @@
             shell.rollback()
             # no pb if not in the same workflow
             wf2 = add_wf(shell, 'Company')
-            foo = wf.add_state(u'foo', initial=True)
-            bar = wf.add_state(u'bar')
-            wf.add_transition(u'baz', (foo,), bar, ('managers',))
+            foo = wf2.add_state(u'foo', initial=True)
+            bar = wf2.add_state(u'bar')
+            wf2.add_transition(u'baz', (foo,), bar, ('managers',))
             shell.commit()
             # gnark gnark
-            biz = wf.add_transition(u'biz', (bar,), foo)
+            biz = wf2.add_transition(u'biz', (bar,), foo)
             shell.commit()
             with self.assertRaises(ValidationError) as cm:
                 biz.cw_set(name=u'baz')
@@ -416,6 +416,32 @@
                 group.cw_clear_all_caches()
                 self.assertEqual(iworkflowable.state, nextstate)
 
+    def test_replace_state(self):
+        with self.admin_access.shell() as shell:
+            wf = add_wf(shell, 'CWGroup', name='groupwf', default=True)
+            s_new = wf.add_state('new', initial=True)
+            s_state1 = wf.add_state('state1')
+            wf.add_transition('tr', (s_new,), s_state1)
+            shell.commit()
+
+        with self.admin_access.repo_cnx() as cnx:
+            group = cnx.create_entity('CWGroup', name=u'grp1')
+            cnx.commit()
+
+            iwf = group.cw_adapt_to('IWorkflowable')
+            iwf.fire_transition('tr')
+            cnx.commit()
+            group.cw_clear_all_caches()
+
+            wf = cnx.entity_from_eid(wf.eid)
+            wf.add_state('state2')
+            with cnx.security_enabled(write=False):
+                wf.replace_state('state1', 'state2')
+            cnx.commit()
+
+            self.assertEqual(iwf.state, 'state2')
+            self.assertEqual(iwf.latest_trinfo().to_state[0].name, 'state2')
+
 
 class CustomWorkflowTC(CubicWebTC):
 
@@ -569,8 +595,9 @@
         with self.admin_access.web_request() as req:
             user = self.create_user(req, 'member', surname=u'toto')
             req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
-                     {'wf': wf.eid, 'x': user.eid})
+                        {'wf': wf.eid, 'x': user.eid})
             req.cnx.commit()
+            user.cw_clear_all_caches()
             iworkflowable = user.cw_adapt_to('IWorkflowable')
             self.assertEqual(iworkflowable.state, 'dead')
 
--- a/entities/wfobjs.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entities/wfobjs.py	Mon Jun 22 14:27:37 2015 +0200
@@ -174,12 +174,14 @@
             todelstate = self.state_by_name(todelstate)
         if not hasattr(replacement, 'eid'):
             replacement = self.state_by_name(replacement)
+        args = {'os': todelstate.eid, 'ns': replacement.eid}
         execute = self._cw.execute
-        execute('SET X in_state S WHERE S eid %(s)s', {'s': todelstate.eid})
-        execute('SET X from_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
-                {'os': todelstate.eid, 'ns': replacement.eid})
-        execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s',
-                {'os': todelstate.eid, 'ns': replacement.eid})
+        execute('SET X in_state NS WHERE X in_state OS, '
+                'NS eid %(ns)s, OS eid %(os)s', args)
+        execute('SET X from_state NS WHERE X from_state OS, '
+                'OS eid %(os)s, NS eid %(ns)s', args)
+        execute('SET X to_state NS WHERE X to_state OS, '
+                'OS eid %(os)s, NS eid %(ns)s', args)
         todelstate.cw_delete()
 
 
--- a/entity.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/entity.py	Mon Jun 22 14:27:37 2015 +0200
@@ -425,7 +425,7 @@
         else:
             for rschema in cls.e_schema.subject_relations():
                 if (rschema.final
-                    and rschema != 'eid'
+                    and rschema not in ('eid', 'cwuri')
                     and cls.e_schema.has_unique_values(rschema)
                     and cls.e_schema.rdef(rschema.type).cardinality[0] == '1'):
                     mainattr = str(rschema)
@@ -514,7 +514,7 @@
         prefixing the relation name by 'reverse_'. Also, relation values may be
         an entity or eid, a list of entities or eids.
         """
-        rql, qargs, pendingrels, attrcache = cls._cw_build_entity_query(kwargs)
+        rql, qargs, pendingrels, _attrcache = cls._cw_build_entity_query(kwargs)
         if rql:
             rql = 'INSERT %s X: %s' % (cls.__regid__, rql)
         else:
@@ -524,7 +524,6 @@
         except IndexError:
             raise Exception('could not create a %r with %r (%r)' %
                             (cls.__regid__, rql, qargs))
-        created._cw_update_attr_cache(attrcache)
         cls._cw_handle_pending_relations(created.eid, pendingrels, execute)
         return created
 
@@ -555,43 +554,6 @@
             return self.eid
         return super(Entity, self).__hash__()
 
-    def _cw_update_attr_cache(self, attrcache):
-        # if context is a repository session, don't consider dont-cache-attrs as
-        # the instance already holds modified values and loosing them could
-        # introduce severe problems
-        trdata = self._cw.transaction_data
-        uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set())
-        if self._cw.is_request:
-            uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set()))
-        for attr in uncached_attrs:
-            attrcache.pop(attr, None)
-            self.cw_attr_cache.pop(attr, None)
-        self.cw_attr_cache.update(attrcache)
-
-    def _cw_dont_cache_attribute(self, attr, repo_side=False):
-        """Repository side method called when some attribute has been
-        transformed by a hook, hence original value should not be cached by
-        the client.
-
-        If repo_side is True, this means that the attribute has been
-        transformed by a *storage*, hence the original value should
-        not be cached **by anyone**.
-
-        This only applies to a storage special case where the value
-        specified in creation or update is **not** the value that will
-        be transparently exposed later.
-
-        For example we have a special "fs_importing" mode in BFSS
-        where a file path is given as attribute value and stored as is
-        in the data base. Later access to the attribute will provide
-        the content of the file at the specified path. We do not want
-        the "filepath" value to be cached.
-        """
-        self._cw.transaction_data.setdefault('%s.dont-cache-attrs' % self.eid, set()).add(attr)
-        if repo_side:
-            trdata = self._cw.transaction_data
-            trdata.setdefault('%s.storage-special-process-attrs' % self.eid, set()).add(attr)
-
     def __json_encode__(self):
         """custom json dumps hook to dump the entity's eid
         which is not part of dict structure itself
@@ -836,7 +798,6 @@
 
     # data fetching methods ###################################################
 
-    @cached
     def as_rset(self): # XXX .cw_as_rset
         """returns a resultset containing `self` information"""
         rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
@@ -1329,10 +1290,6 @@
             else:
                 rql += ' WHERE X eid %(x)s'
             self._cw.execute(rql, qargs)
-        # update current local object _after_ the rql query to avoid
-        # interferences between the query execution itself and the cw_edited /
-        # skip_security machinery
-        self._cw_update_attr_cache(attrcache)
         self._cw_handle_pending_relations(self.eid, pendingrels, self._cw.execute)
         # XXX update relation cache
 
--- a/etwist/server.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/etwist/server.py	Mon Jun 22 14:27:37 2015 +0200
@@ -65,14 +65,6 @@
         # when we have an in-memory repository, clean unused sessions every XX
         # seconds and properly shutdown the server
         if config['repository-uri'] == 'inmemory://':
-            if config.pyro_enabled():
-                # if pyro is enabled, we have to register to the pyro name
-                # server, create a pyro daemon, and create a task to handle pyro
-                # requests
-                self.appli.repo.warning('remote repository access through pyro is deprecated')
-                self.pyro_daemon = self.appli.repo.pyro_register()
-                self.pyro_listen_timeout = 0.02
-                self.appli.repo.looping_task(1, self.pyro_loop_event)
             if config.mode != 'test':
                 reactor.addSystemEventTrigger('before', 'shutdown',
                                               self.shutdown_event)
@@ -93,13 +85,6 @@
         """
         self.appli.repo.shutdown()
 
-    def pyro_loop_event(self):
-        """listen for pyro events"""
-        try:
-            self.pyro_daemon.handleRequests(self.pyro_listen_timeout)
-        except select.error:
-            return
-
     def getChild(self, path, request):
         """Indicate which resource to use to process down the URL's path"""
         return self
--- a/etwist/twconfig.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/etwist/twconfig.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,9 +17,6 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """twisted server configurations:
 
-* the "twisted" configuration to get a web instance running in a standalone
-  twisted web server which talk to a repository server using Pyro
-
 * the "all-in-one" configuration to get a web instance running in a twisted
   web server integrating a repository server in the same process (only available
   if the repository part of the software is installed
@@ -82,13 +79,6 @@
 the repository rather than the user running the command',
           'group': 'main', 'level': WebConfiguration.mode == 'system'
           }),
-        ('pyro-server',
-         {'type' : 'yn',
-          # pyro is only a recommends by default, so don't activate it here
-          'default': False,
-          'help': 'run a pyro server',
-          'group': 'main', 'level': 1,
-          }),
         ('webserver-threadpool-size',
          {'type': 'int',
           'default': 4,
@@ -117,9 +107,6 @@
 
         cubicweb_appobject_path = WebConfigurationBase.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path
         cube_appobject_path = WebConfigurationBase.cube_appobject_path | ServerConfiguration.cube_appobject_path
-        def pyro_enabled(self):
-            """tell if pyro is activated for the in memory repository"""
-            return self['pyro-server']
 
 
     CONFIGURATIONS.append(AllInOneConfiguration)
--- a/etwist/twctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/etwist/twctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -71,11 +71,14 @@
         cfgname = 'all-in-one'
         subcommand = 'cubicweb-twisted'
 
-    class AllInOneStopHandler(serverctl.RepositoryStopHandler):
+    class AllInOneStopHandler(CommandHandler):
         cmdname = 'stop'
         cfgname = 'all-in-one'
         subcommand = 'cubicweb-twisted'
 
+        def poststop(self):
+            pass
+
     class AllInOneUpgradeHandler(TWUpgradeHandler):
         cfgname = 'all-in-one'
 
--- a/ext/rest.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/ext/rest.py	Mon Jun 22 14:27:37 2015 +0200
@@ -34,7 +34,6 @@
 """
 __docformat__ = "restructuredtext en"
 
-from cStringIO import StringIO
 from itertools import chain
 from logging import getLogger
 from os.path import join
@@ -405,7 +404,7 @@
         # remove unprintable characters unauthorized in xml
         data = data.translate(ESC_CAR_TABLE)
     settings = {'input_encoding': encoding, 'output_encoding': 'unicode',
-                'warning_stream': StringIO(),
+                'warning_stream': False,
                 'traceback': True, # don't sys.exit
                 'stylesheet': None, # don't try to embed stylesheet (may cause
                                     # obscure bug due to docutils computing
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/logstats.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,59 @@
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+
+"""looping task for dumping instance's stats in a file
+"""
+
+__docformat__ = "restructuredtext en"
+
+from datetime import datetime
+import json
+
+from cubicweb.server import hook
+
+class LogStatsStartHook(hook.Hook):
+    """register task to regularly dump instance's stats in a file
+
+    data are stored as one json entry per row
+    """
+    __regid__ = 'cubicweb.hook.logstats.start'
+    events = ('server_startup',)
+
+    def __call__(self):
+        interval = self.repo.config.get('logstat-interval', 0)
+        if interval <= 0:
+            return            
+
+        def dump_stats(repo):
+            statsfile = repo.config.get('logstat-file')
+            with repo.internal_cnx() as cnx:
+                stats = cnx.call_service('repo_stats')
+                gcstats = cnx.call_service('repo_gc_stats', nmax=5)
+                
+            allstats = {'resources': stats,
+                        'memory': gcstats,
+                        'timestamp': datetime.utcnow().isoformat(),
+                       }
+            try:
+                with open(statsfile, 'ab') as ofile:
+                    json.dump(allstats, ofile)
+                    ofile.write('\n')
+            except IOError:
+                repo.warning('Cannot open stats file for writing: %s', statsfile)
+                    
+        self.repo.looping_task(interval, dump_stats, self.repo)
--- a/hooks/metadata.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/metadata.py	Mon Jun 22 14:27:37 2015 +0200
@@ -199,17 +199,15 @@
             oldsource = self._cw.entity_from_eid(schange[self.eidfrom])
             entity = self._cw.entity_from_eid(self.eidfrom)
             # we don't want the moved entity to be reimported later.  To
-            # distinguish this state, the trick is to change the associated
-            # record in the 'entities' system table with eid=-eid while leaving
-            # other fields unchanged, and to add a new record with eid=eid,
-            # source='system'. External source will then have consider case
-            # where `extid2eid` return a negative eid as 'this entity was known
-            # but has been moved, ignore it'.
-            self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s',
-                                {'eid': self.eidfrom})
+            # distinguish this state, move the record from the 'entities' table
+            # to 'moved_entities'.  External source will then have consider
+            # case where `extid2eid` returns a negative eid as 'this entity was
+            # known but has been moved, ignore it'.
+            attrs = {'eid': entity.eid, 'extid': self._cw.entity_metas(entity.eid)['extid']}
+            self._cw.system_sql(syssource.sqlgen.insert('moved_entities', attrs), attrs)
             attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None,
                      'asource': 'system'}
-            self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs)
+            self._cw.system_sql(syssource.sqlgen.update('entities', attrs, ['eid']), attrs)
             # register an operation to update repository/sources caches
             ChangeEntitySourceUpdateCaches(self._cw, entity=entity,
                                            oldsource=oldsource.repo_source,
--- a/hooks/notification.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/notification.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -165,7 +165,7 @@
 class EntityUpdateHook(NotificationHook):
     __regid__ = 'notifentityupdated'
     __abstract__ = True # do not register by default
-    __select__ = NotificationHook.__select__ & hook.from_dbapi_query()
+    __select__ = NotificationHook.__select__ & hook.issued_from_user_query()
     events = ('before_update_entity',)
     skip_attrs = set()
 
@@ -200,7 +200,7 @@
 
 class SomethingChangedHook(NotificationHook):
     __regid__ = 'supervising'
-    __select__ = NotificationHook.__select__ & hook.from_dbapi_query()
+    __select__ = NotificationHook.__select__ & hook.issued_from_user_query()
     events = ('before_add_relation', 'before_delete_relation',
               'after_add_entity', 'before_update_entity')
 
--- a/hooks/syncschema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/syncschema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -29,7 +29,7 @@
 from copy import copy
 from yams.schema import (BASE_TYPES, BadSchemaDefinition,
                          RelationSchema, RelationDefinitionSchema)
-from yams import buildobjs as ybo, schema2sql as y2sql, convert_default_value
+from yams import buildobjs as ybo, convert_default_value
 
 from logilab.common.decorators import clear_cache
 
@@ -37,7 +37,7 @@
 from cubicweb.predicates import is_instance
 from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES,
                              CONSTRAINTS, ETYPE_NAME_MAP, display_name)
-from cubicweb.server import hook, schemaserial as ss
+from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.hooks.synccomputed import RecomputeAttributeOperation
 
@@ -72,7 +72,7 @@
     table = SQL_PREFIX + etype
     column = SQL_PREFIX + rtype
     try:
-        cnx.system_sql(str('ALTER TABLE %s ADD %s integer' % (table, column)),
+        cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)' % (table, column)),
                        rollback_on_failure=False)
         cnx.info('added column %s to table %s', column, table)
     except Exception:
@@ -319,8 +319,12 @@
         if 'fulltext_container' in self.values:
             op = UpdateFTIndexOp.get_instance(cnx)
             for subjtype, objtype in rschema.rdefs:
-                op.add_data(subjtype)
-                op.add_data(objtype)
+                if self.values['fulltext_container'] == 'subject':
+                    op.add_data(subjtype)
+                    op.add_data(objtype)
+                else:
+                    op.add_data(objtype)
+                    op.add_data(subjtype)
         # update the in-memory schema first
         self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values)
         self.rschema.__dict__.update(self.values)
@@ -1313,6 +1317,7 @@
     We wait after the commit to as the schema in memory is only updated after
     the commit.
     """
+    containercls = list
 
     def postcommit_event(self):
         cnx = self.cnx
--- a/hooks/test/unittest_hooks.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/test/unittest_hooks.py	Mon Jun 22 14:27:37 2015 +0200
@@ -146,20 +146,6 @@
 
 class UserGroupHooksTC(CubicWebTC):
 
-    def test_user_synchronization(self):
-        with self.admin_access.repo_cnx() as cnx:
-            self.create_user(cnx, 'toto', password='hop', commit=False)
-            self.assertRaises(AuthenticationError,
-                              self.repo.connect, u'toto', password='hop')
-            cnx.commit()
-            cnxid = self.repo.connect(u'toto', password='hop')
-            self.assertNotEqual(cnxid, cnx.sessionid)
-            cnx.execute('DELETE CWUser X WHERE X login "toto"')
-            self.repo.execute(cnxid, 'State X')
-            cnx.commit()
-            self.assertRaises(BadConnectionId,
-                              self.repo.execute, cnxid, 'State X')
-
     def test_user_group_synchronization(self):
         with self.admin_access.repo_cnx() as cnx:
             user = cnx.user
--- a/hooks/test/unittest_syncschema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/test/unittest_syncschema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -38,12 +38,11 @@
 
     def index_exists(self, cnx, etype, attr, unique=False):
         dbhelper = self.repo.system_source.dbhelper
-        with cnx.ensure_cnx_set:
-            sqlcursor = cnx.cnxset.cu
-            return dbhelper.index_exists(sqlcursor,
-                                         SQL_PREFIX + etype,
-                                         SQL_PREFIX + attr,
-                                         unique=unique)
+        sqlcursor = cnx.cnxset.cu
+        return dbhelper.index_exists(sqlcursor,
+                                     SQL_PREFIX + etype,
+                                     SQL_PREFIX + attr,
+                                     unique=unique)
 
     def _set_perms(self, cnx, eid):
         cnx.execute('SET X read_permission G WHERE X eid %(x)s, G is CWGroup',
--- a/hooks/zmq.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/hooks/zmq.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -50,30 +50,3 @@
         self.repo.app_instances_bus.start()
 
 
-class ZMQRepositoryServerStopHook(hook.Hook):
-    __regid__ = 'zmqrepositoryserverstop'
-    events = ('server_shutdown',)
-
-    def __call__(self):
-        server = getattr(self.repo, 'zmq_repo_server', None)
-        if server:
-            self.repo.zmq_repo_server.quit()
-
-class ZMQRepositoryServerStartHook(hook.Hook):
-    __regid__ = 'zmqrepositoryserverstart'
-    events = ('server_startup',)
-
-    def __call__(self):
-        config = self.repo.config
-        if config.name == 'repository':
-            # start-repository command already starts a zmq repo
-            return
-        address = config.get('zmq-repository-address')
-        if not address:
-            return
-        self.repo.warning('remote access to the repository via zmq/pickle is deprecated')
-        from cubicweb.server import cwzmq
-        self.repo.zmq_repo_server = server = cwzmq.ZMQRepositoryServer(self.repo)
-        server.connect(address)
-        self.repo.threaded_task(server.run)
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/migration/3.21.0_Any.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,23 @@
+
+helper = repo.system_source.dbhelper
+sql('DROP INDEX entities_extid_idx')
+sql(helper.sql_create_index('entities', 'extid', True))
+
+sql('''
+CREATE TABLE moved_entities (
+  eid INTEGER PRIMARY KEY NOT NULL,
+  extid VARCHAR(256) UNIQUE
+)
+''')
+
+moved_entities = sql('SELECT -eid, extid FROM entities WHERE eid < 0')
+cu = session.cnxset.cu
+cu.executemany('INSERT INTO moved_entities (eid, extid) VALUES (%s, %s)',
+               moved_entities)
+sql('DELETE FROM entities WHERE eid < 0')
+
+commit()
+
+sync_schema_props_perms('CWEType')
+
+sync_schema_props_perms('cwuri')
--- a/misc/migration/bootstrapmigration_repository.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/misc/migration/bootstrapmigration_repository.py	Mon Jun 22 14:27:37 2015 +0200
@@ -57,7 +57,7 @@
     commit()
 
 if applcubicwebversion <= (3, 14, 4) and cubicwebversion >= (3, 14, 4):
-    from yams import schema2sql as y2sql
+    from cubicweb.server import schema2sql as y2sql
     dbhelper = repo.system_source.dbhelper
     rdefdef = schema['CWSource'].rdef('name')
     attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0]
--- a/misc/scripts/ldapuser2ldapfeed.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/misc/scripts/ldapuser2ldapfeed.py	Mon Jun 22 14:27:37 2015 +0200
@@ -31,8 +31,6 @@
 from cubicweb.server.edition import EditedEntity
 
 
-session.mode = 'write' # hold on the connections set
-
 print '******************** backport entity content ***************************'
 
 todelete = defaultdict(list)
--- a/misc/scripts/pyroforge2datafeed.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/misc/scripts/pyroforge2datafeed.py	Mon Jun 22 14:27:37 2015 +0200
@@ -39,8 +39,6 @@
         ))
 
 
-session.mode = 'write' # hold on the connections set
-
 print '******************** backport entity content ***************************'
 
 from cubicweb.server import debugged
--- a/repoapi.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/repoapi.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2013-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,14 +17,12 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Official API to access the content of a repository
 """
-from logilab.common.deprecation import deprecated
+from logilab.common.deprecation import class_deprecated
 
 from cubicweb.utils import parse_repo_uri
-from cubicweb import ConnectionError, ProgrammingError, AuthenticationError
-from uuid import uuid4
-from contextlib import contextmanager
-from cubicweb.req import RequestSessionBase
-from functools import wraps
+from cubicweb import ConnectionError, AuthenticationError
+from cubicweb.server.session import Connection
+
 
 ### private function for specific method ############################
 
@@ -41,7 +39,7 @@
     loading the repository for a client, eg web server, configuration).
 
     The returned repository may be an in-memory repository or a proxy object
-    using a specific RPC method, depending on the given URI (pyro or zmq).
+    using a specific RPC method, depending on the given URI.
     """
     if uri is None:
         return _get_inmemory_repo(config, vreg)
@@ -52,49 +50,20 @@
         # me may have been called with a dummy 'inmemory://' uri ...
         return _get_inmemory_repo(config, vreg)
 
-    if protocol == 'pyroloc':  # direct connection to the instance
-        from logilab.common.pyro_ext import get_proxy
-        uri = uri.replace('pyroloc', 'PYRO')
-        return get_proxy(uri)
-
-    if protocol == 'pyro':  # connection mediated through the pyro ns
-        from logilab.common.pyro_ext import ns_get_proxy
-        path = appid.strip('/')
-        if not path:
-            raise ConnectionError(
-                "can't find instance name in %s (expected to be the path component)"
-                % uri)
-        if '.' in path:
-            nsgroup, nsid = path.rsplit('.', 1)
-        else:
-            nsgroup = 'cubicweb'
-            nsid = path
-        return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport)
-
-    if protocol.startswith('zmqpickle-'):
-        from cubicweb.zmqclient import ZMQRepositoryClient
-        return ZMQRepositoryClient(uri)
-    else:
-        raise ConnectionError('unknown protocol: `%s`' % protocol)
+    raise ConnectionError('unknown protocol: `%s`' % protocol)
 
 def connect(repo, login, **kwargs):
-    """Take credential and return associated ClientConnection.
-
-    The ClientConnection is associated to a new Session object that will be
-    closed when the ClientConnection is closed.
+    """Take credential and return associated Connection.
 
     raise AuthenticationError if the credential are invalid."""
     sessionid = repo.connect(login, **kwargs)
     session = repo._get_session(sessionid)
     # XXX the autoclose_session should probably be handle on the session directly
     # this is something to consider once we have proper server side Connection.
-    return ClientConnection(session, autoclose_session=True)
+    return Connection(session)
 
 def anonymous_cnx(repo):
-    """return a ClientConnection for Anonymous user.
-
-    The ClientConnection is associated to a new Session object that will be
-    closed when the ClientConnection is closed.
+    """return a Connection for Anonymous user.
 
     raises an AuthenticationError if anonymous usage is not allowed
     """
@@ -105,292 +74,7 @@
     # use vreg's repository cache
     return connect(repo, anon_login, password=anon_password)
 
-def _srv_cnx_func(name):
-    """Decorate ClientConnection method blindly forward to Connection
-    THIS TRANSITIONAL PURPOSE
 
-    will be dropped when we have standalone connection"""
-    def proxy(clt_cnx, *args, **kwargs):
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        if not clt_cnx._open:
-            raise ProgrammingError('Closed client connection')
-        return getattr(clt_cnx._cnx, name)(*args, **kwargs)
-    return proxy
-
-def _open_only(func):
-    """decorator for ClientConnection method that check it is open"""
-    @wraps(func)
-    def check_open(clt_cnx, *args, **kwargs):
-        if not clt_cnx._open:
-            raise ProgrammingError('Closed client connection')
-        return func(clt_cnx, *args, **kwargs)
-    return check_open
-
-
-class ClientConnection(RequestSessionBase):
-    """A Connection object to be used Client side.
-
-    This object is aimed to be used client side (so potential communication
-    with the repo through RPC) and aims to offer some compatibility with the
-    cubicweb.dbapi.Connection interface.
-
-    The autoclose_session parameter informs the connection that this session
-    has been opened explicitly and only for this client connection. The
-    connection will close the session on exit.
-    """
-    # make exceptions available through the connection object
-    ProgrammingError = ProgrammingError
-    # attributes that may be overriden per connection instance
-    anonymous_connection = False # XXX really needed ?
-    is_repo_in_memory = True # BC, always true
-
-    def __init__(self, session, autoclose_session=False):
-        super(ClientConnection, self).__init__(session.vreg)
-        self._session = session # XXX there is no real reason to keep the
-                                # session around function still using it should
-                                # be rewritten and migrated.
-        self._cnx = None
-        self._open = None
-        self._web_request = False
-        #: cache entities built during the connection
-        self._eid_cache = {}
-        self._set_user(session.user)
-        self._autoclose_session = autoclose_session
-
-    def __enter__(self):
-        assert self._open is None
-        self._open = True
-        self._cnx = self._session.new_cnx()
-        self._cnx.__enter__()
-        self._cnx.ctx_count += 1
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self._open = False
-        self._cnx.ctx_count -= 1
-        self._cnx.__exit__(exc_type, exc_val, exc_tb)
-        self._cnx = None
-        if self._autoclose_session:
-            # we have to call repo.close to ensure the repo properly forgets the
-            # session; calling session.close() is not enough :-(
-            self._session.repo.close(self._session.sessionid)
-
-
-    # begin silly BC
-    @property
-    def _closed(self):
-        return not self._open
-
-    def close(self):
-        if self._open:
-            self.__exit__(None, None, None)
-
-    def __repr__(self):
-        # XXX we probably want to reference the user of the session here
-        if self._open is None:
-            return '<ClientConnection (not open yet)>'
-        elif not self._open:
-            return '<ClientConnection (closed)>'
-        elif self.anonymous_connection:
-            return '<ClientConnection %s (anonymous)>' % self._cnx.connectionid
-        else:
-            return '<ClientConnection %s>' % self._cnx.connectionid
-    # end silly BC
-
-    # Main Connection purpose in life #########################################
-
-    call_service = _srv_cnx_func('call_service')
-
-    @_open_only
-    def execute(self, *args, **kwargs):
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        rset = self._cnx.execute(*args, **kwargs)
-        rset.req = self
-        return rset
-
-    @_open_only
-    def commit(self, *args, **kwargs):
-        try:
-            return self._cnx.commit(*args, **kwargs)
-        finally:
-            self.drop_entity_cache()
-
-    @_open_only
-    def rollback(self, *args, **kwargs):
-        try:
-            return self._cnx.rollback(*args, **kwargs)
-        finally:
-            self.drop_entity_cache()
-
-    # security #################################################################
-
-    allow_all_hooks_but = _srv_cnx_func('allow_all_hooks_but')
-    deny_all_hooks_but = _srv_cnx_func('deny_all_hooks_but')
-    security_enabled = _srv_cnx_func('security_enabled')
-
-    # direct sql ###############################################################
-
-    system_sql = _srv_cnx_func('system_sql')
-
-    # session data methods #####################################################
-
-    get_shared_data = _srv_cnx_func('get_shared_data')
-    set_shared_data = _srv_cnx_func('set_shared_data')
-
-    @property
-    def transaction_data(self):
-        return self._cnx.transaction_data
-
-    # meta-data accessors ######################################################
-
-    @_open_only
-    def source_defs(self):
-        """Return the definition of sources used by the repository."""
-        return self._session.repo.source_defs()
-
-    @_open_only
-    def get_schema(self):
-        """Return the schema currently used by the repository."""
-        return self._session.repo.source_defs()
-
-    @_open_only
-    def get_option_value(self, option):
-        """Return the value for `option` in the configuration."""
-        return self._session.repo.get_option_value(option)
-
-    entity_metas = _srv_cnx_func('entity_metas')
-    describe = _srv_cnx_func('describe') # XXX deprecated in 3.19
-
-    # undo support ############################################################
-
-    @_open_only
-    def undoable_transactions(self, ueid=None, req=None, **actionfilters):
-        """Return a list of undoable transaction objects by the connection's
-        user, ordered by descendant transaction time.
-
-        Managers may filter according to user (eid) who has done the transaction
-        using the `ueid` argument. Others will only see their own transactions.
-
-        Additional filtering capabilities is provided by using the following
-        named arguments:
-
-        * `etype` to get only transactions creating/updating/deleting entities
-          of the given type
-
-        * `eid` to get only transactions applied to entity of the given eid
-
-        * `action` to get only transactions doing the given action (action in
-          'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or
-          'D'.
-
-        * `public`: when additional filtering is provided, their are by default
-          only searched in 'public' actions, unless a `public` argument is given
-          and set to false.
-        """
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        source = self._cnx.repo.system_source
-        txinfos = source.undoable_transactions(self._cnx, ueid, **actionfilters)
-        for txinfo in txinfos:
-            txinfo.req = req or self  # XXX mostly wrong
-        return txinfos
-
-    @_open_only
-    def transaction_info(self, txuuid, req=None):
-        """Return transaction object for the given uid.
-
-        raise `NoSuchTransaction` if not found or if session's user is not
-        allowed (eg not in managers group and the transaction doesn't belong to
-        him).
-        """
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        txinfo = self._cnx.repo.system_source.tx_info(self._cnx, txuuid)
-        if req:
-            txinfo.req = req
-        else:
-            txinfo.cnx = self
-        return txinfo
-
-    @_open_only
-    def transaction_actions(self, txuuid, public=True):
-        """Return an ordered list of action effectued during that transaction.
-
-        If public is true, return only 'public' actions, eg not ones triggered
-        under the cover by hooks, else return all actions.
-
-        raise `NoSuchTransaction` if the transaction is not found or if
-        session's user is not allowed (eg not in managers group and the
-        transaction doesn't belong to him).
-        """
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        return self._cnx.repo.system_source.tx_actions(self._cnx, txuuid, public)
-
-    @_open_only
-    def undo_transaction(self, txuuid):
-        """Undo the given transaction. Return potential restoration errors.
-
-        raise `NoSuchTransaction` if not found or if session's user is not
-        allowed (eg not in managers group and the transaction doesn't belong to
-        him).
-        """
-        # the ``with`` dance is transitional. We do not have Standalone
-        # Connection yet so we use this trick to unsure the session have the
-        # proper cnx loaded. This can be simplified one we have Standalone
-        # Connection object
-        return self._cnx.repo.system_source.undo_transaction(self._cnx, txuuid)
-
-    # cache management
-
-    def entity_cache(self, eid):
-        return self._eid_cache[eid]
-
-    def set_entity_cache(self, entity):
-        self._eid_cache[entity.eid] = entity
-
-    def cached_entities(self):
-        return self._eid_cache.values()
-
-    def drop_entity_cache(self, eid=None):
-        if eid is None:
-            self._eid_cache = {}
-        else:
-            del self._eid_cache[eid]
-
-    # deprecated stuff
-
-    @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one')
-    def request(self):
-        return self
-
-    @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one')
-    def cursor(self):
-        return self
-
-    @property
-    @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one')
-    def sessionid(self):
-        return self._session.sessionid
-
-    @property
-    @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one')
-    def connection(self):
-        return self
-
-    @property
-    @deprecated('[3.19] This is a repoapi.ClientConnection object not a dbapi one')
-    def _repo(self):
-        return self._session.repo
+class ClientConnection(Connection):
+    __metaclass__ = class_deprecated
+    __deprecation_warning__ = '[3.20] %(cls)s is deprecated, use Connection instead'
--- a/req.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/req.py	Mon Jun 22 14:27:37 2015 +0200
@@ -81,7 +81,6 @@
         A special method is needed to ensure the linked user is linked to the
         connection too.
         """
-        # cnx validity is checked by the call to .user_info
         rset = self.eid_rset(orig_user.eid, 'CWUser')
         user_cls = self.vreg['etypes'].etype_class('CWUser')
         user = user_cls(self, rset, row=0, groups=orig_user.groups,
@@ -357,7 +356,7 @@
         for key, val in sorted(newparams.iteritems()):
             query[key] = (self.url_quote(val),)
         query = '&'.join(u'%s=%s' % (param, value)
-                         for param, values in query.items()
+                         for param, values in sorted(query.items())
                          for value in values)
         return urlunsplit((schema, netloc, path, query, fragment))
 
--- a/rqlrewrite.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/rqlrewrite.py	Mon Jun 22 14:27:37 2015 +0200
@@ -89,7 +89,7 @@
                 mytyperel.r_type = 'is'
                 if len(possibletypes) > 1:
                     node = n.Function('IN')
-                    for etype in possibletypes:
+                    for etype in sorted(possibletypes):
                         node.append(n.Constant(etype, 'etype'))
                 else:
                     etype = iter(possibletypes).next()
--- a/rset.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/rset.py	Mon Jun 22 14:27:37 2015 +0200
@@ -21,13 +21,16 @@
 
 from warnings import warn
 
+from logilab.common import nullobject
 from logilab.common.decorators import cached, clear_cache, copy_cache
-
 from rql import nodes, stmts
 
 from cubicweb import NotAnEntity, NoResultError, MultipleResultsError
 
 
+_MARKER = nullobject()
+
+
 class ResultSet(object):
     """A result set wraps a RQL query result. This object implements
     partially the list protocol to allow direct use as a list of
@@ -362,12 +365,14 @@
         rset.limited = (limit, offset)
         return rset
 
-    def printable_rql(self, encoded=False):
+    def printable_rql(self, encoded=_MARKER):
         """return the result set's origin rql as a string, with arguments
         substitued
         """
+        if encoded is not _MARKER:
+            warn('[3.21] the "encoded" argument is deprecated', DeprecationWarning)
         encoding = self.req.encoding
-        rqlstr = self.syntax_tree().as_string(encoding, self.args)
+        rqlstr = self.syntax_tree().as_string(kwargs=self.args)
         # sounds like we get encoded or unicode string due to a bug in as_string
         if not encoded:
             if isinstance(rqlstr, unicode):
@@ -478,6 +483,7 @@
         #     new attributes found in this resultset ?
         try:
             entity = req.entity_cache(eid)
+            entity._cw = req
         except KeyError:
             pass
         else:
--- a/schemas/base.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/schemas/base.py	Mon Jun 22 14:27:37 2015 +0200
@@ -23,7 +23,7 @@
 from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
                             SubjectRelation,
                             String, TZDatetime, Datetime, Password, Interval,
-                            Boolean)
+                            Boolean, UniqueConstraint)
 from cubicweb.schema import (
     RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
     PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS,
@@ -165,6 +165,7 @@
     cardinality = '11'
     subject = '*'
     object = 'String'
+    constraints = [UniqueConstraint()]
 
 
 # XXX find a better relation name
@@ -184,7 +185,6 @@
     cardinality = '?*'
 
 
-
 class ExternalUri(EntityType):
     """a URI representing an object in external data store"""
     uri = String(required=True, unique=True, maxsize=256,
--- a/schemas/bootstrap.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/schemas/bootstrap.py	Mon Jun 22 14:27:37 2015 +0200
@@ -38,7 +38,7 @@
     description = RichString(internationalizable=True,
                              description=_('semantic description of this entity type'))
     # necessary to filter using RQL
-    final = Boolean(description=_('automatic'))
+    final = Boolean(default=False, description=_('automatic'))
 
 
 class CWRType(EntityType):
--- a/server/checkintegrity.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/checkintegrity.py	Mon Jun 22 14:27:37 2015 +0200
@@ -88,11 +88,10 @@
     # to be updated due to the reindexation
     repo = cnx.repo
     dbhelper = repo.system_source.dbhelper
-    with cnx.ensure_cnx_set:
-        cursor = cnx.cnxset.cu
-        if not dbhelper.has_fti_table(cursor):
-            print 'no text index table'
-            dbhelper.init_fti(cursor)
+    cursor = cnx.cnxset.cu
+    if not dbhelper.has_fti_table(cursor):
+        print 'no text index table'
+        dbhelper.init_fti(cursor)
     repo.system_source.do_fti = True  # ensure full-text indexation is activated
     if etypes is None:
         print 'Reindexing entities'
@@ -400,8 +399,7 @@
         with cnx.security_enabled(read=False, write=False): # ensure no read security
             for check in checks:
                 check_func = globals()['check_%s' % check]
-                with cnx.ensure_cnx_set:
-                    check_func(repo.schema, cnx, eids_cache, fix=fix)
+                check_func(repo.schema, cnx, eids_cache, fix=fix)
         if fix:
             cnx.commit()
         else:
@@ -410,6 +408,5 @@
             print 'WARNING: Diagnostic run, nothing has been corrected'
     if reindex:
         cnx.rollback()
-        with cnx.ensure_cnx_set:
-            reindex_entities(repo.schema, cnx, withpb=withpb)
+        reindex_entities(repo.schema, cnx, withpb=withpb)
         cnx.commit()
--- a/server/cwzmq.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/cwzmq.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# copyright 2012-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2012-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -17,8 +17,6 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
-import cPickle
-import traceback
 from threading import Thread
 from logging import getLogger
 
@@ -27,16 +25,10 @@
 import zmq.eventloop.zmqstream
 
 from cubicweb import set_log_methods
-from cubicweb.server.server import QuitEvent, Finished
+
 
 ctx = zmq.Context()
 
-def cwproto_to_zmqaddr(address):
-    """ converts a cw-zmq address (like zmqpickle-tcp://<ip>:<port>)
-    into a proper zmq address (tcp://<ip>:<port>)
-    """
-    assert address.startswith('zmqpickle-'), 'bad protocol string %s' % address
-    return address.split('-', 1)[1] # chop the `zmqpickle-` prefix
 
 class ZMQComm(object):
     """
@@ -134,132 +126,5 @@
         self.ioloop.add_callback(lambda: self.stream.setsockopt(zmq.SUBSCRIBE, topic))
 
 
-class ZMQRepositoryServer(object):
-
-    def __init__(self, repository):
-        """make the repository available as a PyRO object"""
-        self.address = None
-        self.repo = repository
-        self.socket = None
-        self.stream = None
-        self.loop = ioloop.IOLoop()
-
-        # event queue
-        self.events = []
-
-    def connect(self, address):
-        self.address = cwproto_to_zmqaddr(address)
-
-    def run(self):
-        """enter the service loop"""
-        # start repository looping tasks
-        self.socket = ctx.socket(zmq.REP)
-        self.stream = zmq.eventloop.zmqstream.ZMQStream(self.socket, io_loop=self.loop)
-        self.stream.bind(self.address)
-        self.info('ZMQ server bound on: %s', self.address)
-
-        self.stream.on_recv(self.process_cmds)
-
-        try:
-            self.loop.start()
-        except zmq.ZMQError:
-            self.warning('ZMQ event loop killed')
-        self.quit()
-
-    def trigger_events(self):
-        """trigger ready events"""
-        for event in self.events[:]:
-            if event.is_ready():
-                self.info('starting event %s', event)
-                event.fire(self)
-                try:
-                    event.update()
-                except Finished:
-                    self.events.remove(event)
-
-    def process_cmd(self, cmd):
-        """Delegate the given command to the repository.
-
-        ``cmd`` is a list of (method_name, args, kwargs)
-        where ``args`` is a list of positional arguments
-        and ``kwargs`` is a dictionnary of named arguments.
-
-        >>> rset = delegate_to_repo(["execute", [sessionid], {'rql': rql}])
-
-        :note1: ``kwargs`` may be ommited
-
-            >>> rset = delegate_to_repo(["execute", [sessionid, rql]])
-
-        :note2: both ``args`` and ``kwargs`` may be omitted
-
-            >>> schema = delegate_to_repo(["get_schema"])
-            >>> schema = delegate_to_repo("get_schema") # also allowed
-
-        """
-        cmd = cPickle.loads(cmd)
-        if not cmd:
-            raise AttributeError('function name required')
-        if isinstance(cmd, basestring):
-            cmd = [cmd]
-        if len(cmd) < 2:
-            cmd.append(())
-        if len(cmd) < 3:
-            cmd.append({})
-        cmd  = list(cmd) + [(), {}]
-        funcname, args, kwargs = cmd[:3]
-        result = getattr(self.repo, funcname)(*args, **kwargs)
-        return result
-
-    def process_cmds(self, cmds):
-        """Callback intended to be used with ``on_recv``.
-
-        Call ``delegate_to_repo`` on each command and send a pickled of
-        each result recursively.
-
-        Any exception are catched, pickled and sent.
-        """
-        try:
-            for cmd in cmds:
-                result = self.process_cmd(cmd)
-                self.send_data(result)
-        except Exception as exc:
-            traceback.print_exc()
-            self.send_data(exc)
-
-    def send_data(self, data):
-        self.socket.send_pyobj(data)
-
-    def quit(self, shutdown_repo=False):
-        """stop the server"""
-        self.info('Quitting ZMQ server')
-        try:
-            self.loop.add_callback(self.loop.stop)
-            self.stream.on_recv(None)
-            self.stream.close()
-        except Exception as e:
-            print e
-            pass
-        if shutdown_repo and not self.repo.shutting_down:
-            event = QuitEvent()
-            event.fire(self)
-
-    # server utilitities ######################################################
-
-    def install_sig_handlers(self):
-        """install signal handlers"""
-        import signal
-        self.info('installing signal handlers')
-        signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit(shutdown_repo=True))
-        signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit(shutdown_repo=True))
-
-
-    # these are overridden by set_log_methods below
-    # only defining here to prevent pylint from complaining
-    @classmethod
-    def info(cls, msg, *a, **kw):
-        pass
-
-
 set_log_methods(Publisher, getLogger('cubicweb.zmq.pub'))
 set_log_methods(Subscriber, getLogger('cubicweb.zmq.sub'))
-set_log_methods(ZMQRepositoryServer, getLogger('cubicweb.zmq.repo'))
--- a/server/edition.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/edition.py	Mon Jun 22 14:27:37 2015 +0200
@@ -103,8 +103,6 @@
         assert not self.saved, 'too late to modify edited attributes'
         super(EditedEntity, self).__setitem__(attr, value)
         self.entity.cw_attr_cache[attr] = value
-        # mark attribute as needing purge by the client
-        self.entity._cw_dont_cache_attribute(attr)
 
     def oldnewvalue(self, attr):
         """returns the couple (old attr value, new attr value)
--- a/server/hook.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/hook.py	Mon Jun 22 14:27:37 2015 +0200
@@ -320,6 +320,7 @@
                 eids_from_to = []
             pruned = self.get_pruned_hooks(cnx, event,
                                            entities, eids_from_to, kwargs)
+
             # by default, hooks are executed with security turned off
             with cnx.security_enabled(read=False):
                 for _kwargs in _iter_kwargs(entities, eids_from_to, kwargs):
@@ -327,10 +328,11 @@
                                    key=lambda x: x.order)
                     debug = server.DEBUG & server.DBG_HOOKS
                     with cnx.security_enabled(write=False):
-                        for hook in hooks:
-                            if debug:
-                                print event, _kwargs, hook
-                            hook()
+                        with cnx.running_hooks_ops():
+                            for hook in hooks:
+                                if debug:
+                                    print event, _kwargs, hook
+                                hook()
 
     def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs):
         """return a set of hooks that should not be considered by filtered_possible objects
@@ -425,10 +427,13 @@
     return req.is_hook_activated(cls)
 
 @objectify_predicate
-def from_dbapi_query(cls, req, **kwargs):
-    if req.running_dbapi_query:
-        return 1
-    return 0
+def issued_from_user_query(cls, req, **kwargs):
+    return 0 if req.hooks_in_progress else 1
+
+from_dbapi_query = class_renamed('from_dbapi_query',
+                                 issued_from_user_query,
+                                 message='[3.21] ')
+
 
 class rechain(object):
     def __init__(self, *iterators):
--- a/server/migractions.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/migractions.py	Mon Jun 22 14:27:37 2015 +0200
@@ -44,7 +44,6 @@
 from logilab.common.decorators import cached, clear_cache
 
 from yams.constraints import SizeConstraint
-from yams.schema2sql import eschema2sql, rschema2sql, unique_index_name
 from yams.schema import RelationDefinitionSchema
 
 from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError
@@ -56,6 +55,7 @@
 from cubicweb import repoapi
 from cubicweb.migration import MigrationHelper, yes
 from cubicweb.server import hook, schemaserial as ss
+from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name
 from cubicweb.server.utils import manager_userpasswd
 from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX
 
@@ -94,10 +94,10 @@
             assert repo
             self.cnx = cnx
             self.repo = repo
-            self.session = cnx._session
+            self.session = cnx.session
         elif connect:
             self.repo_connect()
-            self.set_session()
+            self.set_cnx()
         else:
             self.session = None
         # no config on shell to a remote instance
@@ -125,7 +125,9 @@
         self.fs_schema = schema
         self._synchronized = set()
 
-    def set_session(self):
+    # overriden from base MigrationHelper ######################################
+
+    def set_cnx(self):
         try:
             login = self.repo.config.default_admin_config['login']
             pwd = self.repo.config.default_admin_config['password']
@@ -149,9 +151,7 @@
                 print 'aborting...'
                 sys.exit(0)
         self.session = self.repo._get_session(self.cnx.sessionid)
-        self.session.keep_cnxset_mode('transaction')
 
-    # overriden from base MigrationHelper ######################################
 
     @cached
     def repo_connect(self):
@@ -178,15 +178,14 @@
             super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options)
 
     def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs):
-        with self.cnx._cnx.ensure_cnx_set:
-            try:
-                return super(ServerMigrationHelper, self).cmd_process_script(
-                      migrscript, funcname, *args, **kwargs)
-            except ExecutionError as err:
-                sys.stderr.write("-> %s\n" % err)
-            except BaseException:
-                self.rollback()
-                raise
+        try:
+            return super(ServerMigrationHelper, self).cmd_process_script(
+                  migrscript, funcname, *args, **kwargs)
+        except ExecutionError as err:
+            sys.stderr.write("-> %s\n" % err)
+        except BaseException:
+            self.rollback()
+            raise
 
     # Adjust docstring
     cmd_process_script.__doc__ = MigrationHelper.cmd_process_script.__doc__
@@ -287,12 +286,10 @@
         print '-> database restored.'
 
     def commit(self):
-        if hasattr(self, 'cnx'):
-            self.cnx.commit(free_cnxset=False)
+        self.cnx.commit()
 
     def rollback(self):
-        if hasattr(self, 'cnx'):
-            self.cnx.rollback(free_cnxset=False)
+        self.cnx.rollback()
 
     def rqlexecall(self, rqliter, ask_confirm=False):
         for rql, kwargs in rqliter:
@@ -310,7 +307,7 @@
                         'schema': self.repo.get_schema(),
                         'cnx': self.cnx,
                         'fsschema': self.fs_schema,
-                        'session' : self.cnx._cnx,
+                        'session' : self.cnx,
                         'repo' : self.repo,
                         })
         return context
@@ -958,7 +955,6 @@
                              % (rtype, new.eid, oldeid), ask_confirm=False)
             # delete relations using SQL to avoid relations content removal
             # triggered by schema synchronization hooks.
-            session = self.session
             for rdeftype in ('CWRelation', 'CWAttribute'):
                 thispending = set( (eid for eid, in self.sqlexec(
                     'SELECT cw_eid FROM cw_%s WHERE cw_from_entity=%%(eid)s OR '
@@ -968,10 +964,10 @@
                 # get some validation error on commit since integrity hooks
                 # may think some required relation is missing... This also ensure
                 # repository caches are properly cleanup
-                hook.CleanupDeletedEidsCacheOp.get_instance(session).union(thispending)
+                hook.CleanupDeletedEidsCacheOp.get_instance(self.cnx).union(thispending)
                 # and don't forget to remove record from system tables
                 entities = [self.cnx.entity_from_eid(eid, rdeftype) for eid in thispending]
-                self.repo.system_source.delete_info_multi(self.cnx._cnx, entities)
+                self.repo.system_source.delete_info_multi(self.cnx, entities)
                 self.sqlexec('DELETE FROM cw_%s WHERE cw_from_entity=%%(eid)s OR '
                              'cw_to_entity=%%(eid)s' % rdeftype,
                              {'eid': oldeid}, ask_confirm=False)
@@ -1392,7 +1388,7 @@
         indexable entity types
         """
         from cubicweb.server.checkintegrity import reindex_entities
-        reindex_entities(self.repo.schema, self.cnx._cnx, etypes=etypes)
+        reindex_entities(self.repo.schema, self.cnx, etypes=etypes)
 
     @contextmanager
     def cmd_dropped_constraints(self, etype, attrname, cstrtype=None,
@@ -1491,7 +1487,7 @@
         self.sqlexec(sql, ask_confirm=False)
         dbhelper = self.repo.system_source.dbhelper
         sqltype = dbhelper.TYPE_MAPPING[newtype]
-        cursor = self.cnx._cnx.cnxset.cu
+        cursor = self.cnx.cnxset.cu
         dbhelper.change_col_type(cursor, 'cw_%s'  % etype, 'cw_%s' % attr, sqltype, allownull)
         if commit:
             self.commit()
--- a/server/querier.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/querier.py	Mon Jun 22 14:27:37 2015 +0200
@@ -570,7 +570,7 @@
             except UnknownEid:
                 # we want queries such as "Any X WHERE X eid 9999" return an
                 # empty result instead of raising UnknownEid
-                return empty_rset(rql, args, rqlst)
+                return empty_rset(rql, args)
             if args and rql not in self._rql_ck_cache:
                 self._rql_ck_cache[rql] = eidkeys
                 if eidkeys:
@@ -580,9 +580,6 @@
         if rqlst.TYPE != 'select':
             if cnx.read_security:
                 check_no_password_selected(rqlst)
-            # write query, ensure connection's mode is 'write' so connections
-            # won't be released until commit/rollback
-            cnx.mode = 'write'
             cachekey = None
         else:
             if cnx.read_security:
--- a/server/repository.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/repository.py	Mon Jun 22 14:27:37 2015 +0200
@@ -24,34 +24,29 @@
 * brings these classes all together to provide a single access
   point to a cubicweb instance.
 * handles session management
-* provides method for pyro registration, to call if pyro is enabled
 """
 __docformat__ = "restructuredtext en"
 
-import sys
 import threading
 import Queue
 from warnings import warn
 from itertools import chain
 from time import time, localtime, strftime
 from contextlib import contextmanager
-from warnings import warn
 
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.deprecation import deprecated
 
 from yams import BadSchemaDefinition
-from rql import RQLSyntaxError
 from rql.utils import rqlvar_maker
 
 from cubicweb import (CW_MIGRATION_MAP, QueryError,
                       UnknownEid, AuthenticationError, ExecutionError,
-                      BadConnectionId, Unauthorized, ValidationError,
+                      BadConnectionId, ValidationError,
                       UniqueTogetherError, onevent)
 from cubicweb import cwvreg, schema, server
 from cubicweb.server import ShuttingDown, utils, hook, querier, sources
-from cubicweb.server.session import Session, InternalSession, InternalManager
-from cubicweb.server.ssplanner import EditedEntity
+from cubicweb.server.session import Session, InternalManager
 
 NO_CACHE_RELATIONS = set( [('owned_by', 'object'),
                            ('created_by', 'object'),
@@ -59,7 +54,7 @@
                            ])
 
 def prefill_entity_caches(entity):
-    session = entity._cw
+    cnx = entity._cw
     # prefill entity relation caches
     for rschema in entity.e_schema.subject_relations():
         rtype = str(rschema)
@@ -69,14 +64,14 @@
             entity.cw_attr_cache.setdefault(rtype, None)
         else:
             entity.cw_set_relation_cache(rtype, 'subject',
-                                         session.empty_rset())
+                                         cnx.empty_rset())
     for rschema in entity.e_schema.object_relations():
         rtype = str(rschema)
         if rtype in schema.VIRTUAL_RTYPES or (rtype, 'object') in NO_CACHE_RELATIONS:
             continue
-        entity.cw_set_relation_cache(rtype, 'object', session.empty_rset())
+        entity.cw_set_relation_cache(rtype, 'object', cnx.empty_rset())
 
-def del_existing_rel_if_needed(session, eidfrom, rtype, eidto):
+def del_existing_rel_if_needed(cnx, eidfrom, rtype, eidto):
     """delete existing relation when adding a new one if card is 1 or ?
 
     have to be done once the new relation has been inserted to avoid having
@@ -86,9 +81,9 @@
     hooks order hazardness
     """
     # skip that if integrity explicitly disabled
-    if not session.is_hook_category_activated('activeintegrity'):
+    if not cnx.is_hook_category_activated('activeintegrity'):
         return
-    rdef = session.rtype_eids_rdef(rtype, eidfrom, eidto)
+    rdef = cnx.rtype_eids_rdef(rtype, eidfrom, eidto)
     card = rdef.cardinality
     # one may be tented to check for neweids but this may cause more than one
     # relation even with '1?'  cardinality if thoses relations are added in the
@@ -102,34 +97,34 @@
     # * we don't want read permissions to be applied but we want delete
     #   permission to be checked
     if card[0] in '1?':
-        with session.security_enabled(read=False):
-            session.execute('DELETE X %s Y WHERE X eid %%(x)s, '
-                            'NOT Y eid %%(y)s' % rtype,
-                                {'x': eidfrom, 'y': eidto})
+        with cnx.security_enabled(read=False):
+            cnx.execute('DELETE X %s Y WHERE X eid %%(x)s, '
+                        'NOT Y eid %%(y)s' % rtype,
+                        {'x': eidfrom, 'y': eidto})
     if card[1] in '1?':
-        with session.security_enabled(read=False):
-            session.execute('DELETE X %s Y WHERE Y eid %%(y)s, '
-                            'NOT X eid %%(x)s' % rtype,
-                            {'x': eidfrom, 'y': eidto})
+        with cnx.security_enabled(read=False):
+            cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s, '
+                        'NOT X eid %%(x)s' % rtype,
+                        {'x': eidfrom, 'y': eidto})
 
 
-def preprocess_inlined_relations(session, entity):
+def preprocess_inlined_relations(cnx, entity):
     """when an entity is added, check if it has some inlined relation which
     requires to be extrated for proper call hooks
     """
     relations = []
-    activeintegrity = session.is_hook_category_activated('activeintegrity')
+    activeintegrity = cnx.is_hook_category_activated('activeintegrity')
     eschema = entity.e_schema
     for attr in entity.cw_edited:
         rschema = eschema.subjrels[attr]
         if not rschema.final: # inlined relation
             value = entity.cw_edited[attr]
             relations.append((attr, value))
-            session.update_rel_cache_add(entity.eid, attr, value)
-            rdef = session.rtype_eids_rdef(attr, entity.eid, value)
+            cnx.update_rel_cache_add(entity.eid, attr, value)
+            rdef = cnx.rtype_eids_rdef(attr, entity.eid, value)
             if rdef.cardinality[1] in '1?' and activeintegrity:
-                with session.security_enabled(read=False):
-                    session.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr,
+                with cnx.security_enabled(read=False):
+                    cnx.execute('DELETE X %s Y WHERE Y eid %%(y)s' % attr,
                                     {'x': entity.eid, 'y': value})
     return relations
 
@@ -151,8 +146,6 @@
 class Repository(object):
     """a repository provides access to a set of persistent storages for
     entities and relations
-
-    XXX protect pyro access
     """
 
     def __init__(self, config, tasks_manager=None, vreg=None):
@@ -162,17 +155,11 @@
         self.vreg = vreg
         self._tasks_manager = tasks_manager
 
-        self.pyro_registered = False
-        self.pyro_uri = None
-        # every pyro client is handled in its own thread; map these threads to
-        # the session we opened for them so we can clean up when they go away
-        self._pyro_sessions = {}
         self.app_instances_bus = NullEventBus()
         self.info('starting repository from %s', self.config.apphome)
         # dictionary of opened sessions
         self._sessions = {}
 
-
         # list of functions to be called at regular interval
         # list of running threads
         self._running_threads = []
@@ -435,10 +422,6 @@
             except Exception:
                 self.exception('error while closing %s' % cnxset)
                 continue
-        if self.pyro_registered:
-            if self._use_pyrons():
-                pyro_unregister(self.config)
-            self.pyro_uri = None
         hits, misses = self.querier.cache_hit, self.querier.cache_miss
         try:
             self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses,
@@ -461,8 +444,7 @@
         for source in self.sources_by_uri.itervalues():
             if self.config.source_enabled(source) and source.support_entity('CWUser'):
                 try:
-                    with cnx.ensure_cnx_set:
-                        return source.authenticate(cnx, login, **authinfo)
+                    return source.authenticate(cnx, login, **authinfo)
                 except AuthenticationError:
                     continue
         else:
@@ -481,19 +463,18 @@
 
     def _build_user(self, cnx, eid):
         """return a CWUser entity for user with the given eid"""
-        with cnx.ensure_cnx_set:
-            cls = self.vreg['etypes'].etype_class('CWUser')
-            st = cls.fetch_rqlst(cnx.user, ordermethod=None)
-            st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute')
-            rset = cnx.execute(st.as_string(), {'x': eid})
-            assert len(rset) == 1, rset
-            cwuser = rset.get_entity(0, 0)
-            # pylint: disable=W0104
-            # prefetch / cache cwuser's groups and properties. This is especially
-            # useful for internal sessions to avoid security insertions
-            cwuser.groups
-            cwuser.properties
-            return cwuser
+        cls = self.vreg['etypes'].etype_class('CWUser')
+        st = cls.fetch_rqlst(cnx.user, ordermethod=None)
+        st.add_eid_restriction(st.get_variable('X'), 'x', 'Substitute')
+        rset = cnx.execute(st.as_string(), {'x': eid})
+        assert len(rset) == 1, rset
+        cwuser = rset.get_entity(0, 0)
+        # pylint: disable=W0104
+        # prefetch / cache cwuser's groups and properties. This is especially
+        # useful for internal sessions to avoid security insertions
+        cwuser.groups
+        cwuser.properties
+        return cwuser
 
     # public (dbapi) interface ################################################
 
@@ -640,14 +621,14 @@
         for k in chain(fetch_attrs, query_attrs):
             if k not in cwuserattrs:
                 raise Exception('bad input for find_user')
-        with self.internal_session() as session:
+        with self.internal_cnx() as cnx:
             varmaker = rqlvar_maker()
             vars = [(attr, varmaker.next()) for attr in fetch_attrs]
             rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars)
             rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ','
-            rset = session.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr)
-                                                  for attr in query_attrs),
-                                   query_attrs)
+            rset = cnx.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr)
+                                              for attr in query_attrs),
+                               query_attrs)
             return rset.rows
 
     def new_session(self, login, **kwargs):
@@ -662,12 +643,6 @@
             # try to get a user object
             user = self.authenticate_user(cnx, login, **kwargs)
         session = Session(user, self, cnxprops)
-        if threading.currentThread() in self._pyro_sessions:
-            # assume no pyro client does one get_repository followed by
-            # multiple repo.connect
-            assert self._pyro_sessions[threading.currentThread()] == None
-            self.debug('record session %s', session)
-            self._pyro_sessions[threading.currentThread()] = session
         user._cw = user.cw_rset.req = session
         user.cw_clear_relation_cache()
         self._sessions[session.sessionid] = session
@@ -683,190 +658,26 @@
         """open a new session for a given user and return its sessionid """
         return self.new_session(login, **kwargs).sessionid
 
-    def execute(self, sessionid, rqlstring, args=None, build_descr=True,
-                txid=None):
-        """execute a RQL query
-
-        * rqlstring should be a unicode string or a plain ascii string
-        * args the optional parameters used in the query
-        * build_descr is a flag indicating if the description should be
-          built on select queries
-        """
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            try:
-                rset = self.querier.execute(session, rqlstring, args,
-                                            build_descr)
-                # NOTE: the web front will (re)build it when needed
-                #       e.g in facets
-                #       Zeroed to avoid useless overhead with pyro
-                rset._rqlst = None
-                return rset
-            except (ValidationError, Unauthorized, RQLSyntaxError):
-                raise
-            except Exception:
-                # FIXME: check error to catch internal errors
-                self.exception('unexpected error while executing %s with %s', rqlstring, args)
-                raise
-        finally:
-            session.free_cnxset()
-
-    @deprecated('[3.19] use .entity_metas(sessionid, eid, txid) instead')
-    def describe(self, sessionid, eid, txid=None):
-        """return a tuple `(type, physical source uri, extid, actual source
-        uri)` for the entity of the given `eid`
-
-        As of 3.19, physical source uri is always the system source.
-        """
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            etype, extid, source = self.type_and_source_from_eid(eid, session)
-            return etype, source, extid, source
-        finally:
-            session.free_cnxset()
-
-    def entity_metas(self, sessionid, eid, txid=None):
-        """return a dictionary containing meta-datas for the entity of the given
-        `eid`. Available keys are:
-
-        * 'type', the entity's type name,
-
-        * 'source', the name of the source from which this entity's coming from,
-
-        * 'extid', the identifierfor this entity in its originating source, as
-          an encoded string or `None` for entities from the 'system' source.
-        """
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            etype, extid, source = self.type_and_source_from_eid(eid, session)
-            return {'type': etype, 'source': source, 'extid': extid}
-        finally:
-            session.free_cnxset()
-
     def check_session(self, sessionid):
         """raise `BadConnectionId` if the connection is no more valid, else
         return its latest activity timestamp.
         """
-        return self._get_session(sessionid, setcnxset=False).timestamp
-
-    @deprecated('[3.19] use session or transaction data')
-    def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False):
-        """return value associated to key in the session's data dictionary or
-        session's transaction's data if `txdata` is true.
-
-        If pop is True, value will be removed from the dictionary.
-
-        If key isn't defined in the dictionary, value specified by the
-        `default` argument will be returned.
-        """
-        session = self._get_session(sessionid, setcnxset=False)
-        return session.get_shared_data(key, default, pop, txdata)
-
-    @deprecated('[3.19] use session or transaction data')
-    def set_shared_data(self, sessionid, key, value, txdata=False):
-        """set value associated to `key` in shared data
-
-        if `txdata` is true, the value will be added to the repository session's
-        transaction's data which are cleared on commit/rollback of the current
-        transaction.
-        """
-        session = self._get_session(sessionid, setcnxset=False)
-        session.set_shared_data(key, value, txdata)
-
-    def commit(self, sessionid, txid=None):
-        """commit transaction for the session with the given id"""
-        self.debug('begin commit for session %s', sessionid)
-        try:
-            session = self._get_session(sessionid)
-            session.set_cnx(txid)
-            return session.commit()
-        except (ValidationError, Unauthorized):
-            raise
-        except Exception:
-            self.exception('unexpected error')
-            raise
-
-    def rollback(self, sessionid, txid=None):
-        """commit transaction for the session with the given id"""
-        self.debug('begin rollback for session %s', sessionid)
-        try:
-            session = self._get_session(sessionid)
-            session.set_cnx(txid)
-            session.rollback()
-        except Exception:
-            self.exception('unexpected error')
-            raise
+        return self._get_session(sessionid).timestamp
 
     def close(self, sessionid, txid=None, checkshuttingdown=True):
         """close the session with the given id"""
         session = self._get_session(sessionid, txid=txid,
                                     checkshuttingdown=checkshuttingdown)
         # operation uncommited before close are rolled back before hook is called
-        if session._cnx._session_handled:
-            session._cnx.rollback(free_cnxset=False)
         with session.new_cnx() as cnx:
             self.hm.call_hooks('session_close', cnx)
             # commit connection at this point in case write operation has been
             # done during `session_close` hooks
             cnx.commit()
         session.close()
-        if threading.currentThread() in self._pyro_sessions:
-            self._pyro_sessions[threading.currentThread()] = None
         del self._sessions[sessionid]
         self.info('closed session %s for user %s', sessionid, session.user.login)
 
-    def call_service(self, sessionid, regid, **kwargs):
-        """
-        See :class:`cubicweb.dbapi.Connection.call_service`
-        and :class:`cubicweb.server.Service`
-        """
-        # XXX lack a txid
-        session = self._get_session(sessionid)
-        return session._cnx.call_service(regid, **kwargs)
-
-    def user_info(self, sessionid, props=None):
-        """this method should be used by client to:
-        * check session id validity
-        * update user information on each user's request (i.e. groups and
-          custom properties)
-        """
-        user = self._get_session(sessionid, setcnxset=False).user
-        return user.eid, user.login, user.groups, user.properties
-
-    def undoable_transactions(self, sessionid, ueid=None, txid=None,
-                              **actionfilters):
-        """See :class:`cubicweb.dbapi.Connection.undoable_transactions`"""
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            return self.system_source.undoable_transactions(session, ueid,
-                                                            **actionfilters)
-        finally:
-            session.free_cnxset()
-
-    def transaction_info(self, sessionid, txuuid, txid=None):
-        """See :class:`cubicweb.dbapi.Connection.transaction_info`"""
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            return self.system_source.tx_info(session, txuuid)
-        finally:
-            session.free_cnxset()
-
-    def transaction_actions(self, sessionid, txuuid, public=True, txid=None):
-        """See :class:`cubicweb.dbapi.Connection.transaction_actions`"""
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            return self.system_source.tx_actions(session, txuuid, public)
-        finally:
-            session.free_cnxset()
-
-    def undo_transaction(self, sessionid, txuuid, txid=None):
-        """See :class:`cubicweb.dbapi.Connection.undo_transaction`"""
-        session = self._get_session(sessionid, setcnxset=True, txid=txid)
-        try:
-            return self.system_source.undo_transaction(session, txuuid)
-        finally:
-            session.free_cnxset()
-
     # session handling ########################################################
 
     def close_sessions(self):
@@ -891,27 +702,6 @@
                 nbclosed += 1
         return nbclosed
 
-    @deprecated("[3.19] use internal_cnx now\n"
-                "(Beware that integrity hook are now enabled by default)")
-    def internal_session(self, cnxprops=None, safe=False):
-        """return a dbapi like connection/cursor using internal user which have
-        every rights on the repository. The `safe` argument is a boolean flag
-        telling if integrity hooks should be activated or not.
-
-        /!\ the safe argument is False by default.
-
-        *YOU HAVE TO* commit/rollback or close (rollback implicitly) the
-        session once the job's done, else you'll leak connections set up to the
-        time where no one is available, causing irremediable freeze...
-        """
-        session = InternalSession(self, cnxprops)
-        if not safe:
-            session.disable_hook_categories('integrity')
-        session.disable_hook_categories('security')
-        session._cnx.ctx_count += 1
-        session.set_cnxset()
-        return session
-
     @contextmanager
     def internal_cnx(self):
         """Context manager returning a Connection using internal user which have
@@ -920,14 +710,12 @@
         Beware that unlike the older :meth:`internal_session`, internal
         connections have all hooks beside security enabled.
         """
-        with InternalSession(self) as session:
+        with Session(InternalManager(), self) as session:
             with session.new_cnx() as cnx:
                 with cnx.security_enabled(read=False, write=False):
-                    with cnx.ensure_cnx_set:
-                        yield cnx
+                    yield cnx
 
-    def _get_session(self, sessionid, setcnxset=False, txid=None,
-                     checkshuttingdown=True):
+    def _get_session(self, sessionid, txid=None, checkshuttingdown=True):
         """return the session associated with the given session identifier"""
         if checkshuttingdown and self.shutting_down:
             raise ShuttingDown('Repository is shutting down')
@@ -935,9 +723,6 @@
             session = self._sessions[sessionid]
         except KeyError:
             raise BadConnectionId('No such session %s' % sessionid)
-        if setcnxset:
-            session.set_cnx(txid) # must be done before set_cnxset
-            session.set_cnxset()
         return session
 
     # data sources handling ###################################################
@@ -977,11 +762,11 @@
         """return the type of the entity with id <eid>"""
         return self.type_and_source_from_eid(eid, cnx)[0]
 
-    def querier_cache_key(self, session, rql, args, eidkeys):
+    def querier_cache_key(self, cnx, rql, args, eidkeys):
         cachekey = [rql]
         for key in sorted(eidkeys):
             try:
-                etype = self.type_from_eid(args[key], session)
+                etype = self.type_from_eid(args[key], cnx)
             except KeyError:
                 raise QueryError('bad cache key %s (no value)' % key)
             except TypeError:
@@ -1020,13 +805,7 @@
             return self._extid_cache[extid]
         except KeyError:
             pass
-        try:
-            # bw compat: cnx may be a session, get at the Connection
-            cnx = cnx._cnx
-        except AttributeError:
-            pass
-        with cnx.ensure_cnx_set:
-            eid = self.system_source.extid2eid(cnx, extid)
+        eid = self.system_source.extid2eid(cnx, extid)
         if eid is not None:
             self._extid_cache[extid] = eid
             self._type_source_cache[eid] = (etype, extid, source.uri)
@@ -1034,123 +813,80 @@
         if not insert:
             return
         # no link between extid and eid, create one
-        with cnx.ensure_cnx_set:
-            # write query, ensure connection's mode is 'write' so connections
-            # won't be released until commit/rollback
-            cnx.mode = 'write'
-            try:
-                eid = self.system_source.create_eid(cnx)
-                self._extid_cache[extid] = eid
-                self._type_source_cache[eid] = (etype, extid, source.uri)
-                entity = source.before_entity_insertion(
-                    cnx, extid, etype, eid, sourceparams)
+        # write query, ensure connection's mode is 'write' so connections
+        # won't be released until commit/rollback
+        try:
+            eid = self.system_source.create_eid(cnx)
+            self._extid_cache[extid] = eid
+            self._type_source_cache[eid] = (etype, extid, source.uri)
+            entity = source.before_entity_insertion(
+                cnx, extid, etype, eid, sourceparams)
+            if source.should_call_hooks:
+                # get back a copy of operation for later restore if
+                # necessary, see below
+                pending_operations = cnx.pending_operations[:]
+                self.hm.call_hooks('before_add_entity', cnx, entity=entity)
+            self.add_info(cnx, entity, source, extid)
+            source.after_entity_insertion(cnx, extid, entity, sourceparams)
+            if source.should_call_hooks:
+                self.hm.call_hooks('after_add_entity', cnx, entity=entity)
+            return eid
+        except Exception:
+            # XXX do some cleanup manually so that the transaction has a
+            # chance to be commited, with simply this entity discarded
+            self._extid_cache.pop(extid, None)
+            self._type_source_cache.pop(eid, None)
+            if 'entity' in locals():
+                hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid)
+                self.system_source.delete_info_multi(cnx, [entity])
                 if source.should_call_hooks:
-                    # get back a copy of operation for later restore if
-                    # necessary, see below
-                    pending_operations = cnx.pending_operations[:]
-                    self.hm.call_hooks('before_add_entity', cnx, entity=entity)
-                self.add_info(cnx, entity, source, extid)
-                source.after_entity_insertion(cnx, extid, entity, sourceparams)
-                if source.should_call_hooks:
-                    self.hm.call_hooks('after_add_entity', cnx, entity=entity)
-                return eid
-            except Exception:
-                # XXX do some cleanup manually so that the transaction has a
-                # chance to be commited, with simply this entity discarded
-                self._extid_cache.pop(extid, None)
-                self._type_source_cache.pop(eid, None)
-                if 'entity' in locals():
-                    hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid)
-                    self.system_source.delete_info_multi(cnx, [entity])
-                    if source.should_call_hooks:
-                        cnx.pending_operations = pending_operations
-                raise
+                    cnx.pending_operations = pending_operations
+            raise
 
-    def add_info(self, session, entity, source, extid=None):
+    def add_info(self, cnx, entity, source, extid=None):
         """add type and source info for an eid into the system table,
         and index the entity with the full text index
         """
         # begin by inserting eid/type/source/extid into the entities table
-        hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid)
-        self.system_source.add_info(session, entity, source, extid)
+        hook.CleanupNewEidsCacheOp.get_instance(cnx).add_data(entity.eid)
+        self.system_source.add_info(cnx, entity, source, extid)
 
-    def delete_info(self, session, entity, sourceuri):
-        """called by external source when some entity known by the system source
-        has been deleted in the external source
+    def _delete_cascade_multi(self, cnx, entities):
+        """same as _delete_cascade but accepts a list of entities with
+        the same etype and belonging to the same source.
         """
-        # mark eid as being deleted in session info and setup cache update
-        # operation
-        hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid)
-        self._delete_info(session, entity, sourceuri)
-
-    def _delete_info(self, session, entity, sourceuri):
-        """delete system information on deletion of an entity:
-
-        * delete all remaining relations from/to this entity
-        * call delete info on the system source
-        """
-        pendingrtypes = session.transaction_data.get('pendingrtypes', ())
+        pendingrtypes = cnx.transaction_data.get('pendingrtypes', ())
         # delete remaining relations: if user can delete the entity, he can
         # delete all its relations without security checking
-        with session.security_enabled(read=False, write=False):
-            eid = entity.eid
-            for rschema, _, role in entity.e_schema.relation_definitions():
-                if rschema.rule:
-                    continue # computed relation
-                rtype = rschema.type
-                if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes:
-                    continue
-                if role == 'subject':
-                    # don't skip inlined relation so they are regularly
-                    # deleted and so hooks are correctly called
-                    rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype
-                else:
-                    rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype
-                try:
-                    session.execute(rql, {'x': eid}, build_descr=False)
-                except Exception:
-                    if self.config.mode == 'test':
+        with cnx.security_enabled(read=False, write=False):
+            in_eids = ','.join([str(_e.eid) for _e in entities])
+            with cnx.running_hooks_ops():
+                for rschema, _, role in entities[0].e_schema.relation_definitions():
+                    if rschema.rule:
+                        continue # computed relation
+                    rtype = rschema.type
+                    if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes:
+                        continue
+                    if role == 'subject':
+                        # don't skip inlined relation so they are regularly
+                        # deleted and so hooks are correctly called
+                        rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids)
+                    else:
+                        rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids)
+                    try:
+                        cnx.execute(rql, build_descr=False)
+                    except ValidationError:
                         raise
-                    self.exception('error while cascading delete for entity %s '
-                                   'from %s. RQL: %s', entity, sourceuri, rql)
-        self.system_source.delete_info_multi(session, [entity])
-
-    def _delete_info_multi(self, session, entities):
-        """same as _delete_info but accepts a list of entities with
-        the same etype and belinging to the same source.
-        """
-        pendingrtypes = session.transaction_data.get('pendingrtypes', ())
-        # delete remaining relations: if user can delete the entity, he can
-        # delete all its relations without security checking
-        with session.security_enabled(read=False, write=False):
-            in_eids = ','.join([str(_e.eid) for _e in entities])
-            for rschema, _, role in entities[0].e_schema.relation_definitions():
-                if rschema.rule:
-                    continue # computed relation
-                rtype = rschema.type
-                if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes:
-                    continue
-                if role == 'subject':
-                    # don't skip inlined relation so they are regularly
-                    # deleted and so hooks are correctly called
-                    rql = 'DELETE X %s Y WHERE X eid IN (%s)' % (rtype, in_eids)
-                else:
-                    rql = 'DELETE Y %s X WHERE X eid IN (%s)' % (rtype, in_eids)
-                try:
-                    session.execute(rql, build_descr=False)
-                except ValidationError:
-                    raise
-                except Unauthorized:
-                    self.exception('Unauthorized exception while cascading delete for entity %s. '
-                                   'RQL: %s.\nThis should not happen since security is disabled here.',
-                                   entities, rql)
-                    raise
-                except Exception:
-                    if self.config.mode == 'test':
+                    except Unauthorized:
+                        self.exception('Unauthorized exception while cascading delete for entity %s. '
+                                       'RQL: %s.\nThis should not happen since security is disabled here.',
+                                       entities, rql)
                         raise
-                    self.exception('error while cascading delete for entity %s. RQL: %s',
-                                   entities, rql)
-        self.system_source.delete_info_multi(session, entities)
+                    except Exception:
+                        if self.config.mode == 'test':
+                            raise
+                        self.exception('error while cascading delete for entity %s. RQL: %s',
+                                       entities, rql)
 
     def init_entity_caches(self, cnx, entity, source):
         """add entity to connection entities cache and repo's extid cache.
@@ -1188,13 +924,13 @@
         edited.set_defaults()
         if cnx.is_hook_category_activated('integrity'):
             edited.check(creation=True)
+        self.add_info(cnx, entity, source, extid)
         try:
             source.add_entity(cnx, entity)
         except UniqueTogetherError as exc:
             userhdlr = cnx.vreg['adapters'].select(
                 'IUserFriendlyError', cnx, entity=entity, exc=exc)
             userhdlr.raise_user_exception()
-        self.add_info(cnx, entity, source, extid)
         edited.saved = entity._cw_is_saved = True
         # trigger after_add_entity after after_add_relation
         self.hm.call_hooks('after_add_entity', cnx, entity=entity)
@@ -1309,8 +1045,9 @@
             if server.DEBUG & server.DBG_REPO:
                 print 'DELETE entities', etype, [entity.eid for entity in entities]
             self.hm.call_hooks('before_delete_entity', cnx, entities=entities)
-            self._delete_info_multi(cnx, entities)
+            self._delete_cascade_multi(cnx, entities)
             source.delete_entities(cnx, entities)
+            source.delete_info_multi(cnx, entities)
             self.hm.call_hooks('after_delete_entity', cnx, entities=entities)
         # don't clear cache here, it is done in a hook on commit
 
@@ -1392,79 +1129,12 @@
                            eidfrom=subject, rtype=rtype, eidto=object)
 
 
-    # pyro handling ###########################################################
-
-    @property
-    @cached
-    def pyro_appid(self):
-        from logilab.common import pyro_ext as pyro
-        config = self.config
-        appid = '%s.%s' % pyro.ns_group_and_id(
-            config['pyro-instance-id'] or config.appid,
-            config['pyro-ns-group'])
-        # ensure config['pyro-instance-id'] is a full qualified pyro name
-        config['pyro-instance-id'] = appid
-        return appid
-
-    def _use_pyrons(self):
-        """return True if the pyro-ns-host is set to something else
-        than NO_PYRONS, meaning we want to go through a pyro
-        nameserver"""
-        return self.config['pyro-ns-host'] != 'NO_PYRONS'
-
-    def pyro_register(self, host=''):
-        """register the repository as a pyro object"""
-        from logilab.common import pyro_ext as pyro
-        daemon = pyro.register_object(self, self.pyro_appid,
-                                      daemonhost=self.config['pyro-host'],
-                                      nshost=self.config['pyro-ns-host'],
-                                      use_pyrons=self._use_pyrons())
-        self.info('repository registered as a pyro object %s', self.pyro_appid)
-        self.pyro_uri =  pyro.get_object_uri(self.pyro_appid)
-        self.info('pyro uri is: %s', self.pyro_uri)
-        self.pyro_registered = True
-        # register a looping task to regularly ensure we're still registered
-        # into the pyro name server
-        if self._use_pyrons():
-            self.looping_task(60*10, self._ensure_pyro_ns)
-        pyro_sessions = self._pyro_sessions
-        # install hacky function to free cnxset
-        def handleConnection(conn, tcpserver, sessions=pyro_sessions):
-            sessions[threading.currentThread()] = None
-            return tcpserver.getAdapter().__class__.handleConnection(tcpserver.getAdapter(), conn, tcpserver)
-        daemon.getAdapter().handleConnection = handleConnection
-        def removeConnection(conn, sessions=pyro_sessions):
-            daemon.__class__.removeConnection(daemon, conn)
-            session = sessions.pop(threading.currentThread(), None)
-            if session is None:
-                # client was not yet connected to the repo
-                return
-            if not session.closed:
-                self.close(session.sessionid)
-        daemon.removeConnection = removeConnection
-        return daemon
-
-    def _ensure_pyro_ns(self):
-        if not self._use_pyrons():
-            return
-        from logilab.common import pyro_ext as pyro
-        pyro.ns_reregister(self.pyro_appid, nshost=self.config['pyro-ns-host'])
-        self.info('repository re-registered as a pyro object %s',
-                  self.pyro_appid)
 
 
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
     info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
 
-
-def pyro_unregister(config):
-    """unregister the repository from the pyro name server"""
-    from logilab.common.pyro_ext import ns_unregister
-    appid = config['pyro-instance-id'] or config.appid
-    ns_unregister(appid, config['pyro-ns-group'], config['pyro-ns-host'])
-
-
 from logging import getLogger
 from cubicweb import set_log_methods
 set_log_methods(Repository, getLogger('cubicweb.repository'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/schema2sql.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,245 @@
+# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+"""write a schema as sql"""
+
+__docformat__ = "restructuredtext en"
+
+from hashlib import md5
+
+from six.moves import range
+
+from yams.constraints import SizeConstraint, UniqueConstraint
+
+# default are usually not handled at the sql level. If you want them, set
+# SET_DEFAULT to True
+SET_DEFAULT = False
+
+
+def schema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''):
+    """write to the output stream a SQL schema to store the objects
+    corresponding to the given schema
+    """
+    output = []
+    w = output.append
+    for etype in sorted(schema.entities()):
+        eschema = schema.eschema(etype)
+        if eschema.final or eschema.type in skip_entities:
+            continue
+        w(eschema2sql(dbhelper, eschema, skip_relations, prefix=prefix))
+    for rtype in sorted(schema.relations()):
+        rschema = schema.rschema(rtype)
+        if rschema.final or rschema.inlined or rschema.rule:
+            continue
+        w(rschema2sql(rschema))
+    return '\n'.join(output)
+
+
+def dropschema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''):
+    """write to the output stream a SQL schema to store the objects
+    corresponding to the given schema
+    """
+    output = []
+    w = output.append
+    for etype in sorted(schema.entities()):
+        eschema = schema.eschema(etype)
+        if eschema.final or eschema.type in skip_entities:
+            continue
+        stmts = dropeschema2sql(dbhelper, eschema, skip_relations, prefix=prefix)
+        for stmt in stmts:
+            w(stmt)
+    for rtype in sorted(schema.relations()):
+        rschema = schema.rschema(rtype)
+        if rschema.final or rschema.inlined:
+            continue
+        w(droprschema2sql(rschema))
+    return '\n'.join(output)
+
+
+def eschema_attrs(eschema, skip_relations):
+    attrs = [attrdef for attrdef in eschema.attribute_definitions()
+             if not attrdef[0].type in skip_relations]
+    attrs += [(rschema, None)
+              for rschema in eschema.subject_relations()
+              if not rschema.final and rschema.inlined]
+    return attrs
+
+def unique_index_name(eschema, columns):
+    return u'unique_%s' % md5((eschema.type +
+                              ',' +
+                              ','.join(sorted(columns))).encode('ascii')).hexdigest()
+
+def iter_unique_index_names(eschema):
+    for columns in eschema._unique_together or ():
+        yield columns, unique_index_name(eschema, columns)
+
+def dropeschema2sql(dbhelper, eschema, skip_relations=(), prefix=''):
+    """return sql to drop an entity type's table"""
+    # not necessary to drop indexes, that's implictly done when
+    # dropping the table, but we need to drop SQLServer views used to
+    # create multicol unique indices
+    statements = []
+    tablename = prefix + eschema.type
+    if eschema._unique_together is not None:
+        for columns, index_name in iter_unique_index_names(eschema):
+            cols  = ['%s%s' % (prefix, col) for col in columns]
+            sqls = dbhelper.sqls_drop_multicol_unique_index(tablename, cols, index_name)
+            statements += sqls
+    statements += ['DROP TABLE %s;' % (tablename)]
+    return statements
+
+
+def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''):
+    """write an entity schema as SQL statements to stdout"""
+    output = []
+    w = output.append
+    table = prefix + eschema.type
+    w('CREATE TABLE %s(' % (table))
+    attrs = eschema_attrs(eschema, skip_relations)
+    # XXX handle objectinline physical mode
+    for i in range(len(attrs)):
+        rschema, attrschema = attrs[i]
+        if attrschema is not None:
+            sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema,
+                                  indent=' ')
+        else: # inline relation
+            sqltype = 'integer REFERENCES entities (eid)'
+        if i == len(attrs) - 1:
+            w(' %s%s %s' % (prefix, rschema.type, sqltype))
+        else:
+            w(' %s%s %s,' % (prefix, rschema.type, sqltype))
+    w(');')
+    # create indexes
+    for i in range(len(attrs)):
+        rschema, attrschema = attrs[i]
+        if attrschema is None or eschema.rdef(rschema).indexed:
+            w(dbhelper.sql_create_index(table, prefix + rschema.type))
+    for columns, index_name in iter_unique_index_names(eschema):
+        cols  = ['%s%s' % (prefix, col) for col in columns]
+        sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, index_name)
+        for sql in sqls:
+            w(sql)
+    w('')
+    return '\n'.join(output)
+
+
+def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True, indent=''):
+    """write an attribute schema as SQL statements to stdout"""
+    attr = rschema.type
+    rdef = rschema.rdef(eschema.type, aschema.type)
+    sqltype = type_from_constraints(dbhelper, aschema.type, rdef.constraints,
+                                    creating)
+    if SET_DEFAULT:
+        default = eschema.default(attr)
+        if default is not None:
+            if aschema.type == 'Boolean':
+                sqltype += ' DEFAULT %s' % dbhelper.boolean_value(default)
+            elif aschema.type == 'String':
+                sqltype += ' DEFAULT %r' % str(default)
+            elif aschema.type in ('Int', 'BigInt', 'Float'):
+                sqltype += ' DEFAULT %s' % default
+            # XXX ignore default for other type
+            # this is expected for NOW / TODAY
+    if creating:
+        if rdef.uid:
+            sqltype += ' PRIMARY KEY REFERENCES entities (eid)'
+        elif rdef.cardinality[0] == '1':
+            # don't set NOT NULL if backend isn't able to change it later
+            if dbhelper.alter_column_support:
+                sqltype += ' NOT NULL'
+    # else we're getting sql type to alter a column, we don't want key / indexes
+    # / null modifiers
+    return sqltype
+
+
+def type_from_constraints(dbhelper, etype, constraints, creating=True):
+    """return a sql type string corresponding to the constraints"""
+    constraints = list(constraints)
+    unique, sqltype = False, None
+    size_constrained_string = dbhelper.TYPE_MAPPING.get('SizeConstrainedString', 'varchar(%s)')
+    if etype == 'String':
+        for constraint in constraints:
+            if isinstance(constraint, SizeConstraint):
+                if constraint.max is not None:
+                    sqltype = size_constrained_string % constraint.max
+            elif isinstance(constraint, UniqueConstraint):
+                unique = True
+    if sqltype is None:
+        sqltype = dbhelper.TYPE_MAPPING[etype]
+    if creating and unique:
+        sqltype += ' UNIQUE'
+    return sqltype
+
+
+_SQL_SCHEMA = """
+CREATE TABLE %(table)s (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT %(table)s_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX %(table)s_from_idx ON %(table)s(eid_from);
+CREATE INDEX %(table)s_to_idx ON %(table)s(eid_to);"""
+
+
+def rschema2sql(rschema):
+    assert not rschema.rule
+    return _SQL_SCHEMA % {'table': '%s_relation' % rschema.type}
+    
+
+def droprschema2sql(rschema):
+    """return sql to drop a relation type's table"""
+    # not necessary to drop indexes, that's implictly done when dropping
+    # the table
+    return 'DROP TABLE %s_relation;' % rschema.type
+
+
+def grant_schema(schema, user, set_owner=True, skip_entities=(), prefix=''):
+    """write to the output stream a SQL schema to store the objects
+    corresponding to the given schema
+    """
+    output = []
+    w = output.append
+    for etype in sorted(schema.entities()):
+        eschema = schema.eschema(etype)
+        if eschema.final or etype in skip_entities:
+            continue
+        w(grant_eschema(eschema, user, set_owner, prefix=prefix))
+    for rtype in sorted(schema.relations()):
+        rschema = schema.rschema(rtype)
+        if rschema.final or rschema.inlined:
+            continue
+        w(grant_rschema(rschema, user, set_owner))
+    return '\n'.join(output)
+
+
+def grant_eschema(eschema, user, set_owner=True, prefix=''):
+    output = []
+    w = output.append
+    etype = eschema.type
+    if set_owner:
+        w('ALTER TABLE %s%s OWNER TO %s;' % (prefix, etype, user))
+    w('GRANT ALL ON %s%s TO %s;' % (prefix, etype, user))
+    return '\n'.join(output)
+
+
+def grant_rschema(rschema, user, set_owner=True):
+    output = []
+    if set_owner:
+        output.append('ALTER TABLE %s_relation OWNER TO %s;' % (rschema.type, user))
+    output.append('GRANT ALL ON %s_relation TO %s;' % (rschema.type, user))
+    return '\n'.join(output)
--- a/server/schemaserial.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/schemaserial.py	Mon Jun 22 14:27:37 2015 +0200
@@ -25,13 +25,12 @@
 
 from logilab.common.shellutils import ProgressBar, DummyProgressBar
 
-from yams import (BadSchemaDefinition, schema as schemamod, buildobjs as ybo,
-                  schema2sql as y2sql)
+from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo
 
 from cubicweb import Binary
 from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP,
                              VIRTUAL_RTYPES)
-from cubicweb.server import sqlutils
+from cubicweb.server import sqlutils, schema2sql as y2sql
 
 
 def group_mapping(cnx, interactive=True):
@@ -110,14 +109,13 @@
         has_computed_attributes = False
 
     # XXX bw compat (3.6 migration)
-    with cnx.ensure_cnx_set:
-        sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'")
-        if sqlcu.fetchall():
-            sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric',
-                                          dbhelper.TYPE_MAPPING['Boolean'], True)
-            sqlcu.execute(sql)
-            sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'")
-            cnx.commit(False)
+    sqlcu = cnx.system_sql("SELECT * FROM cw_CWRType WHERE cw_name='symetric'")
+    if sqlcu.fetchall():
+        sql = dbhelper.sql_rename_col('cw_CWRType', 'cw_symetric', 'cw_symmetric',
+                                      dbhelper.TYPE_MAPPING['Boolean'], True)
+        sqlcu.execute(sql)
+        sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'")
+        cnx.commit()
     ertidx = {}
     copiedeids = set()
     permsidx = deserialize_ertype_permissions(cnx)
--- a/server/server.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,140 +0,0 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Pyro RQL server"""
-
-__docformat__ = "restructuredtext en"
-
-import select
-from time import localtime, mktime
-
-from cubicweb.server.utils import TasksManager
-from cubicweb.server.repository import Repository
-
-class Finished(Exception):
-    """raise to remove an event from the event loop"""
-
-class TimeEvent:
-    """base event"""
-    # timefunc = staticmethod(localtime)
-    timefunc = localtime
-
-    def __init__(self, absolute=None, period=None):
-        # local time tuple
-        if absolute is None:
-            absolute = self.timefunc()
-        self.absolute = absolute
-        # optional period in seconds
-        self.period = period
-
-    def is_ready(self):
-        """return  true if the event is ready to be fired"""
-        now = self.timefunc()
-        if self.absolute <= now:
-            return True
-        return False
-
-    def fire(self, server):
-        """fire the event
-        must be overridden by concrete events
-        """
-        raise NotImplementedError()
-
-    def update(self):
-        """update the absolute date for the event or raise a finished exception
-        """
-        if self.period is None:
-            raise Finished
-        self.absolute = localtime(mktime(self.absolute) + self.period)
-
-
-class QuitEvent(TimeEvent):
-    """stop the server"""
-    def fire(self, server):
-        server.repo.shutdown()
-        server.quiting = True
-
-
-class RepositoryServer(object):
-
-    def __init__(self, config):
-        """make the repository available as a PyRO object"""
-        self.config = config
-        self.repo = Repository(config, TasksManager())
-        self.ns = None
-        self.quiting = None
-        # event queue
-        self.events = []
-
-    def add_event(self, event):
-        """add an event to the loop"""
-        self.info('adding event %s', event)
-        self.events.append(event)
-
-    def trigger_events(self):
-        """trigger ready events"""
-        for event in self.events[:]:
-            if event.is_ready():
-                self.info('starting event %s', event)
-                event.fire(self)
-                try:
-                    event.update()
-                except Finished:
-                    self.events.remove(event)
-
-    def run(self, req_timeout=5.0):
-        """enter the service loop"""
-        # start repository looping tasks
-        self.repo.start_looping_tasks()
-        while self.quiting is None:
-            try:
-                self.daemon.handleRequests(req_timeout)
-            except select.error:
-                continue
-            finally:
-                self.trigger_events()
-
-    def quit(self):
-        """stop the server"""
-        self.add_event(QuitEvent())
-
-    def connect(self, host='', port=0):
-        """the connect method on the repository only register to pyro if
-        necessary
-        """
-        self.daemon = self.repo.pyro_register(host)
-
-    # server utilitities ######################################################
-
-    def install_sig_handlers(self):
-        """install signal handlers"""
-        import signal
-        self.info('installing signal handlers')
-        signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit())
-        signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit())
-
-
-    # these are overridden by set_log_methods below
-    # only defining here to prevent pylint from complaining
-    @classmethod
-    def info(cls, msg, *a, **kw):
-        pass
-
-from logging import getLogger
-from cubicweb import set_log_methods
-LOGGER = getLogger('cubicweb.reposerver')
-set_log_methods(RepositoryServer, LOGGER)
--- a/server/serverconfig.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/serverconfig.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -197,44 +197,6 @@
 notified of every changes.',
           'group': 'email', 'level': 2,
           }),
-        # pyro services config
-        ('pyro-host',
-         {'type' : 'string',
-          'default': None,
-          'help': 'Pyro server host, if not detectable correctly through \
-gethostname(). It may contains port information using <host>:<port> notation, \
-and if not set, it will be choosen randomly',
-          'group': 'pyro', 'level': 3,
-          }),
-        ('pyro-instance-id',
-         {'type' : 'string',
-          'default': lgconfig.Method('default_instance_id'),
-          'help': 'identifier of the CubicWeb instance in the Pyro name server',
-          'group': 'pyro', 'level': 1,
-          }),
-        ('pyro-ns-host',
-         {'type' : 'string',
-          'default': '',
-          'help': 'Pyro name server\'s host. If not set, will be detected by a \
-broadcast query. It may contains port information using <host>:<port> notation. \
-Use "NO_PYRONS" to create a Pyro server but not register to a pyro nameserver',
-          'group': 'pyro', 'level': 1,
-          }),
-        ('pyro-ns-group',
-         {'type' : 'string',
-          'default': 'cubicweb',
-          'help': 'Pyro name server\'s group where the repository will be \
-registered.',
-          'group': 'pyro', 'level': 1,
-          }),
-        # zmq services config
-        ('zmq-repository-address',
-         {'type' : 'string',
-          'default': None,
-          'help': ('ZMQ URI on which the repository will be bound '
-                   'to (of the form `zmqpickle-tcp://<ipaddr>:<port>`).'),
-          'group': 'zmq', 'level': 3,
-          }),
          ('zmq-address-sub',
           {'type' : 'csv',
            'default' : (),
@@ -350,10 +312,6 @@
             stream.write('[%s]\n%s\n' % (section, generate_source_config(sconfig)))
         restrict_perms_to_user(sourcesfile)
 
-    def pyro_enabled(self):
-        """pyro is always enabled in standalone repository configuration"""
-        return True
-
     def load_schema(self, expand_cubes=False, **kwargs):
         from cubicweb.schema import CubicWebSchemaLoader
         if expand_cubes:
@@ -387,6 +345,3 @@
         return ServerMigrationHelper(self, schema, interactive=interactive,
                                      cnx=cnx, repo=repo, connect=connect,
                                      verbosity=verbosity)
-
-
-CONFIGURATIONS.append(ServerConfiguration)
--- a/server/serverctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/serverctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -38,7 +38,6 @@
 from cubicweb.toolsutils import Command, CommandHandler, underline_title
 from cubicweb.cwctl import CWCTL, check_options_consistency, ConfigureInstanceCommand
 from cubicweb.server import SOURCE_TYPES
-from cubicweb.server.repository import Repository
 from cubicweb.server.serverconfig import (
     USER_OPTIONS, ServerConfiguration, SourceConfiguration,
     ask_source_config, generate_source_config)
@@ -167,11 +166,6 @@
         if not automatic:
             print underline_title('Configuring the repository')
             config.input_config('email', inputlevel)
-            # ask for pyro configuration if pyro is activated and we're not
-            # using a all-in-one config, in which case this is done by the web
-            # side command handler
-            if config.pyro_enabled() and config.name != 'all-in-one':
-                config.input_config('pyro', inputlevel)
             print '\n'+underline_title('Configuring the sources')
         sourcesfile = config.sources_file()
         # hack to make Method('default_instance_id') usable in db option defs
@@ -301,33 +295,6 @@
                     raise ExecutionError(str(exc))
 
 
-class RepositoryStartHandler(CommandHandler):
-    cmdname = 'start'
-    cfgname = 'repository'
-
-    def start_server(self, config):
-        command = ['cubicweb-ctl', 'start-repository']
-        if config.debugmode:
-            command.append('--debug')
-        command.append('--loglevel')
-        command.append(config['log-threshold'].lower())
-        command.append(config.appid)
-        subprocess.call(command)
-        return 1
-
-
-class RepositoryStopHandler(CommandHandler):
-    cmdname = 'stop'
-    cfgname = 'repository'
-
-    def poststop(self):
-        """if pyro is enabled, ensure the repository is correctly unregistered
-        """
-        if self.config.pyro_enabled():
-            from cubicweb.server.repository import pyro_unregister
-            pyro_unregister(self.config)
-
-
 # repository specific commands ################################################
 
 def createdb(helper, source, dbcnx, cursor, **kwargs):
@@ -686,77 +653,6 @@
         cnx.close()
 
 
-class StartRepositoryCommand(Command):
-    """Start a CubicWeb RQL server for a given instance.
-
-    The server will be remotely accessible through pyro or ZMQ
-
-    <instance>
-      the identifier of the instance to initialize.
-    """
-    name = 'start-repository'
-    arguments = '<instance>'
-    min_args = max_args = 1
-    options = (
-        ('debug',
-         {'short': 'D', 'action' : 'store_true',
-          'help': 'start server in debug mode.'}),
-        ('loglevel',
-         {'short': 'l', 'type' : 'choice', 'metavar': '<log level>',
-          'default': None, 'choices': ('debug', 'info', 'warning', 'error'),
-          'help': 'debug if -D is set, error otherwise',
-          }),
-        ('address',
-         {'short': 'a', 'type': 'string', 'metavar': '<protocol>://<host>:<port>',
-          'default': '',
-          'help': ('specify a ZMQ URI on which to bind, or use "pyro://"'
-                   'to create a pyro-based repository'),
-          }),
-        )
-
-    def create_repo(self, config):
-        address = self['address']
-        if not address:
-            address = config.get('zmq-repository-address') or 'pyro://'
-        if address.startswith('pyro://'):
-            from cubicweb.server.server import RepositoryServer
-            return RepositoryServer(config), config['host']
-        else:
-            from cubicweb.server.utils import TasksManager
-            from cubicweb.server.cwzmq import ZMQRepositoryServer
-            repo = Repository(config, TasksManager())
-            return ZMQRepositoryServer(repo), address
-
-    def run(self, args):
-        from logilab.common.daemon import daemonize, setugid
-        from cubicweb.cwctl import init_cmdline_log_threshold
-        print 'WARNING: Standalone repository with pyro or zmq access is deprecated'
-        appid = args[0]
-        debug = self['debug']
-        if sys.platform == 'win32' and not debug:
-            logger = logging.getLogger('cubicweb.ctl')
-            logger.info('Forcing debug mode on win32 platform')
-            debug = True
-        config = ServerConfiguration.config_for(appid, debugmode=debug)
-        init_cmdline_log_threshold(config, self['loglevel'])
-        # create the server
-        server, address = self.create_repo(config)
-        # ensure the directory where the pid-file should be set exists (for
-        # instance /var/run/cubicweb may be deleted on computer restart)
-        pidfile = config['pid-file']
-        piddir = os.path.dirname(pidfile)
-        # go ! (don't daemonize in debug mode)
-        if not os.path.exists(piddir):
-            os.makedirs(piddir)
-        if not debug and daemonize(pidfile, umask=config['umask']):
-            return
-        uid = config['uid']
-        if uid is not None:
-            setugid(uid)
-        server.install_sig_handlers()
-        server.connect(address)
-        server.run()
-
 
 def _remote_dump(host, appid, output, sudo=False):
     # XXX generate unique/portable file name
@@ -1061,7 +957,7 @@
         config = ServerConfiguration.config_for(appid)
         repo, cnx = repo_cnx(config)
         with cnx:
-            reindex_entities(repo.schema, cnx._cnx, etypes=etypes)
+            reindex_entities(repo.schema, cnx, etypes=etypes)
             cnx.commit()
 
 
@@ -1146,7 +1042,6 @@
 
 for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand,
                  GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
-                 StartRepositoryCommand,
                  DBDumpCommand, DBRestoreCommand, DBCopyCommand,
                  AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
                  SynchronizeSourceCommand, SchemaDiffCommand,
--- a/server/session.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/session.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -19,7 +19,6 @@
 __docformat__ = "restructuredtext en"
 
 import sys
-import threading
 from time import time
 from uuid import uuid4
 from warnings import warn
@@ -34,7 +33,6 @@
 from cubicweb.req import RequestSessionBase
 from cubicweb.utils import make_uid
 from cubicweb.rqlrewrite import RQLRewriter
-from cubicweb.server import ShuttingDown
 from cubicweb.server.edition import EditedEntity
 
 
@@ -68,27 +66,6 @@
     return req.vreg.config.repairing
 
 
-class transaction(object):
-    """Ensure that the transaction is either commited or rolled back at exit
-
-    Context manager to enter a transaction for a session: when exiting the
-    `with` block on exception, call `session.rollback()`, else call
-    `session.commit()` on normal exit
-    """
-    def __init__(self, session, free_cnxset=True):
-        self.session = session
-        self.free_cnxset = free_cnxset
-
-    def __enter__(self):
-        # ensure session has a cnxset
-        self.session.set_cnxset()
-
-    def __exit__(self, exctype, exc, traceback):
-        if exctype:
-            self.session.rollback(free_cnxset=self.free_cnxset)
-        else:
-            self.session.commit(free_cnxset=self.free_cnxset)
-
 @deprecated('[3.17] use <object>.allow/deny_all_hooks_but instead')
 def hooks_control(obj, mode, *categories):
     assert mode in  (HOOKS_ALLOW_ALL, HOOKS_DENY_ALL)
@@ -98,8 +75,7 @@
         return obj.deny_all_hooks_but(*categories)
 
 
-class _hooks_control(object): # XXX repoapi: remove me when
-                              # session stop being connection
+class _hooks_control(object):
     """context manager to control activated hooks categories.
 
     If mode is `HOOKS_DENY_ALL`, given hooks categories will
@@ -149,24 +125,6 @@
         finally:
             self.cnx.hooks_mode = self.oldmode
 
-class _session_hooks_control(_hooks_control): # XXX repoapi: remove me when
-                                              # session stop being connection
-    """hook control context manager for session
-
-    Necessary to handle some unholy transaction scope logic."""
-
-
-    def __init__(self, session, mode, *categories):
-        self.session = session
-        super_init = super(_session_hooks_control, self).__init__
-        super_init(session._cnx, mode, *categories)
-
-    def __exit__(self, exctype, exc, traceback):
-        super_exit = super(_session_hooks_control, self).__exit__
-        ret = super_exit(exctype, exc, traceback)
-        if self.cnx.ctx_count == 0:
-            self.session._close_cnx(self.cnx)
-        return ret
 
 @deprecated('[3.17] use <object>.security_enabled instead')
 def security_enabled(obj, *args, **kwargs):
@@ -205,24 +163,6 @@
         if self.oldwrite is not None:
             self.cnx.write_security = self.oldwrite
 
-class _session_security_enabled(_security_enabled):
-    """hook security context manager for session
-
-    Necessary To handle some unholy transaction scope logic."""
-
-
-    def __init__(self, session, read=None, write=None):
-        self.session = session
-        super_init = super(_session_security_enabled, self).__init__
-        super_init(session._cnx, read=read, write=write)
-
-    def __exit__(self, exctype, exc, traceback):
-        super_exit = super(_session_security_enabled, self).__exit__
-        ret = super_exit(exctype, exc, traceback)
-        if self.cnx.ctx_count == 0:
-            self.session._close_cnx(self.cnx)
-        return ret
-
 HOOKS_ALLOW_ALL = object()
 HOOKS_DENY_ALL = object()
 DEFAULT_SECURITY = object() # evaluated to true by design
@@ -230,146 +170,6 @@
 class SessionClosedError(RuntimeError):
     pass
 
-class CnxSetTracker(object):
-    """Keep track of which connection use which cnxset.
-
-    There should be one of these objects per session (including internal sessions).
-
-    Session objects are responsible for creating their CnxSetTracker object.
-
-    Connections should use the :meth:`record` and :meth:`forget` to inform the
-    tracker of cnxsets they have acquired.
-
-    .. automethod:: cubicweb.server.session.CnxSetTracker.record
-    .. automethod:: cubicweb.server.session.CnxSetTracker.forget
-
-    Sessions use the :meth:`close` and :meth:`wait` methods when closing.
-
-    .. automethod:: cubicweb.server.session.CnxSetTracker.close
-    .. automethod:: cubicweb.server.session.CnxSetTracker.wait
-
-    This object itself is threadsafe. It also requires caller to acquired its
-    lock in some situation.
-    """
-
-    def __init__(self):
-        self._active = True
-        self._condition = threading.Condition()
-        self._record = {}
-
-    def __enter__(self):
-        return self._condition.__enter__()
-
-    def __exit__(self, *args):
-        return self._condition.__exit__(*args)
-
-    def record(self, cnxid, cnxset):
-        """Inform the tracker that a cnxid has acquired a cnxset
-
-        This method is to be used by Connection objects.
-
-        This method fails when:
-        - The cnxid already has a recorded cnxset.
-        - The tracker is not active anymore.
-
-        Notes about the caller:
-        (1) It is responsible for retrieving a cnxset.
-        (2) It must be prepared to release the cnxset if the
-            `cnxsettracker.forget` call fails.
-        (3) It should acquire the tracker lock until the very end of the operation.
-        (4) However it must only lock the CnxSetTracker object after having
-            retrieved the cnxset to prevent deadlock.
-
-        A typical usage look like::
-
-        cnxset = repo._get_cnxset() # (1)
-        try:
-            with cnxset_tracker: # (3) and (4)
-                cnxset_tracker.record(caller.id, cnxset)
-                # (3') operation ends when caller is in expected state only
-                caller.cnxset = cnxset
-        except Exception:
-            repo._free_cnxset(cnxset) # (2)
-            raise
-        """
-        # dubious since the caller is supposed to have acquired it anyway.
-        with self._condition:
-            if not self._active:
-                raise SessionClosedError('Closed')
-            old = self._record.get(cnxid)
-            if old is not None:
-                raise ValueError('connection "%s" already has a cnx_set (%r)'
-                                 % (cnxid, old))
-            self._record[cnxid] = cnxset
-
-    def forget(self, cnxid, cnxset):
-        """Inform the tracker that a cnxid have release a cnxset
-
-        This methode is to be used by Connection object.
-
-        This method fails when:
-        - The cnxset for the cnxid does not match the recorded one.
-
-        Notes about the caller:
-        (1) It is responsible for releasing the cnxset.
-        (2) It should acquire the tracker lock during the operation to ensure
-            the internal tracker state is always accurate regarding its own state.
-
-        A typical usage look like::
-
-        cnxset = caller.cnxset
-        try:
-            with cnxset_tracker:
-                # (2) you can not have caller.cnxset out of sync with
-                #     cnxset_tracker state while unlocked
-                caller.cnxset = None
-                cnxset_tracker.forget(caller.id, cnxset)
-        finally:
-            cnxset = repo._free_cnxset(cnxset) # (1)
-        """
-        with self._condition:
-            old = self._record.get(cnxid, None)
-            if old is not cnxset:
-                raise ValueError('recorded cnxset for "%s" mismatch: %r != %r'
-                                 % (cnxid, old, cnxset))
-            self._record.pop(cnxid)
-            self._condition.notify_all()
-
-    def close(self):
-        """Marks the tracker as inactive.
-
-        This method is to be used by Session objects.
-
-        An inactive tracker does not accept new records anymore.
-        """
-        with self._condition:
-            self._active = False
-
-    def wait(self, timeout=10):
-        """Wait for all recorded cnxsets to be released
-
-        This method is to be used by Session objects.
-
-        Returns a tuple of connection ids that remain open.
-        """
-        with self._condition:
-            if  self._active:
-                raise RuntimeError('Cannot wait on active tracker.'
-                                   ' Call tracker.close() first')
-            while self._record and timeout > 0:
-                start = time()
-                self._condition.wait(timeout)
-                timeout -= time() - start
-            return tuple(self._record)
-
-
-def _with_cnx_set(func):
-    """decorator for Connection method that ensure they run with a cnxset """
-    @functools.wraps(func)
-    def wrapper(cnx, *args, **kwargs):
-        with cnx.ensure_cnx_set:
-            return func(cnx, *args, **kwargs)
-    return wrapper
 
 def _open_only(func):
     """decorator for Connection method that check it is open"""
@@ -389,8 +189,9 @@
 
     Database connection resources:
 
-      :attr:`running_dbapi_query`, boolean flag telling if the executing query
-      is coming from a dbapi connection or is a query from within the repository
+      :attr:`hooks_in_progress`, boolean flag telling if the executing
+      query is coming from a repoapi connection or is a query from
+      within the repository (e.g. started by hooks)
 
       :attr:`cnxset`, the connections set to use to execute queries on sources.
       If the transaction is read only, the connection set may be freed between
@@ -406,12 +207,18 @@
       'transaction' (we want to keep the connections set during all the
       transaction, with or without writing)
 
-    Internal transaction data:
+    Shared data:
 
-      :attr:`data` is a dictionary containing some shared data
-      cleared at the end of the transaction. Hooks and operations may put
-      arbitrary data in there, and this may also be used as a communication
-      channel between the client and the repository.
+      :attr:`data` is a dictionary bound to the underlying session,
+      who will be present for the life time of the session. This may
+      be useful for web clients that rely on the server for managing
+      bits of session-scoped data.
+
+      :attr:`transaction_data` is a dictionary cleared at the end of
+      the transaction. Hooks and operations may put arbitrary data in
+      there.
+
+    Internal state:
 
       :attr:`pending_operations`, ordered list of operations to be processed on
       commit/rollback
@@ -438,33 +245,20 @@
       read/write security is currently activated.
 
     """
+    is_request = False
+    hooks_in_progress = False
+    is_repo_in_memory = True # bw compat
 
-    is_request = False
-
-    def __init__(self, session, cnxid=None, session_handled=False):
+    def __init__(self, session):
         # using super(Connection, self) confuse some test hack
         RequestSessionBase.__init__(self, session.vreg)
-        # only the session provide explicite
-        if cnxid is not None:
-            assert session_handled # only session profive explicite cnxid
         #: connection unique id
         self._open = None
-        if cnxid is None:
-            cnxid = '%s-%s' % (session.sessionid, uuid4().hex)
-        self.connectionid = cnxid
+        self.connectionid = '%s-%s' % (session.sessionid, uuid4().hex)
+        self.session = session
         self.sessionid = session.sessionid
-        #: self._session_handled
-        #: are the life cycle of this Connection automatically controlled by the
-        #: Session This is the old backward compatibility mode
-        self._session_handled = session_handled
         #: reentrance handling
         self.ctx_count = 0
-        #: count the number of entry in a context needing a cnxset
-        self._cnxset_count = 0
-        #: Boolean for compat with the older explicite set_cnxset/free_cnx API
-        #: When a call set_cnxset is done, no automatic freeing will be done
-        #: until free_cnx is called.
-        self._auto_free_cnx_set = True
 
         #: server.Repository object
         self.repo = session.repo
@@ -474,16 +268,8 @@
         # other session utility
         self._session_timestamp = session._timestamp
 
-        #: connection handling mode
-        self.mode = session.default_mode
-        #: connection set used to execute queries on sources
-        self._cnxset = None
-        #: CnxSetTracker used to report cnxset usage
-        self._cnxset_tracker = session._cnxset_tracker
-        #: is this connection from a client or internal to the repo
-        self.running_dbapi_query = True
         # internal (root) session
-        self.is_internal_session = session.is_internal_session
+        self.is_internal_session = isinstance(session.user, InternalManager)
 
         #: dict containing arbitrary data cleared at the end of the transaction
         self.transaction_data = {}
@@ -506,7 +292,7 @@
 
         # undo control
         config = session.repo.config
-        if config.creating or config.repairing or session.is_internal_session:
+        if config.creating or config.repairing or self.is_internal_session:
             self.undo_actions = False
         else:
             self.undo_actions = config['undo-enabled']
@@ -521,21 +307,109 @@
         else:
             self._set_user(session.user)
 
+    @_open_only
+    def source_defs(self):
+        """Return the definition of sources used by the repository."""
+        return self.session.repo.source_defs()
 
-    # live cycle handling ####################################################
+    @_open_only
+    def get_schema(self):
+        """Return the schema currently used by the repository."""
+        return self.session.repo.source_defs()
+
+    @_open_only
+    def get_option_value(self, option):
+        """Return the value for `option` in the configuration."""
+        return self.session.repo.get_option_value(option)
+
+    # transaction api
+
+    @_open_only
+    def undoable_transactions(self, ueid=None, **actionfilters):
+        """Return a list of undoable transaction objects by the connection's
+        user, ordered by descendant transaction time.
+
+        Managers may filter according to user (eid) who has done the transaction
+        using the `ueid` argument. Others will only see their own transactions.
+
+        Additional filtering capabilities is provided by using the following
+        named arguments:
+
+        * `etype` to get only transactions creating/updating/deleting entities
+          of the given type
+
+        * `eid` to get only transactions applied to entity of the given eid
+
+        * `action` to get only transactions doing the given action (action in
+          'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or
+          'D'.
+
+        * `public`: when additional filtering is provided, they are by default
+          only searched in 'public' actions, unless a `public` argument is given
+          and set to false.
+        """
+        return self.repo.system_source.undoable_transactions(self, ueid,
+                                                             **actionfilters)
+
+    @_open_only
+    def transaction_info(self, txuuid):
+        """Return transaction object for the given uid.
+
+        raise `NoSuchTransaction` if not found or if session's user is
+        not allowed (eg not in managers group and the transaction
+        doesn't belong to him).
+        """
+        return self.repo.system_source.tx_info(self, txuuid)
+
+    @_open_only
+    def transaction_actions(self, txuuid, public=True):
+        """Return an ordered list of actions effectued during that transaction.
+
+        If public is true, return only 'public' actions, i.e. not ones
+        triggered under the cover by hooks, else return all actions.
+
+        raise `NoSuchTransaction` if the transaction is not found or
+        if the user is not allowed (eg not in managers group).
+        """
+        return self.repo.system_source.tx_actions(self, txuuid, public)
+
+    @_open_only
+    def undo_transaction(self, txuuid):
+        """Undo the given transaction. Return potential restoration errors.
+
+        raise `NoSuchTransaction` if not found or if user is not
+        allowed (eg not in managers group).
+        """
+        return self.repo.system_source.undo_transaction(self, txuuid)
+
+    # life cycle handling ####################################################
 
     def __enter__(self):
         assert self._open is None # first opening
         self._open = True
+        self.cnxset = self.repo._get_cnxset()
         return self
 
     def __exit__(self, exctype=None, excvalue=None, tb=None):
         assert self._open # actually already open
-        assert self._cnxset_count == 0
-        self.rollback()
+        self.clear()
         self._open = False
+        self.cnxset.cnxset_freed()
+        self.repo._free_cnxset(self.cnxset)
+        self.cnxset = None
 
+    @contextmanager
+    def running_hooks_ops(self):
+        """this context manager should be called whenever hooks or operations
+        are about to be run (but after hook selection)
 
+        It will help the undo logic record pertinent metadata or some
+        hooks to run (or not) depending on who/what issued the query.
+        """
+        prevmode = self.hooks_in_progress
+        self.hooks_in_progress = True
+        yield
+        self.hooks_in_progress = prevmode
 
     # shared data handling ###################################################
 
@@ -580,83 +454,23 @@
         self.local_perm_cache.clear()
         self.rewriter = RQLRewriter(self)
 
-    # Connection Set Management ###############################################
-    @property
-    @_open_only
-    def cnxset(self):
-        return self._cnxset
-
-    @cnxset.setter
-    @_open_only
-    def cnxset(self, new_cnxset):
-        with self._cnxset_tracker:
-            old_cnxset = self._cnxset
-            if new_cnxset is old_cnxset:
-                return #nothing to do
-            if old_cnxset is not None:
-                old_cnxset.rollback()
-                self._cnxset = None
-                self.ctx_count -= 1
-                self._cnxset_tracker.forget(self.connectionid, old_cnxset)
-            if new_cnxset is not None:
-                self._cnxset_tracker.record(self.connectionid, new_cnxset)
-                self._cnxset = new_cnxset
-                self.ctx_count += 1
-
-    @_open_only
-    def _set_cnxset(self):
-        """the connection need a connections set to execute some queries"""
-        if self.cnxset is None:
-            cnxset = self.repo._get_cnxset()
-            try:
-                self.cnxset = cnxset
-            except:
-                self.repo._free_cnxset(cnxset)
-                raise
-        return self.cnxset
-
-    @_open_only
-    def _free_cnxset(self, ignoremode=False):
-        """the connection is no longer using its connections set, at least for some time"""
-        # cnxset may be none if no operation has been done since last commit
-        # or rollback
-        cnxset = self.cnxset
-        if cnxset is not None and (ignoremode or self.mode == 'read'):
-            assert self._cnxset_count == 0
-            try:
-                self.cnxset = None
-            finally:
-                cnxset.cnxset_freed()
-                self.repo._free_cnxset(cnxset)
-
     @deprecated('[3.19] cnxset are automatically managed now.'
                 ' stop using explicit set and free.')
     def set_cnxset(self):
-        self._auto_free_cnx_set = False
-        return self._set_cnxset()
+        pass
 
     @deprecated('[3.19] cnxset are automatically managed now.'
                 ' stop using explicit set and free.')
     def free_cnxset(self, ignoremode=False):
-        self._auto_free_cnx_set = True
-        return self._free_cnxset(ignoremode=ignoremode)
-
+        pass
 
     @property
     @contextmanager
     @_open_only
+    @deprecated('[3.21] a cnxset is automatically set on __enter__ call now.'
+                ' stop using .ensure_cnx_set')
     def ensure_cnx_set(self):
-        assert self._cnxset_count >= 0
-        if self._cnxset_count == 0:
-            self._set_cnxset()
-        try:
-            self._cnxset_count += 1
-            yield
-        finally:
-            self._cnxset_count = max(self._cnxset_count - 1, 0)
-            if self._cnxset_count == 0 and self._auto_free_cnx_set:
-                self._free_cnxset()
-
+        yield
 
     # Entity cache management #################################################
     #
@@ -939,27 +753,7 @@
     @read_security.setter
     @_open_only
     def read_security(self, activated):
-        oldmode = self._read_security
         self._read_security = activated
-        # running_dbapi_query used to detect hooks triggered by a 'dbapi' query
-        # (eg not issued on the session). This is tricky since we the execution
-        # model of a (write) user query is:
-        #
-        # repository.execute (security enabled)
-        #  \-> querier.execute
-        #       \-> repo.glob_xxx (add/update/delete entity/relation)
-        #            \-> deactivate security before calling hooks
-        #                 \-> WE WANT TO CHECK QUERY NATURE HERE
-        #                      \-> potentially, other calls to querier.execute
-        #
-        # so we can't rely on simply checking session.read_security, but
-        # recalling the first transition from DEFAULT_SECURITY to something
-        # else (False actually) is not perfect but should be enough
-        #
-        # also reset running_dbapi_query to true when we go back to
-        # DEFAULT_SECURITY
-        self.running_dbapi_query = (oldmode is DEFAULT_SECURITY
-                                    or activated is DEFAULT_SECURITY)
 
     # undo support ############################################################
 
@@ -971,7 +765,7 @@
     def transaction_uuid(self, set=True):
         uuid = self.transaction_data.get('tx_uuid')
         if set and uuid is None:
-            self.transaction_data['tx_uuid'] = uuid = uuid4().hex
+            self.transaction_data['tx_uuid'] = uuid = unicode(uuid4().hex)
             self.repo.system_source.start_undoable_transaction(self, uuid)
         return uuid
 
@@ -988,7 +782,6 @@
         return self.repo.source_defs()
 
     @deprecated('[3.19] use .entity_metas(eid) instead')
-    @_with_cnx_set
     @_open_only
     def describe(self, eid, asdict=False):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
@@ -999,7 +792,6 @@
             return metas
         return etype, source, extid
 
-    @_with_cnx_set
     @_open_only
     def entity_metas(self, eid):
         """return a tuple (type, sourceuri, extid) for the entity with id <eid>"""
@@ -1008,7 +800,6 @@
 
     # core method #############################################################
 
-    @_with_cnx_set
     @_open_only
     def execute(self, rql, kwargs=None, build_descr=True):
         """db-api like method directly linked to the querier execute method.
@@ -1022,21 +813,16 @@
         return rset
 
     @_open_only
-    def rollback(self, free_cnxset=True, reset_pool=None):
+    def rollback(self, free_cnxset=None, reset_pool=None):
         """rollback the current transaction"""
-        if reset_pool is not None:
-            warn('[3.13] use free_cnxset argument instead for reset_pool',
+        if free_cnxset is not None:
+            warn('[3.21] free_cnxset is now unneeded',
                  DeprecationWarning, stacklevel=2)
-            free_cnxset = reset_pool
-        if self._cnxset_count != 0:
-            # we are inside ensure_cnx_set, don't lose it
-            free_cnxset = False
+        if reset_pool is not None:
+            warn('[3.13] reset_pool is now unneeded',
+                 DeprecationWarning, stacklevel=2)
         cnxset = self.cnxset
-        if cnxset is None:
-            self.clear()
-            self._session_timestamp.touch()
-            self.debug('rollback transaction %s done (no db activity)', self.connectionid)
-            return
+        assert cnxset is not None
         try:
             # by default, operations are executed with security turned off
             with self.security_enabled(False, False):
@@ -1051,26 +837,18 @@
                 self.debug('rollback for transaction %s done', self.connectionid)
         finally:
             self._session_timestamp.touch()
-            if free_cnxset:
-                self._free_cnxset(ignoremode=True)
             self.clear()
 
     @_open_only
-    def commit(self, free_cnxset=True, reset_pool=None):
+    def commit(self, free_cnxset=None, reset_pool=None):
         """commit the current session's transaction"""
-        if reset_pool is not None:
-            warn('[3.13] use free_cnxset argument instead for reset_pool',
+        if free_cnxset is not None:
+            warn('[3.21] free_cnxset is now unneeded',
                  DeprecationWarning, stacklevel=2)
-            free_cnxset = reset_pool
-        if self.cnxset is None:
-            assert not self.pending_operations
-            self.clear()
-            self._session_timestamp.touch()
-            self.debug('commit transaction %s done (no db activity)', self.connectionid)
-            return
-        if self._cnxset_count != 0:
-            # we are inside ensure_cnx_set, don't lose it
-            free_cnxset = False
+        if reset_pool is not None:
+            warn('[3.13] reset_pool is now unneeded',
+                 DeprecationWarning, stacklevel=2)
+        assert self.cnxset is not None
         cstate = self.commit_state
         if cstate == 'uncommitable':
             raise QueryError('transaction must be rolled back')
@@ -1094,13 +872,14 @@
                 if debug:
                     print self.commit_state, '*' * 20
                 try:
-                    while self.pending_operations:
-                        operation = self.pending_operations.pop(0)
-                        operation.processed = 'precommit'
-                        processed.append(operation)
-                        if debug:
-                            print operation
-                        operation.handle_event('precommit_event')
+                    with self.running_hooks_ops():
+                        while self.pending_operations:
+                            operation = self.pending_operations.pop(0)
+                            operation.processed = 'precommit'
+                            processed.append(operation)
+                            if debug:
+                                print operation
+                            operation.handle_event('precommit_event')
                     self.pending_operations[:] = processed
                     self.debug('precommit transaction %s done', self.connectionid)
                 except BaseException:
@@ -1117,56 +896,52 @@
                     operation.failed = True
                     if debug:
                         print self.commit_state, '*' * 20
-                    for operation in reversed(processed):
-                        if debug:
-                            print operation
-                        try:
-                            operation.handle_event('revertprecommit_event')
-                        except BaseException:
-                            self.critical('error while reverting precommit',
-                                          exc_info=True)
+                    with self.running_hooks_ops():
+                        for operation in reversed(processed):
+                            if debug:
+                                print operation
+                            try:
+                                operation.handle_event('revertprecommit_event')
+                            except BaseException:
+                                self.critical('error while reverting precommit',
+                                              exc_info=True)
                     # XXX use slice notation since self.pending_operations is a
                     # read-only property.
                     self.pending_operations[:] = processed + self.pending_operations
-                    self.rollback(free_cnxset)
+                    self.rollback()
                     raise
                 self.cnxset.commit()
                 self.commit_state = 'postcommit'
                 if debug:
                     print self.commit_state, '*' * 20
-                while self.pending_operations:
-                    operation = self.pending_operations.pop(0)
-                    if debug:
-                        print operation
-                    operation.processed = 'postcommit'
-                    try:
-                        operation.handle_event('postcommit_event')
-                    except BaseException:
-                        self.critical('error while postcommit',
-                                      exc_info=sys.exc_info())
+                with self.running_hooks_ops():
+                    while self.pending_operations:
+                        operation = self.pending_operations.pop(0)
+                        if debug:
+                            print operation
+                        operation.processed = 'postcommit'
+                        try:
+                            operation.handle_event('postcommit_event')
+                        except BaseException:
+                            self.critical('error while postcommit',
+                                          exc_info=sys.exc_info())
                 self.debug('postcommit transaction %s done', self.connectionid)
                 return self.transaction_uuid(set=False)
         finally:
             self._session_timestamp.touch()
-            if free_cnxset:
-                self._free_cnxset(ignoremode=True)
             self.clear()
 
     # resource accessors ######################################################
 
-    @_with_cnx_set
     @_open_only
     def call_service(self, regid, **kwargs):
         self.debug('calling service %s', regid)
         service = self.vreg['services'].select(regid, self, **kwargs)
         return service.call(**kwargs)
 
-    @_with_cnx_set
     @_open_only
     def system_sql(self, sql, args=None, rollback_on_failure=True):
         """return a sql cursor on the system database"""
-        if sql.split(None, 1)[0].upper() != 'SELECT':
-            self.mode = 'write'
         source = self.repo.system_source
         try:
             return source.doexec(self, sql, args, rollback=rollback_on_failure)
@@ -1227,149 +1002,37 @@
         return float(self.value)
 
 
-class Session(RequestSessionBase): # XXX repoapi: stop being a
-                                   # RequestSessionBase at some point
+class Session(object):
     """Repository user session
 
     This ties all together:
      * session id,
      * user,
-     * connections set,
      * other session data.
-
-    **About session storage / transactions**
-
-    Here is a description of internal session attributes. Besides :attr:`data`
-    and :attr:`transaction_data`, you should not have to use attributes
-    described here but higher level APIs.
-
-      :attr:`data` is a dictionary containing shared data, used to communicate
-      extra information between the client and the repository
-
-      :attr:`_cnxs` is a dictionary of :class:`Connection` instance, one
-      for each running connection. The key is the connection id. By default
-      the connection id is the thread name but it can be otherwise (per dbapi
-      cursor for instance, or per thread name *from another process*).
-
-      :attr:`__threaddata` is a thread local storage whose `cnx` attribute
-      refers to the proper instance of :class:`Connection` according to the
-      connection.
-
-    You should not have to use neither :attr:`_cnx` nor :attr:`__threaddata`,
-    simply access connection data transparently through the :attr:`_cnx`
-    property. Also, you usually don't have to access it directly since current
-    connection's data may be accessed/modified through properties / methods:
-
-      :attr:`connection_data`, similarly to :attr:`data`, is a dictionary
-      containing some shared data that should be cleared at the end of the
-      connection. Hooks and operations may put arbitrary data in there, and
-      this may also be used as a communication channel between the client and
-      the repository.
-
-    .. automethod:: cubicweb.server.session.Session.get_shared_data
-    .. automethod:: cubicweb.server.session.Session.set_shared_data
-    .. automethod:: cubicweb.server.session.Session.added_in_transaction
-    .. automethod:: cubicweb.server.session.Session.deleted_in_transaction
-
-    Connection state information:
-
-      :attr:`running_dbapi_query`, boolean flag telling if the executing query
-      is coming from a dbapi connection or is a query from within the repository
-
-      :attr:`cnxset`, the connections set to use to execute queries on sources.
-      During a transaction, the connection set may be freed so that is may be
-      used by another session as long as no writing is done. This means we can
-      have multiple sessions with a reasonably low connections set pool size.
-
-      .. automethod:: cubicweb.server.session.Session.set_cnxset
-      .. automethod:: cubicweb.server.session.Session.free_cnxset
-
-      :attr:`mode`, string telling the connections set handling mode, may be one
-      of 'read' (connections set may be freed), 'write' (some write was done in
-      the connections set, it can't be freed before end of the transaction),
-      'transaction' (we want to keep the connections set during all the
-      transaction, with or without writing)
-
-      :attr:`pending_operations`, ordered list of operations to be processed on
-      commit/rollback
-
-      :attr:`commit_state`, describing the transaction commit state, may be one
-      of None (not yet committing), 'precommit' (calling precommit event on
-      operations), 'postcommit' (calling postcommit event on operations),
-      'uncommitable' (some :exc:`ValidationError` or :exc:`Unauthorized` error
-      has been raised during the transaction and so it must be rolled back).
-
-    .. automethod:: cubicweb.server.session.Session.commit
-    .. automethod:: cubicweb.server.session.Session.rollback
-    .. automethod:: cubicweb.server.session.Session.close
-    .. automethod:: cubicweb.server.session.Session.closed
-
-    Security level Management:
-
-      :attr:`read_security` and :attr:`write_security`, boolean flags telling if
-      read/write security is currently activated.
-
-    .. automethod:: cubicweb.server.session.Session.security_enabled
-
-    Hooks Management:
-
-      :attr:`hooks_mode`, may be either `HOOKS_ALLOW_ALL` or `HOOKS_DENY_ALL`.
-
-      :attr:`enabled_hook_categories`, when :attr:`hooks_mode` is
-      `HOOKS_DENY_ALL`, this set contains hooks categories that are enabled.
-
-      :attr:`disabled_hook_categories`, when :attr:`hooks_mode` is
-      `HOOKS_ALLOW_ALL`, this set contains hooks categories that are disabled.
-
-    .. automethod:: cubicweb.server.session.Session.deny_all_hooks_but
-    .. automethod:: cubicweb.server.session.Session.allow_all_hooks_but
-    .. automethod:: cubicweb.server.session.Session.is_hook_category_activated
-    .. automethod:: cubicweb.server.session.Session.is_hook_activated
-
-    Data manipulation:
-
-    .. automethod:: cubicweb.server.session.Session.add_relation
-    .. automethod:: cubicweb.server.session.Session.add_relations
-    .. automethod:: cubicweb.server.session.Session.delete_relation
-
-    Other:
-
-    .. automethod:: cubicweb.server.session.Session.call_service
-
-
-
     """
-    is_request = False
-    is_internal_session = False
 
     def __init__(self, user, repo, cnxprops=None, _id=None):
-        super(Session, self).__init__(repo.vreg)
         self.sessionid = _id or make_uid(unormalize(user.login).encode('UTF8'))
         self.user = user # XXX repoapi: deprecated and store only a login.
         self.repo = repo
+        self.vreg = repo.vreg
         self._timestamp = Timestamp()
-        self.default_mode = 'read'
-        # short cut to querier .execute method
-        self._execute = repo.querier.execute
-        # shared data, used to communicate extra information between the client
-        # and the rql server
         self.data = {}
-        # i18n initialization
-        self.set_language(user.prefered_language())
-        ### internals
-        # Connection of this section
-        self._cnxs = {} # XXX repoapi: remove this when nobody use the session
-                        # as a Connection
-        # Data local to the thread
-        self.__threaddata = threading.local() # XXX repoapi: remove this when
-                                              # nobody use the session as a Connection
-        self._cnxset_tracker = CnxSetTracker()
-        self._closed = False
-        self._lock = threading.RLock()
+        self.closed = False
+
+    def close(self):
+        self.closed = True
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *args):
+        pass
 
     def __unicode__(self):
         return '<session %s (%s 0x%x)>' % (
             unicode(self.user.login), self.sessionid, id(self))
+
     @property
     def timestamp(self):
         return float(self._timestamp)
@@ -1390,55 +1053,6 @@
         """
         return Connection(self)
 
-    def _get_cnx(self, cnxid):
-        """return the <cnxid> connection attached to this session
-
-        Connection is created if necessary"""
-        with self._lock: # no connection exist with the same id
-            try:
-                if self.closed:
-                    raise SessionClosedError('try to access connections set on'
-                                             ' a closed session %s' % self.id)
-                cnx = self._cnxs[cnxid]
-                assert cnx._session_handled
-            except KeyError:
-                cnx = Connection(self, cnxid=cnxid, session_handled=True)
-                self._cnxs[cnxid] = cnx
-                cnx.__enter__()
-        return cnx
-
-    def _close_cnx(self, cnx):
-        """Close a Connection related to a session"""
-        assert cnx._session_handled
-        cnx.__exit__()
-        self._cnxs.pop(cnx.connectionid, None)
-        try:
-            if self.__threaddata.cnx is cnx:
-                del self.__threaddata.cnx
-        except AttributeError:
-            pass
-
-    def set_cnx(self, cnxid=None):
-        # XXX repoapi: remove this when nobody use the session as a Connection
-        """set the default connection of the current thread to <cnxid>
-
-        Connection is created if necessary"""
-        if cnxid is None:
-            cnxid = threading.currentThread().getName()
-        cnx = self._get_cnx(cnxid)
-        # New style session should not be accesed through the session.
-        assert cnx._session_handled
-        self.__threaddata.cnx = cnx
-
-    @property
-    def _cnx(self):
-        """default connection for current session in current thread"""
-        try:
-            return self.__threaddata.cnx
-        except AttributeError:
-            self.set_cnx()
-            return self.__threaddata.cnx
-
     @deprecated('[3.19] use a Connection object instead')
     def get_option_value(self, option, foreid=None):
         if foreid is not None:
@@ -1446,108 +1060,6 @@
                  stacklevel=2)
         return self.repo.get_option_value(option)
 
-    @deprecated('[3.19] use a Connection object instead')
-    def transaction(self, free_cnxset=True):
-        """return context manager to enter a transaction for the session: when
-        exiting the `with` block on exception, call `session.rollback()`, else
-        call `session.commit()` on normal exit.
-
-        The `free_cnxset` will be given to rollback/commit methods to indicate
-        whether the connections set should be freed or not.
-        """
-        return transaction(self, free_cnxset)
-
-    add_relation = cnx_meth('add_relation')
-    add_relations = cnx_meth('add_relations')
-    delete_relation = cnx_meth('delete_relation')
-
-    # relations cache handling #################################################
-
-    update_rel_cache_add = cnx_meth('update_rel_cache_add')
-    update_rel_cache_del = cnx_meth('update_rel_cache_del')
-
-    # resource accessors ######################################################
-
-    system_sql = cnx_meth('system_sql')
-    deleted_in_transaction = cnx_meth('deleted_in_transaction')
-    added_in_transaction = cnx_meth('added_in_transaction')
-    rtype_eids_rdef = cnx_meth('rtype_eids_rdef')
-
-    # security control #########################################################
-
-    @deprecated('[3.19] use a Connection object instead')
-    def security_enabled(self, read=None, write=None):
-        return _session_security_enabled(self, read=read, write=write)
-
-    read_security = cnx_attr('read_security', writable=True)
-    write_security = cnx_attr('write_security', writable=True)
-    running_dbapi_query = cnx_attr('running_dbapi_query')
-
-    # hooks activation control #################################################
-    # all hooks should be activated during normal execution
-
-
-    @deprecated('[3.19] use a Connection object instead')
-    def allow_all_hooks_but(self, *categories):
-        return _session_hooks_control(self, HOOKS_ALLOW_ALL, *categories)
-    @deprecated('[3.19] use a Connection object instead')
-    def deny_all_hooks_but(self, *categories):
-        return _session_hooks_control(self, HOOKS_DENY_ALL, *categories)
-
-    hooks_mode = cnx_attr('hooks_mode')
-
-    disabled_hook_categories = cnx_attr('disabled_hook_cats')
-    enabled_hook_categories = cnx_attr('enabled_hook_cats')
-    disable_hook_categories = cnx_meth('disable_hook_categories')
-    enable_hook_categories = cnx_meth('enable_hook_categories')
-    is_hook_category_activated = cnx_meth('is_hook_category_activated')
-    is_hook_activated = cnx_meth('is_hook_activated')
-
-    # connection management ###################################################
-
-    @deprecated('[3.19] use a Connection object instead')
-    def keep_cnxset_mode(self, mode):
-        """set `mode`, e.g. how the session will keep its connections set:
-
-        * if mode == 'write', the connections set is freed after each read
-          query, but kept until the transaction's end (eg commit or rollback)
-          when a write query is detected (eg INSERT/SET/DELETE queries)
-
-        * if mode == 'transaction', the connections set is only freed after the
-          transaction's end
-
-        notice that a repository has a limited set of connections sets, and a
-        session has to wait for a free connections set to run any rql query
-        (unless it already has one set).
-        """
-        assert mode in ('transaction', 'write')
-        if mode == 'transaction':
-            self.default_mode = 'transaction'
-        else: # mode == 'write'
-            self.default_mode = 'read'
-
-    mode = cnx_attr('mode', writable=True)
-    commit_state = cnx_attr('commit_state', writable=True)
-
-    @property
-    @deprecated('[3.19] use a Connection object instead')
-    def cnxset(self):
-        """connections set, set according to transaction mode for each query"""
-        if self._closed:
-            self.free_cnxset(True)
-            raise SessionClosedError('try to access connections set on a closed session %s' % self.id)
-        return self._cnx.cnxset
-
-    def set_cnxset(self):
-        """the session need a connections set to execute some queries"""
-        with self._lock: # can probably be removed
-            if self._closed:
-                self.free_cnxset(True)
-                raise SessionClosedError('try to set connections set on a closed session %s' % self.id)
-            return self._cnx.set_cnxset()
-    free_cnxset = cnx_meth('free_cnxset')
-    ensure_cnx_set = cnx_attr('ensure_cnx_set')
-
     def _touch(self):
         """update latest session usage timestamp and reset mode to read"""
         self._timestamp.touch()
@@ -1559,156 +1071,6 @@
         assert value == {}
         pass
 
-    # shared data handling ###################################################
-
-    @deprecated('[3.19] use session or transaction data')
-    def get_shared_data(self, key, default=None, pop=False, txdata=False):
-        """return value associated to `key` in session data"""
-        if txdata:
-            return self._cnx.get_shared_data(key, default, pop, txdata=True)
-        else:
-            data = self.data
-        if pop:
-            return data.pop(key, default)
-        else:
-            return data.get(key, default)
-
-    @deprecated('[3.19] use session or transaction data')
-    def set_shared_data(self, key, value, txdata=False):
-        """set value associated to `key` in session data"""
-        if txdata:
-            return self._cnx.set_shared_data(key, value, txdata=True)
-        else:
-            self.data[key] = value
-
-    # server-side service call #################################################
-
-    call_service = cnx_meth('call_service')
-
-    # request interface #######################################################
-
-    @property
-    @deprecated('[3.19] use a Connection object instead')
-    def cursor(self):
-        """return a rql cursor"""
-        return self
-
-    set_entity_cache  = cnx_meth('set_entity_cache')
-    entity_cache      = cnx_meth('entity_cache')
-    cache_entities    = cnx_meth('cached_entities')
-    drop_entity_cache = cnx_meth('drop_entity_cache')
-
-    source_defs = cnx_meth('source_defs')
-    entity_metas = cnx_meth('entity_metas')
-    describe = cnx_meth('describe') # XXX deprecated in 3.19
-
-
-    @deprecated('[3.19] use a Connection object instead')
-    def execute(self, *args, **kwargs):
-        """db-api like method directly linked to the querier execute method.
-
-        See :meth:`cubicweb.dbapi.Cursor.execute` documentation.
-        """
-        rset = self._cnx.execute(*args, **kwargs)
-        rset.req = self
-        return rset
-
-    def _clear_thread_data(self, free_cnxset=True):
-        """remove everything from the thread local storage, except connections set
-        which is explicitly removed by free_cnxset, and mode which is set anyway
-        by _touch
-        """
-        try:
-            cnx = self.__threaddata.cnx
-        except AttributeError:
-            pass
-        else:
-            if free_cnxset:
-                cnx._free_cnxset()
-                if cnx.ctx_count == 0:
-                    self._close_cnx(cnx)
-                else:
-                    cnx.clear()
-            else:
-                cnx.clear()
-
-    @deprecated('[3.19] use a Connection object instead')
-    def commit(self, free_cnxset=True, reset_pool=None):
-        """commit the current session's transaction"""
-        cstate = self._cnx.commit_state
-        if cstate == 'uncommitable':
-            raise QueryError('transaction must be rolled back')
-        try:
-            return self._cnx.commit(free_cnxset, reset_pool)
-        finally:
-            self._clear_thread_data(free_cnxset)
-
-    @deprecated('[3.19] use a Connection object instead')
-    def rollback(self, *args, **kwargs):
-        """rollback the current session's transaction"""
-        return self._rollback(*args, **kwargs)
-
-    def _rollback(self, free_cnxset=True, **kwargs):
-        try:
-            return self._cnx.rollback(free_cnxset, **kwargs)
-        finally:
-            self._clear_thread_data(free_cnxset)
-
-    def close(self):
-        # do not close connections set on session close, since they are shared now
-        tracker = self._cnxset_tracker
-        with self._lock:
-            self._closed = True
-        tracker.close()
-        if self._cnx._session_handled:
-            self._rollback()
-        self.debug('waiting for open connection of session: %s', self)
-        timeout = 10
-        pendings = tracker.wait(timeout)
-        if pendings:
-            self.error('%i connection still alive after 10 seconds, will close '
-                       'session anyway', len(pendings))
-            for cnxid in pendings:
-                cnx = self._cnxs.get(cnxid)
-                if cnx is not None:
-                    # drop cnx.cnxset
-                    with tracker:
-                        try:
-                            cnxset = cnx.cnxset
-                            if cnxset is None:
-                                continue
-                            cnx.cnxset = None
-                        except RuntimeError:
-                            msg = 'issue while force free of cnxset in %s'
-                            self.error(msg, cnx)
-                    # cnxset.reconnect() do an hard reset of the cnxset
-                    # it force it to be freed
-                    cnxset.reconnect()
-                    self.repo._free_cnxset(cnxset)
-        del self.__threaddata
-        del self._cnxs
-
-    @property
-    def closed(self):
-        return not hasattr(self, '_cnxs')
-
-    # transaction data/operations management ##################################
-
-    transaction_data = cnx_attr('transaction_data')
-    pending_operations = cnx_attr('pending_operations')
-    pruned_hooks_cache = cnx_attr('pruned_hooks_cache')
-    add_operation      = cnx_meth('add_operation')
-
-    # undo support ############################################################
-
-    ertype_supports_undo = cnx_meth('ertype_supports_undo')
-    transaction_inc_action_counter = cnx_meth('transaction_inc_action_counter')
-    transaction_uuid = cnx_meth('transaction_uuid')
-
-    # querier helpers #########################################################
-
-    rql_rewriter = cnx_attr('_rewriter')
-
     # deprecated ###############################################################
 
     @property
@@ -1725,52 +1087,10 @@
     def schema_rproperty(self, rtype, eidfrom, eidto, rprop):
         return getattr(self.rtype_eids_rdef(rtype, eidfrom, eidto), rprop)
 
-    @property
-    @deprecated("[3.13] use .cnxset attribute instead of .pool")
-    def pool(self):
-        return self.cnxset
-
-    @deprecated("[3.13] use .set_cnxset() method instead of .set_pool()")
-    def set_pool(self):
-        return self.set_cnxset()
-
-    @deprecated("[3.13] use .free_cnxset() method instead of .reset_pool()")
-    def reset_pool(self):
-        return self.free_cnxset()
-
     # these are overridden by set_log_methods below
     # only defining here to prevent pylint from complaining
     info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
 
-Session.HOOKS_ALLOW_ALL = HOOKS_ALLOW_ALL
-Session.HOOKS_DENY_ALL = HOOKS_DENY_ALL
-Session.DEFAULT_SECURITY = DEFAULT_SECURITY
-
-
-
-class InternalSession(Session):
-    """special session created internally by the repository"""
-    is_internal_session = True
-    running_dbapi_query = False
-
-    def __init__(self, repo, cnxprops=None, safe=False):
-        super(InternalSession, self).__init__(InternalManager(), repo, cnxprops,
-                                              _id='internal')
-        self.user._cw = self # XXX remove when "vreg = user._cw.vreg" hack in entity.py is gone
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exctype, excvalue, tb):
-        self.close()
-
-    @property
-    def cnxset(self):
-        """connections set, set according to transaction mode for each query"""
-        if self.repo.shutting_down:
-            self.free_cnxset(True)
-            raise ShuttingDown('repository is shutting down')
-        return self._cnx.cnxset
 
 
 class InternalManager(object):
@@ -1778,6 +1098,7 @@
     bootstrapping the repository or creating regular users according to
     repository content
     """
+
     def __init__(self, lang='en'):
         self.eid = -1
         self.login = u'__internal_manager__'
--- a/server/sources/native.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/sources/native.py	Mon Jun 22 14:27:37 2015 +0200
@@ -43,7 +43,6 @@
 from logilab.common.shellutils import getlogin
 from logilab.database import get_db_helper, sqlgen
 
-from yams import schema2sql as y2sql
 from yams.schema import role_name
 
 from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary,
@@ -53,6 +52,7 @@
 from cubicweb.schema import VIRTUAL_RTYPES
 from cubicweb.cwconfig import CubicWebNoAppConfiguration
 from cubicweb.server import hook
+from cubicweb.server import schema2sql as y2sql
 from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update
 from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn
 from cubicweb.server.rqlannotation import set_qdata
@@ -272,7 +272,7 @@
          {'type' : 'string',
           'default': 'postgres',
           # XXX use choice type
-          'help': 'database driver (postgres, mysql, sqlite, sqlserver2005)',
+          'help': 'database driver (postgres, sqlite, sqlserver2005)',
           'group': 'native-source', 'level': 0,
           }),
         ('db-host',
@@ -553,25 +553,7 @@
                 self._cache[cachekey] = sql, qargs, cbs
         args = self.merge_args(args, qargs)
         assert isinstance(sql, basestring), repr(sql)
-        try:
-            cursor = self.doexec(cnx, sql, args)
-        except (self.OperationalError, self.InterfaceError):
-            if cnx.mode == 'write':
-                # do not attempt to reconnect if there has been some write
-                # during the transaction
-                raise
-            # FIXME: better detection of deconnection pb
-            self.warning("trying to reconnect")
-            cnx.cnxset.reconnect()
-            cursor = self.doexec(cnx, sql, args)
-        except self.DbapiError as exc:
-            # We get this one with pyodbc and SQL Server when connection was reset
-            if exc.args[0] == '08S01' and cnx.mode != 'write':
-                self.warning("trying to reconnect")
-                cnx.cnxset.reconnect()
-                cursor = self.doexec(cnx, sql, args)
-            else:
-                raise
+        cursor = self.doexec(cnx, sql, args)
         results = self.process_result(cursor, cnx, cbs)
         assert dbg_results(results)
         return results
@@ -614,8 +596,8 @@
             sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
             self.doexec(cnx, sql, attrs)
             if cnx.ertype_supports_undo(entity.cw_etype):
-                self._record_tx_action(cnx, 'tx_entity_actions', 'C',
-                                       etype=entity.cw_etype, eid=entity.eid)
+                self._record_tx_action(cnx, 'tx_entity_actions', u'C',
+                                       etype=unicode(entity.cw_etype), eid=entity.eid)
 
     def update_entity(self, cnx, entity):
         """replace an entity in the source"""
@@ -623,8 +605,8 @@
             attrs = self.preprocess_entity(entity)
             if cnx.ertype_supports_undo(entity.cw_etype):
                 changes = self._save_attrs(cnx, entity, attrs)
-                self._record_tx_action(cnx, 'tx_entity_actions', 'U',
-                                       etype=entity.cw_etype, eid=entity.eid,
+                self._record_tx_action(cnx, 'tx_entity_actions', u'U',
+                                       etype=unicode(entity.cw_etype), eid=entity.eid,
                                        changes=self._binary(dumps(changes)))
             sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs,
                                      ['cw_eid'])
@@ -638,8 +620,8 @@
                          for r in entity.e_schema.subject_relations()
                          if (r.final or r.inlined) and not r in VIRTUAL_RTYPES]
                 changes = self._save_attrs(cnx, entity, attrs)
-                self._record_tx_action(cnx, 'tx_entity_actions', 'D',
-                                       etype=entity.cw_etype, eid=entity.eid,
+                self._record_tx_action(cnx, 'tx_entity_actions', u'D',
+                                       etype=unicode(entity.cw_etype), eid=entity.eid,
                                        changes=self._binary(dumps(changes)))
             attrs = {'cw_eid': entity.eid}
             sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
@@ -649,16 +631,16 @@
         """add a relation to the source"""
         self._add_relations(cnx,  rtype, [(subject, object)], inlined)
         if cnx.ertype_supports_undo(rtype):
-            self._record_tx_action(cnx, 'tx_relation_actions', 'A',
-                                   eid_from=subject, rtype=rtype, eid_to=object)
+            self._record_tx_action(cnx, 'tx_relation_actions', u'A',
+                                   eid_from=subject, rtype=unicode(rtype), eid_to=object)
 
     def add_relations(self, cnx,  rtype, subj_obj_list, inlined=False):
         """add a relations to the source"""
         self._add_relations(cnx, rtype, subj_obj_list, inlined)
         if cnx.ertype_supports_undo(rtype):
             for subject, object in subj_obj_list:
-                self._record_tx_action(cnx, 'tx_relation_actions', 'A',
-                                       eid_from=subject, rtype=rtype, eid_to=object)
+                self._record_tx_action(cnx, 'tx_relation_actions', u'A',
+                                       eid_from=subject, rtype=unicode(rtype), eid_to=object)
 
     def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False):
         """add a relation to the source"""
@@ -689,8 +671,8 @@
         rschema = self.schema.rschema(rtype)
         self._delete_relation(cnx, subject, rtype, object, rschema.inlined)
         if cnx.ertype_supports_undo(rtype):
-            self._record_tx_action(cnx, 'tx_relation_actions', 'R',
-                                   eid_from=subject, rtype=rtype, eid_to=object)
+            self._record_tx_action(cnx, 'tx_relation_actions', u'R',
+                                   eid_from=subject, rtype=unicode(rtype), eid_to=object)
 
     def _delete_relation(self, cnx, subject, rtype, object, inlined=False):
         """delete a relation from the source"""
@@ -757,7 +739,7 @@
         it's a function just so that it shows up in profiling
         """
         if server.DEBUG & server.DBG_SQL:
-            print 'execmany', query, 'with', len(args), 'arguments'
+            print 'execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx
         cursor = cnx.cnxset.cu
         try:
             # str(query) to avoid error if it's a unicode string
@@ -829,18 +811,12 @@
 
     # system source interface #################################################
 
-    def _eid_type_source(self, cnx, eid, sql, _retry=True):
+    def _eid_type_source(self, cnx, eid, sql):
         try:
             res = self.doexec(cnx, sql).fetchone()
             if res is not None:
                 return res
-        except (self.OperationalError, self.InterfaceError):
-            if cnx.mode == 'read' and _retry:
-                self.warning("trying to reconnect (eid_type_source())")
-                cnx.cnxset.reconnect()
-                return self._eid_type_source(cnx, eid, sql, _retry=False)
         except Exception:
-            assert cnx.cnxset, 'connection has no connections set'
             self.exception('failed to query entities table for eid %s', eid)
         raise UnknownEid(eid)
 
@@ -868,9 +844,10 @@
     def extid2eid(self, cnx, extid):
         """get eid from an external id. Return None if no record found."""
         assert isinstance(extid, str)
+        args = {'x': b64encode(extid)}
         cursor = self.doexec(cnx,
                              'SELECT eid FROM entities WHERE extid=%(x)s',
-                             {'x': b64encode(extid)})
+                             args)
         # XXX testing rowcount cause strange bug with sqlite, results are there
         #     but rowcount is 0
         #if cursor.rowcount > 0:
@@ -880,6 +857,17 @@
                 return result[0]
         except Exception:
             pass
+        cursor = self.doexec(cnx,
+                             'SELECT eid FROM moved_entities WHERE extid=%(x)s',
+                             args)
+        try:
+            result = cursor.fetchone()
+            if result:
+                # entity was moved to the system source, return negative
+                # number to tell the external source to ignore it
+                return -result[0]
+        except Exception:
+            pass
         return None
 
     def _handle_is_relation_sql(self, cnx, sql, attrs):
@@ -940,7 +928,7 @@
     # undo support #############################################################
 
     def undoable_transactions(self, cnx, ueid=None, **actionfilters):
-        """See :class:`cubicweb.repoapi.ClientConnection.undoable_transactions`"""
+        """See :class:`cubicweb.repoapi.Connection.undoable_transactions`"""
         # force filtering to connection's user if not a manager
         if not cnx.user.is_in_group('managers'):
             ueid = cnx.user.eid
@@ -965,7 +953,7 @@
                     # only, and with no eid specified
                     assert actionfilters.get('action', 'C') in 'CUD'
                     assert not 'eid' in actionfilters
-                    tearestr['etype'] = val
+                    tearestr['etype'] = unicode(val)
                 elif key == 'eid':
                     # eid filter may apply to 'eid' of tx_entity_actions or to
                     # 'eid_from' OR 'eid_to' of tx_relation_actions
@@ -976,10 +964,10 @@
                         trarestr['eid_to'] = val
                 elif key == 'action':
                     if val in 'CUD':
-                        tearestr['txa_action'] = val
+                        tearestr['txa_action'] = unicode(val)
                     else:
                         assert val in 'AR'
-                        trarestr['txa_action'] = val
+                        trarestr['txa_action'] = unicode(val)
                 else:
                     raise AssertionError('unknow filter %s' % key)
             assert trarestr or tearestr, "can't only filter on 'public'"
@@ -1007,17 +995,17 @@
             restr.update(tearestr)
         # we want results ordered by transaction's time descendant
         sql += ' ORDER BY tx_time DESC'
-        with cnx.ensure_cnx_set:
-            cu = self.doexec(cnx, sql, restr)
-            # turn results into transaction objects
-            return [tx.Transaction(*args) for args in cu.fetchall()]
+        cu = self.doexec(cnx, sql, restr)
+        # turn results into transaction objects
+        return [tx.Transaction(cnx, *args) for args in cu.fetchall()]
 
     def tx_info(self, cnx, txuuid):
-        """See :class:`cubicweb.repoapi.ClientConnection.transaction_info`"""
-        return tx.Transaction(txuuid, *self._tx_info(cnx, txuuid))
+        """See :class:`cubicweb.repoapi.Connection.transaction_info`"""
+        return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, unicode(txuuid)))
 
     def tx_actions(self, cnx, txuuid, public):
-        """See :class:`cubicweb.repoapi.ClientConnection.transaction_actions`"""
+        """See :class:`cubicweb.repoapi.Connection.transaction_actions`"""
+        txuuid = unicode(txuuid)
         self._tx_info(cnx, txuuid)
         restr = {'tx_uuid': txuuid}
         if public:
@@ -1039,13 +1027,11 @@
         return sorted(actions, key=lambda x: x.order)
 
     def undo_transaction(self, cnx, txuuid):
-        """See :class:`cubicweb.repoapi.ClientConnection.undo_transaction`
+        """See :class:`cubicweb.repoapi.Connection.undo_transaction`
 
         important note: while undoing of a transaction, only hooks in the
         'integrity', 'activeintegrity' and 'undo' categories are called.
         """
-        # set mode so connections set isn't released subsquently until commit/rollback
-        cnx.mode = 'write'
         errors = []
         cnx.transaction_data['undoing_uuid'] = txuuid
         with cnx.deny_all_hooks_but('integrity', 'activeintegrity', 'undo'):
@@ -1097,7 +1083,7 @@
         kwargs['tx_uuid'] = cnx.transaction_uuid()
         kwargs['txa_action'] = action
         kwargs['txa_order'] = cnx.transaction_inc_action_counter()
-        kwargs['txa_public'] = cnx.running_dbapi_query
+        kwargs['txa_public'] = not cnx.hooks_in_progress
         self.doexec(cnx, self.sqlgen.insert(table, kwargs), kwargs)
 
     def _tx_info(self, cnx, txuuid):
@@ -1106,19 +1092,18 @@
         raise `NoSuchTransaction` if there is no such transaction of if the
         connection's user isn't allowed to see it.
         """
-        with cnx.ensure_cnx_set:
-            restr = {'tx_uuid': txuuid}
-            sql = self.sqlgen.select('transactions', restr,
-                                     ('tx_time', 'tx_user'))
-            cu = self.doexec(cnx, sql, restr)
-            try:
-                time, ueid = cu.fetchone()
-            except TypeError:
-                raise tx.NoSuchTransaction(txuuid)
-            if not (cnx.user.is_in_group('managers')
-                    or cnx.user.eid == ueid):
-                raise tx.NoSuchTransaction(txuuid)
-            return time, ueid
+        restr = {'tx_uuid': txuuid}
+        sql = self.sqlgen.select('transactions', restr,
+                                 ('tx_time', 'tx_user'))
+        cu = self.doexec(cnx, sql, restr)
+        try:
+            time, ueid = cu.fetchone()
+        except TypeError:
+            raise tx.NoSuchTransaction(txuuid)
+        if not (cnx.user.is_in_group('managers')
+                or cnx.user.eid == ueid):
+            raise tx.NoSuchTransaction(txuuid)
+        return time, ueid
 
     def _reedit_entity(self, entity, changes, err):
         cnx = entity._cw
@@ -1151,6 +1136,7 @@
                         err(cnx._("can't restore entity %(eid)s of type %(eschema)s, "
                                       "target of %(rtype)s (eid %(value)s) does not exist any longer")
                             % locals())
+                        changes[column] = None
             elif eschema.destination(rtype) in ('Bytes', 'Password'):
                 changes[column] = self._binary(value)
                 edited[rtype] = Binary(value)
@@ -1182,10 +1168,10 @@
         self.repo.hm.call_hooks('before_add_entity', cnx, entity=entity)
         # restore the entity
         action.changes['cw_eid'] = eid
+        # restore record in entities (will update fti if needed)
+        self.add_info(cnx, entity, self, None)
         sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes)
         self.doexec(cnx, sql, action.changes)
-        # restore record in entities (will update fti if needed)
-        self.add_info(cnx, entity, self, None)
         self.repo.hm.call_hooks('after_add_entity', cnx, entity=entity)
         return errors
 
@@ -1386,10 +1372,13 @@
   eid INTEGER PRIMARY KEY NOT NULL,
   type VARCHAR(64) NOT NULL,
   asource VARCHAR(128) NOT NULL,
-  extid VARCHAR(256)
+  extid VARCHAR(256) UNIQUE
 );;
 CREATE INDEX entities_type_idx ON entities(type);;
-CREATE INDEX entities_extid_idx ON entities(extid);;
+CREATE TABLE moved_entities (
+  eid INTEGER PRIMARY KEY NOT NULL,
+  extid VARCHAR(256) UNIQUE
+);;
 
 CREATE TABLE transactions (
   tx_uuid CHAR(32) PRIMARY KEY NOT NULL,
@@ -1524,7 +1513,7 @@
                                         SQL_PREFIX + 'login'),
                                        {'newhash': self.source._binary(newhash),
                                         'login': login})
-                    cnx.commit(free_cnxset=False)
+                    cnx.commit()
             return user
         except IndexError:
             raise AuthenticationError('bad password')
--- a/server/sources/storages.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/sources/storages.py	Mon Jun 22 14:27:37 2015 +0200
@@ -152,7 +152,6 @@
         """an entity using this storage for attr has been added"""
         if entity._cw.transaction_data.get('fs_importing'):
             binary = Binary.from_file(entity.cw_edited[attr].getvalue())
-            entity._cw_dont_cache_attribute(attr, repo_side=True)
         else:
             binary = entity.cw_edited.pop(attr)
             fpath = self.new_fs_path(entity, attr)
@@ -171,7 +170,6 @@
             # We do not need to create it but we need to fetch the content of
             # the file as the actual content of the attribute
             fpath = entity.cw_edited[attr].getvalue()
-            entity._cw_dont_cache_attribute(attr, repo_side=True)
             assert fpath is not None
             binary = Binary.from_file(fpath)
         else:
--- a/server/sqlutils.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/sqlutils.py	Mon Jun 22 14:27:37 2015 +0200
@@ -106,7 +106,7 @@
     """return sql to give all access privileges to the given user on the system
     schema
     """
-    from yams.schema2sql import grant_schema
+    from cubicweb.server.schema2sql import grant_schema
     from cubicweb.server.sources import native
     output = []
     w = output.append
@@ -124,7 +124,7 @@
               user=None, set_owner=False,
               skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()):
     """return the system sql schema, according to the given parameters"""
-    from yams.schema2sql import schema2sql
+    from cubicweb.server.schema2sql import schema2sql
     from cubicweb.server.sources import native
     if set_owner:
         assert user, 'user is argument required when set_owner is true'
@@ -149,7 +149,7 @@
 def sqldropschema(schema, driver, text_index=True,
                   skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()):
     """return the sql to drop the schema, according to the given parameters"""
-    from yams.schema2sql import dropschema2sql
+    from cubicweb.server.schema2sql import dropschema2sql
     from cubicweb.server.sources import native
     output = []
     w = output.append
@@ -503,6 +503,8 @@
         return (dt.weekday() + 1) % 7
     cnx.create_function("WEEKDAY", 1, weekday)
 
+    cnx.cursor().execute("pragma foreign_keys = on")
+
     import yams.constraints
     yams.constraints.patch_sqlite_decimal()
 
--- a/server/ssplanner.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/ssplanner.py	Mon Jun 22 14:27:37 2015 +0200
@@ -249,7 +249,7 @@
                 raise QueryError('can not assign to %r relation'
                                  % relation.r_type)
             lhs, rhs = relation.get_variable_parts()
-            lhskey = lhs.as_string('utf-8')
+            lhskey = lhs.as_string()
             if not lhskey in selectedidx:
                 if lhs.variable in eidconsts:
                     eid = eidconsts[lhs.variable]
@@ -262,7 +262,7 @@
                 selectedidx[lhskey] = lhsinfo
             else:
                 lhsinfo = selectedidx[lhskey][:-1] + (None,)
-            rhskey = rhs.as_string('utf-8')
+            rhskey = rhs.as_string()
             if not rhskey in selectedidx:
                 if isinstance(rhs, Constant):
                     rhsinfo = (_CONSTANT, rhs.eval(plan.args), residx)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/Company.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,67 @@
+# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+from yams.buildobjs import EntityType, RelationType, RelationDefinition, \
+     SubjectRelation, String
+
+class Company(EntityType):
+    name = String()
+
+class Subcompany(Company):
+    __specializes_schema__ = True
+    subcompany_of = SubjectRelation('Company')
+
+class Division(Company):
+    __specializes_schema__ = True
+    division_of = SubjectRelation('Company')
+
+class Subdivision(Division):
+    __specializes_schema__ = True
+    subdivision_of = SubjectRelation('Company')
+
+class Employee(EntityType):
+    works_for = SubjectRelation('Company')
+
+class require_permission(RelationType):
+    """link a permission to the entity. This permission should be used in the
+    security definition of the entity's type to be useful.
+    """
+    fulltext_container = 'subject'
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests'),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        }
+
+
+class missing_require_permission(RelationDefinition):
+    name = 'require_permission'
+    subject = 'Company'
+    object = 'EPermission'
+
+class EPermission(EntityType):
+    """entity type that may be used to construct some advanced security configuration
+    """
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        'update': ('managers', 'owners',),
+        }
+    name = String(required=True, indexed=True, internationalizable=True,
+                  fulltextindexed=True, maxsize=100,
+                  description=_('name or identifier of the permission'))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/Dates.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,28 @@
+# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+from datetime import time, date
+from yams.buildobjs import EntityType, Datetime, Date, Time
+
+class Datetest(EntityType):
+    dt1 = Datetime(default=u'now')
+    dt2 = Datetime(default=u'today')
+    d1  = Date(default=u'today')
+    d2  = Date(default=date(2007, 12, 11))
+    t1  = Time(default=time(8, 40))
+    t2  = Time(default=time(9, 45))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/State.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,81 @@
+# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
+                            SubjectRelation, Int, String,  Boolean)
+from yams.constraints import SizeConstraint, UniqueConstraint
+
+from __init__ import RESTRICTED_RTYPE_PERMS
+
+class State(EntityType):
+    """used to associate simple states to an entity
+    type and/or to define workflows
+    """
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers', 'users',),
+        'delete': ('managers', 'owners',),
+        'update': ('managers', 'owners',),
+        }
+
+    # attributes
+    eid = Int(required=True, uid=True)
+    name = String(required=True,
+                  indexed=True, internationalizable=True,
+                  constraints=[SizeConstraint(256)])
+    description = String(fulltextindexed=True)
+    # relations
+    state_of = SubjectRelation('Eetype', cardinality='+*')
+    next_state = SubjectRelation('State', cardinality='**')
+
+
+class state_of(RelationType):
+    """link a state to one or more entity type"""
+    __permissions__ = RESTRICTED_RTYPE_PERMS
+
+class next_state(RelationType):
+    """define a workflow by associating a state to possible following states
+    """
+    __permissions__ = RESTRICTED_RTYPE_PERMS
+
+class initial_state(RelationType):
+    """indicate which state should be used by default when an entity using states
+    is created
+    """
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers', 'users',),
+        'delete': ('managers', 'users',),
+        }
+    subject = 'Eetype'
+    object = 'State'
+    cardinality = '?*'
+    inlined = True
+
+class Eetype(EntityType):
+    """define an entity type, used to build the application schema"""
+    __permissions__ = {
+        'read':   ('managers', 'users', 'guests',),
+        'add':    ('managers',),
+        'delete': ('managers',),
+        'update': ('managers', 'owners',),
+        }
+    name = String(required=True, indexed=True, internationalizable=True,
+                  constraints=[UniqueConstraint(), SizeConstraint(64)])
+    description = String(fulltextindexed=True)
+    meta = Boolean()
+    final = Boolean()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/__init__.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,23 @@
+# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+"""test schema"""
+RESTRICTED_RTYPE_PERMS = {
+    'read':   ('managers', 'users', 'guests',),
+    'add':    ('managers',),
+    'delete': ('managers',),
+    }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/schema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,112 @@
+# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of yams.
+#
+# yams is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# yams is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with yams. If not, see <http://www.gnu.org/licenses/>.
+from yams.buildobjs import (EntityType, RelationDefinition, RelationType,
+                            SubjectRelation, String, Int, Float, Date, Boolean)
+
+class Affaire(EntityType):
+    sujet = String(maxsize=128)
+    ref = String(maxsize=12)
+
+    concerne = SubjectRelation('Societe')
+    obj_wildcard = SubjectRelation('*')
+    sym_rel = SubjectRelation('Person', symmetric=True)
+    inline_rel = SubjectRelation('Person', inlined=True, cardinality='?*')
+
+class subj_wildcard(RelationDefinition):
+    subject = '*'
+    object = 'Affaire'
+
+
+class Person(EntityType):
+    __unique_together__ = [('nom', 'prenom')]
+    nom    = String(maxsize=64, fulltextindexed=True, required=True)
+    prenom = String(maxsize=64, fulltextindexed=True)
+    sexe   = String(maxsize=1, default='M')
+    promo  = String(vocabulary=('bon','pasbon'))
+    titre  = String(maxsize=128, fulltextindexed=True)
+    adel   = String(maxsize=128)
+    ass    = String(maxsize=128)
+    web    = String(maxsize=128)
+    tel    = Int(__permissions__={'read': (),
+                                  'add': ('managers',),
+                                  'update': ('managers',)})
+    fax    = Int()
+    datenaiss = Date()
+    test   = Boolean()
+    salary = Float()
+    travaille = SubjectRelation('Societe',
+                                __permissions__={'read': (),
+                                                 'add': (),
+                                                 'delete': ('managers',),
+                                                 })
+
+    evaluee = SubjectRelation('Note')
+
+class Salaried(Person):
+    __specializes_schema__ = True
+
+class Societe(EntityType):
+    nom  = String(maxsize=64, fulltextindexed=True)
+    web = String(maxsize=128)
+    tel  = Int()
+    fax  = Int()
+    rncs = String(maxsize=32)
+    ad1  = String(maxsize=128)
+    ad2  = String(maxsize=128)
+    ad3  = String(maxsize=128)
+    cp   = String(maxsize=12)
+    ville = String(maxsize=32)
+
+    evaluee = SubjectRelation('Note')
+
+
+class Note(EntityType):
+    date = String(maxsize=10)
+    type = String(maxsize=1)
+    para = String(maxsize=512)
+
+
+class pkginfo(EntityType):
+    modname = String(maxsize=30, required=True)
+    version = String(maxsize=10, required=True, default='0.1')
+    copyright = String(required=True)
+    license = String(vocabulary=('GPL', 'ZPL'))
+    short_desc = String(maxsize=80, required=True)
+    long_desc = String(required=True, fulltextindexed=True)
+    author = String(maxsize=100, required=True)
+    author_email = String(maxsize=100, required=True)
+    mailinglist = String(maxsize=100)
+    debian_handler = String(vocabulary=('machin', 'bidule'))
+
+
+class evaluee(RelationType):
+    __permissions__ = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'delete': ('managers',),
+        }
+
+class concerne(RelationDefinition):
+    subject = 'Person'
+    object = 'Affaire'
+    __permissions__ = {
+        'read': ('managers',),
+        'add': ('managers',),
+        'delete': ('managers',),
+        }
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/data-schema2sql/schema/toignore	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,1 @@
+coucou
--- a/server/test/data/bootstrap_cubes	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/data/bootstrap_cubes	Mon Jun 22 14:27:37 2015 +0200
@@ -1,1 +1,1 @@
-card,comment,folder,tag,basket,email,file,localperms
+card,comment,tag,basket,email,file,localperms
--- a/server/test/data/migratedapp/bootstrap_cubes	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/data/migratedapp/bootstrap_cubes	Mon Jun 22 14:27:37 2015 +0200
@@ -1,1 +1,1 @@
-card,comment,folder,tag,basket,email,file
+card,comment,tag,basket,email,file
--- a/server/test/data/migratedapp/schema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/data/migratedapp/schema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -165,6 +165,15 @@
 # `login_user` rdef is gone
 # `ambiguous_inlined` rdef is gone
 
+class Folder(EntityType):
+    """folders are used to classify entities. They may be defined as a tree.
+    """
+    name = String(required=True, indexed=True, internationalizable=True,
+                  maxsize=64)
+    description = RichString(fulltextindexed=True)
+    filed_under = SubjectRelation('Folder', description=_('parent folder'))
+
+
 # New
 class Text(Para):
     __specializes_schema__ = True
--- a/server/test/data/schema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/data/schema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -240,6 +240,14 @@
     subject = 'Tag'
     object = ('CWUser', 'CWGroup', 'State', 'Note', 'Card', 'Affaire')
 
+class Folder(EntityType):
+    """folders are used to classify entities. They may be defined as a tree.
+    """
+    name = String(required=True, indexed=True, internationalizable=True,
+                  maxsize=64)
+    description = RichString(fulltextindexed=True)
+    filed_under = SubjectRelation('Folder', description=_('parent folder'))
+
 class filed_under(RelationDefinition):
     subject = ('Note', 'Affaire')
     object = 'Folder'
--- a/server/test/unittest_migractions.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_migractions.py	Mon Jun 22 14:27:37 2015 +0200
@@ -72,7 +72,7 @@
 
     @contextmanager
     def mh(self):
-        with self.admin_access.client_cnx() as cnx:
+        with self.admin_access.repo_cnx() as cnx:
             yield cnx, ServerMigrationHelper(self.repo.config, migrschema,
                                              repo=self.repo, cnx=cnx,
                                              interactive=False)
@@ -259,8 +259,8 @@
                                'filed_under2', 'has_text',
                                'identity', 'in_basket', 'is', 'is_instance_of',
                                'modification_date', 'name', 'owned_by'])
-            self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()],
-                              ['filed_under2', 'identity'])
+            self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()],
+                                  ['filed_under2', 'identity'])
             # Old will be missing as it has been renamed into 'New' in the migrated
             # schema while New hasn't been added here.
             self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
--- a/server/test/unittest_querier.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_querier.py	Mon Jun 22 14:27:37 2015 +0200
@@ -69,7 +69,6 @@
 
 def tearDownClass(cls, *args):
     global repo, cnx
-    cnx.close()
     repo.shutdown()
     del repo, cnx
 
@@ -1173,11 +1172,10 @@
     def test_delete_3(self):
         s = self.user_groups_session('users')
         with s.new_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                peid, = self.o.execute(cnx, "INSERT Personne P: P nom 'toto'")[0]
-                seid, = self.o.execute(cnx, "INSERT Societe S: S nom 'logilab'")[0]
-                self.o.execute(cnx, "SET P travaille S")
-                cnx.commit()
+            peid, = self.o.execute(cnx, "INSERT Personne P: P nom 'toto'")[0]
+            seid, = self.o.execute(cnx, "INSERT Societe S: S nom 'logilab'")[0]
+            self.o.execute(cnx, "SET P travaille S")
+            cnx.commit()
         rset = self.qexecute('Personne P WHERE P travaille S')
         self.assertEqual(len(rset.rows), 1)
         self.qexecute("DELETE X travaille Y WHERE X eid %s, Y eid %s" % (peid, seid))
@@ -1212,12 +1210,11 @@
                               'X sender Y, X recipients Y WHERE Y is EmailAddress')[0]
         self.qexecute("DELETE Email X")
         with self.session.new_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                sqlc = cnx.cnxset.cu
-                sqlc.execute('SELECT * FROM recipients_relation')
-                self.assertEqual(len(sqlc.fetchall()), 0)
-                sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid)
-                self.assertEqual(len(sqlc.fetchall()), 0)
+            sqlc = cnx.cnxset.cu
+            sqlc.execute('SELECT * FROM recipients_relation')
+            self.assertEqual(len(sqlc.fetchall()), 0)
+            sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid)
+            self.assertEqual(len(sqlc.fetchall()), 0)
 
     def test_nonregr_delete_cache2(self):
         eid = self.qexecute("INSERT Folder T: T name 'toto'")[0][0]
@@ -1364,12 +1361,11 @@
         self.assertRaises(Unauthorized,
                           self.qexecute, "Any P WHERE X is CWUser, X login 'bob', X upassword P")
         with self.session.new_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                cursor = cnx.cnxset.cu
-                cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
-                               % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
-                passwd = str(cursor.fetchone()[0])
-                self.assertEqual(passwd, crypt_password('toto', passwd))
+            cursor = cnx.cnxset.cu
+            cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
+                           % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
+            passwd = str(cursor.fetchone()[0])
+            self.assertEqual(passwd, crypt_password('toto', passwd))
         rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s",
                             {'pwd': Binary(passwd)})
         self.assertEqual(len(rset.rows), 1)
@@ -1377,21 +1373,20 @@
 
     def test_update_upassword(self):
         with self.session.new_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                rset = cnx.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s",
-                                   {'pwd': 'toto'})
-                self.assertEqual(rset.description[0][0], 'CWUser')
-                rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'",
-                                   {'pwd': 'tutu'})
-                cursor = cnx.cnxset.cu
-                cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
-                               % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
-                passwd = str(cursor.fetchone()[0])
-                self.assertEqual(passwd, crypt_password('tutu', passwd))
-                rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s",
-                                   {'pwd': Binary(passwd)})
-                self.assertEqual(len(rset.rows), 1)
-                self.assertEqual(rset.description, [('CWUser',)])
+            rset = cnx.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s",
+                               {'pwd': 'toto'})
+            self.assertEqual(rset.description[0][0], 'CWUser')
+            rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'",
+                               {'pwd': 'tutu'})
+            cursor = cnx.cnxset.cu
+            cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
+                           % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
+            passwd = str(cursor.fetchone()[0])
+            self.assertEqual(passwd, crypt_password('tutu', passwd))
+            rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s",
+                               {'pwd': Binary(passwd)})
+            self.assertEqual(len(rset.rows), 1)
+            self.assertEqual(rset.description, [('CWUser',)])
 
     # ZT datetime tests ########################################################
 
--- a/server/test/unittest_repository.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_repository.py	Mon Jun 22 14:27:37 2015 +0200
@@ -31,10 +31,9 @@
                       UnknownEid, AuthenticationError, Unauthorized, QueryError)
 from cubicweb.predicates import is_instance
 from cubicweb.schema import RQLConstraint
-from cubicweb.dbapi import connect, multiple_connections_unfix
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.repotest import tuplify
-from cubicweb.server import repository, hook
+from cubicweb.server import hook
 from cubicweb.server.sqlutils import SQL_PREFIX
 from cubicweb.server.hook import Hook
 from cubicweb.server.sources import native
@@ -93,35 +92,17 @@
         self.assertRaises(AuthenticationError,
                           self.repo.connect, None)
 
-    def test_execute(self):
+    def test_login_upassword_accent(self):
+        with self.admin_access.repo_cnx() as cnx:
+            cnx.execute('INSERT CWUser X: X login %(login)s, X upassword %(passwd)s, '
+                        'X in_group G WHERE G name "users"',
+                        {'login': u"barnabé", 'passwd': u"héhéhé".encode('UTF8')})
+            cnx.commit()
         repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        repo.execute(cnxid, 'Any X')
-        repo.execute(cnxid, 'Any X where X is Personne')
-        repo.execute(cnxid, 'Any X where X is Personne, X nom ~= "to"')
-        repo.execute(cnxid, 'Any X WHERE X has_text %(text)s', {'text': u'\xe7a'})
-        repo.close(cnxid)
-
-    def test_login_upassword_accent(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        repo.execute(cnxid, 'INSERT CWUser X: X login %(login)s, X upassword %(passwd)s, X in_group G WHERE G name "users"',
-                     {'login': u"barnabé", 'passwd': u"héhéhé".encode('UTF8')})
-        repo.commit(cnxid)
-        repo.close(cnxid)
         cnxid = repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))
         self.assert_(cnxid)
         repo.close(cnxid)
 
-    def test_rollback_on_commit_error(self):
-        cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
-        self.repo.execute(cnxid,
-                          'INSERT CWUser X: X login %(login)s, X upassword %(passwd)s',
-                          {'login': u"tutetute", 'passwd': 'tutetute'})
-        self.assertRaises(ValidationError, self.repo.commit, cnxid)
-        self.assertFalse(self.repo.execute(cnxid, 'CWUser X WHERE X login "tutetute"'))
-        self.repo.close(cnxid)
-
     def test_rollback_on_execute_validation_error(self):
         class ValidationErrorAfterHook(Hook):
             __regid__ = 'valerror-after-hook'
@@ -166,31 +147,6 @@
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
         self.assert_(cnxid)
         repo.close(cnxid)
-        self.assertRaises(BadConnectionId, repo.execute, cnxid, 'Any X')
-
-    def test_invalid_cnxid(self):
-        self.assertRaises(BadConnectionId, self.repo.execute, 0, 'Any X')
-        self.assertRaises(BadConnectionId, self.repo.close, None)
-
-    def test_shared_data(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        repo.set_shared_data(cnxid, 'data', 4)
-        cnxid2 = repo.connect(self.admlogin, password=self.admpassword)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4)
-        self.assertEqual(repo.get_shared_data(cnxid2, 'data'), None)
-        repo.set_shared_data(cnxid2, 'data', 5)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4)
-        self.assertEqual(repo.get_shared_data(cnxid2, 'data'), 5)
-        repo.get_shared_data(cnxid2, 'data', pop=True)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4)
-        self.assertEqual(repo.get_shared_data(cnxid2, 'data'), None)
-        repo.close(cnxid)
-        repo.close(cnxid2)
-        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data')
-        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid2, 'data')
-        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid, 'data', 1)
-        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid2, 'data', 1)
 
     def test_check_session(self):
         repo = self.repo
@@ -199,76 +155,6 @@
         repo.close(cnxid)
         self.assertRaises(BadConnectionId, repo.check_session, cnxid)
 
-    def test_transaction_base(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        # check db state
-        result = repo.execute(cnxid, 'Personne X')
-        self.assertEqual(result.rowcount, 0)
-        # rollback entity insertion
-        repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'")
-        result = repo.execute(cnxid, 'Personne X')
-        self.assertEqual(result.rowcount, 1)
-        repo.rollback(cnxid)
-        result = repo.execute(cnxid, 'Personne X')
-        self.assertEqual(result.rowcount, 0, result.rows)
-        # commit
-        repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'")
-        repo.commit(cnxid)
-        result = repo.execute(cnxid, 'Personne X')
-        self.assertEqual(result.rowcount, 1)
-        repo.close(cnxid)
-
-    def test_transaction_base2(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        # rollback relation insertion
-        repo.execute(cnxid, "SET U in_group G WHERE U login 'admin', G name 'guests'")
-        result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'")
-        self.assertEqual(result.rowcount, 1)
-        repo.rollback(cnxid)
-        result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'")
-        self.assertEqual(result.rowcount, 0, result.rows)
-        repo.close(cnxid)
-
-    def test_transaction_base3(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        # rollback state change which trigger TrInfo insertion
-        session = repo._get_session(cnxid)
-        user = session.user
-        user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
-        rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
-        self.assertEqual(len(rset), 1)
-        repo.rollback(cnxid)
-        rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
-        self.assertEqual(len(rset), 0)
-        repo.close(cnxid)
-
-    def test_close_kill_processing_request(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        repo.execute(cnxid, 'INSERT CWUser X: X login "toto", X upassword "tutu", X in_group G WHERE G name "users"')
-        repo.commit(cnxid)
-        lock = threading.Lock()
-        lock.acquire()
-        # close has to be in the thread due to sqlite limitations
-        def close_in_a_few_moment():
-            lock.acquire()
-            repo.close(cnxid)
-        t = threading.Thread(target=close_in_a_few_moment)
-        t.start()
-        def run_transaction():
-            lock.release()
-            repo.execute(cnxid, 'DELETE CWUser X WHERE X login "toto"')
-            repo.commit(cnxid)
-        try:
-            with self.assertRaises(SessionClosedError) as cm:
-                run_transaction()
-            self.assertEqual(str(cm.exception), 'try to access connections set on a closed session %s' % cnxid)
-        finally:
-            t.join()
-
     def test_initial_schema(self):
         schema = self.repo.schema
         # check order of attributes is respected
@@ -312,118 +198,14 @@
         ownedby = schema.rschema('owned_by')
         self.assertEqual(ownedby.objects('CWEType'), ('CWUser',))
 
-    def test_pyro(self):
-        import Pyro
-        Pyro.config.PYRO_MULTITHREADED = 0
-        done = []
-        self.repo.config.global_set_option('pyro-ns-host', 'NO_PYRONS')
-        daemon = self.repo.pyro_register()
-        try:
-            uri = self.repo.pyro_uri.replace('PYRO', 'pyroloc')
-            # the client part has to be in the thread due to sqlite limitations
-            t = threading.Thread(target=self._pyro_client, args=(uri, done))
-            t.start()
-            while not done:
-                daemon.handleRequests(1.0)
-            t.join(1)
-            if t.isAlive():
-                self.fail('something went wrong, thread still alive')
-        finally:
-            repository.pyro_unregister(self.repo.config)
-            from logilab.common import pyro_ext
-            pyro_ext._DAEMONS.clear()
-
-
-    def _pyro_client(self, uri, done):
-        cnx = connect(uri,
-                      u'admin', password='gingkow',
-                      initlog=False) # don't reset logging configuration
-        try:
-            cnx.load_appobjects(subpath=('entities',))
-            # check we can get the schema
-            schema = cnx.get_schema()
-            self.assertTrue(cnx.vreg)
-            self.assertTrue('etypes'in cnx.vreg)
-            cu = cnx.cursor()
-            rset = cu.execute('Any U,G WHERE U in_group G')
-            user = iter(rset.entities()).next()
-            self.assertTrue(user._cw)
-            self.assertTrue(user._cw.vreg)
-            from cubicweb.entities import authobjs
-            self.assertIsInstance(user._cw.user, authobjs.CWUser)
-            # make sure the tcp connection is closed properly; yes, it's disgusting.
-            adapter = cnx._repo.adapter
-            cnx.close()
-            adapter.release()
-            done.append(True)
-        finally:
-            # connect monkey patch some method by default, remove them
-            multiple_connections_unfix()
-
-
-    def test_zmq(self):
-        try:
-            import zmq
-        except ImportError:
-            self.skipTest("zmq in not available")
-        done = []
-        from cubicweb.devtools import TestServerConfiguration as ServerConfiguration
-        from cubicweb.server.cwzmq import ZMQRepositoryServer
-        # the client part has to be in a thread due to sqlite limitations
-        t = threading.Thread(target=self._zmq_client, args=(done,))
-        t.start()
-
-        zmq_server = ZMQRepositoryServer(self.repo)
-        zmq_server.connect('zmqpickle-tcp://127.0.0.1:41415')
-
-        t2 = threading.Thread(target=self._zmq_quit, args=(done, zmq_server,))
-        t2.start()
-
-        zmq_server.run()
-
-        t2.join(1)
-        t.join(1)
-
-        if t.isAlive():
-            self.fail('something went wrong, thread still alive')
-
-    def _zmq_quit(self, done, srv):
-        while not done:
-            time.sleep(0.1)
-        srv.quit()
-
-    def _zmq_client(self, done):
-        try:
-            cnx = connect('zmqpickle-tcp://127.0.0.1:41415', u'admin', password=u'gingkow',
-                          initlog=False) # don't reset logging configuration
-            try:
-                cnx.load_appobjects(subpath=('entities',))
-                # check we can get the schema
-                schema = cnx.get_schema()
-                self.assertTrue(cnx.vreg)
-                self.assertTrue('etypes'in cnx.vreg)
-                cu = cnx.cursor()
-                rset = cu.execute('Any U,G WHERE U in_group G')
-                user = iter(rset.entities()).next()
-                self.assertTrue(user._cw)
-                self.assertTrue(user._cw.vreg)
-                from cubicweb.entities import authobjs
-                self.assertIsInstance(user._cw.user, authobjs.CWUser)
-                cnx.close()
-                done.append(True)
-            finally:
-                # connect monkey patch some method by default, remove them
-                multiple_connections_unfix()
-        finally:
-            done.append(False)
-
     def test_internal_api(self):
         repo = self.repo
         cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        session = repo._get_session(cnxid, setcnxset=True)
-        self.assertEqual(repo.type_and_source_from_eid(2, session),
-                         ('CWGroup', None, 'system'))
-        self.assertEqual(repo.type_from_eid(2, session), 'CWGroup')
+        session = repo._get_session(cnxid)
+        with session.new_cnx() as cnx:
+            self.assertEqual(repo.type_and_source_from_eid(2, cnx),
+                             ('CWGroup', None, 'system'))
+            self.assertEqual(repo.type_from_eid(2, cnx), 'CWGroup')
         repo.close(cnxid)
 
     def test_public_api(self):
@@ -435,45 +217,11 @@
         # .properties() return a result set
         self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U')
 
-    def test_session_api(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {}))
-        self.assertEqual({'type': u'CWGroup', 'extid': None, 'source': 'system'},
-                         repo.entity_metas(cnxid, 2))
-        self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', 'system', None, 'system'))
-        repo.close(cnxid)
-        self.assertRaises(BadConnectionId, repo.user_info, cnxid)
-        self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
-
-    def test_shared_data_api(self):
-        repo = self.repo
-        cnxid = repo.connect(self.admlogin, password=self.admpassword)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), None)
-        repo.set_shared_data(cnxid, 'data', 4)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4)
-        repo.get_shared_data(cnxid, 'data', pop=True)
-        repo.get_shared_data(cnxid, 'whatever', pop=True)
-        self.assertEqual(repo.get_shared_data(cnxid, 'data'), None)
-        repo.close(cnxid)
-        self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid, 'data', 0)
-        self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data')
-
     def test_schema_is_relation(self):
         with self.admin_access.repo_cnx() as cnx:
             no_is_rset = cnx.execute('Any X WHERE NOT X is ET')
             self.assertFalse(no_is_rset, no_is_rset.description)
 
-#     def test_perfo(self):
-#         self.set_debug(True)
-#         from time import time, clock
-#         t, c = time(), clock()
-#         try:
-#             self.create_user('toto')
-#         finally:
-#             self.set_debug(False)
-#         print 'test time: %.3f (time) %.3f (cpu)' % ((time() - t), clock() - c)
-
     def test_delete_if_singlecard1(self):
         with self.admin_access.repo_cnx() as cnx:
             note = cnx.create_entity('Affaire')
@@ -626,38 +374,37 @@
             namecol = SQL_PREFIX + 'name'
             finalcol = SQL_PREFIX + 'final'
             with self.admin_access.repo_cnx() as cnx:
-                with cnx.ensure_cnx_set:
-                    cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL'
-                                        % (namecol, table, finalcol))
-                    self.assertEqual(cu.fetchall(), [])
-                    cu = cnx.system_sql('SELECT %s FROM %s '
-                                        'WHERE %s=%%(final)s ORDER BY %s'
-                                        % (namecol, table, finalcol, namecol),
-                                        {'final': True})
-                    self.assertEqual(cu.fetchall(),
-                                     [(u'BabarTestType',),
-                                      (u'BigInt',), (u'Boolean',), (u'Bytes',),
-                                      (u'Date',), (u'Datetime',),
-                                      (u'Decimal',),(u'Float',),
-                                      (u'Int',),
-                                      (u'Interval',), (u'Password',),
-                                      (u'String',),
-                                      (u'TZDatetime',), (u'TZTime',), (u'Time',)])
-                    sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
-                           "FROM cw_CWUniqueTogetherConstraint as cstr, "
-                           "     relations_relation as rel, "
-                           "     cw_CWEType as etype "
-                           "WHERE cstr.cw_eid = rel.eid_from "
-                           "  AND cstr.cw_constraint_of = etype.cw_eid "
-                           "  AND etype.cw_name = 'Personne' "
-                           ";")
-                    cu = cnx.system_sql(sql)
-                    rows = cu.fetchall()
-                    self.assertEqual(len(rows), 3)
-                    person = self.repo.schema.eschema('Personne')
-                    self.assertEqual(len(person._unique_together), 1)
-                    self.assertItemsEqual(person._unique_together[0],
-                                          ('nom', 'prenom', 'inline2'))
+                cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL'
+                                    % (namecol, table, finalcol))
+                self.assertEqual(cu.fetchall(), [])
+                cu = cnx.system_sql('SELECT %s FROM %s '
+                                    'WHERE %s=%%(final)s ORDER BY %s'
+                                    % (namecol, table, finalcol, namecol),
+                                    {'final': True})
+                self.assertEqual(cu.fetchall(),
+                                 [(u'BabarTestType',),
+                                  (u'BigInt',), (u'Boolean',), (u'Bytes',),
+                                  (u'Date',), (u'Datetime',),
+                                  (u'Decimal',),(u'Float',),
+                                  (u'Int',),
+                                  (u'Interval',), (u'Password',),
+                                  (u'String',),
+                                  (u'TZDatetime',), (u'TZTime',), (u'Time',)])
+                sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
+                       "FROM cw_CWUniqueTogetherConstraint as cstr, "
+                       "     relations_relation as rel, "
+                       "     cw_CWEType as etype "
+                       "WHERE cstr.cw_eid = rel.eid_from "
+                       "  AND cstr.cw_constraint_of = etype.cw_eid "
+                       "  AND etype.cw_name = 'Personne' "
+                       ";")
+                cu = cnx.system_sql(sql)
+                rows = cu.fetchall()
+                self.assertEqual(len(rows), 3)
+                person = self.repo.schema.eschema('Personne')
+                self.assertEqual(len(person._unique_together), 1)
+                self.assertItemsEqual(person._unique_together[0],
+                                      ('nom', 'prenom', 'inline2'))
 
         finally:
             self.repo.set_schema(origshema)
@@ -680,30 +427,26 @@
 
     def test_type_from_eid(self):
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup')
+            self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup')
 
     def test_type_from_eid_raise(self):
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx)
+            self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx)
 
     def test_add_delete_info(self):
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                cnx.mode = 'write'
-                entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx)
-                entity.eid = -1
-                entity.complete = lambda x: None
-                self.repo.add_info(cnx, entity, self.repo.system_source)
-                cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
-                data = cu.fetchall()
-                self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)])
-                self.repo.delete_info(cnx, entity, 'system')
-                #self.repo.commit()
-                cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
-                data = cu.fetchall()
-                self.assertEqual(data, [])
+            entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx)
+            entity.eid = -1
+            entity.complete = lambda x: None
+            self.repo.add_info(cnx, entity, self.repo.system_source)
+            cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
+            data = cu.fetchall()
+            self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)])
+            self.repo._delete_cascade_multi(cnx, [entity])
+            self.repo.system_source.delete_info_multi(cnx, [entity])
+            cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
+            data = cu.fetchall()
+            self.assertEqual(data, [])
 
 
 class FTITC(CubicWebTC):
@@ -759,7 +502,6 @@
                               u'system.version.cubicweb',
                               u'system.version.email',
                               u'system.version.file',
-                              u'system.version.folder',
                               u'system.version.localperms',
                               u'system.version.tag'])
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_schema2sql.py	Mon Jun 22 14:27:37 2015 +0200
@@ -0,0 +1,288 @@
+# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+"""unit tests for module cubicweb.server.schema2sql
+"""
+
+import os.path as osp
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.database import get_db_helper
+
+from yams.reader import SchemaLoader
+from cubicweb.server import schema2sql
+
+schema2sql.SET_DEFAULT = True
+
+DATADIR = osp.abspath(osp.join(osp.dirname(__file__), 'data-schema2sql'))
+
+schema = SchemaLoader().load([DATADIR])
+
+
+EXPECTED_DATA_NO_DROP = """
+CREATE TABLE Affaire(
+ sujet varchar(128),
+ ref varchar(12),
+ inline_rel integer REFERENCES entities (eid)
+);
+CREATE INDEX affaire_inline_rel_idx ON Affaire(inline_rel);
+
+CREATE TABLE Company(
+ name text
+);
+
+CREATE TABLE Datetest(
+ dt1 timestamp,
+ dt2 timestamp,
+ d1 date,
+ d2 date,
+ t1 time,
+ t2 time
+);
+
+CREATE TABLE Division(
+ name text
+);
+
+CREATE TABLE EPermission(
+ name varchar(100) NOT NULL
+);
+CREATE INDEX epermission_name_idx ON EPermission(name);
+
+CREATE TABLE Eetype(
+ name varchar(64) UNIQUE NOT NULL,
+ description text,
+ meta boolean,
+ final boolean,
+ initial_state integer REFERENCES entities (eid)
+);
+CREATE INDEX eetype_name_idx ON Eetype(name);
+CREATE INDEX eetype_initial_state_idx ON Eetype(initial_state);
+
+CREATE TABLE Employee(
+);
+
+CREATE TABLE Note(
+ date varchar(10),
+ type varchar(1),
+ para varchar(512)
+);
+
+CREATE TABLE Person(
+ nom varchar(64) NOT NULL,
+ prenom varchar(64),
+ sexe varchar(1) DEFAULT 'M',
+ promo varchar(6),
+ titre varchar(128),
+ adel varchar(128),
+ ass varchar(128),
+ web varchar(128),
+ tel integer,
+ fax integer,
+ datenaiss date,
+ test boolean,
+ salary float
+);
+CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom);
+
+CREATE TABLE Salaried(
+ nom varchar(64) NOT NULL,
+ prenom varchar(64),
+ sexe varchar(1) DEFAULT 'M',
+ promo varchar(6),
+ titre varchar(128),
+ adel varchar(128),
+ ass varchar(128),
+ web varchar(128),
+ tel integer,
+ fax integer,
+ datenaiss date,
+ test boolean,
+ salary float
+);
+CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom);
+
+CREATE TABLE Societe(
+ nom varchar(64),
+ web varchar(128),
+ tel integer,
+ fax integer,
+ rncs varchar(32),
+ ad1 varchar(128),
+ ad2 varchar(128),
+ ad3 varchar(128),
+ cp varchar(12),
+ ville varchar(32)
+);
+
+CREATE TABLE State(
+ eid integer PRIMARY KEY REFERENCES entities (eid),
+ name varchar(256) NOT NULL,
+ description text
+);
+CREATE INDEX state_name_idx ON State(name);
+
+CREATE TABLE Subcompany(
+ name text
+);
+
+CREATE TABLE Subdivision(
+ name text
+);
+
+CREATE TABLE pkginfo(
+ modname varchar(30) NOT NULL,
+ version varchar(10) DEFAULT '0.1' NOT NULL,
+ copyright text NOT NULL,
+ license varchar(3),
+ short_desc varchar(80) NOT NULL,
+ long_desc text NOT NULL,
+ author varchar(100) NOT NULL,
+ author_email varchar(100) NOT NULL,
+ mailinglist varchar(100),
+ debian_handler varchar(6)
+);
+
+
+CREATE TABLE concerne_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT concerne_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX concerne_relation_from_idx ON concerne_relation(eid_from);
+CREATE INDEX concerne_relation_to_idx ON concerne_relation(eid_to);
+
+CREATE TABLE division_of_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT division_of_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX division_of_relation_from_idx ON division_of_relation(eid_from);
+CREATE INDEX division_of_relation_to_idx ON division_of_relation(eid_to);
+
+CREATE TABLE evaluee_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT evaluee_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX evaluee_relation_from_idx ON evaluee_relation(eid_from);
+CREATE INDEX evaluee_relation_to_idx ON evaluee_relation(eid_to);
+
+CREATE TABLE next_state_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT next_state_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX next_state_relation_from_idx ON next_state_relation(eid_from);
+CREATE INDEX next_state_relation_to_idx ON next_state_relation(eid_to);
+
+CREATE TABLE obj_wildcard_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT obj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX obj_wildcard_relation_from_idx ON obj_wildcard_relation(eid_from);
+CREATE INDEX obj_wildcard_relation_to_idx ON obj_wildcard_relation(eid_to);
+
+CREATE TABLE require_permission_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT require_permission_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX require_permission_relation_from_idx ON require_permission_relation(eid_from);
+CREATE INDEX require_permission_relation_to_idx ON require_permission_relation(eid_to);
+
+CREATE TABLE state_of_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT state_of_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX state_of_relation_from_idx ON state_of_relation(eid_from);
+CREATE INDEX state_of_relation_to_idx ON state_of_relation(eid_to);
+
+CREATE TABLE subcompany_of_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT subcompany_of_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX subcompany_of_relation_from_idx ON subcompany_of_relation(eid_from);
+CREATE INDEX subcompany_of_relation_to_idx ON subcompany_of_relation(eid_to);
+
+CREATE TABLE subdivision_of_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT subdivision_of_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX subdivision_of_relation_from_idx ON subdivision_of_relation(eid_from);
+CREATE INDEX subdivision_of_relation_to_idx ON subdivision_of_relation(eid_to);
+
+CREATE TABLE subj_wildcard_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT subj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX subj_wildcard_relation_from_idx ON subj_wildcard_relation(eid_from);
+CREATE INDEX subj_wildcard_relation_to_idx ON subj_wildcard_relation(eid_to);
+
+CREATE TABLE sym_rel_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT sym_rel_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX sym_rel_relation_from_idx ON sym_rel_relation(eid_from);
+CREATE INDEX sym_rel_relation_to_idx ON sym_rel_relation(eid_to);
+
+CREATE TABLE travaille_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT travaille_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX travaille_relation_from_idx ON travaille_relation(eid_from);
+CREATE INDEX travaille_relation_to_idx ON travaille_relation(eid_to);
+
+CREATE TABLE works_for_relation (
+  eid_from INTEGER NOT NULL REFERENCES entities (eid),
+  eid_to INTEGER NOT NULL REFERENCES entities (eid),
+  CONSTRAINT works_for_relation_p_key PRIMARY KEY(eid_from, eid_to)
+);
+
+CREATE INDEX works_for_relation_from_idx ON works_for_relation(eid_from);
+CREATE INDEX works_for_relation_to_idx ON works_for_relation(eid_to);
+"""
+
+class SQLSchemaTC(TestCase):
+
+    def test_known_values(self):
+        dbhelper = get_db_helper('postgres')
+        output = schema2sql.schema2sql(dbhelper, schema)
+        self.assertMultiLineEqual(EXPECTED_DATA_NO_DROP.strip(), output.strip())
+
+
+if __name__ == '__main__':
+    unittest_main()
--- a/server/test/unittest_schemaserial.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_schemaserial.py	Mon Jun 22 14:27:37 2015 +0200
@@ -17,9 +17,6 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """unit tests for schema rql (de)serialization"""
 
-import sys
-from cStringIO import StringIO
-
 from logilab.common.testlib import TestCase, unittest_main
 
 from cubicweb import Binary
--- a/server/test/unittest_security.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_security.py	Mon Jun 22 14:27:37 2015 +0200
@@ -31,9 +31,9 @@
     def setup_database(self):
         super(BaseSecurityTC, self).setup_database()
         with self.admin_access.client_cnx() as cnx:
-            self.create_user(cnx, 'iaminusersgrouponly')
+            self.create_user(cnx, u'iaminusersgrouponly')
             hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt')
-            self.create_user(cnx, 'oldpassword', password=Binary(hash))
+            self.create_user(cnx, u'oldpassword', password=Binary(hash))
 
 class LowLevelSecurityFunctionTC(BaseSecurityTC):
 
@@ -45,7 +45,7 @@
             with self.admin_access.repo_cnx() as cnx:
                 self.repo.vreg.solutions(cnx, rqlst, None)
                 check_relations_read_access(cnx, rqlst, {})
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 self.assertRaises(Unauthorized,
                                   check_relations_read_access,
                                   cnx, rqlst, {})
@@ -60,7 +60,7 @@
                 solution = rqlst.solutions[0]
                 localchecks = get_local_checks(cnx, rqlst, solution)
                 self.assertEqual({}, localchecks)
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 self.assertRaises(Unauthorized,
                                   get_local_checks,
                                   cnx, rqlst, solution)
@@ -70,7 +70,7 @@
         with self.admin_access.repo_cnx() as cnx:
             self.assertRaises(Unauthorized,
                               cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P')
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             self.assertRaises(Unauthorized,
                               cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P')
 
@@ -104,7 +104,7 @@
         super(SecurityRewritingTC, self).tearDown()
 
     def test_not_relation_read_security(self):
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             self.hijack_source_execute()
             cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire')
             self.assertEqual(self.query[0][1].as_string(),
@@ -126,13 +126,13 @@
             cnx.commit()
 
     def test_insert_security(self):
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             cnx.execute("INSERT Personne X: X nom 'bidule'")
             self.assertRaises(Unauthorized, cnx.commit)
             self.assertEqual(cnx.execute('Personne X').rowcount, 1)
 
     def test_insert_security_2(self):
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             cnx.execute("INSERT Affaire X")
             self.assertRaises(Unauthorized, cnx.commit)
             # anon has no read permission on Affaire entities, so
@@ -141,20 +141,20 @@
 
     def test_insert_rql_permission(self):
         # test user can only add une affaire related to a societe he owns
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("INSERT Affaire X: X sujet 'cool'")
             self.assertRaises(Unauthorized, cnx.commit)
         # test nothing has actually been inserted
         with self.admin_access.repo_cnx() as cnx:
             self.assertEqual(cnx.execute('Affaire X').rowcount, 1)
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("INSERT Affaire X: X sujet 'cool'")
             cnx.execute("INSERT Societe X: X nom 'chouette'")
             cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
             cnx.commit()
 
     def test_update_security_1(self):
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             # local security check
             cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne")
             self.assertRaises(Unauthorized, cnx.commit)
@@ -164,7 +164,7 @@
     def test_update_security_2(self):
         with self.temporary_permissions(Personne={'read': ('users', 'managers'),
                                                   'add': ('guests', 'users', 'managers')}):
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 self.assertRaises(Unauthorized, cnx.execute,
                                   "SET X nom 'bidulechouette' WHERE X is Personne")
         # test nothing has actually been inserted
@@ -172,7 +172,7 @@
             self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
 
     def test_update_security_3(self):
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("INSERT Personne X: X nom 'biduuule'")
             cnx.execute("INSERT Societe X: X nom 'looogilab'")
             cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
@@ -191,7 +191,7 @@
             cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
             cnx.commit()
         # test user can only update une affaire related to a societe he owns
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("SET X sujet 'pascool' WHERE X is Affaire")
             # this won't actually do anything since the selection query won't return anything
             cnx.commit()
@@ -212,7 +212,7 @@
         #self.assertRaises(Unauthorized,
         #                  self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'")
         # check local security
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'")
 
     def test_delete_rql_permission(self):
@@ -220,7 +220,7 @@
             cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
             cnx.commit()
         # test user can only dele une affaire related to a societe he owns
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             # this won't actually do anything since the selection query won't return anything
             cnx.execute("DELETE Affaire X")
             cnx.commit()
@@ -239,7 +239,7 @@
             cnx.commit()
 
     def test_insert_relation_rql_permission(self):
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
             # should raise Unauthorized since user don't own S though this won't
             # actually do anything since the selection query won't return
@@ -266,7 +266,7 @@
         with self.admin_access.repo_cnx() as cnx:
             cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             # this won't actually do anything since the selection query won't return anything
             cnx.execute("DELETE A concerne S")
             cnx.commit()
@@ -277,7 +277,7 @@
                          {'x': eid})
             cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S")
             self.assertRaises(QueryError, cnx.commit) # can't commit anymore
             cnx.rollback()
@@ -290,8 +290,8 @@
 
     def test_user_can_change_its_upassword(self):
         with self.admin_access.repo_cnx() as cnx:
-            ueid = self.create_user(cnx, 'user').eid
-        with self.new_access('user').repo_cnx() as cnx:
+            ueid = self.create_user(cnx, u'user').eid
+        with self.new_access(u'user').repo_cnx() as cnx:
             cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
                        {'x': ueid, 'passwd': 'newpwd'})
             cnx.commit()
@@ -299,8 +299,8 @@
 
     def test_user_cant_change_other_upassword(self):
         with self.admin_access.repo_cnx() as cnx:
-            ueid = self.create_user(cnx, 'otheruser').eid
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+            ueid = self.create_user(cnx, u'otheruser').eid
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
                        {'x': ueid, 'passwd': 'newpwd'})
             self.assertRaises(Unauthorized, cnx.commit)
@@ -309,7 +309,7 @@
 
     def test_read_base(self):
         with self.temporary_permissions(Personne={'read': ('users', 'managers')}):
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 self.assertRaises(Unauthorized,
                                   cnx.execute, 'Personne U where U nom "managers"')
 
@@ -317,7 +317,7 @@
         with self.admin_access.repo_cnx() as cnx:
             eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             rset = cnx.execute('Affaire X')
             self.assertEqual(rset.rows, [])
             self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
@@ -342,7 +342,7 @@
     def test_entity_created_in_transaction(self):
         affschema = self.schema['Affaire']
         with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}):
-            with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+            with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
                 aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
                 # entity created in transaction are readable *by eid*
                 self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
@@ -358,7 +358,7 @@
             cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"',
                         {'x': card1})
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
             soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0]
             cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1})
@@ -376,7 +376,7 @@
             cnx.execute("INSERT Societe X: X nom 'bidule'")
             cnx.commit()
         with self.temporary_permissions(Personne={'read': ('managers',)}):
-            with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+            with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
                 rset = cnx.execute('Any N WHERE N has_text "bidule"')
                 self.assertEqual(len(rset.rows), 1, rset.rows)
                 rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
@@ -388,7 +388,7 @@
             cnx.execute("INSERT Societe X: X nom 'bidule'")
             cnx.commit()
         with self.temporary_permissions(Personne={'read': ('managers',)}):
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
                 self.assertEqual(len(rset.rows), 1, rset.rows)
 
@@ -396,7 +396,7 @@
         with self.admin_access.repo_cnx() as cnx:
             cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             rset = cnx.execute('Any COUNT(X) WHERE X is Affaire')
             self.assertEqual(rset.rows, [[0]])
             aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
@@ -424,7 +424,7 @@
                                "X web 'http://www.debian.org', X test TRUE")[0][0]
             cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("INSERT Personne X: X nom 'bidule', "
                        "X web 'http://www.debian.org', X test TRUE")
             self.assertRaises(Unauthorized, cnx.commit)
@@ -440,7 +440,7 @@
             self.assertRaises(Unauthorized, cnx.commit)
             cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid})
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute('INSERT Frozable F: F name "Foo"')
             cnx.commit()
             cnx.execute('SET F name "Bar" WHERE F is Frozable')
@@ -464,7 +464,7 @@
             note.cw_adapt_to('IWorkflowable').fire_transition('markasdone')
             cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid})
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid})
             self.assertRaises(Unauthorized, cnx.commit)
             note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
@@ -496,7 +496,7 @@
         login_rdef = self.repo.schema['CWUser'].rdef('login')
         with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}),
                                         CWUser={'read': ('guests', 'users', 'managers')}):
-            with self.new_access('anon').repo_cnx() as cnx:
+            with self.new_access(u'anon').repo_cnx() as cnx:
                 rset = cnx.execute('CWUser X')
                 self.assertTrue(rset)
                 x = rset.get_entity(0, 0)
@@ -510,7 +510,7 @@
     def test_yams_inheritance_and_security_bug(self):
         with self.temporary_permissions(Division={'read': ('managers',
                                                            ERQLExpression('X owned_by U'))}):
-            with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+            with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
                 querier = cnx.repo.querier
                 rqlst = querier.parse('Any X WHERE X is_instance_of Societe')
                 querier.solutions(cnx, rqlst, {})
@@ -519,7 +519,7 @@
                 plan.preprocess(rqlst)
                 self.assertEqual(
                     rqlst.as_string(),
-                    '(Any X WHERE X is IN(SubDivision, Societe)) UNION '
+                    '(Any X WHERE X is IN(Societe, SubDivision)) UNION '
                     '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))')
 
 
@@ -528,7 +528,7 @@
 
     def test_user_can_delete_object_he_created(self):
         # even if some other user have changed object'state
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             # due to security test, affaire has to concerne a societe the user owns
             cnx.execute('INSERT Societe X: X nom "ARCTIA"')
             cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"')
@@ -542,7 +542,7 @@
             self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",'
                                               'X owned_by U, U login "admin"')),
                              1) # TrInfo at the above state change
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"')
             cnx.commit()
             self.assertFalse(cnx.execute('Affaire X'))
@@ -550,7 +550,7 @@
     def test_users_and_groups_non_readable_by_guests(self):
         with self.repo.internal_cnx() as cnx:
             admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0]
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             anon = cnx.user
             # anonymous user can only read itself
             rset = cnx.execute('Any L WHERE X owned_by U, U login L')
@@ -569,7 +569,7 @@
             self.assertRaises(Unauthorized, cnx.commit)
 
     def test_in_group_relation(self):
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             rql = u"DELETE U in_group G WHERE U login 'admin'"
             self.assertRaises(Unauthorized, cnx.execute, rql)
             rql = u"SET U in_group G WHERE U login 'admin', G name 'users'"
@@ -579,7 +579,7 @@
         with self.admin_access.repo_cnx() as cnx:
             cnx.execute("INSERT Personne X: X nom 'bidule'")
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne"
             self.assertRaises(Unauthorized, cnx.execute, rql)
 
@@ -589,7 +589,7 @@
             beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", '
                                 'B bookmarked_by U WHERE U login "anon"')[0][0]
             cnx.commit()
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             anoneid = cnx.user.eid
             self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
                                          'B bookmarked_by U, U eid %s' % anoneid).rows,
@@ -606,7 +606,7 @@
                               {'x': anoneid, 'b': beid1})
 
     def test_ambigous_ordered(self):
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')]
             self.assertEqual(names, sorted(names, key=lambda x: x.lower()))
 
@@ -617,7 +617,7 @@
         with self.admin_access.repo_cnx() as cnx:
             eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0]
             cnx.commit()
-        with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+        with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
             # needed to remove rql expr granting update perm to the user
             affschema = self.schema['Affaire']
             with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'),
@@ -675,7 +675,7 @@
                          'U use_email X WHERE U login "anon"').get_entity(0, 0)
             cnx.commit()
             self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2)
-        with self.new_access('anon').repo_cnx() as cnx:
+        with self.new_access(u'anon').repo_cnx() as cnx:
             self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1)
 
 if __name__ == '__main__':
--- a/server/test/unittest_session.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,124 +0,0 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-
-from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL
-from cubicweb.server import hook
-from cubicweb.predicates import is_instance
-
-class InternalSessionTC(CubicWebTC):
-    def test_dbapi_query(self):
-        session = self.repo.internal_session()
-        self.assertFalse(session.running_dbapi_query)
-        session.close()
-
-    def test_integrity_hooks(self):
-        with self.repo.internal_session() as session:
-            self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
-            self.assertEqual(set(('integrity', 'security')), session.disabled_hook_categories)
-            self.assertEqual(set(), session.enabled_hook_categories)
-            session.commit()
-            self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
-            self.assertEqual(set(('integrity', 'security')), session.disabled_hook_categories)
-            self.assertEqual(set(), session.enabled_hook_categories)
-
-class SessionTC(CubicWebTC):
-
-    def test_hooks_control(self):
-        session = self.session
-        # this test check the "old" behavior of session with automatic connection management
-        # close the default cnx, we do nto want it to interfer with the test
-        self.cnx.close()
-        # open a dedicated one
-        session.set_cnx('Some-random-cnx-unrelated-to-the-default-one')
-        # go test go
-        self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
-        self.assertEqual(set(), session.disabled_hook_categories)
-        self.assertEqual(set(), session.enabled_hook_categories)
-        self.assertEqual(1, len(session._cnxs))
-        with session.deny_all_hooks_but('metadata'):
-            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
-            self.assertEqual(set(), session.disabled_hook_categories)
-            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
-            session.commit()
-            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
-            self.assertEqual(set(), session.disabled_hook_categories)
-            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
-            session.rollback()
-            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
-            self.assertEqual(set(), session.disabled_hook_categories)
-            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
-            with session.allow_all_hooks_but('integrity'):
-                self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode)
-                self.assertEqual(set(('integrity',)), session.disabled_hook_categories)
-                self.assertEqual(set(('metadata',)), session.enabled_hook_categories) # not changed in such case
-            self.assertEqual(HOOKS_DENY_ALL, session.hooks_mode)
-            self.assertEqual(set(), session.disabled_hook_categories)
-            self.assertEqual(set(('metadata',)), session.enabled_hook_categories)
-        # leaving context manager with no transaction running should reset the
-        # transaction local storage (and associated cnxset)
-        self.assertEqual({}, session._cnxs)
-        self.assertEqual(None, session.cnxset)
-        self.assertEqual(HOOKS_ALLOW_ALL, session.hooks_mode, session.HOOKS_ALLOW_ALL)
-        self.assertEqual(set(), session.disabled_hook_categories)
-        self.assertEqual(set(), session.enabled_hook_categories)
-
-    def test_explicit_connection(self):
-        with self.session.new_cnx() as cnx:
-            rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser')
-            self.assertEqual(1, len(rset))
-            user = rset.get_entity(0, 0)
-            user.cw_delete()
-            cnx.rollback()
-            new_user = cnx.entity_from_eid(user.eid)
-            self.assertIsNotNone(new_user.login)
-        self.assertFalse(cnx._open)
-
-    def test_internal_cnx(self):
-        with self.repo.internal_cnx() as cnx:
-            rset = cnx.execute('Any X LIMIT 1 WHERE X is CWUser')
-            self.assertEqual(1, len(rset))
-            user = rset.get_entity(0, 0)
-            user.cw_delete()
-            cnx.rollback()
-            new_user = cnx.entity_from_eid(user.eid)
-            self.assertIsNotNone(new_user.login)
-        self.assertFalse(cnx._open)
-
-    def test_connection_exit(self):
-        """exiting a connection should roll back the transaction, including any
-        pending operations"""
-        self.rollbacked = False
-        class RollbackOp(hook.Operation):
-            _test = self
-            def rollback_event(self):
-                self._test.rollbacked = True
-        class RollbackHook(hook.Hook):
-            __regid__ = 'rollback'
-            events = ('after_update_entity',)
-            __select__ = hook.Hook.__select__ & is_instance('CWGroup')
-            def __call__(self):
-                RollbackOp(self._cw)
-        with self.temporary_appobjects(RollbackHook):
-            with self.admin_access.client_cnx() as cnx:
-                cnx.execute('SET G name "foo" WHERE G is CWGroup, G name "managers"')
-            self.assertTrue(self.rollbacked)
-
-if __name__ == '__main__':
-    from logilab.common.testlib import unittest_main
-    unittest_main()
--- a/server/test/unittest_tools.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_tools.py	Mon Jun 22 14:27:37 2015 +0200
@@ -23,7 +23,6 @@
 class ImportTC(TestCase):
     def test(self):
         # the minimal test: module is importable...
-        import cubicweb.server.server
         import cubicweb.server.checkintegrity
         import cubicweb.server.serverctl
 
--- a/server/test/unittest_undo.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/server/test/unittest_undo.py	Mon Jun 22 14:27:37 2015 +0200
@@ -48,7 +48,6 @@
 
     def tearDown(self):
         cubicweb.server.session.Connection = OldConnection
-        self.restore_connection()
         super(UndoableTransactionTC, self).tearDown()
 
     def check_transaction_deleted(self, cnx, txuuid):
@@ -210,13 +209,12 @@
                               ['CWUser'])
             # undoing shouldn't be visble in undoable transaction, and the undone
             # transaction should be removed
-            txs = self.cnx.undoable_transactions()
+            txs = cnx.undoable_transactions()
             self.assertEqual(len(txs), 2)
             self.assertRaises(NoSuchTransaction,
-                              self.cnx.transaction_info, txuuid)
+                              cnx.transaction_info, txuuid)
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                self.check_transaction_deleted(cnx, txuuid)
+            self.check_transaction_deleted(cnx, txuuid)
             # the final test: check we can login with the previously deleted user
         with self.new_access('toto').client_cnx():
             pass
@@ -238,6 +236,8 @@
             cnx.commit()
             p.cw_clear_all_caches()
             self.assertEqual(p.fiche[0].eid, c2.eid)
+            # we restored the card
+            self.assertTrue(cnx.entity_from_eid(c.eid))
 
     def test_undo_deletion_integrity_2(self):
         with self.admin_access.client_cnx() as cnx:
@@ -272,18 +272,17 @@
             self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
             self.assertFalse(cnx.execute('Any X,Y WHERE X fiche Y'))
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                for eid in (p.eid, c.eid):
-                    self.assertFalse(cnx.system_sql(
-                        'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
-                    self.assertFalse(cnx.system_sql(
-                        'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall())
-                    # added by sql in hooks (except when using dataimport)
-                    self.assertFalse(cnx.system_sql(
-                        'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall())
-                    self.assertFalse(cnx.system_sql(
-                        'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall())
-                self.check_transaction_deleted(cnx, txuuid)
+            for eid in (p.eid, c.eid):
+                self.assertFalse(cnx.system_sql(
+                    'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
+                self.assertFalse(cnx.system_sql(
+                    'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall())
+                # added by sql in hooks (except when using dataimport)
+                self.assertFalse(cnx.system_sql(
+                    'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall())
+                self.assertFalse(cnx.system_sql(
+                    'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall())
+            self.check_transaction_deleted(cnx, txuuid)
 
     def test_undo_creation_integrity_1(self):
         with self.admin_access.client_cnx() as cnx:
@@ -356,9 +355,8 @@
             p.cw_clear_all_caches()
             self.assertFalse(p.fiche)
         with self.admin_access.repo_cnx() as cnx:
-            with cnx.ensure_cnx_set:
-                self.assertIsNone(cnx.system_sql(
-                    'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0])
+            self.assertIsNone(cnx.system_sql(
+                'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0])
 
     def test_undo_inline_rel_add_ok(self):
         """Undo add relation  Personne (?) fiche (?) Card
@@ -375,6 +373,17 @@
             p.cw_clear_all_caches()
             self.assertFalse(p.fiche)
 
+    def test_undo_inline_rel_delete_ko(self):
+        with self.admin_access.client_cnx() as cnx:
+            c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+            txuuid = cnx.commit()
+            p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
+            cnx.commit()
+            integrityerror = self.repo.sources_by_uri['system'].dbhelper.dbapi_module.IntegrityError
+            with self.assertRaises(integrityerror):
+                cnx.undo_transaction(txuuid)
+
+
     def test_undo_inline_rel_add_ko(self):
         """Undo add relation  Personne (?) fiche (?) Card
 
--- a/skeleton/__pkginfo__.py.tmpl	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/__pkginfo__.py.tmpl	Mon Jun 22 14:27:37 2015 +0200
@@ -13,7 +13,7 @@
 description = '%(shortdesc)s'
 web = 'http://www.cubicweb.org/project/%%s' %% distname
 
-__depends__ =  %(dependencies)s
+__depends__ = %(dependencies)s
 __recommends__ = {}
 
 classifiers = [
@@ -29,6 +29,7 @@
 
 THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname)
 
+
 def listdir(dirpath):
     return [join(dirpath, fname) for fname in _listdir(dirpath)
             if fname[0] != '.' and not fname.endswith('.pyc')
@@ -40,9 +41,9 @@
     [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']],
     ]
 # check for possible extended cube layout
-for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'wdoc', 'i18n', 'migration'):
+for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data',
+              'wdoc', 'i18n', 'migration'):
     if isdir(dname):
         data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)])
 # Note: here, you'll need to add subdirectories if you want
 # them to be included in the debian package
-
--- a/skeleton/migration/postcreate.py.tmpl	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/migration/postcreate.py.tmpl	Mon Jun 22 14:27:37 2015 +0200
@@ -11,4 +11,3 @@
 
 # Example of site property change
 #set_property('ui.site-title', "<sitename>")
-
--- a/skeleton/setup.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/setup.py	Mon Jun 22 14:27:37 2015 +0200
@@ -16,8 +16,8 @@
 # FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
 # details.
 #
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License
+# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """Generic Setup script, takes package info from __pkginfo__.py file
 """
 __docformat__ = "restructuredtext en"
@@ -25,11 +25,11 @@
 import os
 import sys
 import shutil
-from os.path import isdir, exists, join, walk
+from os.path import exists, join, walk
 
 try:
     if os.environ.get('NO_SETUPTOOLS'):
-        raise ImportError() # do as there is no setuptools
+        raise ImportError()  # do as there is no setuptools
     from setuptools import setup
     from setuptools.command import install_lib
     USE_SETUPTOOLS = True
@@ -41,7 +41,7 @@
 
 # import required features
 from __pkginfo__ import modname, version, license, description, web, \
-     author, author_email, classifiers
+    author, author_email, classifiers
 
 if exists('README'):
     long_description = file('README').read()
@@ -52,10 +52,10 @@
 import __pkginfo__
 if USE_SETUPTOOLS:
     requires = {}
-    for entry in ("__depends__",): # "__recommends__"):
+    for entry in ("__depends__",):  # "__recommends__"):
         requires.update(getattr(__pkginfo__, entry, {}))
     install_requires = [("%s %s" % (d, v and v or "")).strip()
-                       for d, v in requires.iteritems()]
+                        for d, v in requires.iteritems()]
 else:
     install_requires = []
 
@@ -82,6 +82,7 @@
         scripts_ = linux_scripts
     return scripts_
 
+
 def export(from_dir, to_dir,
            blacklist=BASE_BLACKLIST,
            ignore_ext=IGNORED_EXTENSIONS,
@@ -150,13 +151,15 @@
             old_install_data.run(self)
             self.install_dir = _old_install_dir
     try:
-        import setuptools.command.easy_install # only if easy_install available
+        # only if easy_install available
+        import setuptools.command.easy_install  # noqa
         # monkey patch: Crack SandboxViolation verification
         from setuptools.sandbox import DirectorySandbox as DS
         old_ok = DS._ok
+
         def _ok(self, path):
             """Return True if ``path`` can be written during installation."""
-            out = old_ok(self, path) # here for side effect from setuptools
+            out = old_ok(self, path)  # here for side effect from setuptools
             realpath = os.path.normcase(os.path.realpath(path))
             allowed_path = os.path.normcase(sys.prefix)
             if realpath.startswith(allowed_path):
@@ -166,6 +169,7 @@
     except ImportError:
         pass
 
+
 def install(**kwargs):
     """setup entry point"""
     if USE_SETUPTOOLS:
@@ -181,21 +185,22 @@
         kwargs['zip_safe'] = False
         cmdclass['install_data'] = MyInstallData
 
-    return setup(name = distname,
-                 version = version,
-                 license = license,
-                 description = description,
-                 long_description = long_description,
-                 author = author,
-                 author_email = author_email,
-                 url = web,
-                 scripts = ensure_scripts(scripts),
-                 data_files = data_files,
-                 ext_modules = ext_modules,
-                 cmdclass = cmdclass,
-                 classifiers = classifiers,
+    return setup(name=distname,
+                 version=version,
+                 license=license,
+                 description=description,
+                 long_description=long_description,
+                 author=author,
+                 author_email=author_email,
+                 url=web,
+                 scripts=ensure_scripts(scripts),
+                 data_files=data_files,
+                 ext_modules=ext_modules,
+                 cmdclass=cmdclass,
+                 classifiers=classifiers,
                  **kwargs
                  )
 
-if __name__ == '__main__' :
+
+if __name__ == '__main__':
     install()
--- a/skeleton/test/pytestconf.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/test/pytestconf.py	Mon Jun 22 14:27:37 2015 +0200
@@ -13,8 +13,8 @@
 # FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
 # details.
 #
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License
+# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """
 
 """
@@ -23,6 +23,7 @@
 
 from logilab.common.pytest import PyTester
 
+
 def getlogin():
     """avoid usinng os.getlogin() because of strange tty / stdin problems
     (man 3 getlogin)
--- a/skeleton/test/realdb_test_CUBENAME.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/test/realdb_test_CUBENAME.py	Mon Jun 22 14:27:37 2015 +0200
@@ -13,14 +13,15 @@
 # FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
 # details.
 #
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License
+# along with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 """
 
 """
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.devtools.realdbtest import buildconfig, loadconfig
 
+
 def setUpModule(options):
     if options.source:
         configcls = loadconfig(options.source)
@@ -28,13 +29,13 @@
         raise Exception('either <sourcefile> or <dbname> options are required')
     else:
         configcls = buildconfig(options.dbuser, options.dbpassword,
-                                               options.dbname, options.euser,
-                                               options.epassword)
+                                options.dbname,
+                                options.euser, options.epassword)
     RealDatabaseTC.configcls = configcls
 
 
 class RealDatabaseTC(CubicWebTC):
-    configcls = None # set by setUpModule()
+    configcls = None  # set by setUpModule()
 
     def test_all_primaries(self):
         for rset in self.iter_individual_rsets(limit=50):
--- a/skeleton/test/test_CUBENAME.py.tmpl	Mon Jun 22 12:51:28 2015 +0200
+++ b/skeleton/test/test_CUBENAME.py.tmpl	Mon Jun 22 14:27:37 2015 +0200
@@ -27,6 +27,7 @@
 
 from cubicweb.devtools import testlib
 
+
 class DefaultTC(testlib.CubicWebTC):
     def test_something(self):
         self.skipTest('this cube has no test')
--- a/sobjects/notification.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/sobjects/notification.py	Mon Jun 22 14:27:37 2015 +0200
@@ -270,7 +270,7 @@
     """
     __abstract__ = True
     __regid__ = 'notif_entity_updated'
-    msgid_timestamp = False
+    msgid_timestamp = True
     message = _('updated')
     no_detailed_change_attrs = ()
     content = """
--- a/sobjects/services.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/sobjects/services.py	Mon Jun 22 14:27:37 2015 +0200
@@ -43,7 +43,7 @@
             (len(source._cache), repo.config['rql-cache-size'],
             source.cache_hit, source.cache_miss, 'sql'),
             ):
-            results['%s_cache_size' % title] = '%s / %s' % (size, maxsize)
+            results['%s_cache_size' % title] = {'size': size, 'maxsize': maxsize}
             results['%s_cache_hit' % title] = hits
             results['%s_cache_miss' % title] = misses
             results['%s_cache_hit_percent' % title] = (hits * 100) / (hits + misses)
@@ -53,9 +53,9 @@
         results['nb_open_sessions'] = len(repo._sessions)
         results['nb_active_threads'] = threading.activeCount()
         looping_tasks = repo._tasks_manager._looping_tasks
-        results['looping_tasks'] = ', '.join(str(t) for t in looping_tasks)
+        results['looping_tasks'] = [(t.name, t.interval) for t in looping_tasks]
         results['available_cnxsets'] = repo._cnxsets_pool.qsize()
-        results['threads'] = ', '.join(sorted(str(t) for t in threading.enumerate()))
+        results['threads'] = [t.name for t in threading.enumerate()]
         return results
 
 class GcStatsService(Service):
@@ -79,13 +79,11 @@
         from cubicweb._gcdebug import gc_info
         from cubicweb.appobject import AppObject
         from cubicweb.rset import ResultSet
-        from cubicweb.dbapi import Connection, Cursor
         from cubicweb.web.request import CubicWebRequestBase
         from rql.stmts import Union
 
         lookupclasses = (AppObject,
                          Union, ResultSet,
-                         Connection, Cursor,
                          CubicWebRequestBase)
         try:
             from cubicweb.server.session import Session, InternalSession
@@ -100,7 +98,7 @@
         results['lookupclasses'] = values
         values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax]
         results['referenced'] = values
-        results['unreachable'] = len(garbage)
+        results['unreachable'] = garbage
         return results
 
 
--- a/sobjects/test/unittest_cwxmlparser.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/sobjects/test/unittest_cwxmlparser.py	Mon Jun 22 14:27:37 2015 +0200
@@ -17,6 +17,7 @@
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
 from datetime import datetime
+from urlparse import urlsplit, parse_qsl
 
 from cubicweb.devtools.testlib import CubicWebTC
 from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser
@@ -133,6 +134,16 @@
     """
     test_db_id = 'xmlparser'
 
+    def assertURLEquiv(self, first, second):
+        # ignore ordering differences in query params
+        parsed_first = urlsplit(first)
+        parsed_second = urlsplit(second)
+        self.assertEqual(parsed_first.scheme, parsed_second.scheme)
+        self.assertEqual(parsed_first.netloc, parsed_second.netloc)
+        self.assertEqual(parsed_first.path, parsed_second.path)
+        self.assertEqual(parsed_first.fragment, parsed_second.fragment)
+        self.assertCountEqual(parse_qsl(parsed_first.query), parse_qsl(parsed_second.query))
+
     @classmethod
     def pre_setup_database(cls, cnx, config):
         myfeed = cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
@@ -161,16 +172,16 @@
         dfsource = self.repo.sources_by_uri['myfeed']
         with self.admin_access.repo_cnx() as cnx:
             parser = dfsource._get_parser(cnx)
-            self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'),
-                             'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
-            self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'),
-                             'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
-            self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
-                             'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
-            self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'),
-                             'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf')
-            self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'),
-                             'http://www.cubicweb.org/?rql=cwuser&relation=hop')
+            self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/CWUser'),
+                                'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
+            self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser'),
+                                'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
+            self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
+                                'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
+            self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'),
+                                'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf')
+            self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'),
+                                'http://www.cubicweb.org/?rql=cwuser&relation=hop')
 
 
     def test_actions(self):
--- a/test/unittest_cwconfig.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_cwconfig.py	Mon Jun 22 14:27:37 2015 +0200
@@ -104,11 +104,14 @@
     def test_appobjects_path(self):
         self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR]
         self.config.adjust_sys_path()
-        self.assertEqual([unabsolutize(p) for p in self.config.appobjects_path()],
-                          ['entities', 'web/views', 'sobjects', 'hooks',
-                           'file/entities', 'file/views.py', 'file/hooks',
-                           'email/entities.py', 'email/views', 'email/hooks.py',
-                           'test/data/entities.py', 'test/data/views.py'])
+        path = [unabsolutize(p) for p in self.config.appobjects_path()]
+        self.assertEqual(path[0], 'entities')
+        self.assertCountEqual(path[1:4], ['web/views', 'sobjects', 'hooks'])
+        self.assertEqual(path[4], 'file/entities')
+        self.assertCountEqual(path[5:7], ['file/views.py', 'file/hooks'])
+        self.assertEqual(path[7], 'email/entities.py')
+        self.assertCountEqual(path[8:10], ['email/views', 'email/hooks.py'])
+        self.assertEqual(path[10:], ['test/data/entities.py', 'test/data/views.py'])
 
     def test_cubes_path(self):
         # make sure we don't import the email cube, but the stdlib email package
--- a/test/unittest_cwctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_cwctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -44,7 +44,7 @@
 
     def test_process_script_args_context(self):
         repo = self.repo
-        with self.admin_access.client_cnx() as cnx:
+        with self.admin_access.repo_cnx() as cnx:
             mih = ServerMigrationHelper(None, repo=repo, cnx=cnx,
                                         interactive=False,
                                         # hack so it don't try to load fs schema
--- a/test/unittest_dataimport.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,167 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import datetime as DT
-from StringIO import StringIO
-
-from logilab.common.testlib import TestCase, unittest_main
-
-from cubicweb import dataimport
-from cubicweb.devtools.testlib import CubicWebTC
-
-
-class RQLObjectStoreTC(CubicWebTC):
-
-    def test_all(self):
-        with self.admin_access.repo_cnx() as cnx:
-            store = dataimport.RQLObjectStore(cnx)
-            group_eid = store.create_entity('CWGroup', name=u'grp').eid
-            user_eid = store.create_entity('CWUser', login=u'lgn', upassword=u'pwd').eid
-            store.relate(user_eid, 'in_group', group_eid)
-            cnx.commit()
-
-        with self.admin_access.repo_cnx() as cnx:
-            users = cnx.execute('CWUser X WHERE X login "lgn"')
-            self.assertEqual(1, len(users))
-            self.assertEqual(user_eid, users.one().eid)
-            groups = cnx.execute('CWGroup X WHERE U in_group X, U login "lgn"')
-            self.assertEqual(1, len(users))
-            self.assertEqual(group_eid, groups.one().eid)
-
-
-class CreateCopyFromBufferTC(TestCase):
-
-    # test converters
-
-    def test_convert_none(self):
-        cnvt = dataimport._copyfrom_buffer_convert_None
-        self.assertEqual('NULL', cnvt(None))
-
-    def test_convert_number(self):
-        cnvt = dataimport._copyfrom_buffer_convert_number
-        self.assertEqual('42', cnvt(42))
-        self.assertEqual('42', cnvt(42L))
-        self.assertEqual('42.42', cnvt(42.42))
-
-    def test_convert_string(self):
-        cnvt = dataimport._copyfrom_buffer_convert_string
-        # simple
-        self.assertEqual('babar', cnvt('babar'))
-        # unicode
-        self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant'))
-        self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1'))
-        self.assertEqual('babar#', cnvt('babar\t', replace_sep='#'))
-        self.assertRaises(ValueError, cnvt, 'babar\t')
-
-    def test_convert_date(self):
-        cnvt = dataimport._copyfrom_buffer_convert_date
-        self.assertEqual('0666-01-13', cnvt(DT.date(666, 1, 13)))
-
-    def test_convert_time(self):
-        cnvt = dataimport._copyfrom_buffer_convert_time
-        self.assertEqual('06:06:06.000100', cnvt(DT.time(6, 6, 6, 100)))
-
-    def test_convert_datetime(self):
-        cnvt = dataimport._copyfrom_buffer_convert_datetime
-        self.assertEqual('0666-06-13 06:06:06.000000', cnvt(DT.datetime(666, 6, 13, 6, 6, 6)))
-
-    # test buffer
-    def test_create_copyfrom_buffer_tuple(self):
-        cnvt = dataimport._create_copyfrom_buffer
-        data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), DT.datetime(666, 6, 13, 6, 6, 6)),
-                (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), DT.datetime(2014, 1, 1, 0, 0, 0)))
-        results = dataimport._create_copyfrom_buffer(data)
-        # all columns
-        expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000
-6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000'''
-        self.assertMultiLineEqual(expected, results.getvalue())
-        # selected columns
-        results = dataimport._create_copyfrom_buffer(data, columns=(1, 3, 6))
-        expected = '''42\téléphant\t0666-06-13 06:06:06.000000
-6\tbabar\t2014-01-01 00:00:00.000000'''
-        self.assertMultiLineEqual(expected, results.getvalue())
-
-    def test_create_copyfrom_buffer_dict(self):
-        cnvt = dataimport._create_copyfrom_buffer
-        data = (dict(integer=42, double=42.42, text=u'éléphant', date=DT.datetime(666, 6, 13, 6, 6, 6)),
-                dict(integer=6, double=6.6, text=u'babar', date=DT.datetime(2014, 1, 1, 0, 0, 0)))
-        results = dataimport._create_copyfrom_buffer(data, ('integer', 'text'))
-        expected = '''42\téléphant\n6\tbabar'''
-        self.assertMultiLineEqual(expected, results.getvalue())
-
-
-class UcsvreaderTC(TestCase):
-
-    def test_empty_lines_skipped(self):
-        stream = StringIO('''a,b,c,d,
-1,2,3,4,
-,,,,
-,,,,
-''')
-        self.assertEqual([[u'a', u'b', u'c', u'd', u''],
-                          [u'1', u'2', u'3', u'4', u''],
-                          ],
-                         list(dataimport.ucsvreader(stream)))
-        stream.seek(0)
-        self.assertEqual([[u'a', u'b', u'c', u'd', u''],
-                          [u'1', u'2', u'3', u'4', u''],
-                          [u'', u'', u'', u'', u''],
-                          [u'', u'', u'', u'', u'']
-                          ],
-                         list(dataimport.ucsvreader(stream, skip_empty=False)))
-
-    def test_skip_first(self):
-        stream = StringIO('a,b,c,d,\n'
-                          '1,2,3,4,\n')
-        reader = dataimport.ucsvreader(stream, skipfirst=True,
-                                       ignore_errors=True)
-        self.assertEqual(list(reader),
-                         [[u'1', u'2', u'3', u'4', u'']])
-
-        stream.seek(0)
-        reader = dataimport.ucsvreader(stream, skipfirst=True,
-                                       ignore_errors=False)
-        self.assertEqual(list(reader),
-                         [[u'1', u'2', u'3', u'4', u'']])
-
-        stream.seek(0)
-        reader = dataimport.ucsvreader(stream, skipfirst=False,
-                                       ignore_errors=True)
-        self.assertEqual(list(reader),
-                         [[u'a', u'b', u'c', u'd', u''],
-                          [u'1', u'2', u'3', u'4', u'']])
-
-        stream.seek(0)
-        reader = dataimport.ucsvreader(stream, skipfirst=False,
-                                       ignore_errors=False)
-        self.assertEqual(list(reader),
-                         [[u'a', u'b', u'c', u'd', u''],
-                          [u'1', u'2', u'3', u'4', u'']])
-
-
-class MetaGeneratorTC(CubicWebTC):
-
-    def test_dont_generate_relation_to_internal_manager(self):
-        with self.admin_access.repo_cnx() as cnx:
-            metagen = dataimport.MetaGenerator(cnx)
-            self.assertIn('created_by', metagen.etype_rels)
-            self.assertIn('owned_by', metagen.etype_rels)
-        with self.repo.internal_cnx() as cnx:
-            metagen = dataimport.MetaGenerator(cnx)
-            self.assertNotIn('created_by', metagen.etype_rels)
-            self.assertNotIn('owned_by', metagen.etype_rels)
-
-    def test_dont_generate_specified_values(self):
-        with self.admin_access.repo_cnx() as cnx:
-            metagen = dataimport.MetaGenerator(cnx)
-            # hijack gen_modification_date to ensure we don't go through it
-            metagen.gen_modification_date = None
-            md = DT.datetime.now() - DT.timedelta(days=1)
-            entity, rels = metagen.base_etype_dicts('CWUser')
-            entity.cw_edited.update(dict(modification_date=md))
-            with cnx.ensure_cnx_set:
-                metagen.init_entity(entity)
-            self.assertEqual(entity.cw_edited['modification_date'], md)
-
-
-if __name__ == '__main__':
-    unittest_main()
--- a/test/unittest_dbapi.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,103 +0,0 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""unittest for cubicweb.dbapi"""
-
-from copy import copy
-
-from logilab.common import tempattr
-
-from cubicweb import ConnectionError, cwconfig, NoSelectableObject
-from cubicweb.dbapi import ProgrammingError, _repo_connect
-from cubicweb.devtools.testlib import CubicWebTC
-
-
-class DBAPITC(CubicWebTC):
-
-    def test_public_repo_api(self):
-        cnx = _repo_connect(self.repo, login='anon', password='anon')
-        self.assertEqual(cnx.get_schema(), self.repo.schema)
-        self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system',
-                                                        'use-cwuri-as-url': False}})
-        cnx.close()
-        self.assertRaises(ProgrammingError, cnx.get_schema)
-        self.assertRaises(ProgrammingError, cnx.source_defs)
-
-    def test_db_api(self):
-        cnx = _repo_connect(self.repo, login='anon', password='anon')
-        self.assertEqual(cnx.rollback(), None)
-        self.assertEqual(cnx.commit(), None)
-        cnx.close()
-        self.assertRaises(ProgrammingError, cnx.rollback)
-        self.assertRaises(ProgrammingError, cnx.commit)
-        self.assertRaises(ProgrammingError, cnx.close)
-
-    def test_api(self):
-        cnx = _repo_connect(self.repo, login='anon', password='anon')
-        self.assertEqual(cnx.user(None).login, 'anon')
-        self.assertEqual({'type': u'CWSource', 'source': u'system', 'extid': None},
-                         cnx.entity_metas(1))
-        self.assertEqual(cnx.describe(1), (u'CWSource', u'system', None))
-        cnx.close()
-        self.assertRaises(ProgrammingError, cnx.user, None)
-        self.assertRaises(ProgrammingError, cnx.entity_metas, 1)
-        self.assertRaises(ProgrammingError, cnx.describe, 1)
-
-    def test_shared_data_api(self):
-        cnx = _repo_connect(self.repo, login='anon', password='anon')
-        self.assertEqual(cnx.get_shared_data('data'), None)
-        cnx.set_shared_data('data', 4)
-        self.assertEqual(cnx.get_shared_data('data'), 4)
-        cnx.get_shared_data('data', pop=True)
-        cnx.get_shared_data('whatever', pop=True)
-        self.assertEqual(cnx.get_shared_data('data'), None)
-        cnx.set_shared_data('data', 4)
-        self.assertEqual(cnx.get_shared_data('data'), 4)
-        cnx.close()
-        self.assertRaises(ProgrammingError, cnx.check)
-        self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0)
-        self.assertRaises(ProgrammingError, cnx.get_shared_data, 'data')
-
-    def test_web_compatible_request(self):
-        config = cwconfig.CubicWebNoAppConfiguration()
-        cnx = _repo_connect(self.repo, login='admin', password='gingkow')
-        with tempattr(cnx.vreg, 'config', config):
-            cnx.use_web_compatible_requests('http://perdu.com')
-            req = cnx.request()
-            self.assertEqual(req.base_url(), 'http://perdu.com/')
-            self.assertEqual(req.from_controller(), 'view')
-            self.assertEqual(req.relative_path(), '')
-            req.ajax_replace_url('domid') # don't crash
-            req.user.cw_adapt_to('IBreadCrumbs') # don't crash
-
-    def test_call_service(self):
-        ServiceClass = self.vreg['services']['test_service'][0]
-        for _cw in (self.request(), self.session):
-            ret_value = _cw.call_service('test_service', msg='coucou')
-            self.assertEqual('coucou', ServiceClass.passed_here.pop())
-            self.assertEqual('babar', ret_value)
-        with self.login('anon') as ctm:
-            for _cw in (self.request(), self.session):
-                with self.assertRaises(NoSelectableObject):
-                    _cw.call_service('test_service', msg='toto')
-                self.rollback()
-                self.assertEqual([], ServiceClass.passed_here)
-
-
-if __name__ == '__main__':
-    from logilab.common.testlib import unittest_main
-    unittest_main()
--- a/test/unittest_entity.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_entity.py	Mon Jun 22 14:27:37 2015 +0200
@@ -140,13 +140,24 @@
         with self.admin_access.web_request() as req:
             user = req.execute('Any X WHERE X eid %(x)s', {'x':req.user.eid}).get_entity(0, 0)
             adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0]
+            self.assertEqual({}, user._cw_related_cache)
             req.cnx.commit()
-            self.assertEqual(user._cw_related_cache, {})
+            self.assertEqual(['primary_email_subject', 'use_email_subject', 'wf_info_for_object'],
+                             sorted(user._cw_related_cache))
             email = user.primary_email[0]
-            self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject'])
-            self.assertEqual(list(email._cw_related_cache), ['primary_email_object'])
+            self.assertEqual(u'toto@logilab.org', email.address)
+            self.assertEqual(['created_by_subject',
+                              'cw_source_subject',
+                              'is_instance_of_subject',
+                              'is_subject',
+                              'owned_by_subject',
+                              'prefered_form_object',
+                              'prefered_form_subject',
+                              'primary_email_object',
+                              'use_email_object'],
+                             sorted(email._cw_related_cache))
+            self.assertEqual('admin', email._cw_related_cache['primary_email_object'][1][0].login)
             groups = user.in_group
-            self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject'])
             for group in groups:
                 self.assertNotIn('in_group_subject', group._cw_related_cache)
             user.cw_clear_all_caches()
--- a/test/unittest_repoapi.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_repoapi.py	Mon Jun 22 14:27:37 2015 +0200
@@ -15,18 +15,18 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""unittest for cubicweb.dbapi"""
+"""unittest for cubicweb.repoapi"""
 
 
 from cubicweb.devtools.testlib import CubicWebTC
 
 from cubicweb import ProgrammingError
-from cubicweb.repoapi import ClientConnection, connect, anonymous_cnx
+from cubicweb.repoapi import Connection, connect, anonymous_cnx
 
 
 class REPOAPITC(CubicWebTC):
 
-    def test_clt_cnx_basic_usage(self):
+    def test_cnx_basic_usage(self):
         """Test that a client connection can be used to access the database"""
         with self.admin_access.client_cnx() as cltcnx:
             # (1) some RQL request
@@ -52,11 +52,11 @@
                                   ''')
             self.assertTrue(rset)
 
-    def test_clt_cnx_life_cycle(self):
+    def test_cnx_life_cycle(self):
         """Check that ClientConnection requires explicit open and close
         """
         access = self.admin_access
-        cltcnx = ClientConnection(access._session)
+        cltcnx = Connection(access._session)
         # connection not open yet
         with self.assertRaises(ProgrammingError):
             cltcnx.execute('Any X WHERE X is CWUser')
@@ -69,18 +69,18 @@
 
     def test_connect(self):
         """check that repoapi.connect works and returns a usable connection"""
-        clt_cnx = connect(self.repo, login='admin', password='gingkow')
-        self.assertEqual('admin', clt_cnx.user.login)
-        with clt_cnx:
-            rset = clt_cnx.execute('Any X WHERE X is CWUser')
+        cnx = connect(self.repo, login='admin', password='gingkow')
+        self.assertEqual('admin', cnx.user.login)
+        with cnx:
+            rset = cnx.execute('Any X WHERE X is CWUser')
             self.assertTrue(rset)
 
     def test_anonymous_connect(self):
         """check that you can get anonymous connection when the data exist"""
-        clt_cnx = anonymous_cnx(self.repo)
-        self.assertEqual('anon', clt_cnx.user.login)
-        with clt_cnx:
-            rset = clt_cnx.execute('Any X WHERE X is CWUser')
+        cnx = anonymous_cnx(self.repo)
+        self.assertEqual('anon', cnx.user.login)
+        with cnx:
+            rset = cnx.execute('Any X WHERE X is CWUser')
             self.assertTrue(rset)
 
 
--- a/test/unittest_schema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_schema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -360,8 +360,8 @@
                          schema['produces_and_buys'].rdefs.keys())
         self.assertEqual([('Person','Service')],
                          schema['produces_and_buys2'].rdefs.keys())
-        self.assertEqual([('Company', 'Service'), ('Person', 'Service')],
-                         schema['reproduce'].rdefs.keys())
+        self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')],
+                              schema['reproduce'].rdefs.keys())
         # check relation definitions are marked infered
         rdef = schema['produces_and_buys'].rdefs[('Person','Service')]
         self.assertTrue(rdef.infered)
--- a/test/unittest_utils.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/test/unittest_utils.py	Mon Jun 22 14:27:37 2015 +0200
@@ -58,12 +58,6 @@
                          parse_repo_uri('myapp'))
         self.assertEqual(('inmemory', None, 'myapp'),
                          parse_repo_uri('inmemory://myapp'))
-        self.assertEqual(('pyro', 'pyro-ns-host:pyro-ns-port', '/myapp'),
-                         parse_repo_uri('pyro://pyro-ns-host:pyro-ns-port/myapp'))
-        self.assertEqual(('pyroloc', 'host:port', '/appkey'),
-                         parse_repo_uri('pyroloc://host:port/appkey'))
-        self.assertEqual(('zmqpickle-tcp', '127.0.0.1:666', ''),
-                         parse_repo_uri('zmqpickle-tcp://127.0.0.1:666'))
         with self.assertRaises(NotImplementedError):
             parse_repo_uri('foo://bar')
 
--- a/toolsutils.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/toolsutils.py	Mon Jun 22 14:27:37 2015 +0200
@@ -257,18 +257,6 @@
       }),
     )
 
-def config_connect(appid, optconfig):
-    from cubicweb.dbapi import connect
-    from getpass import getpass
-    user = optconfig.user
-    if not user:
-        user = raw_input('login: ')
-    password = optconfig.password
-    if not password:
-        password = getpass('password: ')
-    return connect(login=user, password=password, host=optconfig.host, database=appid)
-
-
 ## cwshell helpers #############################################################
 
 class AbstractMatcher(object):
--- a/transaction.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/transaction.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,13 +15,7 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""undoable transaction objects.
-
-
-This module is in the cubicweb package and not in cubicweb.server because those
-objects should be accessible to client through pyro, where the cubicweb.server
-package may not be installed.
-"""
+""" undoable transaction objects. """
 __docformat__ = "restructuredtext en"
 _ = unicode
 
@@ -42,27 +36,21 @@
     msg = _("there is no transaction #%s")
 
     def __init__(self, txuuid):
-        super(RepositoryError, self).__init__(txuuid)
+        super(NoSuchTransaction, self).__init__(txuuid)
         self.txuuid = txuuid
 
 class Transaction(object):
     """an undoable transaction"""
 
-    def __init__(self, uuid, time, ueid):
+    def __init__(self, cnx, uuid, time, ueid):
+        self.cnx = cnx
         self.uuid = uuid
         self.datetime = time
         self.user_eid = ueid
-        # should be set by the dbapi connection
-        self.req = None  # old style
-        self.cnx = None  # new style
 
     def _execute(self, *args, **kwargs):
         """execute a query using either the req or the cnx"""
-        if self.req is None:
-            execute = self.cnx.execute
-        else:
-            execute = self.req
-        return execute(*args, **kwargs)
+        return self.cnx.execute(*args, **kwargs)
 
 
     def __repr__(self):
@@ -73,8 +61,7 @@
         """return the user entity which has done the transaction,
         none if not found.
         """
-        return self._execute('Any X WHERE X eid %(x)s',
-                             {'x': self.user_eid}).get_entity(0, 0)
+        return self.cnx.find('CWUser', eid=self.user_eid).one()
 
     def actions_list(self, public=True):
         """return an ordered list of action effectued during that transaction
@@ -82,14 +69,11 @@
         if public is true, return only 'public' action, eg not ones triggered
         under the cover by hooks.
         """
-        if self.req is not None:
-            cnx = self.req.cnx
-        else:
-            cnx = self.cnx
-        return cnx.transaction_actions(self.uuid, public)
+        return self.cnx.transaction_actions(self.uuid, public)
 
 
 class AbstractAction(object):
+
     def __init__(self, action, public, order):
         self.action = action
         self.public = public
@@ -106,8 +90,9 @@
 
 
 class EntityAction(AbstractAction):
+
     def __init__(self, action, public, order, etype, eid, changes):
-        AbstractAction.__init__(self, action, public, order)
+        super(EntityAction, self).__init__(action, public, order)
         self.etype = etype
         self.eid = eid
         self.changes = changes
@@ -124,8 +109,9 @@
 
 
 class RelationAction(AbstractAction):
+
     def __init__(self, action, public, order, rtype, eidfrom, eidto):
-        AbstractAction.__init__(self, action, public, order)
+        super(RelationAction, self).__init__(action, public, order)
         self.rtype = rtype
         self.eid_from = eidfrom
         self.eid_to = eidto
--- a/utils.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/utils.py	Mon Jun 22 14:27:37 2015 +0200
@@ -21,7 +21,6 @@
 
 __docformat__ = "restructuredtext en"
 
-import sys
 import decimal
 import datetime
 import random
@@ -553,8 +552,12 @@
     """
 
 def _dict2js(d, predictable=False):
+    if predictable:
+        it = sorted(d.iteritems())
+    else:
+        it = d.iteritems()
     res = [key + ': ' + js_dumps(val, predictable)
-           for key, val in d.iteritems()]
+           for key, val in it]
     return '{%s}' % ', '.join(res)
 
 def _list2js(l, predictable=False):
@@ -578,7 +581,7 @@
         return _list2js(something, predictable)
     if isinstance(something, JSString):
         return something
-    return json_dumps(something)
+    return json_dumps(something, sort_keys=predictable)
 
 PERCENT_IN_URLQUOTE_RE = re.compile(r'%(?=[0-9a-fA-F]{2})')
 def js_href(javascript_code):
@@ -608,8 +611,6 @@
     """ transform a command line uri into a (protocol, hostport, appid), e.g:
     <myapp>                      -> 'inmemory', None, '<myapp>'
     inmemory://<myapp>           -> 'inmemory', None, '<myapp>'
-    pyro://[host][:port]         -> 'pyro', 'host:port', None
-    zmqpickle://[host][:port]    -> 'zmqpickle', 'host:port', None
     """
     parseduri = urlparse(uri)
     scheme = parseduri.scheme
@@ -617,8 +618,6 @@
         return ('inmemory', None, parseduri.path)
     if scheme == 'inmemory':
         return (scheme, None, parseduri.netloc)
-    if scheme in ('pyro', 'pyroloc') or scheme.startswith('zmqpickle-'):
-        return (scheme, parseduri.netloc, parseduri.path)
     raise NotImplementedError('URI protocol not implemented for `%s`' % uri)
 
 
--- a/view.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/view.py	Mon Jun 22 14:27:37 2015 +0200
@@ -20,7 +20,7 @@
 __docformat__ = "restructuredtext en"
 _ = unicode
 
-from cStringIO import StringIO
+from io import BytesIO
 from warnings import warn
 from functools import partial
 
@@ -101,7 +101,7 @@
             return
         if w is None:
             if self.binary:
-                self._stream = stream = StringIO()
+                self._stream = stream = BytesIO()
             else:
                 self._stream = stream = UStringIO()
             w = stream.write
@@ -471,7 +471,7 @@
             return
         if w is None:
             if self.binary:
-                self._stream = stream = StringIO()
+                self._stream = stream = BytesIO()
             else:
                 self._stream = stream = HTMLStream(self._cw)
             w = stream.write
--- a/web/application.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/application.py	Mon Jun 22 14:27:37 2015 +0200
@@ -52,103 +52,14 @@
 @contextmanager
 def anonymized_request(req):
     orig_cnx = req.cnx
-    anon_clt_cnx = anonymous_cnx(orig_cnx._session.repo)
-    req.set_cnx(anon_clt_cnx)
+    anon_cnx = anonymous_cnx(orig_cnx.session.repo)
+    req.set_cnx(anon_cnx)
     try:
-        with anon_clt_cnx:
+        with anon_cnx:
             yield req
     finally:
         req.set_cnx(orig_cnx)
 
-class AbstractSessionManager(component.Component):
-    """manage session data associated to a session identifier"""
-    __regid__ = 'sessionmanager'
-
-    def __init__(self, repo):
-        vreg = repo.vreg
-        self.session_time = vreg.config['http-session-time'] or None
-        self.authmanager = vreg['components'].select('authmanager', repo=repo)
-        interval = (self.session_time or 0) / 2.
-        if vreg.config.anonymous_user()[0] is not None:
-            self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60
-            assert self.cleanup_anon_session_time > 0
-            if self.session_time is not None:
-                self.cleanup_anon_session_time = min(self.session_time,
-                                                     self.cleanup_anon_session_time)
-            interval = self.cleanup_anon_session_time / 2.
-        # we don't want to check session more than once every 5 minutes
-        self.clean_sessions_interval = max(5 * 60, interval)
-
-    def clean_sessions(self):
-        """cleanup sessions which has not been unused since a given amount of
-        time. Return the number of sessions which have been closed.
-        """
-        self.debug('cleaning http sessions')
-        session_time = self.session_time
-        closed, total = 0, 0
-        for session in self.current_sessions():
-            total += 1
-            last_usage_time = session.mtime
-            no_use_time = (time() - last_usage_time)
-            if session.anonymous_session:
-                if no_use_time >= self.cleanup_anon_session_time:
-                    self.close_session(session)
-                    closed += 1
-            elif session_time is not None and no_use_time >= session_time:
-                self.close_session(session)
-                closed += 1
-        return closed, total - closed
-
-    def current_sessions(self):
-        """return currently open sessions"""
-        raise NotImplementedError()
-
-    def get_session(self, req, sessionid):
-        """return existing session for the given session identifier"""
-        raise NotImplementedError()
-
-    def open_session(self, req):
-        """open and return a new session for the given request.
-
-        raise :exc:`cubicweb.AuthenticationError` if authentication failed
-        (no authentication info found or wrong user/password)
-        """
-        raise NotImplementedError()
-
-    def close_session(self, session):
-        """close session on logout or on invalid session detected (expired out,
-        corrupted...)
-        """
-        raise NotImplementedError()
-
-
-class AbstractAuthenticationManager(component.Component):
-    """authenticate user associated to a request and check session validity"""
-    __regid__ = 'authmanager'
-
-    def __init__(self, repo):
-        self.vreg = repo.vreg
-
-    def validate_session(self, req, session):
-        """check session validity, reconnecting it to the repository if the
-        associated connection expired in the repository side (hence the
-        necessity for this method).
-
-        raise :exc:`InvalidSession` if session is corrupted for a reason or
-        another and should be closed
-        """
-        raise NotImplementedError()
-
-    def authenticate(self, req):
-        """authenticate user using connection information found in the request,
-        and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
-        as well as login and authentication information dictionary used to open
-        the connection.
-
-        raise :exc:`cubicweb.AuthenticationError` if authentication failed
-        (no authentication info found or wrong user/password)
-        """
-        raise NotImplementedError()
 
 
 class CookieSessionHandler(object):
@@ -350,7 +261,7 @@
             try:
                 session = self.get_session(req)
                 from  cubicweb import repoapi
-                cnx = repoapi.ClientConnection(session)
+                cnx = repoapi.Connection(session)
                 req.set_cnx(cnx)
             except AuthenticationError:
                 # Keep the dummy session set at initialisation.
@@ -365,12 +276,6 @@
                 # several cubes like registration or forgotten password rely on
                 # this principle.
 
-            # DENY https acces for anonymous_user
-            if (req.https
-                and req.session.anonymous_session
-                and self.vreg.config['https-deny-anonymous']):
-                # don't allow anonymous on https connection
-                raise AuthenticationError()
             # nested try to allow LogOut to delegate logic to AuthenticationError
             # handler
             try:
--- a/web/data/cubicweb.ajax.js	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/data/cubicweb.ajax.js	Mon Jun 22 14:27:37 2015 +0200
@@ -518,59 +518,6 @@
     });
 }
 
-userCallback = cw.utils.deprecatedFunction(
-    '[3.19] use a plain ajaxfunc instead of user callbacks',
-    function userCallback(cbname) {
-    setProgressCursor();
-    var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('user_callback', null, cbname));
-    d.addCallback(resetCursor);
-    d.addErrback(resetCursor);
-    d.addErrback(remoteCallFailed);
-    return d;
-});
-
-userCallbackThenUpdateUI = cw.utils.deprecatedFunction(
-    '[3.19] use a plain ajaxfunc instead of user callbacks',
-    function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) {
-    var d = userCallback(cbname);
-    d.addCallback(function() {
-        $('#' + nodeid).loadxhtml(AJAX_BASE_URL, ajaxFuncArgs('render', {'rql': rql},
-                                                       registry, compid), null, 'swap');
-        if (msg) {
-            updateMessage(msg);
-        }
-    });
-});
-
-userCallbackThenReloadPage = cw.utils.deprecatedFunction(
-    '[3.19] use a plain ajaxfunc instead of user callbacks',
-    function userCallbackThenReloadPage(cbname, msg) {
-    var d = userCallback(cbname);
-    d.addCallback(function() {
-        window.location.reload();
-        if (msg) {
-            updateMessage(msg);
-        }
-    });
-});
-
-/**
- * .. function:: unregisterUserCallback(cbname)
- *
- * unregisters the python function registered on the server's side
- * while the page was generated.
- */
-unregisterUserCallback = cw.utils.deprecatedFunction(
-    '[3.19] use a plain ajaxfunc instead of user callbacks',
-    function unregisterUserCallback(cbname) {
-    setProgressCursor();
-    var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('unregister_user_callback',
-                                            null, cbname));
-    d.addCallback(resetCursor);
-    d.addErrback(resetCursor);
-    d.addErrback(remoteCallFailed);
-});
-
 
 //============= XXX move those functions? ====================================//
 function openHash() {
--- a/web/data/cubicweb.timeline-bundle.js	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10129 +0,0 @@
-/**
- *  This file contains timeline utilities
- *  :organization: Logilab
- */
-
-var SimileAjax_urlPrefix = BASE_URL + 'data/';
-var Timeline_urlPrefix = BASE_URL + 'data/';
-
-/*
- *  Simile Ajax API
- *
- *  Include this file in your HTML file as follows::
- *
- *    <script src="http://simile.mit.edu/ajax/api/simile-ajax-api.js" type="text/javascript"></script>
- *
- *
- */
-
-if (typeof SimileAjax == "undefined") {
-    var SimileAjax = {
-        loaded:                 false,
-        loadingScriptsCount:    0,
-        error:                  null,
-        params:                 { bundle:"true" }
-    };
-
-    SimileAjax.Platform = new Object();
-        /*
-            HACK: We need these 2 things here because we cannot simply append
-            a <script> element containing code that accesses SimileAjax.Platform
-            to initialize it because IE executes that <script> code first
-            before it loads ajax.js and platform.js.
-        */
-
-    var getHead = function(doc) {
-        return doc.getElementsByTagName("head")[0];
-    };
-
-    SimileAjax.findScript = function(doc, substring) {
-        var heads = doc.documentElement.getElementsByTagName("head");
-        for (var h = 0; h < heads.length; h++) {
-            var node = heads[h].firstChild;
-            while (node != null) {
-                if (node.nodeType == 1 && node.tagName.toLowerCase() == "script") {
-                    var url = node.src;
-                    var i = url.indexOf(substring);
-                    if (i >= 0) {
-                        return url;
-                    }
-                }
-                node = node.nextSibling;
-            }
-        }
-        return null;
-    };
-    SimileAjax.includeJavascriptFile = function(doc, url, onerror, charset) {
-        onerror = onerror || "";
-        if (doc.body == null) {
-            try {
-                var q = "'" + onerror.replace( /'/g, '&apos' ) + "'"; // "
-                doc.write("<script src='" + url + "' onerror="+ q +
-                          (charset ? " charset='"+ charset +"'" : "") +
-                          " type='text/javascript'>"+ onerror + "</script>");
-                return;
-            } catch (e) {
-                // fall through
-            }
-        }
-
-        var script = doc.createElement("script");
-        if (onerror) {
-            try { script.innerHTML = onerror; } catch(e) {}
-            script.setAttribute("onerror", onerror);
-        }
-        if (charset) {
-            script.setAttribute("charset", charset);
-        }
-        script.type = "text/javascript";
-        script.language = "JavaScript";
-        script.src = url;
-        return getHead(doc).appendChild(script);
-    };
-    SimileAjax.includeJavascriptFiles = function(doc, urlPrefix, filenames) {
-        for (var i = 0; i < filenames.length; i++) {
-            SimileAjax.includeJavascriptFile(doc, urlPrefix + filenames[i]);
-        }
-        SimileAjax.loadingScriptsCount += filenames.length;
-        // XXX adim SimileAjax.includeJavascriptFile(doc, SimileAjax.urlPrefix + "scripts/signal.js?" + filenames.length);
-    };
-    SimileAjax.includeCssFile = function(doc, url) {
-        if (doc.body == null) {
-            try {
-                doc.write("<link rel='stylesheet' href='" + url + "' type='text/css'/>");
-                return;
-            } catch (e) {
-                // fall through
-            }
-        }
-
-        var link = doc.createElement("link");
-        link.setAttribute("rel", "stylesheet");
-        link.setAttribute("type", "text/css");
-        link.setAttribute("href", url);
-        getHead(doc).appendChild(link);
-    };
-    SimileAjax.includeCssFiles = function(doc, urlPrefix, filenames) {
-        for (var i = 0; i < filenames.length; i++) {
-            SimileAjax.includeCssFile(doc, urlPrefix + filenames[i]);
-        }
-    };
-
-    /**
-     * Append into urls each string in suffixes after prefixing it with urlPrefix.
-     * @param {Array} urls
-     * @param {String} urlPrefix
-     * @param {Array} suffixes
-     */
-    SimileAjax.prefixURLs = function(urls, urlPrefix, suffixes) {
-        for (var i = 0; i < suffixes.length; i++) {
-            urls.push(urlPrefix + suffixes[i]);
-        }
-    };
-
-    /**
-     * Parse out the query parameters from a URL
-     * @param {String} url    the url to parse, or location.href if undefined
-     * @param {Object} to     optional object to extend with the parameters
-     * @param {Object} types  optional object mapping keys to value types
-     *        (String, Number, Boolean or Array, String by default)
-     * @return a key/value Object whose keys are the query parameter names
-     * @type Object
-     */
-    SimileAjax.parseURLParameters = function(url, to, types) {
-        to = to || {};
-        types = types || {};
-
-        if (typeof url == "undefined") {
-            url = location.href;
-        }
-        var q = url.indexOf("?");
-        if (q < 0) {
-            return to;
-        }
-        url = (url+"#").slice(q+1, url.indexOf("#")); // toss the URL fragment
-
-        var params = url.split("&"), param, parsed = {};
-        var decode = window.decodeURIComponent || unescape;
-        for (var i = 0; param = params[i]; i++) {
-            var eq = param.indexOf("=");
-            var name = decode(param.slice(0,eq));
-            var old = parsed[name];
-            if (typeof old == "undefined") {
-                old = [];
-            } else if (!(old instanceof Array)) {
-                old = [old];
-            }
-            parsed[name] = old.concat(decode(param.slice(eq+1)));
-        }
-        for (var i in parsed) {
-            if (!parsed.hasOwnProperty(i)) continue;
-            var type = types[i] || String;
-            var data = parsed[i];
-            if (!(data instanceof Array)) {
-                data = [data];
-            }
-            if (type === Boolean && data[0] == "false") {
-                to[i] = false; // because Boolean("false") === true
-            } else {
-                to[i] = type.apply(this, data);
-            }
-        }
-        return to;
-    };
-
-    (function() {
-        var javascriptFiles = [
-            "jquery-1.2.6.js",
-            "platform.js",
-            "debug.js",
-            "xmlhttp.js",
-            "json.js",
-            "dom.js",
-            "graphics.js",
-            "date-time.js",
-            "string.js",
-            "html.js",
-            "data-structure.js",
-            "units.js",
-
-            "ajax.js",
-            "history.js",
-            "window-manager.js"
-        ];
-        var cssFiles = [
-            "graphics.css"
-        ];
-
-        if (typeof SimileAjax_urlPrefix == "string") {
-            SimileAjax.urlPrefix = SimileAjax_urlPrefix;
-        } else {
-            var url = SimileAjax.findScript(document, "simile-ajax-api.js");
-            if (url == null) {
-                SimileAjax.error = new Error("Failed to derive URL prefix for Simile Ajax API code files");
-                return;
-            }
-
-            SimileAjax.urlPrefix = url.substr(0, url.indexOf("simile-ajax-api.js"));
-        }
-
-        SimileAjax.parseURLParameters(url, SimileAjax.params, {bundle:Boolean});
-//         if (SimileAjax.params.bundle) {
-//             SimileAjax.includeJavascriptFiles(document, SimileAjax.urlPrefix, [ "simile-ajax-bundle.js" ]);
-//         } else {
-//             SimileAjax.includeJavascriptFiles(document, SimileAjax.urlPrefix + "scripts/", javascriptFiles);
-//         }
-//         SimileAjax.includeCssFiles(document, SimileAjax.urlPrefix + "styles/", cssFiles);
-
-        SimileAjax.loaded = true;
-    })();
-}
-/*
- *  Platform Utility Functions and Constants
- *
- */
-
-/*  This must be called after our jQuery has been loaded
-    but before control returns to user-code.
-*/
-SimileAjax.jQuery = jQuery;
-// SimileAjax.jQuery = jQuery.noConflict(true);
-if (typeof window["$"] == "undefined") {
-    window.$ = SimileAjax.jQuery;
-}
-
-SimileAjax.Platform.os = {
-    isMac:   false,
-    isWin:   false,
-    isWin32: false,
-    isUnix:  false
-};
-SimileAjax.Platform.browser = {
-    isIE:           false,
-    isNetscape:     false,
-    isMozilla:      false,
-    isFirefox:      false,
-    isOpera:        false,
-    isSafari:       false,
-
-    majorVersion:   0,
-    minorVersion:   0
-};
-
-(function() {
-    var an = navigator.appName.toLowerCase();
-	var ua = navigator.userAgent.toLowerCase();
-
-    /*
-     *  Operating system
-     */
-	SimileAjax.Platform.os.isMac = (ua.indexOf('mac') != -1);
-	SimileAjax.Platform.os.isWin = (ua.indexOf('win') != -1);
-	SimileAjax.Platform.os.isWin32 = SimileAjax.Platform.isWin && (
-        ua.indexOf('95') != -1 ||
-        ua.indexOf('98') != -1 ||
-        ua.indexOf('nt') != -1 ||
-        ua.indexOf('win32') != -1 ||
-        ua.indexOf('32bit') != -1
-    );
-	SimileAjax.Platform.os.isUnix = (ua.indexOf('x11') != -1);
-
-    /*
-     *  Browser
-     */
-    SimileAjax.Platform.browser.isIE = (an.indexOf("microsoft") != -1);
-    SimileAjax.Platform.browser.isNetscape = (an.indexOf("netscape") != -1);
-    SimileAjax.Platform.browser.isMozilla = (ua.indexOf("mozilla") != -1);
-    SimileAjax.Platform.browser.isFirefox = (ua.indexOf("firefox") != -1);
-    SimileAjax.Platform.browser.isOpera = (an.indexOf("opera") != -1);
-    SimileAjax.Platform.browser.isSafari = (an.indexOf("safari") != -1);
-
-    var parseVersionString = function(s) {
-        var a = s.split(".");
-        SimileAjax.Platform.browser.majorVersion = parseInt(a[0]);
-        SimileAjax.Platform.browser.minorVersion = parseInt(a[1]);
-    };
-    var indexOf = function(s, sub, start) {
-        var i = s.indexOf(sub, start);
-        return i >= 0 ? i : s.length;
-    };
-
-    if (SimileAjax.Platform.browser.isMozilla) {
-        var offset = ua.indexOf("mozilla/");
-        if (offset >= 0) {
-            parseVersionString(ua.substring(offset + 8, indexOf(ua, " ", offset)));
-        }
-    }
-    if (SimileAjax.Platform.browser.isIE) {
-        var offset = ua.indexOf("msie ");
-        if (offset >= 0) {
-            parseVersionString(ua.substring(offset + 5, indexOf(ua, ";", offset)));
-        }
-    }
-    if (SimileAjax.Platform.browser.isNetscape) {
-        var offset = ua.indexOf("rv:");
-        if (offset >= 0) {
-            parseVersionString(ua.substring(offset + 3, indexOf(ua, ")", offset)));
-        }
-    }
-    if (SimileAjax.Platform.browser.isFirefox) {
-        var offset = ua.indexOf("firefox/");
-        if (offset >= 0) {
-            parseVersionString(ua.substring(offset + 8, indexOf(ua, " ", offset)));
-        }
-    }
-
-    if (!("localeCompare" in String.prototype)) {
-        String.prototype.localeCompare = function (s) {
-            if (this < s) return -1;
-            else if (this > s) return 1;
-            else return 0;
-        };
-    }
-})();
-
-SimileAjax.Platform.getDefaultLocale = function() {
-    return SimileAjax.Platform.clientLocale;
-};
-/*
- *  Debug Utility Functions
- *
- */
-
-SimileAjax.Debug = {
-    silent: false
-};
-
-SimileAjax.Debug.log = function(msg) {
-    var f;
-    if ("console" in window && "log" in window.console) { // FireBug installed
-        f = function(msg2) {
-            console.log(msg2);
-        }
-    } else {
-        f = function(msg2) {
-            if (!SimileAjax.Debug.silent) {
-                alert(msg2);
-            }
-        }
-    }
-    SimileAjax.Debug.log = f;
-    f(msg);
-};
-
-SimileAjax.Debug.warn = function(msg) {
-    var f;
-    if ("console" in window && "warn" in window.console) { // FireBug installed
-        f = function(msg2) {
-            console.warn(msg2);
-        }
-    } else {
-        f = function(msg2) {
-            if (!SimileAjax.Debug.silent) {
-                alert(msg2);
-            }
-        }
-    }
-    SimileAjax.Debug.warn = f;
-    f(msg);
-};
-
-SimileAjax.Debug.exception = function(e, msg) {
-    var f, params = SimileAjax.parseURLParameters();
-    if (params.errors == "throw" || SimileAjax.params.errors == "throw") {
-        f = function(e2, msg2) {
-            throw(e2); // do not hide from browser's native debugging features
-        };
-    } else if ("console" in window && "error" in window.console) { // FireBug installed
-        f = function(e2, msg2) {
-            if (msg2 != null) {
-                console.error(msg2 + " %o", e2);
-            } else {
-                console.error(e2);
-            }
-            throw(e2); // do not hide from browser's native debugging features
-        };
-    } else {
-        f = function(e2, msg2) {
-            if (!SimileAjax.Debug.silent) {
-                alert("Caught exception: " + msg2 + "\n\nDetails: " + ("description" in e2 ? e2.description : e2));
-            }
-            throw(e2); // do not hide from browser's native debugging features
-        };
-    }
-    SimileAjax.Debug.exception = f;
-    f(e, msg);
-};
-
-SimileAjax.Debug.objectToString = function(o) {
-    return SimileAjax.Debug._objectToString(o, "");
-};
-
-SimileAjax.Debug._objectToString = function(o, indent) {
-    var indent2 = indent + " ";
-    if (typeof o == "object") {
-        var s = "{";
-        for (n in o) {
-            s += indent2 + n + ": " + SimileAjax.Debug._objectToString(o[n], indent2) + "\n";
-        }
-        s += indent + "}";
-        return s;
-    } else if (typeof o == "array") {
-        var s = "[";
-        for (var n = 0; n < o.length; n++) {
-            s += SimileAjax.Debug._objectToString(o[n], indent2) + "\n";
-        }
-        s += indent + "]";
-        return s;
-    } else {
-        return o;
-    }
-};
-/**
- * @fileOverview XmlHttp utility functions
- * @name SimileAjax.XmlHttp
- */
-
-SimileAjax.XmlHttp = new Object();
-
-/**
- *  Callback for XMLHttp onRequestStateChange.
- */
-SimileAjax.XmlHttp._onReadyStateChange = function(xmlhttp, fError, fDone) {
-    switch (xmlhttp.readyState) {
-    // 1: Request not yet made
-    // 2: Contact established with server but nothing downloaded yet
-    // 3: Called multiple while downloading in progress
-
-    // Download complete
-    case 4:
-        try {
-            if (xmlhttp.status == 0     // file:// urls, works on Firefox
-             || xmlhttp.status == 200   // http:// urls
-            ) {
-                if (fDone) {
-                    fDone(xmlhttp);
-                }
-            } else {
-                if (fError) {
-                    fError(
-                        xmlhttp.statusText,
-                        xmlhttp.status,
-                        xmlhttp
-                    );
-                }
-            }
-        } catch (e) {
-            SimileAjax.Debug.exception("XmlHttp: Error handling onReadyStateChange", e);
-        }
-        break;
-    }
-};
-
-/**
- *  Creates an XMLHttpRequest object. On the first run, this
- *  function creates a platform-specific function for
- *  instantiating an XMLHttpRequest object and then replaces
- *  itself with that function.
- */
-SimileAjax.XmlHttp._createRequest = function() {
-    if (SimileAjax.Platform.browser.isIE) {
-        var programIDs = [
-        "Msxml2.XMLHTTP",
-        "Microsoft.XMLHTTP",
-        "Msxml2.XMLHTTP.4.0"
-        ];
-        for (var i = 0; i < programIDs.length; i++) {
-            try {
-                var programID = programIDs[i];
-                var f = function() {
-                    return new ActiveXObject(programID);
-                };
-                var o = f();
-
-                // We are replacing the SimileAjax._createXmlHttpRequest
-                // function with this inner function as we've
-                // found out that it works. This is so that we
-                // don't have to do all the testing over again
-                // on subsequent calls.
-                SimileAjax.XmlHttp._createRequest = f;
-
-                return o;
-            } catch (e) {
-                // silent
-            }
-        }
-        // fall through to try new XMLHttpRequest();
-    }
-
-    try {
-        var f = function() {
-            return new XMLHttpRequest();
-        };
-        var o = f();
-
-        // We are replacing the SimileAjax._createXmlHttpRequest
-        // function with this inner function as we've
-        // found out that it works. This is so that we
-        // don't have to do all the testing over again
-        // on subsequent calls.
-        SimileAjax.XmlHttp._createRequest = f;
-
-        return o;
-    } catch (e) {
-        throw new Error("Failed to create an XMLHttpRequest object");
-    }
-};
-
-/**
- * Performs an asynchronous HTTP GET.
- *
- * @param {Function} fError a function of the form
-     function(statusText, statusCode, xmlhttp)
- * @param {Function} fDone a function of the form function(xmlhttp)
- */
-SimileAjax.XmlHttp.get = function(url, fError, fDone) {
-    var xmlhttp = SimileAjax.XmlHttp._createRequest();
-
-    xmlhttp.open("GET", url, true);
-    xmlhttp.onreadystatechange = function() {
-        SimileAjax.XmlHttp._onReadyStateChange(xmlhttp, fError, fDone);
-    };
-    xmlhttp.send(null);
-};
-
-/**
- * Performs an asynchronous HTTP POST.
- *
- * @param {Function} fError a function of the form
-     function(statusText, statusCode, xmlhttp)
- * @param {Function} fDone a function of the form function(xmlhttp)
- */
-SimileAjax.XmlHttp.post = function(url, body, fError, fDone) {
-    var xmlhttp = SimileAjax.XmlHttp._createRequest();
-
-    xmlhttp.open("POST", url, true);
-    xmlhttp.onreadystatechange = function() {
-        SimileAjax.XmlHttp._onReadyStateChange(xmlhttp, fError, fDone);
-    };
-    xmlhttp.send(body);
-};
-
-SimileAjax.XmlHttp._forceXML = function(xmlhttp) {
-    try {
-        xmlhttp.overrideMimeType("text/xml");
-    } catch (e) {
-        xmlhttp.setrequestheader("Content-Type", "text/xml");
-    }
-};/*
- *  Copied directly from http://www.json.org/json.js.
- */
-
-/*
-    json.js
-    2006-04-28
-
-    This file adds these methods to JavaScript:
-
-        object.toJSONString()
-
-            This method produces a JSON text from an object. The
-            object must not contain any cyclical references.
-
-        array.toJSONString()
-
-            This method produces a JSON text from an array. The
-            array must not contain any cyclical references.
-
-        string.parseJSON()
-
-            This method parses a JSON text to produce an object or
-            array. It will return false if there is an error.
-*/
-
-SimileAjax.JSON = new Object();
-
-(function () {
-    var m = {
-        '\b': '\\b',
-        '\t': '\\t',
-        '\n': '\\n',
-        '\f': '\\f',
-        '\r': '\\r',
-        '"' : '\\"',
-        '\\': '\\\\'
-    };
-    var s = {
-        array: function (x) {
-            var a = ['['], b, f, i, l = x.length, v;
-            for (i = 0; i < l; i += 1) {
-                v = x[i];
-                f = s[typeof v];
-                if (f) {
-                    v = f(v);
-                    if (typeof v == 'string') {
-                        if (b) {
-                            a[a.length] = ',';
-                        }
-                        a[a.length] = v;
-                        b = true;
-                    }
-                }
-            }
-            a[a.length] = ']';
-            return a.join('');
-        },
-        'boolean': function (x) {
-            return String(x);
-        },
-        'null': function (x) {
-            return "null";
-        },
-        number: function (x) {
-            return isFinite(x) ? String(x) : 'null';
-        },
-        object: function (x) {
-            if (x) {
-                if (x instanceof Array) {
-                    return s.array(x);
-                }
-                var a = ['{'], b, f, i, v;
-                for (i in x) {
-                    v = x[i];
-                    f = s[typeof v];
-                    if (f) {
-                        v = f(v);
-                        if (typeof v == 'string') {
-                            if (b) {
-                                a[a.length] = ',';
-                            }
-                            a.push(s.string(i), ':', v);
-                            b = true;
-                        }
-                    }
-                }
-                a[a.length] = '}';
-                return a.join('');
-            }
-            return 'null';
-        },
-        string: function (x) {
-            if (/["\\\x00-\x1f]/.test(x)) {
-                x = x.replace(/([\x00-\x1f\\"])/g, function(a, b) {
-                    var c = m[b];
-                    if (c) {
-                        return c;
-                    }
-                    c = b.charCodeAt();
-                    return '\\u00' +
-                        Math.floor(c / 16).toString(16) +
-                        (c % 16).toString(16);
-                });
-            }
-            return '"' + x + '"';
-        }
-    };
-
-    SimileAjax.JSON.toJSONString = function(o) {
-        if (o instanceof Object) {
-            return s.object(o);
-        } else if (o instanceof Array) {
-            return s.array(o);
-        } else {
-            return o.toString();
-        }
-    };
-
-    SimileAjax.JSON.parseJSON = function () {
-        try {
-            return !(/[^,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]/.test(
-                    this.replace(/"(\\.|[^"\\])*"/g, ''))) &&
-                eval('(' + this + ')');
-        } catch (e) {
-            return false;
-        }
-    };
-})();
-/*
- *  DOM Utility Functions
- *
- */
-
-SimileAjax.DOM = new Object();
-
-SimileAjax.DOM.registerEventWithObject = function(elmt, eventName, obj, handlerName) {
-    SimileAjax.DOM.registerEvent(elmt, eventName, function(elmt2, evt, target) {
-        return obj[handlerName].call(obj, elmt2, evt, target);
-    });
-};
-
-SimileAjax.DOM.registerEvent = function(elmt, eventName, handler) {
-    var handler2 = function(evt) {
-        evt = (evt) ? evt : ((event) ? event : null);
-        if (evt) {
-            var target = (evt.target) ?
-                evt.target : ((evt.srcElement) ? evt.srcElement : null);
-            if (target) {
-                target = (target.nodeType == 1 || target.nodeType == 9) ?
-                    target : target.parentNode;
-            }
-
-            return handler(elmt, evt, target);
-        }
-        return true;
-    }
-
-    if (SimileAjax.Platform.browser.isIE) {
-        elmt.attachEvent("on" + eventName, handler2);
-    } else {
-        elmt.addEventListener(eventName, handler2, false);
-    }
-};
-
-SimileAjax.DOM.getPageCoordinates = function(elmt) {
-    var left = 0;
-    var top = 0;
-
-    if (elmt.nodeType != 1) {
-        elmt = elmt.parentNode;
-    }
-
-    var elmt2 = elmt;
-    while (elmt2 != null) {
-        left += elmt2.offsetLeft;
-        top += elmt2.offsetTop;
-        elmt2 = elmt2.offsetParent;
-    }
-
-    var body = document.body;
-    while (elmt != null && elmt != body) {
-        if ("scrollLeft" in elmt) {
-            left -= elmt.scrollLeft;
-            top -= elmt.scrollTop;
-        }
-        elmt = elmt.parentNode;
-    }
-
-    return { left: left, top: top };
-};
-
-SimileAjax.DOM.getSize = function(elmt) {
-	var w = this.getStyle(elmt,"width");
-	var h = this.getStyle(elmt,"height");
-	if (w.indexOf("px") > -1) w = w.replace("px","");
-	if (h.indexOf("px") > -1) h = h.replace("px","");
-	return {
-		w: w,
-		h: h
-	}
-}
-
-SimileAjax.DOM.getStyle = function(elmt, styleProp) {
-    if (elmt.currentStyle) { // IE
-        var style = elmt.currentStyle[styleProp];
-    } else if (window.getComputedStyle) { // standard DOM
-        var style = document.defaultView.getComputedStyle(elmt, null).getPropertyValue(styleProp);
-    } else {
-    	var style = "";
-    }
-    return style;
-}
-
-SimileAjax.DOM.getEventRelativeCoordinates = function(evt, elmt) {
-    if (SimileAjax.Platform.browser.isIE) {
-      if (evt.type == "mousewheel") {
-        var coords = SimileAjax.DOM.getPageCoordinates(elmt);
-        return {
-          x: evt.clientX - coords.left,
-          y: evt.clientY - coords.top
-        };
-      } else {
-        return {
-          x: evt.offsetX,
-          y: evt.offsetY
-        };
-      }
-    } else {
-        var coords = SimileAjax.DOM.getPageCoordinates(elmt);
-
-        if ((evt.type == "DOMMouseScroll") &&
-          SimileAjax.Platform.browser.isFirefox &&
-          (SimileAjax.Platform.browser.majorVersion == 2)) {
-          // Due to: https://bugzilla.mozilla.org/show_bug.cgi?id=352179
-
-          return {
-            x: evt.screenX - coords.left,
-            y: evt.screenY - coords.top
-          };
-        } else {
-          return {
-              x: evt.pageX - coords.left,
-              y: evt.pageY - coords.top
-          };
-        }
-    }
-};
-
-SimileAjax.DOM.getEventPageCoordinates = function(evt) {
-    if (SimileAjax.Platform.browser.isIE) {
-        return {
-            x: evt.clientX + document.body.scrollLeft,
-            y: evt.clientY + document.body.scrollTop
-        };
-    } else {
-        return {
-            x: evt.pageX,
-            y: evt.pageY
-        };
-    }
-};
-
-SimileAjax.DOM.hittest = function(x, y, except) {
-    return SimileAjax.DOM._hittest(document.body, x, y, except);
-};
-
-SimileAjax.DOM._hittest = function(elmt, x, y, except) {
-    var childNodes = elmt.childNodes;
-    outer: for (var i = 0; i < childNodes.length; i++) {
-        var childNode = childNodes[i];
-        for (var j = 0; j < except.length; j++) {
-            if (childNode == except[j]) {
-                continue outer;
-            }
-        }
-
-        if (childNode.offsetWidth == 0 && childNode.offsetHeight == 0) {
-            /*
-             *  Sometimes SPAN elements have zero width and height but
-             *  they have children like DIVs that cover non-zero areas.
-             */
-            var hitNode = SimileAjax.DOM._hittest(childNode, x, y, except);
-            if (hitNode != childNode) {
-                return hitNode;
-            }
-        } else {
-            var top = 0;
-            var left = 0;
-
-            var node = childNode;
-            while (node) {
-                top += node.offsetTop;
-                left += node.offsetLeft;
-                node = node.offsetParent;
-            }
-
-            if (left <= x && top <= y && (x - left) < childNode.offsetWidth && (y - top) < childNode.offsetHeight) {
-                return SimileAjax.DOM._hittest(childNode, x, y, except);
-            } else if (childNode.nodeType == 1 && childNode.tagName == "TR") {
-                /*
-                 *  Table row might have cells that span several rows.
-                 */
-                var childNode2 = SimileAjax.DOM._hittest(childNode, x, y, except);
-                if (childNode2 != childNode) {
-                    return childNode2;
-                }
-            }
-        }
-    }
-    return elmt;
-};
-
-SimileAjax.DOM.cancelEvent = function(evt) {
-    evt.returnValue = false;
-    evt.cancelBubble = true;
-    if ("preventDefault" in evt) {
-        evt.preventDefault();
-    }
-};
-
-SimileAjax.DOM.appendClassName = function(elmt, className) {
-    var classes = elmt.className.split(" ");
-    for (var i = 0; i < classes.length; i++) {
-        if (classes[i] == className) {
-            return;
-        }
-    }
-    classes.push(className);
-    elmt.className = classes.join(" ");
-};
-
-SimileAjax.DOM.createInputElement = function(type) {
-    var div = document.createElement("div");
-    div.innerHTML = "<input type='" + type + "' />";
-
-    return div.firstChild;
-};
-
-SimileAjax.DOM.createDOMFromTemplate = function(template) {
-    var result = {};
-    result.elmt = SimileAjax.DOM._createDOMFromTemplate(template, result, null);
-
-    return result;
-};
-
-SimileAjax.DOM._createDOMFromTemplate = function(templateNode, result, parentElmt) {
-    if (templateNode == null) {
-        /*
-        var node = doc.createTextNode("--null--");
-        if (parentElmt != null) {
-            parentElmt.appendChild(node);
-        }
-        return node;
-        */
-        return null;
-    } else if (typeof templateNode != "object") {
-        var node = document.createTextNode(templateNode);
-        if (parentElmt != null) {
-            parentElmt.appendChild(node);
-        }
-        return node;
-    } else {
-        var elmt = null;
-        if ("tag" in templateNode) {
-            var tag = templateNode.tag;
-            if (parentElmt != null) {
-                if (tag == "tr") {
-                    elmt = parentElmt.insertRow(parentElmt.rows.length);
-                } else if (tag == "td") {
-                    elmt = parentElmt.insertCell(parentElmt.cells.length);
-                }
-            }
-            if (elmt == null) {
-                elmt = tag == "input" ?
-                    SimileAjax.DOM.createInputElement(templateNode.type) :
-                    document.createElement(tag);
-
-                if (parentElmt != null) {
-                    parentElmt.appendChild(elmt);
-                }
-            }
-        } else {
-            elmt = templateNode.elmt;
-            if (parentElmt != null) {
-                parentElmt.appendChild(elmt);
-            }
-        }
-
-        for (var attribute in templateNode) {
-            var value = templateNode[attribute];
-
-            if (attribute == "field") {
-                result[value] = elmt;
-
-            } else if (attribute == "className") {
-                elmt.className = value;
-            } else if (attribute == "id") {
-                elmt.id = value;
-            } else if (attribute == "title") {
-                elmt.title = value;
-            } else if (attribute == "type" && elmt.tagName == "input") {
-                // do nothing
-            } else if (attribute == "style") {
-                for (n in value) {
-                    var v = value[n];
-                    if (n == "float") {
-                        n = SimileAjax.Platform.browser.isIE ? "styleFloat" : "cssFloat";
-                    }
-                    elmt.style[n] = v;
-                }
-            } else if (attribute == "children") {
-                for (var i = 0; i < value.length; i++) {
-                    SimileAjax.DOM._createDOMFromTemplate(value[i], result, elmt);
-                }
-            } else if (attribute != "tag" && attribute != "elmt") {
-                elmt.setAttribute(attribute, value);
-            }
-        }
-        return elmt;
-    }
-}
-
-SimileAjax.DOM._cachedParent = null;
-SimileAjax.DOM.createElementFromString = function(s) {
-    if (SimileAjax.DOM._cachedParent == null) {
-        SimileAjax.DOM._cachedParent = document.createElement("div");
-    }
-    SimileAjax.DOM._cachedParent.innerHTML = s;
-    return SimileAjax.DOM._cachedParent.firstChild;
-};
-
-SimileAjax.DOM.createDOMFromString = function(root, s, fieldElmts) {
-    var elmt = typeof root == "string" ? document.createElement(root) : root;
-    elmt.innerHTML = s;
-
-    var dom = { elmt: elmt };
-    SimileAjax.DOM._processDOMChildrenConstructedFromString(dom, elmt, fieldElmts != null ? fieldElmts : {} );
-
-    return dom;
-};
-
-SimileAjax.DOM._processDOMConstructedFromString = function(dom, elmt, fieldElmts) {
-    var id = elmt.id;
-    if (id != null && id.length > 0) {
-        elmt.removeAttribute("id");
-        if (id in fieldElmts) {
-            var parentElmt = elmt.parentNode;
-            parentElmt.insertBefore(fieldElmts[id], elmt);
-            parentElmt.removeChild(elmt);
-
-            dom[id] = fieldElmts[id];
-            return;
-        } else {
-            dom[id] = elmt;
-        }
-    }
-
-    if (elmt.hasChildNodes()) {
-        SimileAjax.DOM._processDOMChildrenConstructedFromString(dom, elmt, fieldElmts);
-    }
-};
-
-SimileAjax.DOM._processDOMChildrenConstructedFromString = function(dom, elmt, fieldElmts) {
-    var node = elmt.firstChild;
-    while (node != null) {
-        var node2 = node.nextSibling;
-        if (node.nodeType == 1) {
-            SimileAjax.DOM._processDOMConstructedFromString(dom, node, fieldElmts);
-        }
-        node = node2;
-    }
-};
-/**
- * @fileOverview Graphics utility functions and constants
- * @name SimileAjax.Graphics
- */
-
-SimileAjax.Graphics = new Object();
-
-/**
- * A boolean value indicating whether PNG translucency is supported on the
- * user's browser or not.
- *
- * @type Boolean
- */
-SimileAjax.Graphics.pngIsTranslucent = (!SimileAjax.Platform.browser.isIE) || (SimileAjax.Platform.browser.majorVersion > 6);
-if (!SimileAjax.Graphics.pngIsTranslucent) {
-    SimileAjax.includeCssFile(document, SimileAjax.urlPrefix + "styles/graphics-ie6.css");
-}
-
-/*
- *  Opacity, translucency
- *
- */
-SimileAjax.Graphics._createTranslucentImage1 = function(url, verticalAlign) {
-    var elmt = document.createElement("img");
-    elmt.setAttribute("src", url);
-    if (verticalAlign != null) {
-        elmt.style.verticalAlign = verticalAlign;
-    }
-    return elmt;
-};
-SimileAjax.Graphics._createTranslucentImage2 = function(url, verticalAlign) {
-    var elmt = document.createElement("img");
-    elmt.style.width = "1px";  // just so that IE will calculate the size property
-    elmt.style.height = "1px";
-    elmt.style.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + url +"', sizingMethod='image')";
-    elmt.style.verticalAlign = (verticalAlign != null) ? verticalAlign : "middle";
-    return elmt;
-};
-
-/**
- * Creates a DOM element for an <code>img</code> tag using the URL given. This
- * is a convenience method that automatically includes the necessary CSS to
- * allow for translucency, even on IE.
- *
- * @function
- * @param {String} url the URL to the image
- * @param {String} verticalAlign the CSS value for the image's vertical-align
- * @return {Element} a DOM element containing the <code>img</code> tag
- */
-SimileAjax.Graphics.createTranslucentImage = SimileAjax.Graphics.pngIsTranslucent ?
-    SimileAjax.Graphics._createTranslucentImage1 :
-    SimileAjax.Graphics._createTranslucentImage2;
-
-SimileAjax.Graphics._createTranslucentImageHTML1 = function(url, verticalAlign) {
-    return "<img src=\"" + url + "\"" +
-        (verticalAlign != null ? " style=\"vertical-align: " + verticalAlign + ";\"" : "") +
-        " />";
-};
-SimileAjax.Graphics._createTranslucentImageHTML2 = function(url, verticalAlign) {
-    var style =
-        "width: 1px; height: 1px; " +
-        "filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + url +"', sizingMethod='image');" +
-        (verticalAlign != null ? " vertical-align: " + verticalAlign + ";" : "");
-
-    return "<img src='" + url + "' style=\"" + style + "\" />";
-};
-
-/**
- * Creates an HTML string for an <code>img</code> tag using the URL given.
- * This is a convenience method that automatically includes the necessary CSS
- * to allow for translucency, even on IE.
- *
- * @function
- * @param {String} url the URL to the image
- * @param {String} verticalAlign the CSS value for the image's vertical-align
- * @return {String} a string containing the <code>img</code> tag
- */
-SimileAjax.Graphics.createTranslucentImageHTML = SimileAjax.Graphics.pngIsTranslucent ?
-    SimileAjax.Graphics._createTranslucentImageHTML1 :
-    SimileAjax.Graphics._createTranslucentImageHTML2;
-
-/**
- * Sets the opacity on the given DOM element.
- *
- * @param {Element} elmt the DOM element to set the opacity on
- * @param {Number} opacity an integer from 0 to 100 specifying the opacity
- */
-SimileAjax.Graphics.setOpacity = function(elmt, opacity) {
-    if (SimileAjax.Platform.browser.isIE) {
-        elmt.style.filter = "progid:DXImageTransform.Microsoft.Alpha(Style=0,Opacity=" + opacity + ")";
-    } else {
-        var o = (opacity / 100).toString();
-        elmt.style.opacity = o;
-        elmt.style.MozOpacity = o;
-    }
-};
-
-/*
- *  Bubble
- *
- */
-
-SimileAjax.Graphics.bubbleConfig = {
-    containerCSSClass:              "simileAjax-bubble-container",
-    innerContainerCSSClass:         "simileAjax-bubble-innerContainer",
-    contentContainerCSSClass:       "simileAjax-bubble-contentContainer",
-
-    borderGraphicSize:              50,
-    borderGraphicCSSClassPrefix:    "simileAjax-bubble-border-",
-
-    arrowGraphicTargetOffset:       33,  // from tip of arrow to the side of the graphic that touches the content of the bubble
-    arrowGraphicLength:             100, // dimension of arrow graphic along the direction that the arrow points
-    arrowGraphicWidth:              49,  // dimension of arrow graphic perpendicular to the direction that the arrow points
-    arrowGraphicCSSClassPrefix:     "simileAjax-bubble-arrow-",
-
-    closeGraphicCSSClass:           "simileAjax-bubble-close",
-
-    extraPadding:                   20
-};
-
-/**
- * Creates a nice, rounded bubble popup with the given content in a div,
- * page coordinates and a suggested width. The bubble will point to the
- * location on the page as described by pageX and pageY.  All measurements
- * should be given in pixels.
- *
- * @param {Element} the content div
- * @param {Number} pageX the x coordinate of the point to point to
- * @param {Number} pageY the y coordinate of the point to point to
- * @param {Number} contentWidth a suggested width of the content
- * @param {String} orientation a string ("top", "bottom", "left", or "right")
- *   that describes the orientation of the arrow on the bubble
- * @param {Number} maxHeight. Add a scrollbar div if bubble would be too tall.
- *   Default of 0 or null means no maximum
- */
-SimileAjax.Graphics.createBubbleForContentAndPoint = function(
-       div, pageX, pageY, contentWidth, orientation, maxHeight) {
-    if (typeof contentWidth != "number") {
-        contentWidth = 300;
-    }
-    if (typeof maxHeight != "number") {
-        maxHeight = 0;
-    }
-
-    div.style.position = "absolute";
-    div.style.left = "-5000px";
-    div.style.top = "0px";
-    div.style.width = contentWidth + "px";
-    document.body.appendChild(div);
-
-    window.setTimeout(function() {
-        var width = div.scrollWidth + 10;
-        var height = div.scrollHeight + 10;
-        var scrollDivW = 0; // width of the possible inner container when we want vertical scrolling
-        if (maxHeight > 0 && height > maxHeight) {
-          height = maxHeight;
-          scrollDivW = width - 25;
-        }
-
-        var bubble = SimileAjax.Graphics.createBubbleForPoint(pageX, pageY, width, height, orientation);
-
-        document.body.removeChild(div);
-        div.style.position = "static";
-        div.style.left = "";
-        div.style.top = "";
-
-        // create a scroll div if needed
-        if (scrollDivW > 0) {
-          var scrollDiv = document.createElement("div");
-          div.style.width = "";
-          scrollDiv.style.width = scrollDivW + "px";
-          scrollDiv.appendChild(div);
-          bubble.content.appendChild(scrollDiv);
-        } else {
-          div.style.width = width + "px";
-          bubble.content.appendChild(div);
-        }
-    }, 200);
-};
-
-/**
- * Creates a nice, rounded bubble popup with the given page coordinates and
- * content dimensions.  The bubble will point to the location on the page
- * as described by pageX and pageY.  All measurements should be given in
- * pixels.
- *
- * @param {Number} pageX the x coordinate of the point to point to
- * @param {Number} pageY the y coordinate of the point to point to
- * @param {Number} contentWidth the width of the content box in the bubble
- * @param {Number} contentHeight the height of the content box in the bubble
- * @param {String} orientation a string ("top", "bottom", "left", or "right")
- *   that describes the orientation of the arrow on the bubble
- * @return {Element} a DOM element for the newly created bubble
- */
-SimileAjax.Graphics.createBubbleForPoint = function(pageX, pageY, contentWidth, contentHeight, orientation) {
-    contentWidth = parseInt(contentWidth, 10); // harden against bad input bugs
-    contentHeight = parseInt(contentHeight, 10); // getting numbers-as-strings
-
-    var bubbleConfig = SimileAjax.Graphics.bubbleConfig;
-    var pngTransparencyClassSuffix =
-        SimileAjax.Graphics.pngIsTranslucent ? "pngTranslucent" : "pngNotTranslucent";
-
-    var bubbleWidth = contentWidth + 2 * bubbleConfig.borderGraphicSize;
-    var bubbleHeight = contentHeight + 2 * bubbleConfig.borderGraphicSize;
-
-    var generatePngSensitiveClass = function(className) {
-        return className + " " + className + "-" + pngTransparencyClassSuffix;
-    };
-
-    /*
-     *  Render container divs
-     */
-    var div = document.createElement("div");
-    div.className = generatePngSensitiveClass(bubbleConfig.containerCSSClass);
-    div.style.width = contentWidth + "px";
-    div.style.height = contentHeight + "px";
-
-    var divInnerContainer = document.createElement("div");
-    divInnerContainer.className = generatePngSensitiveClass(bubbleConfig.innerContainerCSSClass);
-    div.appendChild(divInnerContainer);
-
-    /*
-     *  Create layer for bubble
-     */
-    var close = function() {
-        if (!bubble._closed) {
-            document.body.removeChild(bubble._div);
-            bubble._doc = null;
-            bubble._div = null;
-            bubble._content = null;
-            bubble._closed = true;
-        }
-    }
-    var bubble = { _closed: false };
-    var layer = SimileAjax.WindowManager.pushLayer(close, true, div);
-    bubble._div = div;
-    bubble.close = function() { SimileAjax.WindowManager.popLayer(layer); }
-
-    /*
-     *  Render border graphics
-     */
-    var createBorder = function(classNameSuffix) {
-        var divBorderGraphic = document.createElement("div");
-        divBorderGraphic.className = generatePngSensitiveClass(bubbleConfig.borderGraphicCSSClassPrefix + classNameSuffix);
-        divInnerContainer.appendChild(divBorderGraphic);
-    };
-    createBorder("top-left");
-    createBorder("top-right");
-    createBorder("bottom-left");
-    createBorder("bottom-right");
-    createBorder("left");
-    createBorder("right");
-    createBorder("top");
-    createBorder("bottom");
-
-    /*
-     *  Render content
-     */
-    var divContentContainer = document.createElement("div");
-    divContentContainer.className = generatePngSensitiveClass(bubbleConfig.contentContainerCSSClass);
-    divInnerContainer.appendChild(divContentContainer);
-    bubble.content = divContentContainer;
-
-    /*
-     *  Render close button
-     */
-    var divClose = document.createElement("div");
-    divClose.className = generatePngSensitiveClass(bubbleConfig.closeGraphicCSSClass);
-    divInnerContainer.appendChild(divClose);
-    SimileAjax.WindowManager.registerEventWithObject(divClose, "click", bubble, "close");
-
-    (function() {
-        var dims = SimileAjax.Graphics.getWindowDimensions();
-        var docWidth = dims.w;
-        var docHeight = dims.h;
-
-        var halfArrowGraphicWidth = Math.ceil(bubbleConfig.arrowGraphicWidth / 2);
-
-        var createArrow = function(classNameSuffix) {
-            var divArrowGraphic = document.createElement("div");
-            divArrowGraphic.className = generatePngSensitiveClass(bubbleConfig.arrowGraphicCSSClassPrefix + "point-" + classNameSuffix);
-            divInnerContainer.appendChild(divArrowGraphic);
-            return divArrowGraphic;
-        };
-
-        if (pageX - halfArrowGraphicWidth - bubbleConfig.borderGraphicSize - bubbleConfig.extraPadding > 0 &&
-            pageX + halfArrowGraphicWidth + bubbleConfig.borderGraphicSize + bubbleConfig.extraPadding < docWidth) {
-
-            /*
-             *  Bubble can be positioned above or below the target point.
-             */
-
-            var left = pageX - Math.round(contentWidth / 2);
-            left = pageX < (docWidth / 2) ?
-                Math.max(left, bubbleConfig.extraPadding + bubbleConfig.borderGraphicSize) :
-                Math.min(left, docWidth - bubbleConfig.extraPadding - bubbleConfig.borderGraphicSize - contentWidth);
-
-            if ((orientation && orientation == "top") ||
-                (!orientation &&
-                    (pageY
-                        - bubbleConfig.arrowGraphicTargetOffset
-                        - contentHeight
-                        - bubbleConfig.borderGraphicSize
-                        - bubbleConfig.extraPadding > 0))) {
-
-                /*
-                 *  Position bubble above the target point.
-                 */
-
-                var divArrow = createArrow("down");
-                divArrow.style.left = (pageX - halfArrowGraphicWidth - left) + "px";
-
-                div.style.left = left + "px";
-                div.style.top = (pageY - bubbleConfig.arrowGraphicTargetOffset - contentHeight) + "px";
-
-                return;
-            } else if ((orientation && orientation == "bottom") ||
-                (!orientation &&
-                    (pageY
-                        + bubbleConfig.arrowGraphicTargetOffset
-                        + contentHeight
-                        + bubbleConfig.borderGraphicSize
-                        + bubbleConfig.extraPadding < docHeight))) {
-
-                /*
-                 *  Position bubble below the target point.
-                 */
-
-                var divArrow = createArrow("up");
-                divArrow.style.left = (pageX - halfArrowGraphicWidth - left) + "px";
-
-                div.style.left = left + "px";
-                div.style.top = (pageY + bubbleConfig.arrowGraphicTargetOffset) + "px";
-
-                return;
-            }
-        }
-
-        var top = pageY - Math.round(contentHeight / 2);
-        top = pageY < (docHeight / 2) ?
-            Math.max(top, bubbleConfig.extraPadding + bubbleConfig.borderGraphicSize) :
-            Math.min(top, docHeight - bubbleConfig.extraPadding - bubbleConfig.borderGraphicSize - contentHeight);
-
-        if ((orientation && orientation == "left") ||
-            (!orientation &&
-                (pageX
-                    - bubbleConfig.arrowGraphicTargetOffset
-                    - contentWidth
-                    - bubbleConfig.borderGraphicSize
-                    - bubbleConfig.extraPadding > 0))) {
-
-            /*
-             *  Position bubble left of the target point.
-             */
-
-            var divArrow = createArrow("right");
-            divArrow.style.top = (pageY - halfArrowGraphicWidth - top) + "px";
-
-            div.style.top = top + "px";
-            div.style.left = (pageX - bubbleConfig.arrowGraphicTargetOffset - contentWidth) + "px";
-        } else {
-
-            /*
-             *  Position bubble right of the target point, as the last resort.
-             */
-
-            var divArrow = createArrow("left");
-            divArrow.style.top = (pageY - halfArrowGraphicWidth - top) + "px";
-
-            div.style.top = top + "px";
-            div.style.left = (pageX + bubbleConfig.arrowGraphicTargetOffset) + "px";
-        }
-    })();
-
-    document.body.appendChild(div);
-
-    return bubble;
-};
-
-SimileAjax.Graphics.getWindowDimensions = function() {
-    if (typeof window.innerHeight == 'number') {
-        return { w:window.innerWidth, h:window.innerHeight }; // Non-IE
-    } else if (document.documentElement && document.documentElement.clientHeight) {
-        return { // IE6+, in "standards compliant mode"
-            w:document.documentElement.clientWidth,
-            h:document.documentElement.clientHeight
-        };
-    } else if (document.body && document.body.clientHeight) {
-        return { // IE 4 compatible
-            w:document.body.clientWidth,
-            h:document.body.clientHeight
-        };
-    }
-};
-
-
-/**
- * Creates a floating, rounded message bubble in the center of the window for
- * displaying modal information, e.g. "Loading..."
- *
- * @param {Document} doc the root document for the page to render on
- * @param {Object} an object with two properties, contentDiv and containerDiv,
- *   consisting of the newly created DOM elements
- */
-SimileAjax.Graphics.createMessageBubble = function(doc) {
-    var containerDiv = doc.createElement("div");
-    if (SimileAjax.Graphics.pngIsTranslucent) {
-        var topDiv = doc.createElement("div");
-        topDiv.style.height = "33px";
-        topDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-top-left.png) top left no-repeat";
-        topDiv.style.paddingLeft = "44px";
-        containerDiv.appendChild(topDiv);
-
-        var topRightDiv = doc.createElement("div");
-        topRightDiv.style.height = "33px";
-        topRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-top-right.png) top right no-repeat";
-        topDiv.appendChild(topRightDiv);
-
-        var middleDiv = doc.createElement("div");
-        middleDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-left.png) top left repeat-y";
-        middleDiv.style.paddingLeft = "44px";
-        containerDiv.appendChild(middleDiv);
-
-        var middleRightDiv = doc.createElement("div");
-        middleRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-right.png) top right repeat-y";
-        middleRightDiv.style.paddingRight = "44px";
-        middleDiv.appendChild(middleRightDiv);
-
-        var contentDiv = doc.createElement("div");
-        middleRightDiv.appendChild(contentDiv);
-
-        var bottomDiv = doc.createElement("div");
-        bottomDiv.style.height = "55px";
-        bottomDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-bottom-left.png) bottom left no-repeat";
-        bottomDiv.style.paddingLeft = "44px";
-        containerDiv.appendChild(bottomDiv);
-
-        var bottomRightDiv = doc.createElement("div");
-        bottomRightDiv.style.height = "55px";
-        bottomRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-bottom-right.png) bottom right no-repeat";
-        bottomDiv.appendChild(bottomRightDiv);
-    } else {
-        containerDiv.style.border = "2px solid #7777AA";
-        containerDiv.style.padding = "20px";
-        containerDiv.style.background = "white";
-        SimileAjax.Graphics.setOpacity(containerDiv, 90);
-
-        var contentDiv = doc.createElement("div");
-        containerDiv.appendChild(contentDiv);
-    }
-
-    return {
-        containerDiv:   containerDiv,
-        contentDiv:     contentDiv
-    };
-};
-
-/*
- *  Animation
- *
- */
-
-/**
- * Creates an animation for a function, and an interval of values.  The word
- * "animation" here is used in the sense of repeatedly calling a function with
- * a current value from within an interval, and a delta value.
- *
- * @param {Function} f a function to be called every 50 milliseconds throughout
- *   the animation duration, of the form f(current, delta), where current is
- *   the current value within the range and delta is the current change.
- * @param {Number} from a starting value
- * @param {Number} to an ending value
- * @param {Number} duration the duration of the animation in milliseconds
- * @param {Function} [cont] an optional function that is called at the end of
- *   the animation, i.e. a continuation.
- * @return {SimileAjax.Graphics._Animation} a new animation object
- */
-SimileAjax.Graphics.createAnimation = function(f, from, to, duration, cont) {
-    return new SimileAjax.Graphics._Animation(f, from, to, duration, cont);
-};
-
-SimileAjax.Graphics._Animation = function(f, from, to, duration, cont) {
-    this.f = f;
-    this.cont = (typeof cont == "function") ? cont : function() {};
-
-    this.from = from;
-    this.to = to;
-    this.current = from;
-
-    this.duration = duration;
-    this.start = new Date().getTime();
-    this.timePassed = 0;
-};
-
-/**
- * Runs this animation.
- */
-SimileAjax.Graphics._Animation.prototype.run = function() {
-    var a = this;
-    window.setTimeout(function() { a.step(); }, 50);
-};
-
-/**
- * Increments this animation by one step, and then continues the animation with
- * <code>run()</code>.
- */
-SimileAjax.Graphics._Animation.prototype.step = function() {
-    this.timePassed += 50;
-
-    var timePassedFraction = this.timePassed / this.duration;
-    var parameterFraction = -Math.cos(timePassedFraction * Math.PI) / 2 + 0.5;
-    var current = parameterFraction * (this.to - this.from) + this.from;
-
-    try {
-        this.f(current, current - this.current);
-    } catch (e) {
-    }
-    this.current = current;
-
-    if (this.timePassed < this.duration) {
-        this.run();
-    } else {
-        this.f(this.to, 0);
-        this["cont"]();
-    }
-};
-
-/*
- *  CopyPasteButton
- *
- *  Adapted from http://spaces.live.com/editorial/rayozzie/demo/liveclip/liveclipsample/techPreview.html.
- *
- */
-
-/**
- * Creates a button and textarea for displaying structured data and copying it
- * to the clipboard.  The data is dynamically generated by the given
- * createDataFunction parameter.
- *
- * @param {String} image an image URL to use as the background for the
- *   generated box
- * @param {Number} width the width in pixels of the generated box
- * @param {Number} height the height in pixels of the generated box
- * @param {Function} createDataFunction a function that is called with no
- *   arguments to generate the structured data
- * @return a new DOM element
- */
-SimileAjax.Graphics.createStructuredDataCopyButton = function(image, width, height, createDataFunction) {
-    var div = document.createElement("div");
-    div.style.position = "relative";
-    div.style.display = "inline";
-    div.style.width = width + "px";
-    div.style.height = height + "px";
-    div.style.overflow = "hidden";
-    div.style.margin = "2px";
-
-    if (SimileAjax.Graphics.pngIsTranslucent) {
-        div.style.background = "url(" + image + ") no-repeat";
-    } else {
-        div.style.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + image +"', sizingMethod='image')";
-    }
-
-    var style;
-    if (SimileAjax.Platform.browser.isIE) {
-        style = "filter:alpha(opacity=0)";
-    } else {
-        style = "opacity: 0";
-    }
-    div.innerHTML = "<textarea rows='1' autocomplete='off' value='none' style='" + style + "' />";
-
-    var textarea = div.firstChild;
-    textarea.style.width = width + "px";
-    textarea.style.height = height + "px";
-    textarea.onmousedown = function(evt) {
-        evt = (evt) ? evt : ((event) ? event : null);
-        if (evt.button == 2) {
-            textarea.value = createDataFunction();
-            textarea.select();
-        }
-    };
-
-    return div;
-};
-
-/*
- *  getWidthHeight
- *
- */
-SimileAjax.Graphics.getWidthHeight = function(el) {
-    // RETURNS hash {width:  w, height: h} in pixels
-
-    var w, h;
-    // offsetWidth rounds on FF, so doesn't work for us.
-    // See https://bugzilla.mozilla.org/show_bug.cgi?id=458617
-    if (el.getBoundingClientRect == null) {
-    	// use offsetWidth
-      w = el.offsetWidth;
-      h = el.offsetHeight;
-    } else {
-    	// use getBoundingClientRect
-      var rect = el.getBoundingClientRect();
-      w = Math.ceil(rect.right - rect.left);
-    	h = Math.ceil(rect.bottom - rect.top);
-    }
-    return {
-        width:  w,
-        height: h
-    };
-};
-
-
-/*
- *  FontRenderingContext
- *
- */
-SimileAjax.Graphics.getFontRenderingContext = function(elmt, width) {
-    return new SimileAjax.Graphics._FontRenderingContext(elmt, width);
-};
-
-SimileAjax.Graphics._FontRenderingContext = function(elmt, width) {
-    this._elmt = elmt;
-    this._elmt.style.visibility = "hidden";
-    if (typeof width == "string") {
-        this._elmt.style.width = width;
-    } else if (typeof width == "number") {
-        this._elmt.style.width = width + "px";
-    }
-};
-
-SimileAjax.Graphics._FontRenderingContext.prototype.dispose = function() {
-    this._elmt = null;
-};
-
-SimileAjax.Graphics._FontRenderingContext.prototype.update = function() {
-    this._elmt.innerHTML = "A";
-    this._lineHeight = this._elmt.offsetHeight;
-};
-
-SimileAjax.Graphics._FontRenderingContext.prototype.computeSize = function(text, className) {
-    // className arg is optional
-    var el = this._elmt;
-    el.innerHTML = text;
-    el.className = className === undefined ? '' : className;
-    var wh = SimileAjax.Graphics.getWidthHeight(el);
-    el.className = ''; // reset for the next guy
-
-    return wh;
-};
-
-SimileAjax.Graphics._FontRenderingContext.prototype.getLineHeight = function() {
-    return this._lineHeight;
-};
-
-/**
- * @fileOverview A collection of date/time utility functions
- * @name SimileAjax.DateTime
- */
-
-SimileAjax.DateTime = new Object();
-
-SimileAjax.DateTime.MILLISECOND    = 0;
-SimileAjax.DateTime.SECOND         = 1;
-SimileAjax.DateTime.MINUTE         = 2;
-SimileAjax.DateTime.HOUR           = 3;
-SimileAjax.DateTime.DAY            = 4;
-SimileAjax.DateTime.WEEK           = 5;
-SimileAjax.DateTime.MONTH          = 6;
-SimileAjax.DateTime.YEAR           = 7;
-SimileAjax.DateTime.DECADE         = 8;
-SimileAjax.DateTime.CENTURY        = 9;
-SimileAjax.DateTime.MILLENNIUM     = 10;
-
-SimileAjax.DateTime.EPOCH          = -1;
-SimileAjax.DateTime.ERA            = -2;
-
-/**
- * An array of unit lengths, expressed in milliseconds, of various lengths of
- * time.  The array indices are predefined and stored as properties of the
- * SimileAjax.DateTime object, e.g. SimileAjax.DateTime.YEAR.
- * @type Array
- */
-SimileAjax.DateTime.gregorianUnitLengths = [];
-    (function() {
-        var d = SimileAjax.DateTime;
-        var a = d.gregorianUnitLengths;
-
-        a[d.MILLISECOND] = 1;
-        a[d.SECOND]      = 1000;
-        a[d.MINUTE]      = a[d.SECOND] * 60;
-        a[d.HOUR]        = a[d.MINUTE] * 60;
-        a[d.DAY]         = a[d.HOUR] * 24;
-        a[d.WEEK]        = a[d.DAY] * 7;
-        a[d.MONTH]       = a[d.DAY] * 31;
-        a[d.YEAR]        = a[d.DAY] * 365;
-        a[d.DECADE]      = a[d.YEAR] * 10;
-        a[d.CENTURY]     = a[d.YEAR] * 100;
-        a[d.MILLENNIUM]  = a[d.YEAR] * 1000;
-    })();
-
-SimileAjax.DateTime._dateRegexp = new RegExp(
-    "^(-?)([0-9]{4})(" + [
-        "(-?([0-9]{2})(-?([0-9]{2}))?)", // -month-dayOfMonth
-        "(-?([0-9]{3}))",                // -dayOfYear
-        "(-?W([0-9]{2})(-?([1-7]))?)"    // -Wweek-dayOfWeek
-    ].join("|") + ")?$"
-);
-SimileAjax.DateTime._timezoneRegexp = new RegExp(
-    "Z|(([-+])([0-9]{2})(:?([0-9]{2}))?)$"
-);
-SimileAjax.DateTime._timeRegexp = new RegExp(
-    "^([0-9]{2})(:?([0-9]{2})(:?([0-9]{2})(\.([0-9]+))?)?)?$"
-);
-
-/**
- * Takes a date object and a string containing an ISO 8601 date and sets the
- * the date using information parsed from the string.  Note that this method
- * does not parse any time information.
- *
- * @param {Date} dateObject the date object to modify
- * @param {String} string an ISO 8601 string to parse
- * @return {Date} the modified date object
- */
-SimileAjax.DateTime.setIso8601Date = function(dateObject, string) {
-    /*
-     *  This function has been adapted from dojo.date, v.0.3.0
-     *  http://dojotoolkit.org/.
-     */
-
-    var d = string.match(SimileAjax.DateTime._dateRegexp);
-    if(!d) {
-        throw new Error("Invalid date string: " + string);
-    }
-
-    var sign = (d[1] == "-") ? -1 : 1; // BC or AD
-    var year = sign * d[2];
-    var month = d[5];
-    var date = d[7];
-    var dayofyear = d[9];
-    var week = d[11];
-    var dayofweek = (d[13]) ? d[13] : 1;
-
-    dateObject.setUTCFullYear(year);
-    if (dayofyear) {
-        dateObject.setUTCMonth(0);
-        dateObject.setUTCDate(Number(dayofyear));
-    } else if (week) {
-        dateObject.setUTCMonth(0);
-        dateObject.setUTCDate(1);
-        var gd = dateObject.getUTCDay();
-        var day =  (gd) ? gd : 7;
-        var offset = Number(dayofweek) + (7 * Number(week));
-
-        if (day <= 4) {
-            dateObject.setUTCDate(offset + 1 - day);
-        } else {
-            dateObject.setUTCDate(offset + 8 - day);
-        }
-    } else {
-        if (month) {
-            dateObject.setUTCDate(1);
-            dateObject.setUTCMonth(month - 1);
-        }
-        if (date) {
-            dateObject.setUTCDate(date);
-        }
-    }
-
-    return dateObject;
-};
-
-/**
- * Takes a date object and a string containing an ISO 8601 time and sets the
- * the time using information parsed from the string.  Note that this method
- * does not parse any date information.
- *
- * @param {Date} dateObject the date object to modify
- * @param {String} string an ISO 8601 string to parse
- * @return {Date} the modified date object
- */
-SimileAjax.DateTime.setIso8601Time = function (dateObject, string) {
-    /*
-     *  This function has been adapted from dojo.date, v.0.3.0
-     *  http://dojotoolkit.org/.
-     */
-
-    var d = string.match(SimileAjax.DateTime._timeRegexp);
-    if(!d) {
-        SimileAjax.Debug.warn("Invalid time string: " + string);
-        return false;
-    }
-    var hours = d[1];
-    var mins = Number((d[3]) ? d[3] : 0);
-    var secs = (d[5]) ? d[5] : 0;
-    var ms = d[7] ? (Number("0." + d[7]) * 1000) : 0;
-
-    dateObject.setUTCHours(hours);
-    dateObject.setUTCMinutes(mins);
-    dateObject.setUTCSeconds(secs);
-    dateObject.setUTCMilliseconds(ms);
-
-    return dateObject;
-};
-
-/**
- * The timezone offset in minutes in the user's browser.
- * @type Number
- */
-SimileAjax.DateTime.timezoneOffset = new Date().getTimezoneOffset();
-
-/**
- * Takes a date object and a string containing an ISO 8601 date and time and
- * sets the date object using information parsed from the string.
- *
- * @param {Date} dateObject the date object to modify
- * @param {String} string an ISO 8601 string to parse
- * @return {Date} the modified date object
- */
-SimileAjax.DateTime.setIso8601 = function (dateObject, string){
-    /*
-     *  This function has been adapted from dojo.date, v.0.3.0
-     *  http://dojotoolkit.org/.
-     */
-
-    var offset = null;
-    var comps = (string.indexOf("T") == -1) ? string.split(" ") : string.split("T");
-
-    SimileAjax.DateTime.setIso8601Date(dateObject, comps[0]);
-    if (comps.length == 2) {
-        // first strip timezone info from the end
-        var d = comps[1].match(SimileAjax.DateTime._timezoneRegexp);
-        if (d) {
-            if (d[0] == 'Z') {
-                offset = 0;
-            } else {
-                offset = (Number(d[3]) * 60) + Number(d[5]);
-                offset *= ((d[2] == '-') ? 1 : -1);
-            }
-            comps[1] = comps[1].substr(0, comps[1].length - d[0].length);
-        }
-
-        SimileAjax.DateTime.setIso8601Time(dateObject, comps[1]);
-    }
-    if (offset == null) {
-        offset = dateObject.getTimezoneOffset(); // local time zone if no tz info
-    }
-    dateObject.setTime(dateObject.getTime() + offset * 60000);
-
-    return dateObject;
-};
-
-/**
- * Takes a string containing an ISO 8601 date and returns a newly instantiated
- * date object with the parsed date and time information from the string.
- *
- * @param {String} string an ISO 8601 string to parse
- * @return {Date} a new date object created from the string
- */
-SimileAjax.DateTime.parseIso8601DateTime = function (string) {
-    try {
-        return SimileAjax.DateTime.setIso8601(new Date(0), string);
-    } catch (e) {
-        return null;
-    }
-};
-
-/**
- * Takes a string containing a Gregorian date and time and returns a newly
- * instantiated date object with the parsed date and time information from the
- * string.  If the param is actually an instance of Date instead of a string,
- * simply returns the given date instead.
- *
- * @param {Object} o an object, to either return or parse as a string
- * @return {Date} the date object
- */
-SimileAjax.DateTime.parseGregorianDateTime = function(o) {
-    if (o == null) {
-        return null;
-    } else if (o instanceof Date) {
-        return o;
-    }
-
-    var s = o.toString();
-    if (s.length > 0 && s.length < 8) {
-        var space = s.indexOf(" ");
-        if (space > 0) {
-            var year = parseInt(s.substr(0, space));
-            var suffix = s.substr(space + 1);
-            if (suffix.toLowerCase() == "bc") {
-                year = 1 - year;
-            }
-        } else {
-            var year = parseInt(s);
-        }
-
-        var d = new Date(0);
-        d.setUTCFullYear(year);
-
-        return d;
-    }
-
-    try {
-        return new Date(Date.parse(s));
-    } catch (e) {
-        return null;
-    }
-};
-
-/**
- * Rounds date objects down to the nearest interval or multiple of an interval.
- * This method modifies the given date object, converting it to the given
- * timezone if specified.
- *
- * @param {Date} date the date object to round
- * @param {Number} intervalUnit a constant, integer index specifying an
- *   interval, e.g. SimileAjax.DateTime.HOUR
- * @param {Number} timeZone a timezone shift, given in hours
- * @param {Number} multiple a multiple of the interval to round by
- * @param {Number} firstDayOfWeek an integer specifying the first day of the
- *   week, 0 corresponds to Sunday, 1 to Monday, etc.
- */
-SimileAjax.DateTime.roundDownToInterval = function(date, intervalUnit, timeZone, multiple, firstDayOfWeek) {
-    var timeShift = timeZone *
-        SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR];
-
-    var date2 = new Date(date.getTime() + timeShift);
-    var clearInDay = function(d) {
-        d.setUTCMilliseconds(0);
-        d.setUTCSeconds(0);
-        d.setUTCMinutes(0);
-        d.setUTCHours(0);
-    };
-    var clearInYear = function(d) {
-        clearInDay(d);
-        d.setUTCDate(1);
-        d.setUTCMonth(0);
-    };
-
-    switch(intervalUnit) {
-    case SimileAjax.DateTime.MILLISECOND:
-        var x = date2.getUTCMilliseconds();
-        date2.setUTCMilliseconds(x - (x % multiple));
-        break;
-    case SimileAjax.DateTime.SECOND:
-        date2.setUTCMilliseconds(0);
-
-        var x = date2.getUTCSeconds();
-        date2.setUTCSeconds(x - (x % multiple));
-        break;
-    case SimileAjax.DateTime.MINUTE:
-        date2.setUTCMilliseconds(0);
-        date2.setUTCSeconds(0);
-
-        var x = date2.getUTCMinutes();
-        date2.setTime(date2.getTime() -
-            (x % multiple) * SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.MINUTE]);
-        break;
-    case SimileAjax.DateTime.HOUR:
-        date2.setUTCMilliseconds(0);
-        date2.setUTCSeconds(0);
-        date2.setUTCMinutes(0);
-
-        var x = date2.getUTCHours();
-        date2.setUTCHours(x - (x % multiple));
-        break;
-    case SimileAjax.DateTime.DAY:
-        clearInDay(date2);
-        break;
-    case SimileAjax.DateTime.WEEK:
-        clearInDay(date2);
-        var d = (date2.getUTCDay() + 7 - firstDayOfWeek) % 7;
-        date2.setTime(date2.getTime() -
-            d * SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.DAY]);
-        break;
-    case SimileAjax.DateTime.MONTH:
-        clearInDay(date2);
-        date2.setUTCDate(1);
-
-        var x = date2.getUTCMonth();
-        date2.setUTCMonth(x - (x % multiple));
-        break;
-    case SimileAjax.DateTime.YEAR:
-        clearInYear(date2);
-
-        var x = date2.getUTCFullYear();
-        date2.setUTCFullYear(x - (x % multiple));
-        break;
-    case SimileAjax.DateTime.DECADE:
-        clearInYear(date2);
-        date2.setUTCFullYear(Math.floor(date2.getUTCFullYear() / 10) * 10);
-        break;
-    case SimileAjax.DateTime.CENTURY:
-        clearInYear(date2);
-        date2.setUTCFullYear(Math.floor(date2.getUTCFullYear() / 100) * 100);
-        break;
-    case SimileAjax.DateTime.MILLENNIUM:
-        clearInYear(date2);
-        date2.setUTCFullYear(Math.floor(date2.getUTCFullYear() / 1000) * 1000);
-        break;
-    }
-
-    date.setTime(date2.getTime() - timeShift);
-};
-
-/**
- * Rounds date objects up to the nearest interval or multiple of an interval.
- * This method modifies the given date object, converting it to the given
- * timezone if specified.
- *
- * @param {Date} date the date object to round
- * @param {Number} intervalUnit a constant, integer index specifying an
- *   interval, e.g. SimileAjax.DateTime.HOUR
- * @param {Number} timeZone a timezone shift, given in hours
- * @param {Number} multiple a multiple of the interval to round by
- * @param {Number} firstDayOfWeek an integer specifying the first day of the
- *   week, 0 corresponds to Sunday, 1 to Monday, etc.
- * @see SimileAjax.DateTime.roundDownToInterval
- */
-SimileAjax.DateTime.roundUpToInterval = function(date, intervalUnit, timeZone, multiple, firstDayOfWeek) {
-    var originalTime = date.getTime();
-    SimileAjax.DateTime.roundDownToInterval(date, intervalUnit, timeZone, multiple, firstDayOfWeek);
-    if (date.getTime() < originalTime) {
-        date.setTime(date.getTime() +
-            SimileAjax.DateTime.gregorianUnitLengths[intervalUnit] * multiple);
-    }
-};
-
-/**
- * Increments a date object by a specified interval, taking into
- * consideration the timezone.
- *
- * @param {Date} date the date object to increment
- * @param {Number} intervalUnit a constant, integer index specifying an
- *   interval, e.g. SimileAjax.DateTime.HOUR
- * @param {Number} timeZone the timezone offset in hours
- */
-SimileAjax.DateTime.incrementByInterval = function(date, intervalUnit, timeZone) {
-    timeZone = (typeof timeZone == 'undefined') ? 0 : timeZone;
-
-    var timeShift = timeZone *
-        SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR];
-
-    var date2 = new Date(date.getTime() + timeShift);
-
-    switch(intervalUnit) {
-    case SimileAjax.DateTime.MILLISECOND:
-        date2.setTime(date2.getTime() + 1)
-        break;
-    case SimileAjax.DateTime.SECOND:
-        date2.setTime(date2.getTime() + 1000);
-        break;
-    case SimileAjax.DateTime.MINUTE:
-        date2.setTime(date2.getTime() +
-            SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.MINUTE]);
-        break;
-    case SimileAjax.DateTime.HOUR:
-        date2.setTime(date2.getTime() +
-            SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR]);
-        break;
-    case SimileAjax.DateTime.DAY:
-        date2.setUTCDate(date2.getUTCDate() + 1);
-        break;
-    case SimileAjax.DateTime.WEEK:
-        date2.setUTCDate(date2.getUTCDate() + 7);
-        break;
-    case SimileAjax.DateTime.MONTH:
-        date2.setUTCMonth(date2.getUTCMonth() + 1);
-        break;
-    case SimileAjax.DateTime.YEAR:
-        date2.setUTCFullYear(date2.getUTCFullYear() + 1);
-        break;
-    case SimileAjax.DateTime.DECADE:
-        date2.setUTCFullYear(date2.getUTCFullYear() + 10);
-        break;
-    case SimileAjax.DateTime.CENTURY:
-        date2.setUTCFullYear(date2.getUTCFullYear() + 100);
-        break;
-    case SimileAjax.DateTime.MILLENNIUM:
-        date2.setUTCFullYear(date2.getUTCFullYear() + 1000);
-        break;
-    }
-
-    date.setTime(date2.getTime() - timeShift);
-};
-
-/**
- * Returns a new date object with the given time offset removed.
- *
- * @param {Date} date the starting date
- * @param {Number} timeZone a timezone specified in an hour offset to remove
- * @return {Date} a new date object with the offset removed
- */
-SimileAjax.DateTime.removeTimeZoneOffset = function(date, timeZone) {
-    return new Date(date.getTime() +
-        timeZone * SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.HOUR]);
-};
-
-/**
- * Returns the timezone of the user's browser.
- *
- * @return {Number} the timezone in the user's locale in hours
- */
-SimileAjax.DateTime.getTimezone = function() {
-    var d = new Date().getTimezoneOffset();
-    return d / -60;
-};
-/*
- *  String Utility Functions and Constants
- *
- */
-
-String.prototype.trim = function() {
-    return this.replace(/^\s+|\s+$/g, '');
-};
-
-String.prototype.startsWith = function(prefix) {
-    return this.length >= prefix.length && this.substr(0, prefix.length) == prefix;
-};
-
-String.prototype.endsWith = function(suffix) {
-    return this.length >= suffix.length && this.substr(this.length - suffix.length) == suffix;
-};
-
-String.substitute = function(s, objects) {
-    var result = "";
-    var start = 0;
-    while (start < s.length - 1) {
-        var percent = s.indexOf("%", start);
-        if (percent < 0 || percent == s.length - 1) {
-            break;
-        } else if (percent > start && s.charAt(percent - 1) == "\\") {
-            result += s.substring(start, percent - 1) + "%";
-            start = percent + 1;
-        } else {
-            var n = parseInt(s.charAt(percent + 1));
-            if (isNaN(n) || n >= objects.length) {
-                result += s.substring(start, percent + 2);
-            } else {
-                result += s.substring(start, percent) + objects[n].toString();
-            }
-            start = percent + 2;
-        }
-    }
-
-    if (start < s.length) {
-        result += s.substring(start);
-    }
-    return result;
-};
-/*
- *  HTML Utility Functions
- *
- */
-
-SimileAjax.HTML = new Object();
-
-SimileAjax.HTML._e2uHash = {};
-(function() {
-    var e2uHash = SimileAjax.HTML._e2uHash;
-    e2uHash['nbsp']= '\u00A0[space]';
-    e2uHash['iexcl']= '\u00A1';
-    e2uHash['cent']= '\u00A2';
-    e2uHash['pound']= '\u00A3';
-    e2uHash['curren']= '\u00A4';
-    e2uHash['yen']= '\u00A5';
-    e2uHash['brvbar']= '\u00A6';
-    e2uHash['sect']= '\u00A7';
-    e2uHash['uml']= '\u00A8';
-    e2uHash['copy']= '\u00A9';
-    e2uHash['ordf']= '\u00AA';
-    e2uHash['laquo']= '\u00AB';
-    e2uHash['not']= '\u00AC';
-    e2uHash['shy']= '\u00AD';
-    e2uHash['reg']= '\u00AE';
-    e2uHash['macr']= '\u00AF';
-    e2uHash['deg']= '\u00B0';
-    e2uHash['plusmn']= '\u00B1';
-    e2uHash['sup2']= '\u00B2';
-    e2uHash['sup3']= '\u00B3';
-    e2uHash['acute']= '\u00B4';
-    e2uHash['micro']= '\u00B5';
-    e2uHash['para']= '\u00B6';
-    e2uHash['middot']= '\u00B7';
-    e2uHash['cedil']= '\u00B8';
-    e2uHash['sup1']= '\u00B9';
-    e2uHash['ordm']= '\u00BA';
-    e2uHash['raquo']= '\u00BB';
-    e2uHash['frac14']= '\u00BC';
-    e2uHash['frac12']= '\u00BD';
-    e2uHash['frac34']= '\u00BE';
-    e2uHash['iquest']= '\u00BF';
-    e2uHash['Agrave']= '\u00C0';
-    e2uHash['Aacute']= '\u00C1';
-    e2uHash['Acirc']= '\u00C2';
-    e2uHash['Atilde']= '\u00C3';
-    e2uHash['Auml']= '\u00C4';
-    e2uHash['Aring']= '\u00C5';
-    e2uHash['AElig']= '\u00C6';
-    e2uHash['Ccedil']= '\u00C7';
-    e2uHash['Egrave']= '\u00C8';
-    e2uHash['Eacute']= '\u00C9';
-    e2uHash['Ecirc']= '\u00CA';
-    e2uHash['Euml']= '\u00CB';
-    e2uHash['Igrave']= '\u00CC';
-    e2uHash['Iacute']= '\u00CD';
-    e2uHash['Icirc']= '\u00CE';
-    e2uHash['Iuml']= '\u00CF';
-    e2uHash['ETH']= '\u00D0';
-    e2uHash['Ntilde']= '\u00D1';
-    e2uHash['Ograve']= '\u00D2';
-    e2uHash['Oacute']= '\u00D3';
-    e2uHash['Ocirc']= '\u00D4';
-    e2uHash['Otilde']= '\u00D5';
-    e2uHash['Ouml']= '\u00D6';
-    e2uHash['times']= '\u00D7';
-    e2uHash['Oslash']= '\u00D8';
-    e2uHash['Ugrave']= '\u00D9';
-    e2uHash['Uacute']= '\u00DA';
-    e2uHash['Ucirc']= '\u00DB';
-    e2uHash['Uuml']= '\u00DC';
-    e2uHash['Yacute']= '\u00DD';
-    e2uHash['THORN']= '\u00DE';
-    e2uHash['szlig']= '\u00DF';
-    e2uHash['agrave']= '\u00E0';
-    e2uHash['aacute']= '\u00E1';
-    e2uHash['acirc']= '\u00E2';
-    e2uHash['atilde']= '\u00E3';
-    e2uHash['auml']= '\u00E4';
-    e2uHash['aring']= '\u00E5';
-    e2uHash['aelig']= '\u00E6';
-    e2uHash['ccedil']= '\u00E7';
-    e2uHash['egrave']= '\u00E8';
-    e2uHash['eacute']= '\u00E9';
-    e2uHash['ecirc']= '\u00EA';
-    e2uHash['euml']= '\u00EB';
-    e2uHash['igrave']= '\u00EC';
-    e2uHash['iacute']= '\u00ED';
-    e2uHash['icirc']= '\u00EE';
-    e2uHash['iuml']= '\u00EF';
-    e2uHash['eth']= '\u00F0';
-    e2uHash['ntilde']= '\u00F1';
-    e2uHash['ograve']= '\u00F2';
-    e2uHash['oacute']= '\u00F3';
-    e2uHash['ocirc']= '\u00F4';
-    e2uHash['otilde']= '\u00F5';
-    e2uHash['ouml']= '\u00F6';
-    e2uHash['divide']= '\u00F7';
-    e2uHash['oslash']= '\u00F8';
-    e2uHash['ugrave']= '\u00F9';
-    e2uHash['uacute']= '\u00FA';
-    e2uHash['ucirc']= '\u00FB';
-    e2uHash['uuml']= '\u00FC';
-    e2uHash['yacute']= '\u00FD';
-    e2uHash['thorn']= '\u00FE';
-    e2uHash['yuml']= '\u00FF';
-    e2uHash['quot']= '\u0022';
-    e2uHash['amp']= '\u0026';
-    e2uHash['lt']= '\u003C';
-    e2uHash['gt']= '\u003E';
-    e2uHash['OElig']= '';
-    e2uHash['oelig']= '\u0153';
-    e2uHash['Scaron']= '\u0160';
-    e2uHash['scaron']= '\u0161';
-    e2uHash['Yuml']= '\u0178';
-    e2uHash['circ']= '\u02C6';
-    e2uHash['tilde']= '\u02DC';
-    e2uHash['ensp']= '\u2002';
-    e2uHash['emsp']= '\u2003';
-    e2uHash['thinsp']= '\u2010';
-    e2uHash['zwnj']= '\u200C';
-    e2uHash['zwj']= '\u200D';
-    e2uHash['lrm']= '\u200E';
-    e2uHash['rlm']= '\u200F';
-    e2uHash['ndash']= '\u2013';
-    e2uHash['mdash']= '\u2014';
-    e2uHash['lsquo']= '\u2018';
-    e2uHash['rsquo']= '\u2019';
-    e2uHash['sbquo']= '\u201A';
-    e2uHash['ldquo']= '\u201C';
-    e2uHash['rdquo']= '\u201D';
-    e2uHash['bdquo']= '\u201E';
-    e2uHash['dagger']= '\u2020';
-    e2uHash['Dagger']= '\u2021';
-    e2uHash['permil']= '\u2030';
-    e2uHash['lsaquo']= '\u2039';
-    e2uHash['rsaquo']= '\u203A';
-    e2uHash['euro']= '\u20AC';
-    e2uHash['fnof']= '\u0192';
-    e2uHash['Alpha']= '\u0391';
-    e2uHash['Beta']= '\u0392';
-    e2uHash['Gamma']= '\u0393';
-    e2uHash['Delta']= '\u0394';
-    e2uHash['Epsilon']= '\u0395';
-    e2uHash['Zeta']= '\u0396';
-    e2uHash['Eta']= '\u0397';
-    e2uHash['Theta']= '\u0398';
-    e2uHash['Iota']= '\u0399';
-    e2uHash['Kappa']= '\u039A';
-    e2uHash['Lambda']= '\u039B';
-    e2uHash['Mu']= '\u039C';
-    e2uHash['Nu']= '\u039D';
-    e2uHash['Xi']= '\u039E';
-    e2uHash['Omicron']= '\u039F';
-    e2uHash['Pi']= '\u03A0';
-    e2uHash['Rho']= '\u03A1';
-    e2uHash['Sigma']= '\u03A3';
-    e2uHash['Tau']= '\u03A4';
-    e2uHash['Upsilon']= '\u03A5';
-    e2uHash['Phi']= '\u03A6';
-    e2uHash['Chi']= '\u03A7';
-    e2uHash['Psi']= '\u03A8';
-    e2uHash['Omega']= '\u03A9';
-    e2uHash['alpha']= '\u03B1';
-    e2uHash['beta']= '\u03B2';
-    e2uHash['gamma']= '\u03B3';
-    e2uHash['delta']= '\u03B4';
-    e2uHash['epsilon']= '\u03B5';
-    e2uHash['zeta']= '\u03B6';
-    e2uHash['eta']= '\u03B7';
-    e2uHash['theta']= '\u03B8';
-    e2uHash['iota']= '\u03B9';
-    e2uHash['kappa']= '\u03BA';
-    e2uHash['lambda']= '\u03BB';
-    e2uHash['mu']= '\u03BC';
-    e2uHash['nu']= '\u03BD';
-    e2uHash['xi']= '\u03BE';
-    e2uHash['omicron']= '\u03BF';
-    e2uHash['pi']= '\u03C0';
-    e2uHash['rho']= '\u03C1';
-    e2uHash['sigmaf']= '\u03C2';
-    e2uHash['sigma']= '\u03C3';
-    e2uHash['tau']= '\u03C4';
-    e2uHash['upsilon']= '\u03C5';
-    e2uHash['phi']= '\u03C6';
-    e2uHash['chi']= '\u03C7';
-    e2uHash['psi']= '\u03C8';
-    e2uHash['omega']= '\u03C9';
-    e2uHash['thetasym']= '\u03D1';
-    e2uHash['upsih']= '\u03D2';
-    e2uHash['piv']= '\u03D6';
-    e2uHash['bull']= '\u2022';
-    e2uHash['hellip']= '\u2026';
-    e2uHash['prime']= '\u2032';
-    e2uHash['Prime']= '\u2033';
-    e2uHash['oline']= '\u203E';
-    e2uHash['frasl']= '\u2044';
-    e2uHash['weierp']= '\u2118';
-    e2uHash['image']= '\u2111';
-    e2uHash['real']= '\u211C';
-    e2uHash['trade']= '\u2122';
-    e2uHash['alefsym']= '\u2135';
-    e2uHash['larr']= '\u2190';
-    e2uHash['uarr']= '\u2191';
-    e2uHash['rarr']= '\u2192';
-    e2uHash['darr']= '\u2193';
-    e2uHash['harr']= '\u2194';
-    e2uHash['crarr']= '\u21B5';
-    e2uHash['lArr']= '\u21D0';
-    e2uHash['uArr']= '\u21D1';
-    e2uHash['rArr']= '\u21D2';
-    e2uHash['dArr']= '\u21D3';
-    e2uHash['hArr']= '\u21D4';
-    e2uHash['forall']= '\u2200';
-    e2uHash['part']= '\u2202';
-    e2uHash['exist']= '\u2203';
-    e2uHash['empty']= '\u2205';
-    e2uHash['nabla']= '\u2207';
-    e2uHash['isin']= '\u2208';
-    e2uHash['notin']= '\u2209';
-    e2uHash['ni']= '\u220B';
-    e2uHash['prod']= '\u220F';
-    e2uHash['sum']= '\u2211';
-    e2uHash['minus']= '\u2212';
-    e2uHash['lowast']= '\u2217';
-    e2uHash['radic']= '\u221A';
-    e2uHash['prop']= '\u221D';
-    e2uHash['infin']= '\u221E';
-    e2uHash['ang']= '\u2220';
-    e2uHash['and']= '\u2227';
-    e2uHash['or']= '\u2228';
-    e2uHash['cap']= '\u2229';
-    e2uHash['cup']= '\u222A';
-    e2uHash['int']= '\u222B';
-    e2uHash['there4']= '\u2234';
-    e2uHash['sim']= '\u223C';
-    e2uHash['cong']= '\u2245';
-    e2uHash['asymp']= '\u2248';
-    e2uHash['ne']= '\u2260';
-    e2uHash['equiv']= '\u2261';
-    e2uHash['le']= '\u2264';
-    e2uHash['ge']= '\u2265';
-    e2uHash['sub']= '\u2282';
-    e2uHash['sup']= '\u2283';
-    e2uHash['nsub']= '\u2284';
-    e2uHash['sube']= '\u2286';
-    e2uHash['supe']= '\u2287';
-    e2uHash['oplus']= '\u2295';
-    e2uHash['otimes']= '\u2297';
-    e2uHash['perp']= '\u22A5';
-    e2uHash['sdot']= '\u22C5';
-    e2uHash['lceil']= '\u2308';
-    e2uHash['rceil']= '\u2309';
-    e2uHash['lfloor']= '\u230A';
-    e2uHash['rfloor']= '\u230B';
-    e2uHash['lang']= '\u2329';
-    e2uHash['rang']= '\u232A';
-    e2uHash['loz']= '\u25CA';
-    e2uHash['spades']= '\u2660';
-    e2uHash['clubs']= '\u2663';
-    e2uHash['hearts']= '\u2665';
-    e2uHash['diams']= '\u2666';
-})();
-
-SimileAjax.HTML.deEntify = function(s) {
-    var e2uHash = SimileAjax.HTML._e2uHash;
-
-    var re = /&(\w+?);/;
-    while (re.test(s)) {
-        var m = s.match(re);
-        s = s.replace(re, e2uHash[m[1]]);
-    }
-    return s;
-};/**
- * A basic set (in the mathematical sense) data structure
- *
- * @constructor
- * @param {Array or SimileAjax.Set} [a] an initial collection
- */
-SimileAjax.Set = function(a) {
-    this._hash = {};
-    this._count = 0;
-
-    if (a instanceof Array) {
-        for (var i = 0; i < a.length; i++) {
-            this.add(a[i]);
-        }
-    } else if (a instanceof SimileAjax.Set) {
-        this.addSet(a);
-    }
-}
-
-/**
- * Adds the given object to this set, assuming there it does not already exist
- *
- * @param {Object} o the object to add
- * @return {Boolean} true if the object was added, false if not
- */
-SimileAjax.Set.prototype.add = function(o) {
-    if (!(o in this._hash)) {
-        this._hash[o] = true;
-        this._count++;
-        return true;
-    }
-    return false;
-}
-
-/**
- * Adds each element in the given set to this set
- *
- * @param {SimileAjax.Set} set the set of elements to add
- */
-SimileAjax.Set.prototype.addSet = function(set) {
-    for (var o in set._hash) {
-        this.add(o);
-    }
-}
-
-/**
- * Removes the given element from this set
- *
- * @param {Object} o the object to remove
- * @return {Boolean} true if the object was successfully removed,
- *   false otherwise
- */
-SimileAjax.Set.prototype.remove = function(o) {
-    if (o in this._hash) {
-        delete this._hash[o];
-        this._count--;
-        return true;
-    }
-    return false;
-}
-
-/**
- * Removes the elements in this set that correspond to the elements in the
- * given set
- *
- * @param {SimileAjax.Set} set the set of elements to remove
- */
-SimileAjax.Set.prototype.removeSet = function(set) {
-    for (var o in set._hash) {
-        this.remove(o);
-    }
-}
-
-/**
- * Removes all elements in this set that are not present in the given set, i.e.
- * modifies this set to the intersection of the two sets
- *
- * @param {SimileAjax.Set} set the set to intersect
- */
-SimileAjax.Set.prototype.retainSet = function(set) {
-    for (var o in this._hash) {
-        if (!set.contains(o)) {
-            delete this._hash[o];
-            this._count--;
-        }
-    }
-}
-
-/**
- * Returns whether or not the given element exists in this set
- *
- * @param {SimileAjax.Set} o the object to test for
- * @return {Boolean} true if the object is present, false otherwise
- */
-SimileAjax.Set.prototype.contains = function(o) {
-    return (o in this._hash);
-}
-
-/**
- * Returns the number of elements in this set
- *
- * @return {Number} the number of elements in this set
- */
-SimileAjax.Set.prototype.size = function() {
-    return this._count;
-}
-
-/**
- * Returns the elements of this set as an array
- *
- * @return {Array} a new array containing the elements of this set
- */
-SimileAjax.Set.prototype.toArray = function() {
-    var a = [];
-    for (var o in this._hash) {
-        a.push(o);
-    }
-    return a;
-}
-
-/**
- * Iterates through the elements of this set, order unspecified, executing the
- * given function on each element until the function returns true
- *
- * @param {Function} f a function of form f(element)
- */
-SimileAjax.Set.prototype.visit = function(f) {
-    for (var o in this._hash) {
-        if (f(o) == true) {
-            break;
-        }
-    }
-}
-
-/**
- * A sorted array data structure
- *
- * @constructor
- */
-SimileAjax.SortedArray = function(compare, initialArray) {
-    this._a = (initialArray instanceof Array) ? initialArray : [];
-    this._compare = compare;
-};
-
-SimileAjax.SortedArray.prototype.add = function(elmt) {
-    var sa = this;
-    var index = this.find(function(elmt2) {
-        return sa._compare(elmt2, elmt);
-    });
-
-    if (index < this._a.length) {
-        this._a.splice(index, 0, elmt);
-    } else {
-        this._a.push(elmt);
-    }
-};
-
-SimileAjax.SortedArray.prototype.remove = function(elmt) {
-    var sa = this;
-    var index = this.find(function(elmt2) {
-        return sa._compare(elmt2, elmt);
-    });
-
-    while (index < this._a.length && this._compare(this._a[index], elmt) == 0) {
-        if (this._a[index] == elmt) {
-            this._a.splice(index, 1);
-            return true;
-        } else {
-            index++;
-        }
-    }
-    return false;
-};
-
-SimileAjax.SortedArray.prototype.removeAll = function() {
-    this._a = [];
-};
-
-SimileAjax.SortedArray.prototype.elementAt = function(index) {
-    return this._a[index];
-};
-
-SimileAjax.SortedArray.prototype.length = function() {
-    return this._a.length;
-};
-
-SimileAjax.SortedArray.prototype.find = function(compare) {
-    var a = 0;
-    var b = this._a.length;
-
-    while (a < b) {
-        var mid = Math.floor((a + b) / 2);
-        var c = compare(this._a[mid]);
-        if (mid == a) {
-            return c < 0 ? a+1 : a;
-        } else if (c < 0) {
-            a = mid;
-        } else {
-            b = mid;
-        }
-    }
-    return a;
-};
-
-SimileAjax.SortedArray.prototype.getFirst = function() {
-    return (this._a.length > 0) ? this._a[0] : null;
-};
-
-SimileAjax.SortedArray.prototype.getLast = function() {
-    return (this._a.length > 0) ? this._a[this._a.length - 1] : null;
-};
-
-/*
- *  Event Index
- *
- */
-
-SimileAjax.EventIndex = function(unit) {
-    var eventIndex = this;
-
-    this._unit = (unit != null) ? unit : SimileAjax.NativeDateUnit;
-    this._events = new SimileAjax.SortedArray(
-        function(event1, event2) {
-            return eventIndex._unit.compare(event1.getStart(), event2.getStart());
-        }
-    );
-    this._idToEvent = {};
-    this._indexed = true;
-};
-
-SimileAjax.EventIndex.prototype.getUnit = function() {
-    return this._unit;
-};
-
-SimileAjax.EventIndex.prototype.getEvent = function(id) {
-    return this._idToEvent[id];
-};
-
-SimileAjax.EventIndex.prototype.add = function(evt) {
-    this._events.add(evt);
-    this._idToEvent[evt.getID()] = evt;
-    this._indexed = false;
-};
-
-SimileAjax.EventIndex.prototype.removeAll = function() {
-    this._events.removeAll();
-    this._idToEvent = {};
-    this._indexed = false;
-};
-
-SimileAjax.EventIndex.prototype.getCount = function() {
-    return this._events.length();
-};
-
-SimileAjax.EventIndex.prototype.getIterator = function(startDate, endDate) {
-    if (!this._indexed) {
-        this._index();
-    }
-    return new SimileAjax.EventIndex._Iterator(this._events, startDate, endDate, this._unit);
-};
-
-SimileAjax.EventIndex.prototype.getReverseIterator = function(startDate, endDate) {
-    if (!this._indexed) {
-        this._index();
-    }
-    return new SimileAjax.EventIndex._ReverseIterator(this._events, startDate, endDate, this._unit);
-};
-
-SimileAjax.EventIndex.prototype.getAllIterator = function() {
-    return new SimileAjax.EventIndex._AllIterator(this._events);
-};
-
-SimileAjax.EventIndex.prototype.getEarliestDate = function() {
-    var evt = this._events.getFirst();
-    return (evt == null) ? null : evt.getStart();
-};
-
-SimileAjax.EventIndex.prototype.getLatestDate = function() {
-    var evt = this._events.getLast();
-    if (evt == null) {
-        return null;
-    }
-
-    if (!this._indexed) {
-        this._index();
-    }
-
-    var index = evt._earliestOverlapIndex;
-    var date = this._events.elementAt(index).getEnd();
-    for (var i = index + 1; i < this._events.length(); i++) {
-        date = this._unit.later(date, this._events.elementAt(i).getEnd());
-    }
-
-    return date;
-};
-
-SimileAjax.EventIndex.prototype._index = function() {
-    /*
-     *  For each event, we want to find the earliest preceding
-     *  event that overlaps with it, if any.
-     */
-
-    var l = this._events.length();
-    for (var i = 0; i < l; i++) {
-        var evt = this._events.elementAt(i);
-        evt._earliestOverlapIndex = i;
-    }
-
-    var toIndex = 1;
-    for (var i = 0; i < l; i++) {
-        var evt = this._events.elementAt(i);
-        var end = evt.getEnd();
-
-        toIndex = Math.max(toIndex, i + 1);
-        while (toIndex < l) {
-            var evt2 = this._events.elementAt(toIndex);
-            var start2 = evt2.getStart();
-
-            if (this._unit.compare(start2, end) < 0) {
-                evt2._earliestOverlapIndex = i;
-                toIndex++;
-            } else {
-                break;
-            }
-        }
-    }
-    this._indexed = true;
-};
-
-SimileAjax.EventIndex._Iterator = function(events, startDate, endDate, unit) {
-    this._events = events;
-    this._startDate = startDate;
-    this._endDate = endDate;
-    this._unit = unit;
-
-    this._currentIndex = events.find(function(evt) {
-        return unit.compare(evt.getStart(), startDate);
-    });
-    if (this._currentIndex - 1 >= 0) {
-        this._currentIndex = this._events.elementAt(this._currentIndex - 1)._earliestOverlapIndex;
-    }
-    this._currentIndex--;
-
-    this._maxIndex = events.find(function(evt) {
-        return unit.compare(evt.getStart(), endDate);
-    });
-
-    this._hasNext = false;
-    this._next = null;
-    this._findNext();
-};
-
-SimileAjax.EventIndex._Iterator.prototype = {
-    hasNext: function() { return this._hasNext; },
-    next: function() {
-        if (this._hasNext) {
-            var next = this._next;
-            this._findNext();
-
-            return next;
-        } else {
-            return null;
-        }
-    },
-    _findNext: function() {
-        var unit = this._unit;
-        while ((++this._currentIndex) < this._maxIndex) {
-            var evt = this._events.elementAt(this._currentIndex);
-            if (unit.compare(evt.getStart(), this._endDate) < 0 &&
-                unit.compare(evt.getEnd(), this._startDate) > 0) {
-
-                this._next = evt;
-                this._hasNext = true;
-                return;
-            }
-        }
-        this._next = null;
-        this._hasNext = false;
-    }
-};
-
-SimileAjax.EventIndex._ReverseIterator = function(events, startDate, endDate, unit) {
-    this._events = events;
-    this._startDate = startDate;
-    this._endDate = endDate;
-    this._unit = unit;
-
-    this._minIndex = events.find(function(evt) {
-        return unit.compare(evt.getStart(), startDate);
-    });
-    if (this._minIndex - 1 >= 0) {
-        this._minIndex = this._events.elementAt(this._minIndex - 1)._earliestOverlapIndex;
-    }
-
-    this._maxIndex = events.find(function(evt) {
-        return unit.compare(evt.getStart(), endDate);
-    });
-
-    this._currentIndex = this._maxIndex;
-    this._hasNext = false;
-    this._next = null;
-    this._findNext();
-};
-
-SimileAjax.EventIndex._ReverseIterator.prototype = {
-    hasNext: function() { return this._hasNext; },
-    next: function() {
-        if (this._hasNext) {
-            var next = this._next;
-            this._findNext();
-
-            return next;
-        } else {
-            return null;
-        }
-    },
-    _findNext: function() {
-        var unit = this._unit;
-        while ((--this._currentIndex) >= this._minIndex) {
-            var evt = this._events.elementAt(this._currentIndex);
-            if (unit.compare(evt.getStart(), this._endDate) < 0 &&
-                unit.compare(evt.getEnd(), this._startDate) > 0) {
-
-                this._next = evt;
-                this._hasNext = true;
-                return;
-            }
-        }
-        this._next = null;
-        this._hasNext = false;
-    }
-};
-
-SimileAjax.EventIndex._AllIterator = function(events) {
-    this._events = events;
-    this._index = 0;
-};
-
-SimileAjax.EventIndex._AllIterator.prototype = {
-    hasNext: function() {
-        return this._index < this._events.length();
-    },
-    next: function() {
-        return this._index < this._events.length() ?
-            this._events.elementAt(this._index++) : null;
-    }
-};/*
- *  Default Unit
- *
- */
-
-SimileAjax.NativeDateUnit = new Object();
-
-SimileAjax.NativeDateUnit.makeDefaultValue = function() {
-    return new Date();
-};
-
-SimileAjax.NativeDateUnit.cloneValue = function(v) {
-    return new Date(v.getTime());
-};
-
-SimileAjax.NativeDateUnit.getParser = function(format) {
-    if (typeof format == "string") {
-        format = format.toLowerCase();
-    }
-    return (format == "iso8601" || format == "iso 8601") ?
-        SimileAjax.DateTime.parseIso8601DateTime : 
-        SimileAjax.DateTime.parseGregorianDateTime;
-};
-
-SimileAjax.NativeDateUnit.parseFromObject = function(o) {
-    return SimileAjax.DateTime.parseGregorianDateTime(o);
-};
-
-SimileAjax.NativeDateUnit.toNumber = function(v) {
-    return v.getTime();
-};
-
-SimileAjax.NativeDateUnit.fromNumber = function(n) {
-    return new Date(n);
-};
-
-SimileAjax.NativeDateUnit.compare = function(v1, v2) {
-    var n1, n2;
-    if (typeof v1 == "object") {
-        n1 = v1.getTime();
-    } else {
-        n1 = Number(v1);
-    }
-    if (typeof v2 == "object") {
-        n2 = v2.getTime();
-    } else {
-        n2 = Number(v2);
-    }
-    
-    return n1 - n2;
-};
-
-SimileAjax.NativeDateUnit.earlier = function(v1, v2) {
-    return SimileAjax.NativeDateUnit.compare(v1, v2) < 0 ? v1 : v2;
-};
-
-SimileAjax.NativeDateUnit.later = function(v1, v2) {
-    return SimileAjax.NativeDateUnit.compare(v1, v2) > 0 ? v1 : v2;
-};
-
-SimileAjax.NativeDateUnit.change = function(v, n) {
-    return new Date(v.getTime() + n);
-};
-
-/*
- *  General, miscellaneous SimileAjax stuff
- *
- */
-
-SimileAjax.ListenerQueue = function(wildcardHandlerName) {
-    this._listeners = [];
-    this._wildcardHandlerName = wildcardHandlerName;
-};
-
-SimileAjax.ListenerQueue.prototype.add = function(listener) {
-    this._listeners.push(listener);
-};
-
-SimileAjax.ListenerQueue.prototype.remove = function(listener) {
-    var listeners = this._listeners;
-    for (var i = 0; i < listeners.length; i++) {
-        if (listeners[i] == listener) {
-            listeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-SimileAjax.ListenerQueue.prototype.fire = function(handlerName, args) {
-    var listeners = [].concat(this._listeners);
-    for (var i = 0; i < listeners.length; i++) {
-        var listener = listeners[i];
-        if (handlerName in listener) {
-            try {
-                listener[handlerName].apply(listener, args);
-            } catch (e) {
-                SimileAjax.Debug.exception("Error firing event of name " + handlerName, e);
-            }
-        } else if (this._wildcardHandlerName != null &&
-            this._wildcardHandlerName in listener) {
-            try {
-                listener[this._wildcardHandlerName].apply(listener, [ handlerName ]);
-            } catch (e) {
-                SimileAjax.Debug.exception("Error firing event of name " + handlerName + " to wildcard handler", e);
-            }
-        }
-    }
-};
-
-/*
- *  History
- *
- *  This is a singleton that keeps track of undoable user actions and
- *  performs undos and redos in response to the browser's Back and
- *  Forward buttons.
- *
- *  Call addAction(action) to register an undoable user action. action
- *  must have 4 fields:
- *
- *      perform: an argument-less function that carries out the action
- *      undo:    an argument-less function that undos the action
- *      label:   a short, user-friendly string describing the action
- *      uiLayer: the UI layer on which the action takes place
- *
- *  By default, the history keeps track of upto 10 actions. You can
- *  configure this behavior by setting
- *      SimileAjax.History.maxHistoryLength
- *  to a different number.
- *
- *  An iframe is inserted into the document's body element to track
- *  onload events.
- *
- */
-
-SimileAjax.History = {
-    maxHistoryLength:       10,
-    historyFile:            "__history__.html",
-    enabled:               true,
-
-    _initialized:           false,
-    _listeners:             new SimileAjax.ListenerQueue(),
-
-    _actions:               [],
-    _baseIndex:             0,
-    _currentIndex:          0,
-
-    _plainDocumentTitle:    document.title
-};
-
-SimileAjax.History.formatHistoryEntryTitle = function(actionLabel) {
-    return SimileAjax.History._plainDocumentTitle + " {" + actionLabel + "}";
-};
-
-SimileAjax.History.initialize = function() {
-    if (SimileAjax.History._initialized) {
-        return;
-    }
-
-    if (SimileAjax.History.enabled) {
-        var iframe = document.createElement("iframe");
-        iframe.id = "simile-ajax-history";
-        iframe.style.position = "absolute";
-        iframe.style.width = "10px";
-        iframe.style.height = "10px";
-        iframe.style.top = "0px";
-        iframe.style.left = "0px";
-        iframe.style.visibility = "hidden";
-        iframe.src = SimileAjax.History.historyFile + "?0";
-
-        document.body.appendChild(iframe);
-        SimileAjax.DOM.registerEvent(iframe, "load", SimileAjax.History._handleIFrameOnLoad);
-
-        SimileAjax.History._iframe = iframe;
-    }
-    SimileAjax.History._initialized = true;
-};
-
-SimileAjax.History.addListener = function(listener) {
-    SimileAjax.History.initialize();
-
-    SimileAjax.History._listeners.add(listener);
-};
-
-SimileAjax.History.removeListener = function(listener) {
-    SimileAjax.History.initialize();
-
-    SimileAjax.History._listeners.remove(listener);
-};
-
-SimileAjax.History.addAction = function(action) {
-    SimileAjax.History.initialize();
-
-    SimileAjax.History._listeners.fire("onBeforePerform", [ action ]);
-    window.setTimeout(function() {
-        try {
-            action.perform();
-            SimileAjax.History._listeners.fire("onAfterPerform", [ action ]);
-
-            if (SimileAjax.History.enabled) {
-                SimileAjax.History._actions = SimileAjax.History._actions.slice(
-                    0, SimileAjax.History._currentIndex - SimileAjax.History._baseIndex);
-
-                SimileAjax.History._actions.push(action);
-                SimileAjax.History._currentIndex++;
-
-                var diff = SimileAjax.History._actions.length - SimileAjax.History.maxHistoryLength;
-                if (diff > 0) {
-                    SimileAjax.History._actions = SimileAjax.History._actions.slice(diff);
-                    SimileAjax.History._baseIndex += diff;
-                }
-
-                try {
-                    SimileAjax.History._iframe.contentWindow.location.search =
-                        "?" + SimileAjax.History._currentIndex;
-                } catch (e) {
-                    /*
-                     *  We can't modify location.search most probably because it's a file:// url.
-                     *  We'll just going to modify the document's title.
-                     */
-                    var title = SimileAjax.History.formatHistoryEntryTitle(action.label);
-                    document.title = title;
-                }
-            }
-        } catch (e) {
-            SimileAjax.Debug.exception(e, "Error adding action {" + action.label + "} to history");
-        }
-    }, 0);
-};
-
-SimileAjax.History.addLengthyAction = function(perform, undo, label) {
-    SimileAjax.History.addAction({
-        perform:    perform,
-        undo:       undo,
-        label:      label,
-        uiLayer:    SimileAjax.WindowManager.getBaseLayer(),
-        lengthy:    true
-    });
-};
-
-SimileAjax.History._handleIFrameOnLoad = function() {
-    /*
-     *  This function is invoked when the user herself
-     *  navigates backward or forward. We need to adjust
-     *  the application's state accordingly.
-     */
-
-    try {
-        var q = SimileAjax.History._iframe.contentWindow.location.search;
-        var c = (q.length == 0) ? 0 : Math.max(0, parseInt(q.substr(1)));
-
-        var finishUp = function() {
-            var diff = c - SimileAjax.History._currentIndex;
-            SimileAjax.History._currentIndex += diff;
-            SimileAjax.History._baseIndex += diff;
-
-            SimileAjax.History._iframe.contentWindow.location.search = "?" + c;
-        };
-
-        if (c < SimileAjax.History._currentIndex) { // need to undo
-            SimileAjax.History._listeners.fire("onBeforeUndoSeveral", []);
-            window.setTimeout(function() {
-                while (SimileAjax.History._currentIndex > c &&
-                       SimileAjax.History._currentIndex > SimileAjax.History._baseIndex) {
-
-                    SimileAjax.History._currentIndex--;
-
-                    var action = SimileAjax.History._actions[SimileAjax.History._currentIndex - SimileAjax.History._baseIndex];
-
-                    try {
-                        action.undo();
-                    } catch (e) {
-                        SimileAjax.Debug.exception(e, "History: Failed to undo action {" + action.label + "}");
-                    }
-                }
-
-                SimileAjax.History._listeners.fire("onAfterUndoSeveral", []);
-                finishUp();
-            }, 0);
-        } else if (c > SimileAjax.History._currentIndex) { // need to redo
-            SimileAjax.History._listeners.fire("onBeforeRedoSeveral", []);
-            window.setTimeout(function() {
-                while (SimileAjax.History._currentIndex < c &&
-                       SimileAjax.History._currentIndex - SimileAjax.History._baseIndex < SimileAjax.History._actions.length) {
-
-                    var action = SimileAjax.History._actions[SimileAjax.History._currentIndex - SimileAjax.History._baseIndex];
-
-                    try {
-                        action.perform();
-                    } catch (e) {
-                        SimileAjax.Debug.exception(e, "History: Failed to redo action {" + action.label + "}");
-                    }
-
-                    SimileAjax.History._currentIndex++;
-                }
-
-                SimileAjax.History._listeners.fire("onAfterRedoSeveral", []);
-                finishUp();
-            }, 0);
-        } else {
-            var index = SimileAjax.History._currentIndex - SimileAjax.History._baseIndex - 1;
-            var title = (index >= 0 && index < SimileAjax.History._actions.length) ?
-                SimileAjax.History.formatHistoryEntryTitle(SimileAjax.History._actions[index].label) :
-                SimileAjax.History._plainDocumentTitle;
-
-            SimileAjax.History._iframe.contentWindow.document.title = title;
-            document.title = title;
-        }
-    } catch (e) {
-        // silent
-    }
-};
-
-SimileAjax.History.getNextUndoAction = function() {
-    try {
-        var index = SimileAjax.History._currentIndex - SimileAjax.History._baseIndex - 1;
-        return SimileAjax.History._actions[index];
-    } catch (e) {
-        return null;
-    }
-};
-
-SimileAjax.History.getNextRedoAction = function() {
-    try {
-        var index = SimileAjax.History._currentIndex - SimileAjax.History._baseIndex;
-        return SimileAjax.History._actions[index];
-    } catch (e) {
-        return null;
-    }
-};
-/**
- * @fileOverview UI layers and window-wide dragging
- * @name SimileAjax.WindowManager
- */
-
-/**
- *  This is a singleton that keeps track of UI layers (modal and
- *  modeless) and enables/disables UI elements based on which layers
- *  they belong to. It also provides window-wide dragging
- *  implementation.
- */
-SimileAjax.WindowManager = {
-    _initialized:       false,
-    _listeners:         [],
-
-    _draggedElement:                null,
-    _draggedElementCallback:        null,
-    _dropTargetHighlightElement:    null,
-    _lastCoords:                    null,
-    _ghostCoords:                   null,
-    _draggingMode:                  "",
-    _dragging:                      false,
-
-    _layers:            []
-};
-
-SimileAjax.WindowManager.initialize = function() {
-    if (SimileAjax.WindowManager._initialized) {
-        return;
-    }
-
-    SimileAjax.DOM.registerEvent(document.body, "mousedown", SimileAjax.WindowManager._onBodyMouseDown);
-    SimileAjax.DOM.registerEvent(document.body, "mousemove", SimileAjax.WindowManager._onBodyMouseMove);
-    SimileAjax.DOM.registerEvent(document.body, "mouseup",   SimileAjax.WindowManager._onBodyMouseUp);
-    SimileAjax.DOM.registerEvent(document, "keydown",       SimileAjax.WindowManager._onBodyKeyDown);
-    SimileAjax.DOM.registerEvent(document, "keyup",         SimileAjax.WindowManager._onBodyKeyUp);
-
-    SimileAjax.WindowManager._layers.push({index: 0});
-
-    SimileAjax.WindowManager._historyListener = {
-        onBeforeUndoSeveral:    function() {},
-        onAfterUndoSeveral:     function() {},
-        onBeforeUndo:           function() {},
-        onAfterUndo:            function() {},
-
-        onBeforeRedoSeveral:    function() {},
-        onAfterRedoSeveral:     function() {},
-        onBeforeRedo:           function() {},
-        onAfterRedo:            function() {}
-    };
-    SimileAjax.History.addListener(SimileAjax.WindowManager._historyListener);
-
-    SimileAjax.WindowManager._initialized = true;
-};
-
-SimileAjax.WindowManager.getBaseLayer = function() {
-    SimileAjax.WindowManager.initialize();
-    return SimileAjax.WindowManager._layers[0];
-};
-
-SimileAjax.WindowManager.getHighestLayer = function() {
-    SimileAjax.WindowManager.initialize();
-    return SimileAjax.WindowManager._layers[SimileAjax.WindowManager._layers.length - 1];
-};
-
-SimileAjax.WindowManager.registerEventWithObject = function(elmt, eventName, obj, handlerName, layer) {
-    SimileAjax.WindowManager.registerEvent(
-        elmt,
-        eventName,
-        function(elmt2, evt, target) {
-            return obj[handlerName].call(obj, elmt2, evt, target);
-        },
-        layer
-    );
-};
-
-SimileAjax.WindowManager.registerEvent = function(elmt, eventName, handler, layer) {
-    if (layer == null) {
-        layer = SimileAjax.WindowManager.getHighestLayer();
-    }
-
-    var handler2 = function(elmt, evt, target) {
-        if (SimileAjax.WindowManager._canProcessEventAtLayer(layer)) {
-            SimileAjax.WindowManager._popToLayer(layer.index);
-            try {
-                handler(elmt, evt, target);
-            } catch (e) {
-                SimileAjax.Debug.exception(e);
-            }
-        }
-        SimileAjax.DOM.cancelEvent(evt);
-        return false;
-    }
-
-    SimileAjax.DOM.registerEvent(elmt, eventName, handler2);
-};
-
-SimileAjax.WindowManager.pushLayer = function(f, ephemeral, elmt) {
-    var layer = { onPop: f, index: SimileAjax.WindowManager._layers.length, ephemeral: (ephemeral), elmt: elmt };
-    SimileAjax.WindowManager._layers.push(layer);
-
-    return layer;
-};
-
-SimileAjax.WindowManager.popLayer = function(layer) {
-    for (var i = 1; i < SimileAjax.WindowManager._layers.length; i++) {
-        if (SimileAjax.WindowManager._layers[i] == layer) {
-            SimileAjax.WindowManager._popToLayer(i - 1);
-            break;
-        }
-    }
-};
-
-SimileAjax.WindowManager.popAllLayers = function() {
-    SimileAjax.WindowManager._popToLayer(0);
-};
-
-SimileAjax.WindowManager.registerForDragging = function(elmt, callback, layer) {
-    SimileAjax.WindowManager.registerEvent(
-        elmt,
-        "mousedown",
-        function(elmt, evt, target) {
-            SimileAjax.WindowManager._handleMouseDown(elmt, evt, callback);
-        },
-        layer
-    );
-};
-
-SimileAjax.WindowManager._popToLayer = function(level) {
-    while (level+1 < SimileAjax.WindowManager._layers.length) {
-        try {
-            var layer = SimileAjax.WindowManager._layers.pop();
-            if (layer.onPop != null) {
-                layer.onPop();
-            }
-        } catch (e) {
-        }
-    }
-};
-
-SimileAjax.WindowManager._canProcessEventAtLayer = function(layer) {
-    if (layer.index == (SimileAjax.WindowManager._layers.length - 1)) {
-        return true;
-    }
-    for (var i = layer.index + 1; i < SimileAjax.WindowManager._layers.length; i++) {
-        if (!SimileAjax.WindowManager._layers[i].ephemeral) {
-            return false;
-        }
-    }
-    return true;
-};
-
-SimileAjax.WindowManager.cancelPopups = function(evt) {
-    var evtCoords = (evt) ? SimileAjax.DOM.getEventPageCoordinates(evt) : { x: -1, y: -1 };
-
-    var i = SimileAjax.WindowManager._layers.length - 1;
-    while (i > 0 && SimileAjax.WindowManager._layers[i].ephemeral) {
-        var layer = SimileAjax.WindowManager._layers[i];
-        if (layer.elmt != null) { // if event falls within main element of layer then don't cancel
-            var elmt = layer.elmt;
-            var elmtCoords = SimileAjax.DOM.getPageCoordinates(elmt);
-            if (evtCoords.x >= elmtCoords.left && evtCoords.x < (elmtCoords.left + elmt.offsetWidth) &&
-                evtCoords.y >= elmtCoords.top && evtCoords.y < (elmtCoords.top + elmt.offsetHeight)) {
-                break;
-            }
-        }
-        i--;
-    }
-    SimileAjax.WindowManager._popToLayer(i);
-};
-
-SimileAjax.WindowManager._onBodyMouseDown = function(elmt, evt, target) {
-    if (!("eventPhase" in evt) || evt.eventPhase == evt.BUBBLING_PHASE) {
-        SimileAjax.WindowManager.cancelPopups(evt);
-    }
-};
-
-SimileAjax.WindowManager._handleMouseDown = function(elmt, evt, callback) {
-    SimileAjax.WindowManager._draggedElement = elmt;
-    SimileAjax.WindowManager._draggedElementCallback = callback;
-    SimileAjax.WindowManager._lastCoords = { x: evt.clientX, y: evt.clientY };
-
-    SimileAjax.DOM.cancelEvent(evt);
-    return false;
-};
-
-SimileAjax.WindowManager._onBodyKeyDown = function(elmt, evt, target) {
-    if (SimileAjax.WindowManager._dragging) {
-        if (evt.keyCode == 27) { // esc
-            SimileAjax.WindowManager._cancelDragging();
-        } else if ((evt.keyCode == 17 || evt.keyCode == 16) && SimileAjax.WindowManager._draggingMode != "copy") {
-            SimileAjax.WindowManager._draggingMode = "copy";
-
-            var img = SimileAjax.Graphics.createTranslucentImage(SimileAjax.urlPrefix + "images/copy.png");
-            img.style.position = "absolute";
-            img.style.left = (SimileAjax.WindowManager._ghostCoords.left - 16) + "px";
-            img.style.top = (SimileAjax.WindowManager._ghostCoords.top) + "px";
-            document.body.appendChild(img);
-
-            SimileAjax.WindowManager._draggingModeIndicatorElmt = img;
-        }
-    }
-};
-
-SimileAjax.WindowManager._onBodyKeyUp = function(elmt, evt, target) {
-    if (SimileAjax.WindowManager._dragging) {
-        if (evt.keyCode == 17 || evt.keyCode == 16) {
-            SimileAjax.WindowManager._draggingMode = "";
-            if (SimileAjax.WindowManager._draggingModeIndicatorElmt != null) {
-                document.body.removeChild(SimileAjax.WindowManager._draggingModeIndicatorElmt);
-                SimileAjax.WindowManager._draggingModeIndicatorElmt = null;
-            }
-        }
-    }
-};
-
-SimileAjax.WindowManager._onBodyMouseMove = function(elmt, evt, target) {
-    if (SimileAjax.WindowManager._draggedElement != null) {
-        var callback = SimileAjax.WindowManager._draggedElementCallback;
-
-        var lastCoords = SimileAjax.WindowManager._lastCoords;
-        var diffX = evt.clientX - lastCoords.x;
-        var diffY = evt.clientY - lastCoords.y;
-
-        if (!SimileAjax.WindowManager._dragging) {
-            if (Math.abs(diffX) > 5 || Math.abs(diffY) > 5) {
-                try {
-                    if ("onDragStart" in callback) {
-                        callback.onDragStart();
-                    }
-
-                    if ("ghost" in callback && callback.ghost) {
-                        var draggedElmt = SimileAjax.WindowManager._draggedElement;
-
-                        SimileAjax.WindowManager._ghostCoords = SimileAjax.DOM.getPageCoordinates(draggedElmt);
-                        SimileAjax.WindowManager._ghostCoords.left += diffX;
-                        SimileAjax.WindowManager._ghostCoords.top += diffY;
-
-                        var ghostElmt = draggedElmt.cloneNode(true);
-                        ghostElmt.style.position = "absolute";
-                        ghostElmt.style.left = SimileAjax.WindowManager._ghostCoords.left + "px";
-                        ghostElmt.style.top = SimileAjax.WindowManager._ghostCoords.top + "px";
-                        ghostElmt.style.zIndex = 1000;
-                        SimileAjax.Graphics.setOpacity(ghostElmt, 50);
-
-                        document.body.appendChild(ghostElmt);
-                        callback._ghostElmt = ghostElmt;
-                    }
-
-                    SimileAjax.WindowManager._dragging = true;
-                    SimileAjax.WindowManager._lastCoords = { x: evt.clientX, y: evt.clientY };
-
-                    document.body.focus();
-                } catch (e) {
-                    SimileAjax.Debug.exception("WindowManager: Error handling mouse down", e);
-                    SimileAjax.WindowManager._cancelDragging();
-                }
-            }
-        } else {
-            try {
-                SimileAjax.WindowManager._lastCoords = { x: evt.clientX, y: evt.clientY };
-
-                if ("onDragBy" in callback) {
-                    callback.onDragBy(diffX, diffY);
-                }
-
-                if ("_ghostElmt" in callback) {
-                    var ghostElmt = callback._ghostElmt;
-
-                    SimileAjax.WindowManager._ghostCoords.left += diffX;
-                    SimileAjax.WindowManager._ghostCoords.top += diffY;
-
-                    ghostElmt.style.left = SimileAjax.WindowManager._ghostCoords.left + "px";
-                    ghostElmt.style.top = SimileAjax.WindowManager._ghostCoords.top + "px";
-                    if (SimileAjax.WindowManager._draggingModeIndicatorElmt != null) {
-                        var indicatorElmt = SimileAjax.WindowManager._draggingModeIndicatorElmt;
-
-                        indicatorElmt.style.left = (SimileAjax.WindowManager._ghostCoords.left - 16) + "px";
-                        indicatorElmt.style.top = SimileAjax.WindowManager._ghostCoords.top + "px";
-                    }
-
-                    if ("droppable" in callback && callback.droppable) {
-                        var coords = SimileAjax.DOM.getEventPageCoordinates(evt);
-                        var target = SimileAjax.DOM.hittest(
-                            coords.x, coords.y,
-                            [   SimileAjax.WindowManager._ghostElmt,
-                                SimileAjax.WindowManager._dropTargetHighlightElement
-                            ]
-                        );
-                        target = SimileAjax.WindowManager._findDropTarget(target);
-
-                        if (target != SimileAjax.WindowManager._potentialDropTarget) {
-                            if (SimileAjax.WindowManager._dropTargetHighlightElement != null) {
-                                document.body.removeChild(SimileAjax.WindowManager._dropTargetHighlightElement);
-
-                                SimileAjax.WindowManager._dropTargetHighlightElement = null;
-                                SimileAjax.WindowManager._potentialDropTarget = null;
-                            }
-
-                            var droppable = false;
-                            if (target != null) {
-                                if ((!("canDropOn" in callback) || callback.canDropOn(target)) &&
-                                    (!("canDrop" in target) || target.canDrop(SimileAjax.WindowManager._draggedElement))) {
-
-                                    droppable = true;
-                                }
-                            }
-
-                            if (droppable) {
-                                var border = 4;
-                                var targetCoords = SimileAjax.DOM.getPageCoordinates(target);
-                                var highlight = document.createElement("div");
-                                highlight.style.border = border + "px solid yellow";
-                                highlight.style.backgroundColor = "yellow";
-                                highlight.style.position = "absolute";
-                                highlight.style.left = targetCoords.left + "px";
-                                highlight.style.top = targetCoords.top + "px";
-                                highlight.style.width = (target.offsetWidth - border * 2) + "px";
-                                highlight.style.height = (target.offsetHeight - border * 2) + "px";
-                                SimileAjax.Graphics.setOpacity(highlight, 30);
-                                document.body.appendChild(highlight);
-
-                                SimileAjax.WindowManager._potentialDropTarget = target;
-                                SimileAjax.WindowManager._dropTargetHighlightElement = highlight;
-                            }
-                        }
-                    }
-                }
-            } catch (e) {
-                SimileAjax.Debug.exception("WindowManager: Error handling mouse move", e);
-                SimileAjax.WindowManager._cancelDragging();
-            }
-        }
-
-        SimileAjax.DOM.cancelEvent(evt);
-        return false;
-    }
-};
-
-SimileAjax.WindowManager._onBodyMouseUp = function(elmt, evt, target) {
-    if (SimileAjax.WindowManager._draggedElement != null) {
-        try {
-            if (SimileAjax.WindowManager._dragging) {
-                var callback = SimileAjax.WindowManager._draggedElementCallback;
-                if ("onDragEnd" in callback) {
-                    callback.onDragEnd();
-                }
-                if ("droppable" in callback && callback.droppable) {
-                    var dropped = false;
-
-                    var target = SimileAjax.WindowManager._potentialDropTarget;
-                    if (target != null) {
-                        if ((!("canDropOn" in callback) || callback.canDropOn(target)) &&
-                            (!("canDrop" in target) || target.canDrop(SimileAjax.WindowManager._draggedElement))) {
-
-                            if ("onDropOn" in callback) {
-                                callback.onDropOn(target);
-                            }
-                            target.ondrop(SimileAjax.WindowManager._draggedElement, SimileAjax.WindowManager._draggingMode);
-
-                            dropped = true;
-                        }
-                    }
-
-                    if (!dropped) {
-                        // TODO: do holywood explosion here
-                    }
-                }
-            }
-        } finally {
-            SimileAjax.WindowManager._cancelDragging();
-        }
-
-        SimileAjax.DOM.cancelEvent(evt);
-        return false;
-    }
-};
-
-SimileAjax.WindowManager._cancelDragging = function() {
-    var callback = SimileAjax.WindowManager._draggedElementCallback;
-    if ("_ghostElmt" in callback) {
-        var ghostElmt = callback._ghostElmt;
-        document.body.removeChild(ghostElmt);
-
-        delete callback._ghostElmt;
-    }
-    if (SimileAjax.WindowManager._dropTargetHighlightElement != null) {
-        document.body.removeChild(SimileAjax.WindowManager._dropTargetHighlightElement);
-        SimileAjax.WindowManager._dropTargetHighlightElement = null;
-    }
-    if (SimileAjax.WindowManager._draggingModeIndicatorElmt != null) {
-        document.body.removeChild(SimileAjax.WindowManager._draggingModeIndicatorElmt);
-        SimileAjax.WindowManager._draggingModeIndicatorElmt = null;
-    }
-
-    SimileAjax.WindowManager._draggedElement = null;
-    SimileAjax.WindowManager._draggedElementCallback = null;
-    SimileAjax.WindowManager._potentialDropTarget = null;
-    SimileAjax.WindowManager._dropTargetHighlightElement = null;
-    SimileAjax.WindowManager._lastCoords = null;
-    SimileAjax.WindowManager._ghostCoords = null;
-    SimileAjax.WindowManager._draggingMode = "";
-    SimileAjax.WindowManager._dragging = false;
-};
-
-SimileAjax.WindowManager._findDropTarget = function(elmt) {
-    while (elmt != null) {
-        if ("ondrop" in elmt && (typeof elmt.ondrop) == "function") {
-            break;
-        }
-        elmt = elmt.parentNode;
-    }
-    return elmt;
-};
-/*
- *  Timeline API
- *
- *  This file will load all the Javascript files
- *  necessary to make the standard timeline work.
- *  It also detects the default locale.
- *
- *  To run from the MIT copy of Timeline:
- *  Include this file in your HTML file as follows:
- *
- *    <script src="http://api.simile-widgets.org/timeline/2.3.1/timeline-api.js"
- *     type="text/javascript"></script>
- *
- *
- * To host the Timeline files on your own server:
- *   1) Install the Timeline and Simile-Ajax files onto your webserver using
- *      timeline_libraries.zip or timeline_source.zip
- *
- *   2) Set global js variables used to send parameters to this script:
- *        var Timeline_ajax_url -- url for simile-ajax-api.js
- *        var Timeline_urlPrefix -- url for the *directory* that contains timeline-api.js
- *            Include trailing slash
- *        var Timeline_parameters='bundle=true'; // you must set bundle to true if you are using
- *                                               // timeline_libraries.zip since only the
- *                                               // bundled libraries are included
- *
- * eg your html page would include
- *
- *   <script>
- *     var Timeline_ajax_url="http://YOUR_SERVER/javascripts/timeline/timeline_ajax/simile-ajax-api.js";
- *     var Timeline_urlPrefix='http://YOUR_SERVER/javascripts/timeline/timeline_js/';
- *     var Timeline_parameters='bundle=true';
- *   </script>
- *   <script src="http://YOUR_SERVER/javascripts/timeline/timeline_js/timeline-api.js"
- *     type="text/javascript">
- *   </script>
- *
- * SCRIPT PARAMETERS
- * This script auto-magically figures out locale and has defaults for other parameters
- * To set parameters explicity, set js global variable Timeline_parameters or include as
- * parameters on the url using GET style. Eg the two next lines pass the same parameters:
- *     Timeline_parameters='bundle=true';                    // pass parameter via js variable
- *     <script src="http://....timeline-api.js?bundle=true"  // pass parameter via url
- *
- * Parameters
- *   timeline-use-local-resources --
- *   bundle -- true: use the single js bundle file; false: load individual files (for debugging)
- *   locales --
- *   defaultLocale --
- *   forceLocale -- force locale to be a particular value--used for debugging. Normally locale is determined
- *                  by browser's and server's locale settings.
- *
- * DEBUGGING
- * If you have a problem with Timeline, the first step is to use the unbundled Javascript files. To do so:
- * To use the unbundled Timeline and Ajax libraries
- * Change
- *   <script src="http://api.simile-widgets.org/timeline/2.3.1/api/timeline-api.js?bundle=true" type="text/javascript"></script>
- * To
- *   <script>var Timeline_ajax_url = "http://api.simile-widgets.org/ajax/2.2.1/simile-ajax-api.js?bundle=false"</script>
- *   <script src="http://api.simile-widgets.org/timeline/2.3.1/api/timeline-api.js?bundle=false" type="text/javascript"></script>
- *
- * Note that the Ajax version is usually NOT the same as the Timeline version.
- * See variable simile_ajax_ver below for the current version
- *
- *
- */
-
-(function() {
-
-    var simile_ajax_ver = "2.2.1"; // ===========>>>  current Simile-Ajax version
-
-    var useLocalResources = false;
-    if (document.location.search.length > 0) {
-        var params = document.location.search.substr(1).split("&");
-        for (var i = 0; i < params.length; i++) {
-            if (params[i] == "timeline-use-local-resources") {
-                useLocalResources = true;
-            }
-        }
-    };
-
-    var loadMe = function() {
-        if ("Timeline" in window) {
-            return;
-        }
-
-        window.Timeline = new Object();
-        window.Timeline.DateTime = window.SimileAjax.DateTime; // for backward compatibility
-
-        var bundle = false;
-        var javascriptFiles = [
-            "timeline.js",
-            "band.js",
-            "themes.js",
-            "ethers.js",
-            "ether-painters.js",
-            "event-utils.js",
-            "labellers.js",
-            "sources.js",
-            "original-painter.js",
-            "detailed-painter.js",
-            "overview-painter.js",
-            "compact-painter.js",
-            "decorators.js",
-            "units.js"
-        ];
-        var cssFiles = [
-            "timeline.css",
-            "ethers.css",
-            "events.css"
-        ];
-
-        var localizedJavascriptFiles = [
-            "timeline.js",
-            "labellers.js"
-        ];
-        var localizedCssFiles = [
-        ];
-
-        // ISO-639 language codes, ISO-3166 country codes (2 characters)
-        var supportedLocales = [
-            "cs",       // Czech
-            "de",       // German
-            "en",       // English
-            "es",       // Spanish
-            "fr",       // French
-            "it",       // Italian
-            "nl",       // Dutch (The Netherlands)
-            "ru",       // Russian
-            "se",       // Swedish
-            "tr",       // Turkish
-            "vi",       // Vietnamese
-            "zh"        // Chinese
-        ];
-
-        try {
-            var desiredLocales = [ "en" ],
-                defaultServerLocale = "en",
-                forceLocale = null;
-
-            var parseURLParameters = function(parameters) {
-                var params = parameters.split("&");
-                for (var p = 0; p < params.length; p++) {
-                    var pair = params[p].split("=");
-                    if (pair[0] == "locales") {
-                        desiredLocales = desiredLocales.concat(pair[1].split(","));
-                    } else if (pair[0] == "defaultLocale") {
-                        defaultServerLocale = pair[1];
-                    } else if (pair[0] == "forceLocale") {
-                        forceLocale = pair[1];
-                        desiredLocales = desiredLocales.concat(pair[1].split(","));
-                    } else if (pair[0] == "bundle") {
-                        bundle = pair[1] != "false";
-                    }
-                }
-            };
-
-            (function() {
-                if (typeof Timeline_urlPrefix == "string") {
-                    Timeline.urlPrefix = Timeline_urlPrefix;
-                    if (typeof Timeline_parameters == "string") {
-                        parseURLParameters(Timeline_parameters);
-                    }
-                } else {
-                    var heads = document.documentElement.getElementsByTagName("head");
-                    for (var h = 0; h < heads.length; h++) {
-                        var scripts = heads[h].getElementsByTagName("script");
-                        for (var s = 0; s < scripts.length; s++) {
-                            var url = scripts[s].src;
-                            var i = url.indexOf("timeline-api.js");
-                            if (i >= 0) {
-                                Timeline.urlPrefix = url.substr(0, i);
-                                var q = url.indexOf("?");
-                                if (q > 0) {
-                                    parseURLParameters(url.substr(q + 1));
-                                }
-                                return;
-                            }
-                        }
-                    }
-                    throw new Error("Failed to derive URL prefix for Timeline API code files");
-                }
-            })();
-
-            var includeJavascriptFiles = function(urlPrefix, filenames) {
-                SimileAjax.includeJavascriptFiles(document, urlPrefix, filenames);
-            }
-            var includeCssFiles = function(urlPrefix, filenames) {
-                SimileAjax.includeCssFiles(document, urlPrefix, filenames);
-            }
-
-            /*
-             *  Include non-localized files
-             */
-            if (bundle) {
-                includeJavascriptFiles(Timeline.urlPrefix, [ "timeline-bundle.js" ]);
-                includeCssFiles(Timeline.urlPrefix, [ "timeline-bundle.css" ]);
-            } else {
-                // XXX adim includeJavascriptFiles(Timeline.urlPrefix + "scripts/", javascriptFiles);
-                // XXX adim includeCssFiles(Timeline.urlPrefix + "styles/", cssFiles);
-            }
-
-            /*
-             *  Include localized files
-             */
-            var loadLocale = [];
-            loadLocale[defaultServerLocale] = true;
-
-            var tryExactLocale = function(locale) {
-                for (var l = 0; l < supportedLocales.length; l++) {
-                    if (locale == supportedLocales[l]) {
-                        loadLocale[locale] = true;
-                        return true;
-                    }
-                }
-                return false;
-            }
-            var tryLocale = function(locale) {
-                if (tryExactLocale(locale)) {
-                    return locale;
-                }
-
-                var dash = locale.indexOf("-");
-                if (dash > 0 && tryExactLocale(locale.substr(0, dash))) {
-                    return locale.substr(0, dash);
-                }
-
-                return null;
-            }
-
-            for (var l = 0; l < desiredLocales.length; l++) {
-                tryLocale(desiredLocales[l]);
-            }
-
-            var defaultClientLocale = defaultServerLocale;
-            var defaultClientLocales = ("language" in navigator ? navigator.language : navigator.browserLanguage).split(";");
-            for (var l = 0; l < defaultClientLocales.length; l++) {
-                var locale = tryLocale(defaultClientLocales[l]);
-                if (locale != null) {
-                    defaultClientLocale = locale;
-                    break;
-                }
-            }
-
-            for (var l = 0; l < supportedLocales.length; l++) {
-                var locale = supportedLocales[l];
-                if (loadLocale[locale]) {
-                    // XXX adim includeJavascriptFiles(Timeline.urlPrefix + "scripts/l10n/" + locale + "/", localizedJavascriptFiles);
-                    // XXX adim includeCssFiles(Timeline.urlPrefix + "styles/l10n/" + locale + "/", localizedCssFiles);
-                }
-            }
-
-            if (forceLocale == null) {
-              Timeline.serverLocale = defaultServerLocale;
-              Timeline.clientLocale = defaultClientLocale;
-            } else {
-              Timeline.serverLocale = forceLocale;
-              Timeline.clientLocale = forceLocale;
-            }
-        } catch (e) {
-            alert(e);
-        }
-    };
-
-    /*
-     *  Load SimileAjax if it's not already loaded
-     */
-    if (typeof SimileAjax == "undefined") {
-        window.SimileAjax_onLoad = loadMe;
-
-        var url = useLocalResources ?
-            "http://127.0.0.1:9999/ajax/api/simile-ajax-api.js?bundle=false" :
-            "http://api.simile-widgets.org/ajax/" + simile_ajax_ver + "/simile-ajax-api.js";
-        if (typeof Timeline_ajax_url == "string") {
-           url = Timeline_ajax_url;
-        }
-        var createScriptElement = function() {
-            var script = document.createElement("script");
-            script.type = "text/javascript";
-            script.language = "JavaScript";
-            script.src = url;
-            document.getElementsByTagName("head")[0].appendChild(script);
-        }
-        if (document.body == null) {
-            try {
-                document.write("<script src='" + url + "' type='text/javascript'></script>");
-            } catch (e) {
-                createScriptElement();
-            }
-        } else {
-            createScriptElement();
-        }
-    } else {
-        loadMe();
-    }
-})();
-/*
- *
- * Coding standards:
- *
- * We aim towards Douglas Crockford's Javascript conventions.
- * See:  http://javascript.crockford.com/code.html
- * See also: http://www.crockford.com/javascript/javascript.html
- *
- * That said, this JS code was written before some recent JS
- * support libraries became widely used or available.
- * In particular, the _ character is used to indicate a class function or
- * variable that should be considered private to the class.
- *
- * The code mostly uses accessor methods for getting/setting the private
- * class variables.
- *
- * Over time, we'd like to formalize the convention by using support libraries
- * which enforce privacy in objects.
- *
- * We also want to use jslint:  http://www.jslint.com/
- *
- *
- *
- */
-
-
-
-/*
- *  Timeline VERSION
- *
- */
-// Note: version is also stored in the build.xml file
-Timeline.version = 'pre 2.4.0';  // use format 'pre 1.2.3' for trunk versions
-Timeline.ajax_lib_version = SimileAjax.version;
-Timeline.display_version = Timeline.version + ' (with Ajax lib ' + Timeline.ajax_lib_version + ')';
- // cf method Timeline.writeVersion
-
-/*
- *  Timeline
- *
- */
-Timeline.strings = {}; // localization string tables
-Timeline.HORIZONTAL = 0;
-Timeline.VERTICAL = 1;
-Timeline._defaultTheme = null;
-
-Timeline.getDefaultLocale = function() {
-    return Timeline.clientLocale;
-};
-
-Timeline.create = function(elmt, bandInfos, orientation, unit) {
-    if (Timeline.timelines == null) {
-        Timeline.timelines = [];
-        // Timeline.timelines array can have null members--Timelines that
-        // once existed on the page, but were later disposed of.
-    }
-
-    var timelineID = Timeline.timelines.length;
-    Timeline.timelines[timelineID] = null; // placeholder until we have the object
-    var new_tl = new Timeline._Impl(elmt, bandInfos, orientation, unit,
-      timelineID);
-    Timeline.timelines[timelineID] = new_tl;
-    return new_tl;
-};
-
-Timeline.createBandInfo = function(params) {
-    var theme = ("theme" in params) ? params.theme : Timeline.getDefaultTheme();
-
-    var eventSource = ("eventSource" in params) ? params.eventSource : null;
-
-    var etherParams = {
-        interval:           SimileAjax.DateTime.gregorianUnitLengths[params.intervalUnit],
-        pixelsPerInterval: params.intervalPixels,
-	theme: theme
-    };
-    if ('startsOn' in params || 'endsOn' in params) {
-	if ('startsOn' in params) {
-	    etherParams.startsOn = params.startsOn;
-	}
-	if ('endsOn' in params) {
-	    etherParams.endsOn = params.endsOn;
-	}
-    } else {
-	etherParams.centersOn = ("date" in params) ? params.date : new Date();
-    }
-    var ether = new Timeline.LinearEther(etherParams);
-
-    var etherPainter = new Timeline.GregorianEtherPainter({
-        unit:       params.intervalUnit,
-        multiple:   ("multiple" in params) ? params.multiple : 1,
-        theme:      theme,
-        align:      ("align" in params) ? params.align : undefined
-    });
-
-    var eventPainterParams = {
-        showText:   ("showEventText" in params) ? params.showEventText : true,
-        theme:      theme
-    };
-    // pass in custom parameters for the event painter
-    if ("eventPainterParams" in params) {
-        for (var prop in params.eventPainterParams) {
-            eventPainterParams[prop] = params.eventPainterParams[prop];
-        }
-    }
-
-    if ("trackHeight" in params) {
-        eventPainterParams.trackHeight = params.trackHeight;
-    }
-    if ("trackGap" in params) {
-        eventPainterParams.trackGap = params.trackGap;
-    }
-
-    var layout = ("overview" in params && params.overview) ? "overview" : ("layout" in params ? params.layout : "original");
-    var eventPainter;
-    if ("eventPainter" in params) {
-        eventPainter = new params.eventPainter(eventPainterParams);
-    } else {
-        switch (layout) {
-            case "overview" :
-                eventPainter = new Timeline.OverviewEventPainter(eventPainterParams);
-                break;
-            case "detailed" :
-                eventPainter = new Timeline.DetailedEventPainter(eventPainterParams);
-                break;
-            default:
-                eventPainter = new Timeline.OriginalEventPainter(eventPainterParams);
-        }
-    }
-
-    return {
-        width:          params.width,
-        eventSource:    eventSource,
-        timeZone:       ("timeZone" in params) ? params.timeZone : 0,
-        ether:          ether,
-        etherPainter:   etherPainter,
-        eventPainter:   eventPainter,
-        theme:          theme,
-        zoomIndex:      ("zoomIndex" in params) ? params.zoomIndex : 0,
-        zoomSteps:      ("zoomSteps" in params) ? params.zoomSteps : null
-    };
-};
-
-Timeline.createHotZoneBandInfo = function(params) {
-    var theme = ("theme" in params) ? params.theme : Timeline.getDefaultTheme();
-
-    var eventSource = ("eventSource" in params) ? params.eventSource : null;
-
-    var ether = new Timeline.HotZoneEther({
-        centersOn:          ("date" in params) ? params.date : new Date(),
-        interval:           SimileAjax.DateTime.gregorianUnitLengths[params.intervalUnit],
-        pixelsPerInterval:  params.intervalPixels,
-        zones:              params.zones,
-        theme:              theme
-    });
-
-    var etherPainter = new Timeline.HotZoneGregorianEtherPainter({
-        unit:       params.intervalUnit,
-        zones:      params.zones,
-        theme:      theme,
-        align:      ("align" in params) ? params.align : undefined
-    });
-
-    var eventPainterParams = {
-        showText:   ("showEventText" in params) ? params.showEventText : true,
-        theme:      theme
-    };
-    // pass in custom parameters for the event painter
-    if ("eventPainterParams" in params) {
-        for (var prop in params.eventPainterParams) {
-            eventPainterParams[prop] = params.eventPainterParams[prop];
-        }
-    }
-    if ("trackHeight" in params) {
-        eventPainterParams.trackHeight = params.trackHeight;
-    }
-    if ("trackGap" in params) {
-        eventPainterParams.trackGap = params.trackGap;
-    }
-
-    var layout = ("overview" in params && params.overview) ? "overview" : ("layout" in params ? params.layout : "original");
-    var eventPainter;
-    if ("eventPainter" in params) {
-        eventPainter = new params.eventPainter(eventPainterParams);
-    } else {
-        switch (layout) {
-            case "overview" :
-                eventPainter = new Timeline.OverviewEventPainter(eventPainterParams);
-                break;
-            case "detailed" :
-                eventPainter = new Timeline.DetailedEventPainter(eventPainterParams);
-                break;
-            default:
-                eventPainter = new Timeline.OriginalEventPainter(eventPainterParams);
-        }
-    }
-    return {
-        width:          params.width,
-        eventSource:    eventSource,
-        timeZone:       ("timeZone" in params) ? params.timeZone : 0,
-        ether:          ether,
-        etherPainter:   etherPainter,
-        eventPainter:   eventPainter,
-        theme:          theme,
-        zoomIndex:      ("zoomIndex" in params) ? params.zoomIndex : 0,
-        zoomSteps:      ("zoomSteps" in params) ? params.zoomSteps : null
-    };
-};
-
-Timeline.getDefaultTheme = function() {
-    if (Timeline._defaultTheme == null) {
-        Timeline._defaultTheme = Timeline.ClassicTheme.create(Timeline.getDefaultLocale());
-    }
-    return Timeline._defaultTheme;
-};
-
-Timeline.setDefaultTheme = function(theme) {
-    Timeline._defaultTheme = theme;
-};
-
-Timeline.loadXML = function(url, f) {
-    var fError = function(statusText, status, xmlhttp) {
-        alert("Failed to load data xml from " + url + "\n" + statusText);
-    };
-    var fDone = function(xmlhttp) {
-        var xml = xmlhttp.responseXML;
-        if (!xml.documentElement && xmlhttp.responseStream) {
-            xml.load(xmlhttp.responseStream);
-        }
-        f(xml, url);
-    };
-    SimileAjax.XmlHttp.get(url, fError, fDone);
-};
-
-
-Timeline.loadJSON = function(url, f) {
-    var fError = function(statusText, status, xmlhttp) {
-        alert("Failed to load json data from " + url + "\n" + statusText);
-    };
-    var fDone = function(xmlhttp) {
-        f(eval('(' + xmlhttp.responseText + ')'), url);
-    };
-    SimileAjax.XmlHttp.get(url, fError, fDone);
-};
-
-Timeline.getTimelineFromID = function(timelineID) {
-    return Timeline.timelines[timelineID];
-};
-
-// Write the current Timeline version as the contents of element with id el_id
-Timeline.writeVersion = function(el_id) {
-  document.getElementById(el_id).innerHTML = this.display_version;
-};
-
-
-
-/*
- *  Timeline Implementation object
- *
- */
-Timeline._Impl = function(elmt, bandInfos, orientation, unit, timelineID) {
-    SimileAjax.WindowManager.initialize();
-
-    this._containerDiv = elmt;
-
-    this._bandInfos = bandInfos;
-    this._orientation = orientation == null ? Timeline.HORIZONTAL : orientation;
-    this._unit = (unit != null) ? unit : SimileAjax.NativeDateUnit;
-    this._starting = true; // is the Timeline being created? Used by autoWidth
-                           // functions
-    this._autoResizing = false;
-
-    // autoWidth is a "public" property of the Timeline object
-    this.autoWidth = bandInfos && bandInfos[0] && bandInfos[0].theme &&
-                     bandInfos[0].theme.autoWidth;
-    this.autoWidthAnimationTime = bandInfos && bandInfos[0] && bandInfos[0].theme &&
-                     bandInfos[0].theme.autoWidthAnimationTime;
-    this.timelineID = timelineID; // also public attribute
-    this.timeline_start = bandInfos && bandInfos[0] && bandInfos[0].theme &&
-                     bandInfos[0].theme.timeline_start;
-    this.timeline_stop  = bandInfos && bandInfos[0] && bandInfos[0].theme &&
-                     bandInfos[0].theme.timeline_stop;
-    this.timeline_at_start = false; // already at start or stop? Then won't
-    this.timeline_at_stop = false;  // try to move further in the wrong direction
-
-    this._initialize();
-};
-
-//
-// Public functions used by client sw
-//
-Timeline._Impl.prototype.dispose = function() {
-    for (var i = 0; i < this._bands.length; i++) {
-        this._bands[i].dispose();
-    }
-    this._bands = null;
-    this._bandInfos = null;
-    this._containerDiv.innerHTML = "";
-    // remove from array of Timelines
-    Timeline.timelines[this.timelineID] = null;
-};
-
-Timeline._Impl.prototype.getBandCount = function() {
-    return this._bands.length;
-};
-
-Timeline._Impl.prototype.getBand = function(index) {
-    return this._bands[index];
-};
-
-Timeline._Impl.prototype.finishedEventLoading = function() {
-    // Called by client after events have been loaded into Timeline
-    // Only used if the client has set autoWidth
-    // Sets width to Timeline's requested amount and will shrink down the div if
-    // need be.
-    this._autoWidthCheck(true);
-    this._starting = false;
-};
-
-Timeline._Impl.prototype.layout = function() {
-    // called by client when browser is resized
-    this._autoWidthCheck(true);
-    this._distributeWidths();
-};
-
-Timeline._Impl.prototype.paint = function() {
-    for (var i = 0; i < this._bands.length; i++) {
-        this._bands[i].paint();
-    }
-};
-
-Timeline._Impl.prototype.getDocument = function() {
-    return this._containerDiv.ownerDocument;
-};
-
-Timeline._Impl.prototype.addDiv = function(div) {
-    this._containerDiv.appendChild(div);
-};
-
-Timeline._Impl.prototype.removeDiv = function(div) {
-    this._containerDiv.removeChild(div);
-};
-
-Timeline._Impl.prototype.isHorizontal = function() {
-    return this._orientation == Timeline.HORIZONTAL;
-};
-
-Timeline._Impl.prototype.isVertical = function() {
-    return this._orientation == Timeline.VERTICAL;
-};
-
-Timeline._Impl.prototype.getPixelLength = function() {
-    return this._orientation == Timeline.HORIZONTAL ?
-        this._containerDiv.offsetWidth : this._containerDiv.offsetHeight;
-};
-
-Timeline._Impl.prototype.getPixelWidth = function() {
-    return this._orientation == Timeline.VERTICAL ?
-        this._containerDiv.offsetWidth : this._containerDiv.offsetHeight;
-};
-
-Timeline._Impl.prototype.getUnit = function() {
-    return this._unit;
-};
-
-Timeline._Impl.prototype.getWidthStyle = function() {
-    // which element.style attribute should be changed to affect Timeline's "width"
-    return this._orientation == Timeline.HORIZONTAL ? 'height' : 'width';
-};
-
-Timeline._Impl.prototype.loadXML = function(url, f) {
-    var tl = this;
-
-
-    var fError = function(statusText, status, xmlhttp) {
-        alert("Failed to load data xml from " + url + "\n" + statusText);
-        tl.hideLoadingMessage();
-    };
-    var fDone = function(xmlhttp) {
-        try {
-            var xml = xmlhttp.responseXML;
-            if (!xml.documentElement && xmlhttp.responseStream) {
-                xml.load(xmlhttp.responseStream);
-            }
-            f(xml, url);
-        } finally {
-            tl.hideLoadingMessage();
-        }
-    };
-
-    this.showLoadingMessage();
-    window.setTimeout(function() { SimileAjax.XmlHttp.get(url, fError, fDone); }, 0);
-};
-
-Timeline._Impl.prototype.loadJSON = function(url, f) {
-    var tl = this;
-
-    var fError = function(statusText, status, xmlhttp) {
-        alert("Failed to load json data from " + url + "\n" + statusText);
-        tl.hideLoadingMessage();
-    };
-    var fDone = function(xmlhttp) {
-        try {
-            f(eval('(' + xmlhttp.responseText + ')'), url);
-        } finally {
-            tl.hideLoadingMessage();
-        }
-    };
-
-    this.showLoadingMessage();
-    window.setTimeout(function() { SimileAjax.XmlHttp.get(url, fError, fDone); }, 0);
-};
-
-
-//
-// Private functions used by Timeline object functions
-//
-
-Timeline._Impl.prototype._autoWidthScrollListener = function(band) {
-    band.getTimeline()._autoWidthCheck(false);
-};
-
-// called to re-calculate auto width and adjust the overall Timeline div if needed
-Timeline._Impl.prototype._autoWidthCheck = function(okToShrink) {
-    var timeline = this; // this Timeline
-    var immediateChange = timeline._starting;
-    var newWidth = 0;
-
-    function changeTimelineWidth() {
-        var widthStyle = timeline.getWidthStyle();
-        if (immediateChange) {
-            timeline._containerDiv.style[widthStyle] = newWidth + 'px';
-        } else {
-        	  // animate change
-        	  timeline._autoResizing = true;
-        	  var animateParam ={};
-        	  animateParam[widthStyle] = newWidth + 'px';
-
-        	  SimileAjax.jQuery(timeline._containerDiv).animate(
-        	      animateParam, timeline.autoWidthAnimationTime,
-        	      'linear', function(){timeline._autoResizing = false;});
-        }
-    }
-
-    function checkTimelineWidth() {
-        var targetWidth = 0; // the new desired width
-        var currentWidth = timeline.getPixelWidth();
-
-        if (timeline._autoResizing) {
-        	return; // early return
-        }
-
-        // compute targetWidth
-        for (var i = 0; i < timeline._bands.length; i++) {
-            timeline._bands[i].checkAutoWidth();
-            targetWidth += timeline._bandInfos[i].width;
-        }
-
-        if (targetWidth > currentWidth || okToShrink) {
-            // yes, let's change the size
-            newWidth = targetWidth;
-            changeTimelineWidth();
-            timeline._distributeWidths();
-        }
-    }
-
-    // function's mainline
-    if (!timeline.autoWidth) {
-        return; // early return
-    }
-
-    checkTimelineWidth();
-};
-
-Timeline._Impl.prototype._initialize = function() {
-    var containerDiv = this._containerDiv;
-    var doc = containerDiv.ownerDocument;
-
-    containerDiv.className =
-        containerDiv.className.split(" ").concat("timeline-container").join(" ");
-
-	/*
-	 * Set css-class on container div that will define orientation
-	 */
-	var orientation = (this.isHorizontal()) ? 'horizontal' : 'vertical'
-	containerDiv.className +=' timeline-'+orientation;
-
-
-    while (containerDiv.firstChild) {
-        containerDiv.removeChild(containerDiv.firstChild);
-    }
-
-    /*
-     *  inserting copyright and link to simile
-     */
-    var elmtCopyright = SimileAjax.Graphics.createTranslucentImage(Timeline.urlPrefix + (this.isHorizontal() ? "images/copyright-vertical.png" : "images/copyright.png"));
-    elmtCopyright.className = "timeline-copyright";
-    elmtCopyright.title = "Timeline copyright SIMILE - www.code.google.com/p/simile-widgets/";
-    SimileAjax.DOM.registerEvent(elmtCopyright, "click", function() { window.location = "http://www.simile-widgets.org/"; });
-    containerDiv.appendChild(elmtCopyright);
-
-    /*
-     *  creating bands
-     */
-    this._bands = [];
-    for (var i = 0; i < this._bandInfos.length; i++) {
-        var bandInfo = this._bandInfos[i];
-        var bandClass = bandInfo.bandClass || Timeline._Band;
-        var band = new bandClass(this, this._bandInfos[i], i);
-        this._bands.push(band);
-    }
-    this._distributeWidths();
-
-    /*
-     *  sync'ing bands
-     */
-    for (var i = 0; i < this._bandInfos.length; i++) {
-        var bandInfo = this._bandInfos[i];
-        if ("syncWith" in bandInfo) {
-            this._bands[i].setSyncWithBand(
-                this._bands[bandInfo.syncWith],
-                ("highlight" in bandInfo) ? bandInfo.highlight : false
-            );
-        }
-    }
-
-
-    if (this.autoWidth) {
-        for (var i = 0; i < this._bands.length; i++) {
-            this._bands[i].addOnScrollListener(this._autoWidthScrollListener);
-        }
-    }
-
-
-    /*
-     *  creating loading UI
-     */
-    var message = SimileAjax.Graphics.createMessageBubble(doc);
-    message.containerDiv.className = "timeline-message-container";
-    containerDiv.appendChild(message.containerDiv);
-
-    message.contentDiv.className = "timeline-message";
-    message.contentDiv.innerHTML = "<img src='" + Timeline.urlPrefix + "images/progress-running.gif' /> Loading...";
-
-    this.showLoadingMessage = function() { message.containerDiv.style.display = "block"; };
-    this.hideLoadingMessage = function() { message.containerDiv.style.display = "none"; };
-};
-
-Timeline._Impl.prototype._distributeWidths = function() {
-    var length = this.getPixelLength();
-    var width = this.getPixelWidth();
-    var cumulativeWidth = 0;
-
-    for (var i = 0; i < this._bands.length; i++) {
-        var band = this._bands[i];
-        var bandInfos = this._bandInfos[i];
-        var widthString = bandInfos.width;
-        var bandWidth;
-
-        if (typeof widthString == 'string') {
-          var x =  widthString.indexOf("%");
-          if (x > 0) {
-              var percent = parseInt(widthString.substr(0, x));
-              bandWidth = Math.round(percent * width / 100);
-          } else {
-              bandWidth = parseInt(widthString);
-          }
-        } else {
-        	// was given an integer
-        	bandWidth = widthString;
-        }
-
-        band.setBandShiftAndWidth(cumulativeWidth, bandWidth);
-        band.setViewLength(length);
-
-        cumulativeWidth += bandWidth;
-    }
-};
-
-Timeline._Impl.prototype.shiftOK = function(index, shift) {
-    // Returns true if the proposed shift is ok
-    //
-    // Positive shift means going back in time
-    var going_back = shift > 0,
-        going_forward = shift < 0;
-
-    // Is there an edge?
-    if ((going_back    && this.timeline_start == null) ||
-        (going_forward && this.timeline_stop  == null) ||
-        (shift == 0)) {
-        return (true);  // early return
-    }
-
-    // If any of the bands has noted that it is changing the others,
-    // then this shift is a secondary shift in reaction to the real shift,
-    // which already happened. In such cases, ignore it. (The issue is
-    // that a positive original shift can cause a negative secondary shift,
-    // as the bands adjust.)
-    var secondary_shift = false;
-    for (var i = 0; i < this._bands.length && !secondary_shift; i++) {
-       secondary_shift = this._bands[i].busy();
-    }
-    if (secondary_shift) {
-        return(true); // early return
-    }
-
-    // If we are already at an edge, then don't even think about going any further
-    if ((going_back    && this.timeline_at_start) ||
-        (going_forward && this.timeline_at_stop)) {
-        return (false);  // early return
-    }
-
-    // Need to check all the bands
-    var ok = false; // return value
-    // If any of the bands will be or are showing an ok date, then let the shift proceed.
-    for (var i = 0; i < this._bands.length && !ok; i++) {
-       var band = this._bands[i];
-       if (going_back) {
-           ok = (i == index ? band.getMinVisibleDateAfterDelta(shift) : band.getMinVisibleDate())
-                >= this.timeline_start;
-       } else {
-           ok = (i == index ? band.getMaxVisibleDateAfterDelta(shift) : band.getMaxVisibleDate())
-                <= this.timeline_stop;
-       }
-    }
-
-    // process results
-    if (going_back) {
-       this.timeline_at_start = !ok;
-       this.timeline_at_stop = false;
-    } else {
-       this.timeline_at_stop = !ok;
-       this.timeline_at_start = false;
-    }
-    // This is where you could have an effect once per hitting an
-    // edge of the Timeline. Eg jitter the Timeline
-    //if (!ok) {
-        //alert(going_back ? "At beginning" : "At end");
-    //}
-    return (ok);
-};
-
-Timeline._Impl.prototype.zoom = function (zoomIn, x, y, target) {
-  var matcher = new RegExp("^timeline-band-([0-9]+)$");
-  var bandIndex = null;
-
-  var result = matcher.exec(target.id);
-  if (result) {
-    bandIndex = parseInt(result[1]);
-  }
-
-  if (bandIndex != null) {
-    this._bands[bandIndex].zoom(zoomIn, x, y, target);
-  }
-
-  this.paint();
-};
-
-/*
- *
- * Coding standards:
- *
- * We aim towards Douglas Crockford's Javascript conventions.
- * See:  http://javascript.crockford.com/code.html
- * See also: http://www.crockford.com/javascript/javascript.html
- *
- * That said, this JS code was written before some recent JS
- * support libraries became widely used or available.
- * In particular, the _ character is used to indicate a class function or
- * variable that should be considered private to the class.
- *
- * The code mostly uses accessor methods for getting/setting the private
- * class variables.
- *
- * Over time, we'd like to formalize the convention by using support libraries
- * which enforce privacy in objects.
- *
- * We also want to use jslint:  http://www.jslint.com/
- *
- *
- *
- */
-
-
-
-/*
- *  Band
- *
- */
-Timeline._Band = function(timeline, bandInfo, index) {
-    // hack for easier subclassing
-    if (timeline !== undefined) {
-        this.initialize(timeline, bandInfo, index);
-    }
-};
-
-Timeline._Band.prototype.initialize = function(timeline, bandInfo, index) {
-    // Set up the band's object
-
-    // Munge params: If autoWidth is on for the Timeline, then ensure that
-    // bandInfo.width is an integer
-    if (timeline.autoWidth && typeof bandInfo.width == 'string') {
-        bandInfo.width = bandInfo.width.indexOf("%") > -1 ? 0 : parseInt(bandInfo.width);
-    }
-
-    this._timeline = timeline;
-    this._bandInfo = bandInfo;
-
-    this._index = index;
-
-    this._locale = ("locale" in bandInfo) ? bandInfo.locale : Timeline.getDefaultLocale();
-    this._timeZone = ("timeZone" in bandInfo) ? bandInfo.timeZone : 0;
-    this._labeller = ("labeller" in bandInfo) ? bandInfo.labeller :
-        (("createLabeller" in timeline.getUnit()) ?
-            timeline.getUnit().createLabeller(this._locale, this._timeZone) :
-            new Timeline.GregorianDateLabeller(this._locale, this._timeZone));
-    this._theme = bandInfo.theme;
-    this._zoomIndex = ("zoomIndex" in bandInfo) ? bandInfo.zoomIndex : 0;
-    this._zoomSteps = ("zoomSteps" in bandInfo) ? bandInfo.zoomSteps : null;
-
-    this._dragging = false;
-    this._changing = false;
-    this._originalScrollSpeed = 5; // pixels
-    this._scrollSpeed = this._originalScrollSpeed;
-    this._viewOrthogonalOffset= 0; // vertical offset if the timeline is horizontal, and vice versa
-    this._onScrollListeners = [];
-
-    var b = this;
-    this._syncWithBand = null;
-    this._syncWithBandHandler = function(band) {
-        b._onHighlightBandScroll();
-    };
-    this._selectorListener = function(band) {
-        b._onHighlightBandScroll();
-    };
-
-    /*
-     *  Install a textbox to capture keyboard events
-     */
-    var inputDiv = this._timeline.getDocument().createElement("div");
-    inputDiv.className = "timeline-band-input";
-    this._timeline.addDiv(inputDiv);
-
-    this._keyboardInput = document.createElement("input");
-    this._keyboardInput.type = "text";
-    inputDiv.appendChild(this._keyboardInput);
-    SimileAjax.DOM.registerEventWithObject(this._keyboardInput, "keydown", this, "_onKeyDown");
-    SimileAjax.DOM.registerEventWithObject(this._keyboardInput, "keyup", this, "_onKeyUp");
-
-    /*
-     *  The band's outer most div that slides with respect to the timeline's div
-     */
-    this._div = this._timeline.getDocument().createElement("div");
-    this._div.id = "timeline-band-" + index;
-    this._div.className = "timeline-band timeline-band-" + index;
-    this._timeline.addDiv(this._div);
-
-    SimileAjax.DOM.registerEventWithObject(this._div, "mousedown", this, "_onMouseDown");
-    SimileAjax.DOM.registerEventWithObject(this._div, "mousemove", this, "_onMouseMove");
-    SimileAjax.DOM.registerEventWithObject(this._div, "mouseup", this, "_onMouseUp");
-    SimileAjax.DOM.registerEventWithObject(this._div, "mouseout", this, "_onMouseOut");
-    SimileAjax.DOM.registerEventWithObject(this._div, "dblclick", this, "_onDblClick");
-
-    var mouseWheel = this._theme!= null ? this._theme.mouseWheel : 'scroll'; // theme is not always defined
-    if (mouseWheel === 'zoom' || mouseWheel === 'scroll' || this._zoomSteps) {
-        // capture mouse scroll
-        if (SimileAjax.Platform.browser.isFirefox) {
-            SimileAjax.DOM.registerEventWithObject(this._div, "DOMMouseScroll", this, "_onMouseScroll");
-        } else {
-            SimileAjax.DOM.registerEventWithObject(this._div, "mousewheel", this, "_onMouseScroll");
-        }
-    }
-
-    /*
-     *  The inner div that contains layers
-     */
-    this._innerDiv = this._timeline.getDocument().createElement("div");
-    this._innerDiv.className = "timeline-band-inner";
-    this._div.appendChild(this._innerDiv);
-
-    /*
-     *  Initialize parts of the band
-     */
-    this._ether = bandInfo.ether;
-    bandInfo.ether.initialize(this, timeline);
-
-    this._etherPainter = bandInfo.etherPainter;
-    bandInfo.etherPainter.initialize(this, timeline);
-
-    this._eventSource = bandInfo.eventSource;
-    if (this._eventSource) {
-        this._eventListener = {
-            onAddMany: function() { b._onAddMany(); },
-            onClear:   function() { b._onClear(); }
-        }
-        this._eventSource.addListener(this._eventListener);
-    }
-
-    this._eventPainter = bandInfo.eventPainter;
-    this._eventTracksNeeded = 0;   // set by painter via updateEventTrackInfo
-    this._eventTrackIncrement = 0;
-    bandInfo.eventPainter.initialize(this, timeline);
-
-    this._decorators = ("decorators" in bandInfo) ? bandInfo.decorators : [];
-    for (var i = 0; i < this._decorators.length; i++) {
-        this._decorators[i].initialize(this, timeline);
-    }
-};
-
-Timeline._Band.SCROLL_MULTIPLES = 5;
-
-Timeline._Band.prototype.dispose = function() {
-    this.closeBubble();
-
-    if (this._eventSource) {
-        this._eventSource.removeListener(this._eventListener);
-        this._eventListener = null;
-        this._eventSource = null;
-    }
-
-    this._timeline = null;
-    this._bandInfo = null;
-
-    this._labeller = null;
-    this._ether = null;
-    this._etherPainter = null;
-    this._eventPainter = null;
-    this._decorators = null;
-
-    this._onScrollListeners = null;
-    this._syncWithBandHandler = null;
-    this._selectorListener = null;
-
-    this._div = null;
-    this._innerDiv = null;
-    this._keyboardInput = null;
-};
-
-Timeline._Band.prototype.addOnScrollListener = function(listener) {
-    this._onScrollListeners.push(listener);
-};
-
-Timeline._Band.prototype.removeOnScrollListener = function(listener) {
-    for (var i = 0; i < this._onScrollListeners.length; i++) {
-        if (this._onScrollListeners[i] == listener) {
-            this._onScrollListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline._Band.prototype.setSyncWithBand = function(band, highlight) {
-    if (this._syncWithBand) {
-        this._syncWithBand.removeOnScrollListener(this._syncWithBandHandler);
-    }
-
-    this._syncWithBand = band;
-    this._syncWithBand.addOnScrollListener(this._syncWithBandHandler);
-    this._highlight = highlight;
-    this._positionHighlight();
-};
-
-Timeline._Band.prototype.getLocale = function() {
-    return this._locale;
-};
-
-Timeline._Band.prototype.getTimeZone = function() {
-    return this._timeZone;
-};
-
-Timeline._Band.prototype.getLabeller = function() {
-    return this._labeller;
-};
-
-Timeline._Band.prototype.getIndex = function() {
-    return this._index;
-};
-
-Timeline._Band.prototype.getEther = function() {
-    return this._ether;
-};
-
-Timeline._Band.prototype.getEtherPainter = function() {
-    return this._etherPainter;
-};
-
-Timeline._Band.prototype.getEventSource = function() {
-    return this._eventSource;
-};
-
-Timeline._Band.prototype.getEventPainter = function() {
-    return this._eventPainter;
-};
-
-Timeline._Band.prototype.getTimeline = function() {
-    return this._timeline;
-};
-
-// Autowidth support
-Timeline._Band.prototype.updateEventTrackInfo = function(tracks, increment) {
-    this._eventTrackIncrement = increment; // doesn't vary for a specific band
-
-    if (tracks > this._eventTracksNeeded) {
-        this._eventTracksNeeded = tracks;
-    }
-};
-
-// Autowidth support
-Timeline._Band.prototype.checkAutoWidth = function() {
-    // if a new (larger) width is needed by the band
-    // then: a) updates the band's bandInfo.width
-    //
-    // desiredWidth for the band is
-    //   (number of tracks + margin) * track increment
-    if (! this._timeline.autoWidth) {
-      return; // early return
-    }
-
-    var overviewBand = this._eventPainter.getType() == 'overview';
-    var margin = overviewBand ?
-       this._theme.event.overviewTrack.autoWidthMargin :
-       this._theme.event.track.autoWidthMargin;
-    var desiredWidth = Math.ceil((this._eventTracksNeeded + margin) *
-                       this._eventTrackIncrement);
-    // add offset amount (additional margin)
-    desiredWidth += overviewBand ? this._theme.event.overviewTrack.offset :
-                                   this._theme.event.track.offset;
-    var bandInfo = this._bandInfo;
-
-    if (desiredWidth != bandInfo.width) {
-        bandInfo.width = desiredWidth;
-    }
-};
-
-Timeline._Band.prototype.layout = function() {
-    this.paint();
-};
-
-Timeline._Band.prototype.paint = function() {
-    this._etherPainter.paint();
-    this._paintDecorators();
-    this._paintEvents();
-};
-
-Timeline._Band.prototype.softLayout = function() {
-    this.softPaint();
-};
-
-Timeline._Band.prototype.softPaint = function() {
-    this._etherPainter.softPaint();
-    this._softPaintDecorators();
-    this._softPaintEvents();
-};
-
-Timeline._Band.prototype.setBandShiftAndWidth = function(shift, width) {
-    var inputDiv = this._keyboardInput.parentNode;
-    var middle = shift + Math.floor(width / 2);
-    if (this._timeline.isHorizontal()) {
-        this._div.style.top = shift + "px";
-        this._div.style.height = width + "px";
-
-        inputDiv.style.top = middle + "px";
-        inputDiv.style.left = "-1em";
-    } else {
-        this._div.style.left = shift + "px";
-        this._div.style.width = width + "px";
-
-        inputDiv.style.left = middle + "px";
-        inputDiv.style.top = "-1em";
-    }
-};
-
-Timeline._Band.prototype.getViewWidth = function() {
-    if (this._timeline.isHorizontal()) {
-        return this._div.offsetHeight;
-    } else {
-        return this._div.offsetWidth;
-    }
-};
-
-Timeline._Band.prototype.setViewLength = function(length) {
-    this._viewLength = length;
-    this._recenterDiv();
-    this._onChanging();
-};
-
-Timeline._Band.prototype.getViewLength = function() {
-    return this._viewLength;
-};
-
-Timeline._Band.prototype.getTotalViewLength = function() {
-    return Timeline._Band.SCROLL_MULTIPLES * this._viewLength;
-};
-
-Timeline._Band.prototype.getViewOffset = function() {
-    return this._viewOffset;
-};
-
-Timeline._Band.prototype.getMinDate = function() {
-    return this._ether.pixelOffsetToDate(this._viewOffset);
-};
-
-Timeline._Band.prototype.getMaxDate = function() {
-    return this._ether.pixelOffsetToDate(this._viewOffset + Timeline._Band.SCROLL_MULTIPLES * this._viewLength);
-};
-
-Timeline._Band.prototype.getMinVisibleDate = function() {
-    return this._ether.pixelOffsetToDate(0);
-};
-
-Timeline._Band.prototype.getMinVisibleDateAfterDelta = function(delta) {
-    return this._ether.pixelOffsetToDate(delta);
-};
-
-Timeline._Band.prototype.getMaxVisibleDate = function() {
-    // Max date currently visible on band
-    return this._ether.pixelOffsetToDate(this._viewLength);
-};
-
-Timeline._Band.prototype.getMaxVisibleDateAfterDelta = function(delta) {
-    // Max date visible on band after delta px view change is applied
-    return this._ether.pixelOffsetToDate(this._viewLength + delta);
-};
-
-Timeline._Band.prototype.getCenterVisibleDate = function() {
-    return this._ether.pixelOffsetToDate(this._viewLength / 2);
-};
-
-Timeline._Band.prototype.setMinVisibleDate = function(date) {
-    if (!this._changing) {
-        this._moveEther(Math.round(-this._ether.dateToPixelOffset(date)));
-    }
-};
-
-Timeline._Band.prototype.setMaxVisibleDate = function(date) {
-    if (!this._changing) {
-        this._moveEther(Math.round(this._viewLength - this._ether.dateToPixelOffset(date)));
-    }
-};
-
-Timeline._Band.prototype.setCenterVisibleDate = function(date) {
-    if (!this._changing) {
-        this._moveEther(Math.round(this._viewLength / 2 - this._ether.dateToPixelOffset(date)));
-    }
-};
-
-Timeline._Band.prototype.dateToPixelOffset = function(date) {
-    return this._ether.dateToPixelOffset(date) - this._viewOffset;
-};
-
-Timeline._Band.prototype.pixelOffsetToDate = function(pixels) {
-    return this._ether.pixelOffsetToDate(pixels + this._viewOffset);
-};
-
-Timeline._Band.prototype.getViewOrthogonalOffset = function() {
-    return this._viewOrthogonalOffset;
-};
-
-Timeline._Band.prototype.setViewOrthogonalOffset = function(offset) {
-    this._viewOrthogonalOffset = Math.max(0, offset);
-};
-
-Timeline._Band.prototype.createLayerDiv = function(zIndex, className) {
-    var div = this._timeline.getDocument().createElement("div");
-    div.className = "timeline-band-layer" + (typeof className == "string" ? (" " + className) : "");
-    div.style.zIndex = zIndex;
-    this._innerDiv.appendChild(div);
-
-    var innerDiv = this._timeline.getDocument().createElement("div");
-    innerDiv.className = "timeline-band-layer-inner";
-    if (SimileAjax.Platform.browser.isIE) {
-        innerDiv.style.cursor = "move";
-    } else {
-        innerDiv.style.cursor = "-moz-grab";
-    }
-    div.appendChild(innerDiv);
-
-    return innerDiv;
-};
-
-Timeline._Band.prototype.removeLayerDiv = function(div) {
-    this._innerDiv.removeChild(div.parentNode);
-};
-
-Timeline._Band.prototype.scrollToCenter = function(date, f) {
-    var pixelOffset = this._ether.dateToPixelOffset(date);
-    if (pixelOffset < -this._viewLength / 2) {
-        this.setCenterVisibleDate(this.pixelOffsetToDate(pixelOffset + this._viewLength));
-    } else if (pixelOffset > 3 * this._viewLength / 2) {
-        this.setCenterVisibleDate(this.pixelOffsetToDate(pixelOffset - this._viewLength));
-    }
-    this._autoScroll(Math.round(this._viewLength / 2 - this._ether.dateToPixelOffset(date)), f);
-};
-
-Timeline._Band.prototype.showBubbleForEvent = function(eventID) {
-    var evt = this.getEventSource().getEvent(eventID);
-    if (evt) {
-        var self = this;
-        this.scrollToCenter(evt.getStart(), function() {
-            self._eventPainter.showBubble(evt);
-        });
-    }
-};
-
-Timeline._Band.prototype.zoom = function(zoomIn, x, y, target) {
-  if (!this._zoomSteps) {
-    // zoom disabled
-    return;
-  }
-
-  // shift the x value by our offset
-  x += this._viewOffset;
-
-  var zoomDate = this._ether.pixelOffsetToDate(x);
-  var netIntervalChange = this._ether.zoom(zoomIn);
-  this._etherPainter.zoom(netIntervalChange);
-
-  // shift our zoom date to the far left
-  this._moveEther(Math.round(-this._ether.dateToPixelOffset(zoomDate)));
-  // then shift it back to where the mouse was
-  this._moveEther(x);
-};
-
-Timeline._Band.prototype._onMouseDown = function(innerFrame, evt, target) {
-    this.closeBubble();
-
-    this._dragging = true;
-    this._dragX = evt.clientX;
-    this._dragY = evt.clientY;
-};
-
-Timeline._Band.prototype._onMouseMove = function(innerFrame, evt, target) {
-    if (this._dragging) {
-        var diffX = evt.clientX - this._dragX;
-        var diffY = evt.clientY - this._dragY;
-
-        this._dragX = evt.clientX;
-        this._dragY = evt.clientY;
-
-        if (this._timeline.isHorizontal()) {
-            this._moveEther(diffX, diffY);
-        } else {
-            this._moveEther(diffY, diffX);
-        }
-        this._positionHighlight();
-    }
-};
-
-Timeline._Band.prototype._onMouseUp = function(innerFrame, evt, target) {
-    this._dragging = false;
-    this._keyboardInput.focus();
-};
-
-Timeline._Band.prototype._onMouseOut = function(innerFrame, evt, target) {
-    var coords = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
-    coords.x += this._viewOffset;
-    if (coords.x < 0 || coords.x > innerFrame.offsetWidth ||
-        coords.y < 0 || coords.y > innerFrame.offsetHeight) {
-        this._dragging = false;
-    }
-};
-
-Timeline._Band.prototype._onMouseScroll = function(innerFrame, evt, target) {
-  var now = new Date();
-  now = now.getTime();
-
-  if (!this._lastScrollTime || ((now - this._lastScrollTime) > 50)) {
-    // limit 1 scroll per 200ms due to FF3 sending multiple events back to back
-    this._lastScrollTime = now;
-
-    var delta = 0;
-    if (evt.wheelDelta) {
-      delta = evt.wheelDelta/120;
-    } else if (evt.detail) {
-      delta = -evt.detail/3;
-    }
-
-    // either scroll or zoom
-    var mouseWheel = this._theme.mouseWheel;
-
-    if (this._zoomSteps || mouseWheel === 'zoom') {
-      var loc = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
-      if (delta != 0) {
-        var zoomIn;
-        if (delta > 0)
-          zoomIn = true;
-        if (delta < 0)
-          zoomIn = false;
-        // call zoom on the timeline so we could zoom multiple bands if desired
-        this._timeline.zoom(zoomIn, loc.x, loc.y, innerFrame);
-      }
-    }
-    else if (mouseWheel === 'scroll') {
-    	var move_amt = 50 * (delta < 0 ? -1 : 1);
-      this._moveEther(move_amt);
-    }
-  }
-
-  // prevent bubble
-  if (evt.stopPropagation) {
-    evt.stopPropagation();
-  }
-  evt.cancelBubble = true;
-
-  // prevent the default action
-  if (evt.preventDefault) {
-    evt.preventDefault();
-  }
-  evt.returnValue = false;
-};
-
-Timeline._Band.prototype._onDblClick = function(innerFrame, evt, target) {
-    var coords = SimileAjax.DOM.getEventRelativeCoordinates(evt, innerFrame);
-    var distance = coords.x - (this._viewLength / 2 - this._viewOffset);
-
-    this._autoScroll(-distance);
-};
-
-Timeline._Band.prototype._onKeyDown = function(keyboardInput, evt, target) {
-    if (!this._dragging) {
-        switch (evt.keyCode) {
-        case 27: // ESC
-            break;
-        case 37: // left arrow
-        case 38: // up arrow
-            this._scrollSpeed = Math.min(50, Math.abs(this._scrollSpeed * 1.05));
-            this._moveEther(this._scrollSpeed);
-            break;
-        case 39: // right arrow
-        case 40: // down arrow
-            this._scrollSpeed = -Math.min(50, Math.abs(this._scrollSpeed * 1.05));
-            this._moveEther(this._scrollSpeed);
-            break;
-        default:
-            return true;
-        }
-        this.closeBubble();
-
-        SimileAjax.DOM.cancelEvent(evt);
-        return false;
-    }
-    return true;
-};
-
-Timeline._Band.prototype._onKeyUp = function(keyboardInput, evt, target) {
-    if (!this._dragging) {
-        this._scrollSpeed = this._originalScrollSpeed;
-
-        switch (evt.keyCode) {
-        case 35: // end
-            this.setCenterVisibleDate(this._eventSource.getLatestDate());
-            break;
-        case 36: // home
-            this.setCenterVisibleDate(this._eventSource.getEarliestDate());
-            break;
-        case 33: // page up
-            this._autoScroll(this._timeline.getPixelLength());
-            break;
-        case 34: // page down
-            this._autoScroll(-this._timeline.getPixelLength());
-            break;
-        default:
-            return true;
-        }
-
-        this.closeBubble();
-
-        SimileAjax.DOM.cancelEvent(evt);
-        return false;
-    }
-    return true;
-};
-
-Timeline._Band.prototype._autoScroll = function(distance, f) {
-    var b = this;
-    var a = SimileAjax.Graphics.createAnimation(
-        function(abs, diff) {
-            b._moveEther(diff);
-        },
-        0,
-        distance,
-        1000,
-        f
-    );
-    a.run();
-};
-
-Timeline._Band.prototype._moveEther = function(shift, orthogonalShift) {
-    if (orthogonalShift === undefined) {
-        orthogonalShift = 0;
-    }
-
-    this.closeBubble();
-
-    // A positive shift means back in time
-    // Check that we're not moving beyond Timeline's limits
-    if (!this._timeline.shiftOK(this._index, shift)) {
-        return; // early return
-    }
-
-    this._viewOffset += shift;
-    this._viewOrthogonalOffset = Math.min(0, this._viewOrthogonalOffset + orthogonalShift);
-
-    this._ether.shiftPixels(-shift);
-    if (this._timeline.isHorizontal()) {
-        this._div.style.left = this._viewOffset + "px";
-    } else {
-        this._div.style.top = this._viewOffset + "px";
-    }
-
-    if (this._viewOffset > -this._viewLength * 0.5 ||
-        this._viewOffset < -this._viewLength * (Timeline._Band.SCROLL_MULTIPLES - 1.5)) {
-
-        this._recenterDiv();
-    } else {
-        this.softLayout();
-    }
-
-    this._onChanging();
-}
-
-Timeline._Band.prototype._onChanging = function() {
-    this._changing = true;
-
-    this._fireOnScroll();
-    this._setSyncWithBandDate();
-
-    this._changing = false;
-};
-
-Timeline._Band.prototype.busy = function() {
-    // Is this band busy changing other bands?
-    return(this._changing);
-};
-
-Timeline._Band.prototype._fireOnScroll = function() {
-    for (var i = 0; i < this._onScrollListeners.length; i++) {
-        this._onScrollListeners[i](this);
-    }
-};
-
-Timeline._Band.prototype._setSyncWithBandDate = function() {
-    if (this._syncWithBand) {
-        var centerDate = this._ether.pixelOffsetToDate(this.getViewLength() / 2);
-        this._syncWithBand.setCenterVisibleDate(centerDate);
-    }
-};
-
-Timeline._Band.prototype._onHighlightBandScroll = function() {
-    if (this._syncWithBand) {
-        var centerDate = this._syncWithBand.getCenterVisibleDate();
-        var centerPixelOffset = this._ether.dateToPixelOffset(centerDate);
-
-        this._moveEther(Math.round(this._viewLength / 2 - centerPixelOffset));
-
-        if (this._highlight) {
-            this._etherPainter.setHighlight(
-                this._syncWithBand.getMinVisibleDate(),
-                this._syncWithBand.getMaxVisibleDate());
-        }
-    }
-};
-
-Timeline._Band.prototype._onAddMany = function() {
-    this._paintEvents();
-};
-
-Timeline._Band.prototype._onClear = function() {
-    this._paintEvents();
-};
-
-Timeline._Band.prototype._positionHighlight = function() {
-    if (this._syncWithBand) {
-        var startDate = this._syncWithBand.getMinVisibleDate();
-        var endDate = this._syncWithBand.getMaxVisibleDate();
-
-        if (this._highlight) {
-            this._etherPainter.setHighlight(startDate, endDate);
-        }
-    }
-};
-
-Timeline._Band.prototype._recenterDiv = function() {
-    this._viewOffset = -this._viewLength * (Timeline._Band.SCROLL_MULTIPLES - 1) / 2;
-    if (this._timeline.isHorizontal()) {
-        this._div.style.left = this._viewOffset + "px";
-        this._div.style.width = (Timeline._Band.SCROLL_MULTIPLES * this._viewLength) + "px";
-    } else {
-        this._div.style.top = this._viewOffset + "px";
-        this._div.style.height = (Timeline._Band.SCROLL_MULTIPLES * this._viewLength) + "px";
-    }
-    this.layout();
-};
-
-Timeline._Band.prototype._paintEvents = function() {
-    this._eventPainter.paint();
-};
-
-Timeline._Band.prototype._softPaintEvents = function() {
-    this._eventPainter.softPaint();
-};
-
-Timeline._Band.prototype._paintDecorators = function() {
-    for (var i = 0; i < this._decorators.length; i++) {
-        this._decorators[i].paint();
-    }
-};
-
-Timeline._Band.prototype._softPaintDecorators = function() {
-    for (var i = 0; i < this._decorators.length; i++) {
-        this._decorators[i].softPaint();
-    }
-};
-
-Timeline._Band.prototype.closeBubble = function() {
-    SimileAjax.WindowManager.cancelPopups();
-};
-/*
- *  Classic Theme
- *
- */
-
-
-
-Timeline.ClassicTheme = new Object();
-
-Timeline.ClassicTheme.implementations = [];
-
-Timeline.ClassicTheme.create = function(locale) {
-    if (locale == null) {
-        locale = Timeline.getDefaultLocale();
-    }
-
-    var f = Timeline.ClassicTheme.implementations[locale];
-    if (f == null) {
-        f = Timeline.ClassicTheme._Impl;
-    }
-    return new f();
-};
-
-Timeline.ClassicTheme._Impl = function() {
-    this.firstDayOfWeek = 0; // Sunday
-
-    // Note: Many styles previously set here are now set using CSS
-    //       The comments indicate settings controlled by CSS, not
-    //       lines to be un-commented.
-    //
-    //
-    // Attributes autoWidth, autoWidthAnimationTime, timeline_start
-    // and timeline_stop must be set on the first band's theme.
-    // The other attributes can be set differently for each
-    // band by using different themes for the bands.
-    this.autoWidth = false; // Should the Timeline automatically grow itself, as
-                            // needed when too many events for the available width
-                            // are painted on the visible part of the Timeline?
-    this.autoWidthAnimationTime = 500; // mSec
-    this.timeline_start = null; // Setting a date, eg new Date(Date.UTC(2010,0,17,20,00,00,0)) will prevent the
-                                // Timeline from being moved to anytime before the date.
-    this.timeline_stop = null;  // Use for setting a maximum date. The Timeline will not be able
-                                // to be moved to anytime after this date.
-    this.ether = {
-        backgroundColors: [
-        //    "#EEE",
-        //    "#DDD",
-        //    "#CCC",
-        //    "#AAA"
-        ],
-     //   highlightColor:     "white",
-        highlightOpacity:   50,
-        interval: {
-            line: {
-                show:       true,
-                opacity:    25
-               // color:      "#aaa",
-            },
-            weekend: {
-                opacity:    30
-              //  color:      "#FFFFE0",
-            },
-            marker: {
-                hAlign:     "Bottom",
-                vAlign:     "Right"
-                                        /*
-                hBottomStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-bottom";
-                },
-                hBottomEmphasizedStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-bottom-emphasized";
-                },
-                hTopStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-top";
-                },
-                hTopEmphasizedStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-top-emphasized";
-                },
-                */
-
-
-               /*
-                                  vRightStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-right";
-                },
-                vRightEmphasizedStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-right-emphasized";
-                },
-                vLeftStyler: function(elmt) {
-                    elmt.className = "timeline-ether-marker-left";
-                },
-                vLeftEmphasizedStyler:function(elmt) {
-                    elmt.className = "timeline-ether-marker-left-emphasized";
-                }
-                */
-            }
-        }
-    };
-
-    this.event = {
-        track: {
-                   height: 10, // px. You will need to change the track
-                               //     height if you change the tape height.
-                      gap:  2, // px. Gap between tracks
-                   offset:  2, // px. top margin above tapes
-          autoWidthMargin:  1.5
-          /* autoWidthMargin is only used if autoWidth (see above) is true.
-             The autoWidthMargin setting is used to set how close the bottom of the
-             lowest track is to the edge of the band's div. The units are total track
-             width (tape + label + gap). A min of 0.5 is suggested. Use this setting to
-             move the bottom track's tapes above the axis markers, if needed for your
-             Timeline.
-          */
-        },
-        overviewTrack: {
-                  offset: 20, // px -- top margin above tapes
-              tickHeight:  6, // px
-                  height:  2, // px
-                     gap:  1, // px
-         autoWidthMargin:  5 // This attribute is only used if autoWidth (see above) is true.
-        },
-        tape: {
-            height:         4 // px. For thicker tapes, remember to change track height too.
-        },
-        instant: {
-                           icon: Timeline.urlPrefix + "images/dull-blue-circle.png",
-                                 // default icon. Icon can also be specified per event
-                      iconWidth: 10,
-                     iconHeight: 10,
-               impreciseOpacity: 20, // opacity of the tape when durationEvent is false
-            impreciseIconMargin: 3   // A tape and an icon are painted for imprecise instant
-                                     // events. This attribute is the margin between the
-                                     // bottom of the tape and the top of the icon in that
-                                     // case.
-    //        color:             "#58A0DC",
-    //        impreciseColor:    "#58A0DC",
-        },
-        duration: {
-            impreciseOpacity: 20 // tape opacity for imprecise part of duration events
-      //      color:            "#58A0DC",
-      //      impreciseColor:   "#58A0DC",
-        },
-        label: {
-            backgroundOpacity: 50,// only used in detailed painter
-               offsetFromLine:  3 // px left margin amount from icon's right edge
-      //      backgroundColor:   "white",
-      //      lineColor:         "#58A0DC",
-        },
-        highlightColors: [  // Use with getEventPainter().setHighlightMatcher
-                            // See webapp/examples/examples.js
-            "#FFFF00",
-            "#FFC000",
-            "#FF0000",
-            "#0000FF"
-        ],
-        highlightLabelBackground: false, // When highlighting an event, also change the event's label background?
-        bubble: {
-            width:          250, // px
-            maxHeight:        0, // px Maximum height of bubbles. 0 means no max height.
-                                 // scrollbar will be added for taller bubbles
-            titleStyler: function(elmt) {
-                elmt.className = "timeline-event-bubble-title";
-            },
-            bodyStyler: function(elmt) {
-                elmt.className = "timeline-event-bubble-body";
-            },
-            imageStyler: function(elmt) {
-                elmt.className = "timeline-event-bubble-image";
-            },
-            wikiStyler: function(elmt) {
-                elmt.className = "timeline-event-bubble-wiki";
-            },
-            timeStyler: function(elmt) {
-                elmt.className = "timeline-event-bubble-time";
-            }
-        }
-    };
-
-    this.mouseWheel = 'scroll'; // 'default', 'zoom', 'scroll'
-};/*
- *  An "ether" is a object that maps date/time to pixel coordinates.
- *
- */
-
-/*
- *  Linear Ether
- *
- */
-
-Timeline.LinearEther = function(params) {
-    this._params = params;
-    this._interval = params.interval;
-    this._pixelsPerInterval = params.pixelsPerInterval;
-};
-
-Timeline.LinearEther.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-    this._unit = timeline.getUnit();
-
-    if ("startsOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.startsOn);
-    } else if ("endsOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.endsOn);
-        this.shiftPixels(-this._timeline.getPixelLength());
-    } else if ("centersOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.centersOn);
-        this.shiftPixels(-this._timeline.getPixelLength() / 2);
-    } else {
-        this._start = this._unit.makeDefaultValue();
-        this.shiftPixels(-this._timeline.getPixelLength() / 2);
-    }
-};
-
-Timeline.LinearEther.prototype.setDate = function(date) {
-    this._start = this._unit.cloneValue(date);
-};
-
-Timeline.LinearEther.prototype.shiftPixels = function(pixels) {
-    var numeric = this._interval * pixels / this._pixelsPerInterval;
-    this._start = this._unit.change(this._start, numeric);
-};
-
-Timeline.LinearEther.prototype.dateToPixelOffset = function(date) {
-    var numeric = this._unit.compare(date, this._start);
-    return this._pixelsPerInterval * numeric / this._interval;
-};
-
-Timeline.LinearEther.prototype.pixelOffsetToDate = function(pixels) {
-    var numeric = pixels * this._interval / this._pixelsPerInterval;
-    return this._unit.change(this._start, numeric);
-};
-
-Timeline.LinearEther.prototype.zoom = function(zoomIn) {
-  var netIntervalChange = 0;
-  var currentZoomIndex = this._band._zoomIndex;
-  var newZoomIndex = currentZoomIndex;
-
-  if (zoomIn && (currentZoomIndex > 0)) {
-    newZoomIndex = currentZoomIndex - 1;
-  }
-
-  if (!zoomIn && (currentZoomIndex < (this._band._zoomSteps.length - 1))) {
-    newZoomIndex = currentZoomIndex + 1;
-  }
-
-  this._band._zoomIndex = newZoomIndex;
-  this._interval =
-    SimileAjax.DateTime.gregorianUnitLengths[this._band._zoomSteps[newZoomIndex].unit];
-  this._pixelsPerInterval = this._band._zoomSteps[newZoomIndex].pixelsPerInterval;
-  netIntervalChange = this._band._zoomSteps[newZoomIndex].unit -
-    this._band._zoomSteps[currentZoomIndex].unit;
-
-  return netIntervalChange;
-};
-
-
-/*
- *  Hot Zone Ether
- *
- */
-
-Timeline.HotZoneEther = function(params) {
-    this._params = params;
-    this._interval = params.interval;
-    this._pixelsPerInterval = params.pixelsPerInterval;
-    this._theme = params.theme;
-};
-
-Timeline.HotZoneEther.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-    this._unit = timeline.getUnit();
-
-    this._zones = [{
-        startTime:  Number.NEGATIVE_INFINITY,
-        endTime:    Number.POSITIVE_INFINITY,
-        magnify:    1
-    }];
-    var params = this._params;
-    for (var i = 0; i < params.zones.length; i++) {
-        var zone = params.zones[i];
-        var zoneStart = this._unit.parseFromObject(zone.start);
-        var zoneEnd =   this._unit.parseFromObject(zone.end);
-
-        for (var j = 0; j < this._zones.length && this._unit.compare(zoneEnd, zoneStart) > 0; j++) {
-            var zone2 = this._zones[j];
-
-            if (this._unit.compare(zoneStart, zone2.endTime) < 0) {
-                if (this._unit.compare(zoneStart, zone2.startTime) > 0) {
-                    this._zones.splice(j, 0, {
-                        startTime:   zone2.startTime,
-                        endTime:     zoneStart,
-                        magnify:     zone2.magnify
-                    });
-                    j++;
-
-                    zone2.startTime = zoneStart;
-                }
-
-                if (this._unit.compare(zoneEnd, zone2.endTime) < 0) {
-                    this._zones.splice(j, 0, {
-                        startTime:  zoneStart,
-                        endTime:    zoneEnd,
-                        magnify:    zone.magnify * zone2.magnify
-                    });
-                    j++;
-
-                    zone2.startTime = zoneEnd;
-                    zoneStart = zoneEnd;
-                } else {
-                    zone2.magnify *= zone.magnify;
-                    zoneStart = zone2.endTime;
-                }
-            } // else, try the next existing zone
-        }
-    }
-
-    if ("startsOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.startsOn);
-    } else if ("endsOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.endsOn);
-        this.shiftPixels(-this._timeline.getPixelLength());
-    } else if ("centersOn" in this._params) {
-        this._start = this._unit.parseFromObject(this._params.centersOn);
-        this.shiftPixels(-this._timeline.getPixelLength() / 2);
-    } else {
-        this._start = this._unit.makeDefaultValue();
-        this.shiftPixels(-this._timeline.getPixelLength() / 2);
-    }
-};
-
-Timeline.HotZoneEther.prototype.setDate = function(date) {
-    this._start = this._unit.cloneValue(date);
-};
-
-Timeline.HotZoneEther.prototype.shiftPixels = function(pixels) {
-    this._start = this.pixelOffsetToDate(pixels);
-};
-
-Timeline.HotZoneEther.prototype.dateToPixelOffset = function(date) {
-    return this._dateDiffToPixelOffset(this._start, date);
-};
-
-Timeline.HotZoneEther.prototype.pixelOffsetToDate = function(pixels) {
-    return this._pixelOffsetToDate(pixels, this._start);
-};
-
-Timeline.HotZoneEther.prototype.zoom = function(zoomIn) {
-  var netIntervalChange = 0;
-  var currentZoomIndex = this._band._zoomIndex;
-  var newZoomIndex = currentZoomIndex;
-
-  if (zoomIn && (currentZoomIndex > 0)) {
-    newZoomIndex = currentZoomIndex - 1;
-  }
-
-  if (!zoomIn && (currentZoomIndex < (this._band._zoomSteps.length - 1))) {
-    newZoomIndex = currentZoomIndex + 1;
-  }
-
-  this._band._zoomIndex = newZoomIndex;
-  this._interval =
-    SimileAjax.DateTime.gregorianUnitLengths[this._band._zoomSteps[newZoomIndex].unit];
-  this._pixelsPerInterval = this._band._zoomSteps[newZoomIndex].pixelsPerInterval;
-  netIntervalChange = this._band._zoomSteps[newZoomIndex].unit -
-    this._band._zoomSteps[currentZoomIndex].unit;
-
-  return netIntervalChange;
-};
-
-Timeline.HotZoneEther.prototype._dateDiffToPixelOffset = function(fromDate, toDate) {
-    var scale = this._getScale();
-    var fromTime = fromDate;
-    var toTime = toDate;
-
-    var pixels = 0;
-    if (this._unit.compare(fromTime, toTime) < 0) {
-        var z = 0;
-        while (z < this._zones.length) {
-            if (this._unit.compare(fromTime, this._zones[z].endTime) < 0) {
-                break;
-            }
-            z++;
-        }
-
-        while (this._unit.compare(fromTime, toTime) < 0) {
-            var zone = this._zones[z];
-            var toTime2 = this._unit.earlier(toTime, zone.endTime);
-
-            pixels += (this._unit.compare(toTime2, fromTime) / (scale / zone.magnify));
-
-            fromTime = toTime2;
-            z++;
-        }
-    } else {
-        var z = this._zones.length - 1;
-        while (z >= 0) {
-            if (this._unit.compare(fromTime, this._zones[z].startTime) > 0) {
-                break;
-            }
-            z--;
-        }
-
-        while (this._unit.compare(fromTime, toTime) > 0) {
-            var zone = this._zones[z];
-            var toTime2 = this._unit.later(toTime, zone.startTime);
-
-            pixels += (this._unit.compare(toTime2, fromTime) / (scale / zone.magnify));
-
-            fromTime = toTime2;
-            z--;
-        }
-    }
-    return pixels;
-};
-
-Timeline.HotZoneEther.prototype._pixelOffsetToDate = function(pixels, fromDate) {
-    var scale = this._getScale();
-    var time = fromDate;
-    if (pixels > 0) {
-        var z = 0;
-        while (z < this._zones.length) {
-            if (this._unit.compare(time, this._zones[z].endTime) < 0) {
-                break;
-            }
-            z++;
-        }
-
-        while (pixels > 0) {
-            var zone = this._zones[z];
-            var scale2 = scale / zone.magnify;
-
-            if (zone.endTime == Number.POSITIVE_INFINITY) {
-                time = this._unit.change(time, pixels * scale2);
-                pixels = 0;
-            } else {
-                var pixels2 = this._unit.compare(zone.endTime, time) / scale2;
-                if (pixels2 > pixels) {
-                    time = this._unit.change(time, pixels * scale2);
-                    pixels = 0;
-                } else {
-                    time = zone.endTime;
-                    pixels -= pixels2;
-                }
-            }
-            z++;
-        }
-    } else {
-        var z = this._zones.length - 1;
-        while (z >= 0) {
-            if (this._unit.compare(time, this._zones[z].startTime) > 0) {
-                break;
-            }
-            z--;
-        }
-
-        pixels = -pixels;
-        while (pixels > 0) {
-            var zone = this._zones[z];
-            var scale2 = scale / zone.magnify;
-
-            if (zone.startTime == Number.NEGATIVE_INFINITY) {
-                time = this._unit.change(time, -pixels * scale2);
-                pixels = 0;
-            } else {
-                var pixels2 = this._unit.compare(time, zone.startTime) / scale2;
-                if (pixels2 > pixels) {
-                    time = this._unit.change(time, -pixels * scale2);
-                    pixels = 0;
-                } else {
-                    time = zone.startTime;
-                    pixels -= pixels2;
-                }
-            }
-            z--;
-        }
-    }
-    return time;
-};
-
-Timeline.HotZoneEther.prototype._getScale = function() {
-    return this._interval / this._pixelsPerInterval;
-};
-/*
- *  Gregorian Ether Painter
- *
- */
-
-Timeline.GregorianEtherPainter = function(params) {
-    this._params = params;
-    this._theme = params.theme;
-    this._unit = params.unit;
-    this._multiple = ("multiple" in params) ? params.multiple : 1;
-};
-
-Timeline.GregorianEtherPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backgroundLayer = band.createLayerDiv(0);
-    this._backgroundLayer.setAttribute("name", "ether-background"); // for debugging
-    this._backgroundLayer.className = 'timeline-ether-bg';
-  //  this._backgroundLayer.style.background = this._theme.ether.backgroundColors[band.getIndex()];
-
-
-    this._markerLayer = null;
-    this._lineLayer = null;
-
-    var align = ("align" in this._params && this._params.align != undefined) ? this._params.align :
-        this._theme.ether.interval.marker[timeline.isHorizontal() ? "hAlign" : "vAlign"];
-    var showLine = ("showLine" in this._params) ? this._params.showLine :
-        this._theme.ether.interval.line.show;
-
-    this._intervalMarkerLayout = new Timeline.EtherIntervalMarkerLayout(
-        this._timeline, this._band, this._theme, align, showLine);
-
-    this._highlight = new Timeline.EtherHighlight(
-        this._timeline, this._band, this._theme, this._backgroundLayer);
-}
-
-Timeline.GregorianEtherPainter.prototype.setHighlight = function(startDate, endDate) {
-    this._highlight.position(startDate, endDate);
-}
-
-Timeline.GregorianEtherPainter.prototype.paint = function() {
-    if (this._markerLayer) {
-        this._band.removeLayerDiv(this._markerLayer);
-    }
-    this._markerLayer = this._band.createLayerDiv(100);
-    this._markerLayer.setAttribute("name", "ether-markers"); // for debugging
-    this._markerLayer.style.display = "none";
-
-    if (this._lineLayer) {
-        this._band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = this._band.createLayerDiv(1);
-    this._lineLayer.setAttribute("name", "ether-lines"); // for debugging
-    this._lineLayer.style.display = "none";
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var timeZone = this._band.getTimeZone();
-    var labeller = this._band.getLabeller();
-
-    SimileAjax.DateTime.roundDownToInterval(minDate, this._unit, timeZone, this._multiple, this._theme.firstDayOfWeek);
-
-    var p = this;
-    var incrementDate = function(date) {
-        for (var i = 0; i < p._multiple; i++) {
-            SimileAjax.DateTime.incrementByInterval(date, p._unit);
-        }
-    };
-
-    while (minDate.getTime() < maxDate.getTime()) {
-        this._intervalMarkerLayout.createIntervalMarker(
-            minDate, labeller, this._unit, this._markerLayer, this._lineLayer);
-
-        incrementDate(minDate);
-    }
-    this._markerLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-};
-
-Timeline.GregorianEtherPainter.prototype.softPaint = function() {
-};
-
-Timeline.GregorianEtherPainter.prototype.zoom = function(netIntervalChange) {
-  if (netIntervalChange != 0) {
-    this._unit += netIntervalChange;
-  }
-};
-
-
-/*
- *  Hot Zone Gregorian Ether Painter
- *
- */
-
-Timeline.HotZoneGregorianEtherPainter = function(params) {
-    this._params = params;
-    this._theme = params.theme;
-
-    this._zones = [{
-        startTime:  Number.NEGATIVE_INFINITY,
-        endTime:    Number.POSITIVE_INFINITY,
-        unit:       params.unit,
-        multiple:   1
-    }];
-    for (var i = 0; i < params.zones.length; i++) {
-        var zone = params.zones[i];
-        var zoneStart = SimileAjax.DateTime.parseGregorianDateTime(zone.start).getTime();
-        var zoneEnd = SimileAjax.DateTime.parseGregorianDateTime(zone.end).getTime();
-
-        for (var j = 0; j < this._zones.length && zoneEnd > zoneStart; j++) {
-            var zone2 = this._zones[j];
-
-            if (zoneStart < zone2.endTime) {
-                if (zoneStart > zone2.startTime) {
-                    this._zones.splice(j, 0, {
-                        startTime:   zone2.startTime,
-                        endTime:     zoneStart,
-                        unit:        zone2.unit,
-                        multiple:    zone2.multiple
-                    });
-                    j++;
-
-                    zone2.startTime = zoneStart;
-                }
-
-                if (zoneEnd < zone2.endTime) {
-                    this._zones.splice(j, 0, {
-                        startTime:  zoneStart,
-                        endTime:    zoneEnd,
-                        unit:       zone.unit,
-                        multiple:   (zone.multiple) ? zone.multiple : 1
-                    });
-                    j++;
-
-                    zone2.startTime = zoneEnd;
-                    zoneStart = zoneEnd;
-                } else {
-                    zone2.multiple = zone.multiple;
-                    zone2.unit = zone.unit;
-                    zoneStart = zone2.endTime;
-                }
-            } // else, try the next existing zone
-        }
-    }
-};
-
-Timeline.HotZoneGregorianEtherPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backgroundLayer = band.createLayerDiv(0);
-    this._backgroundLayer.setAttribute("name", "ether-background"); // for debugging
-    this._backgroundLayer.className ='timeline-ether-bg';
-    //this._backgroundLayer.style.background = this._theme.ether.backgroundColors[band.getIndex()];
-
-    this._markerLayer = null;
-    this._lineLayer = null;
-
-    var align = ("align" in this._params && this._params.align != undefined) ? this._params.align :
-        this._theme.ether.interval.marker[timeline.isHorizontal() ? "hAlign" : "vAlign"];
-    var showLine = ("showLine" in this._params) ? this._params.showLine :
-        this._theme.ether.interval.line.show;
-
-    this._intervalMarkerLayout = new Timeline.EtherIntervalMarkerLayout(
-        this._timeline, this._band, this._theme, align, showLine);
-
-    this._highlight = new Timeline.EtherHighlight(
-        this._timeline, this._band, this._theme, this._backgroundLayer);
-}
-
-Timeline.HotZoneGregorianEtherPainter.prototype.setHighlight = function(startDate, endDate) {
-    this._highlight.position(startDate, endDate);
-}
-
-Timeline.HotZoneGregorianEtherPainter.prototype.paint = function() {
-    if (this._markerLayer) {
-        this._band.removeLayerDiv(this._markerLayer);
-    }
-    this._markerLayer = this._band.createLayerDiv(100);
-    this._markerLayer.setAttribute("name", "ether-markers"); // for debugging
-    this._markerLayer.style.display = "none";
-
-    if (this._lineLayer) {
-        this._band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = this._band.createLayerDiv(1);
-    this._lineLayer.setAttribute("name", "ether-lines"); // for debugging
-    this._lineLayer.style.display = "none";
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var timeZone = this._band.getTimeZone();
-    var labeller = this._band.getLabeller();
-
-    var p = this;
-    var incrementDate = function(date, zone) {
-        for (var i = 0; i < zone.multiple; i++) {
-            SimileAjax.DateTime.incrementByInterval(date, zone.unit);
-        }
-    };
-
-    var zStart = 0;
-    while (zStart < this._zones.length) {
-        if (minDate.getTime() < this._zones[zStart].endTime) {
-            break;
-        }
-        zStart++;
-    }
-    var zEnd = this._zones.length - 1;
-    while (zEnd >= 0) {
-        if (maxDate.getTime() > this._zones[zEnd].startTime) {
-            break;
-        }
-        zEnd--;
-    }
-
-    for (var z = zStart; z <= zEnd; z++) {
-        var zone = this._zones[z];
-
-        var minDate2 = new Date(Math.max(minDate.getTime(), zone.startTime));
-        var maxDate2 = new Date(Math.min(maxDate.getTime(), zone.endTime));
-
-        SimileAjax.DateTime.roundDownToInterval(minDate2, zone.unit, timeZone, zone.multiple, this._theme.firstDayOfWeek);
-        SimileAjax.DateTime.roundUpToInterval(maxDate2, zone.unit, timeZone, zone.multiple, this._theme.firstDayOfWeek);
-
-        while (minDate2.getTime() < maxDate2.getTime()) {
-            this._intervalMarkerLayout.createIntervalMarker(
-                minDate2, labeller, zone.unit, this._markerLayer, this._lineLayer);
-
-            incrementDate(minDate2, zone);
-        }
-    }
-    this._markerLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-};
-
-Timeline.HotZoneGregorianEtherPainter.prototype.softPaint = function() {
-};
-
-Timeline.HotZoneGregorianEtherPainter.prototype.zoom = function(netIntervalChange) {
-  if (netIntervalChange != 0) {
-    for (var i = 0; i < this._zones.length; ++i) {
-      if (this._zones[i]) {
-        this._zones[i].unit += netIntervalChange;
-      }
-    }
-  }
-};
-
-/*
- *  Year Count Ether Painter
- *
- */
-
-Timeline.YearCountEtherPainter = function(params) {
-    this._params = params;
-    this._theme = params.theme;
-    this._startDate = SimileAjax.DateTime.parseGregorianDateTime(params.startDate);
-    this._multiple = ("multiple" in params) ? params.multiple : 1;
-};
-
-Timeline.YearCountEtherPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backgroundLayer = band.createLayerDiv(0);
-    this._backgroundLayer.setAttribute("name", "ether-background"); // for debugging
-    this._backgroundLayer.className = 'timeline-ether-bg';
-   // this._backgroundLayer.style.background = this._theme.ether.backgroundColors[band.getIndex()];
-
-    this._markerLayer = null;
-    this._lineLayer = null;
-
-    var align = ("align" in this._params) ? this._params.align :
-        this._theme.ether.interval.marker[timeline.isHorizontal() ? "hAlign" : "vAlign"];
-    var showLine = ("showLine" in this._params) ? this._params.showLine :
-        this._theme.ether.interval.line.show;
-
-    this._intervalMarkerLayout = new Timeline.EtherIntervalMarkerLayout(
-        this._timeline, this._band, this._theme, align, showLine);
-
-    this._highlight = new Timeline.EtherHighlight(
-        this._timeline, this._band, this._theme, this._backgroundLayer);
-};
-
-Timeline.YearCountEtherPainter.prototype.setHighlight = function(startDate, endDate) {
-    this._highlight.position(startDate, endDate);
-};
-
-Timeline.YearCountEtherPainter.prototype.paint = function() {
-    if (this._markerLayer) {
-        this._band.removeLayerDiv(this._markerLayer);
-    }
-    this._markerLayer = this._band.createLayerDiv(100);
-    this._markerLayer.setAttribute("name", "ether-markers"); // for debugging
-    this._markerLayer.style.display = "none";
-
-    if (this._lineLayer) {
-        this._band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = this._band.createLayerDiv(1);
-    this._lineLayer.setAttribute("name", "ether-lines"); // for debugging
-    this._lineLayer.style.display = "none";
-
-    var minDate = new Date(this._startDate.getTime());
-    var maxDate = this._band.getMaxDate();
-    var yearDiff = this._band.getMinDate().getUTCFullYear() - this._startDate.getUTCFullYear();
-    minDate.setUTCFullYear(this._band.getMinDate().getUTCFullYear() - yearDiff % this._multiple);
-
-    var p = this;
-    var incrementDate = function(date) {
-        for (var i = 0; i < p._multiple; i++) {
-            SimileAjax.DateTime.incrementByInterval(date, SimileAjax.DateTime.YEAR);
-        }
-    };
-    var labeller = {
-        labelInterval: function(date, intervalUnit) {
-            var diff = date.getUTCFullYear() - p._startDate.getUTCFullYear();
-            return {
-                text: diff,
-                emphasized: diff == 0
-            };
-        }
-    };
-
-    while (minDate.getTime() < maxDate.getTime()) {
-        this._intervalMarkerLayout.createIntervalMarker(
-            minDate, labeller, SimileAjax.DateTime.YEAR, this._markerLayer, this._lineLayer);
-
-        incrementDate(minDate);
-    }
-    this._markerLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-};
-
-Timeline.YearCountEtherPainter.prototype.softPaint = function() {
-};
-
-/*
- *  Quarterly Ether Painter
- *
- */
-
-Timeline.QuarterlyEtherPainter = function(params) {
-    this._params = params;
-    this._theme = params.theme;
-    this._startDate = SimileAjax.DateTime.parseGregorianDateTime(params.startDate);
-};
-
-Timeline.QuarterlyEtherPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backgroundLayer = band.createLayerDiv(0);
-    this._backgroundLayer.setAttribute("name", "ether-background"); // for debugging
-    this._backgroundLayer.className = 'timeline-ether-bg';
- //   this._backgroundLayer.style.background = this._theme.ether.backgroundColors[band.getIndex()];
-
-    this._markerLayer = null;
-    this._lineLayer = null;
-
-    var align = ("align" in this._params) ? this._params.align :
-        this._theme.ether.interval.marker[timeline.isHorizontal() ? "hAlign" : "vAlign"];
-    var showLine = ("showLine" in this._params) ? this._params.showLine :
-        this._theme.ether.interval.line.show;
-
-    this._intervalMarkerLayout = new Timeline.EtherIntervalMarkerLayout(
-        this._timeline, this._band, this._theme, align, showLine);
-
-    this._highlight = new Timeline.EtherHighlight(
-        this._timeline, this._band, this._theme, this._backgroundLayer);
-};
-
-Timeline.QuarterlyEtherPainter.prototype.setHighlight = function(startDate, endDate) {
-    this._highlight.position(startDate, endDate);
-};
-
-Timeline.QuarterlyEtherPainter.prototype.paint = function() {
-    if (this._markerLayer) {
-        this._band.removeLayerDiv(this._markerLayer);
-    }
-    this._markerLayer = this._band.createLayerDiv(100);
-    this._markerLayer.setAttribute("name", "ether-markers"); // for debugging
-    this._markerLayer.style.display = "none";
-
-    if (this._lineLayer) {
-        this._band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = this._band.createLayerDiv(1);
-    this._lineLayer.setAttribute("name", "ether-lines"); // for debugging
-    this._lineLayer.style.display = "none";
-
-    var minDate = new Date(0);
-    var maxDate = this._band.getMaxDate();
-
-    minDate.setUTCFullYear(Math.max(this._startDate.getUTCFullYear(), this._band.getMinDate().getUTCFullYear()));
-    minDate.setUTCMonth(this._startDate.getUTCMonth());
-
-    var p = this;
-    var incrementDate = function(date) {
-        date.setUTCMonth(date.getUTCMonth() + 3);
-    };
-    var labeller = {
-        labelInterval: function(date, intervalUnit) {
-            var quarters = (4 + (date.getUTCMonth() - p._startDate.getUTCMonth()) / 3) % 4;
-            if (quarters != 0) {
-                return { text: "Q" + (quarters + 1), emphasized: false };
-            } else {
-                return { text: "Y" + (date.getUTCFullYear() - p._startDate.getUTCFullYear() + 1), emphasized: true };
-            }
-        }
-    };
-
-    while (minDate.getTime() < maxDate.getTime()) {
-        this._intervalMarkerLayout.createIntervalMarker(
-            minDate, labeller, SimileAjax.DateTime.YEAR, this._markerLayer, this._lineLayer);
-
-        incrementDate(minDate);
-    }
-    this._markerLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-};
-
-Timeline.QuarterlyEtherPainter.prototype.softPaint = function() {
-};
-
-/*
- *  Ether Interval Marker Layout
- *
- */
-
-Timeline.EtherIntervalMarkerLayout = function(timeline, band, theme, align, showLine) {
-    var horizontal = timeline.isHorizontal();
-    if (horizontal) {
-        if (align == "Top") {
-            this.positionDiv = function(div, offset) {
-                div.style.left = offset + "px";
-                div.style.top = "0px";
-            };
-        } else {
-            this.positionDiv = function(div, offset) {
-                div.style.left = offset + "px";
-                div.style.bottom = "0px";
-            };
-        }
-    } else {
-        if (align == "Left") {
-            this.positionDiv = function(div, offset) {
-                div.style.top = offset + "px";
-                div.style.left = "0px";
-            };
-        } else {
-            this.positionDiv = function(div, offset) {
-                div.style.top = offset + "px";
-                div.style.right = "0px";
-            };
-        }
-    }
-
-    var markerTheme = theme.ether.interval.marker;
-    var lineTheme = theme.ether.interval.line;
-    var weekendTheme = theme.ether.interval.weekend;
-
-    var stylePrefix = (horizontal ? "h" : "v") + align;
-    var labelStyler = markerTheme[stylePrefix + "Styler"];
-    var emphasizedLabelStyler = markerTheme[stylePrefix + "EmphasizedStyler"];
-    var day = SimileAjax.DateTime.gregorianUnitLengths[SimileAjax.DateTime.DAY];
-
-    this.createIntervalMarker = function(date, labeller, unit, markerDiv, lineDiv) {
-        var offset = Math.round(band.dateToPixelOffset(date));
-
-        if (showLine && unit != SimileAjax.DateTime.WEEK) {
-            var divLine = timeline.getDocument().createElement("div");
-            divLine.className = "timeline-ether-lines";
-
-            if (lineTheme.opacity < 100) {
-                SimileAjax.Graphics.setOpacity(divLine, lineTheme.opacity);
-            }
-
-            if (horizontal) {
-				//divLine.className += " timeline-ether-lines-vertical";
-				divLine.style.left = offset + "px";
-            } else {
-				//divLine.className += " timeline-ether-lines-horizontal";
-                divLine.style.top = offset + "px";
-            }
-            lineDiv.appendChild(divLine);
-        }
-        if (unit == SimileAjax.DateTime.WEEK) {
-            var firstDayOfWeek = theme.firstDayOfWeek;
-
-            var saturday = new Date(date.getTime() + (6 - firstDayOfWeek - 7) * day);
-            var monday = new Date(saturday.getTime() + 2 * day);
-
-            var saturdayPixel = Math.round(band.dateToPixelOffset(saturday));
-            var mondayPixel = Math.round(band.dateToPixelOffset(monday));
-            var length = Math.max(1, mondayPixel - saturdayPixel);
-
-            var divWeekend = timeline.getDocument().createElement("div");
-			divWeekend.className = 'timeline-ether-weekends'
-
-            if (weekendTheme.opacity < 100) {
-                SimileAjax.Graphics.setOpacity(divWeekend, weekendTheme.opacity);
-            }
-
-            if (horizontal) {
-                divWeekend.style.left = saturdayPixel + "px";
-                divWeekend.style.width = length + "px";
-            } else {
-                divWeekend.style.top = saturdayPixel + "px";
-                divWeekend.style.height = length + "px";
-            }
-            lineDiv.appendChild(divWeekend);
-        }
-
-        var label = labeller.labelInterval(date, unit);
-
-        var div = timeline.getDocument().createElement("div");
-        div.innerHTML = label.text;
-
-
-
-		div.className = 'timeline-date-label'
-		if(label.emphasized) div.className += ' timeline-date-label-em'
-
-        this.positionDiv(div, offset);
-        markerDiv.appendChild(div);
-
-        return div;
-    };
-};
-
-/*
- *  Ether Highlight Layout
- *
- */
-
-Timeline.EtherHighlight = function(timeline, band, theme, backgroundLayer) {
-    var horizontal = timeline.isHorizontal();
-
-    this._highlightDiv = null;
-    this._createHighlightDiv = function() {
-        if (this._highlightDiv == null) {
-            this._highlightDiv = timeline.getDocument().createElement("div");
-            this._highlightDiv.setAttribute("name", "ether-highlight"); // for debugging
-            this._highlightDiv.className = 'timeline-ether-highlight'
-
-            var opacity = theme.ether.highlightOpacity;
-            if (opacity < 100) {
-                SimileAjax.Graphics.setOpacity(this._highlightDiv, opacity);
-            }
-
-            backgroundLayer.appendChild(this._highlightDiv);
-        }
-    }
-
-    this.position = function(startDate, endDate) {
-        this._createHighlightDiv();
-
-        var startPixel = Math.round(band.dateToPixelOffset(startDate));
-        var endPixel = Math.round(band.dateToPixelOffset(endDate));
-        var length = Math.max(endPixel - startPixel, 3);
-        if (horizontal) {
-            this._highlightDiv.style.left = startPixel + "px";
-            this._highlightDiv.style.width = length + "px";
-            this._highlightDiv.style.height = (band.getViewWidth() - 4) + "px";
-        } else {
-            this._highlightDiv.style.top = startPixel + "px";
-            this._highlightDiv.style.height = length + "px";
-            this._highlightDiv.style.width = (band.getViewWidth() - 4) + "px";
-        }
-    }
-};
-/*
- *  Event Utils
- *
- */
-Timeline.EventUtils = {};
-
-Timeline.EventUtils.getNewEventID = function() {
-    // global across page
-    if (this._lastEventID == null) {
-        this._lastEventID = 0;
-    }
-
-    this._lastEventID += 1;
-    return "e" + this._lastEventID;
-};
-
-Timeline.EventUtils.decodeEventElID = function(elementID) {
-    /*
-     *
-     * Use this function to decode an event element's id on a band (label div,
-     * tape div or icon img).
-     *
-     * Returns {band: <bandObj>, evt: <eventObj>}
-     *
-     * To enable a single event listener to monitor everything
-     * on a Timeline, a set format is used for the id's of the
-     * elements on the Timeline--
-     *
-     * element id format for labels, icons, tapes:
-     *   labels: label-tl-<timelineID>-<band_index>-<evt.id>
-     *    icons: icon-tl-<timelineID>-<band_index>-<evt.id>
-     *    tapes: tape1-tl-<timelineID>-<band_index>-<evt.id>
-     *           tape2-tl-<timelineID>-<band_index>-<evt.id>
-     *           // some events have more than one tape
-     *    highlight: highlight1-tl-<timelineID>-<band_index>-<evt.id>
-     *               highlight2-tl-<timelineID>-<band_index>-<evt.id>
-     *           // some events have more than one highlight div (future)
-     * Note: use split('-') to get array of the format's parts
-     *
-     * You can then retrieve the timeline object and event object
-     * by using Timeline.getTimeline, Timeline.getBand, or
-     * Timeline.getEvent and passing in the element's id
-     *
-     *
-     */
-
-    var parts = elementID.split('-');
-    if (parts[1] != 'tl') {
-        alert("Internal Timeline problem 101, please consult support");
-        return {band: null, evt: null}; // early return
-    }
-
-    var timeline = Timeline.getTimelineFromID(parts[2]);
-    var band = timeline.getBand(parts[3]);
-    var evt = band.getEventSource.getEvent(parts[4]);
-
-    return {band: band, evt: evt};
-};
-
-Timeline.EventUtils.encodeEventElID = function(timeline, band, elType, evt) {
-    // elType should be one of {label | icon | tapeN | highlightN}
-    return elType + "-tl-" + timeline.timelineID +
-       "-" + band.getIndex() + "-" + evt.getID();
-};/*
- *  Gregorian Date Labeller
- *
- */
-
-Timeline.GregorianDateLabeller = function(locale, timeZone) {
-    this._locale = locale;
-    this._timeZone = timeZone;
-};
-
-Timeline.GregorianDateLabeller.monthNames = [];
-Timeline.GregorianDateLabeller.dayNames = [];
-Timeline.GregorianDateLabeller.labelIntervalFunctions = [];
-
-Timeline.GregorianDateLabeller.getMonthName = function(month, locale) {
-    return Timeline.GregorianDateLabeller.monthNames[locale][month];
-};
-
-Timeline.GregorianDateLabeller.prototype.labelInterval = function(date, intervalUnit) {
-    var f = Timeline.GregorianDateLabeller.labelIntervalFunctions[this._locale];
-    if (f == null) {
-        f = Timeline.GregorianDateLabeller.prototype.defaultLabelInterval;
-    }
-    return f.call(this, date, intervalUnit);
-};
-
-Timeline.GregorianDateLabeller.prototype.labelPrecise = function(date) {
-    return SimileAjax.DateTime.removeTimeZoneOffset(
-        date,
-        this._timeZone //+ (new Date().getTimezoneOffset() / 60)
-    ).toUTCString();
-};
-
-Timeline.GregorianDateLabeller.prototype.defaultLabelInterval = function(date, intervalUnit) {
-    var text;
-    var emphasized = false;
-
-    date = SimileAjax.DateTime.removeTimeZoneOffset(date, this._timeZone);
-
-    switch(intervalUnit) {
-    case SimileAjax.DateTime.MILLISECOND:
-        text = date.getUTCMilliseconds();
-        break;
-    case SimileAjax.DateTime.SECOND:
-        text = date.getUTCSeconds();
-        break;
-    case SimileAjax.DateTime.MINUTE:
-        var m = date.getUTCMinutes();
-        if (m == 0) {
-            text = date.getUTCHours() + ":00";
-            emphasized = true;
-        } else {
-            text = m;
-        }
-        break;
-    case SimileAjax.DateTime.HOUR:
-        text = date.getUTCHours() + "hr";
-        break;
-    case SimileAjax.DateTime.DAY:
-        text = Timeline.GregorianDateLabeller.getMonthName(date.getUTCMonth(), this._locale) + " " + date.getUTCDate();
-        break;
-    case SimileAjax.DateTime.WEEK:
-        text = Timeline.GregorianDateLabeller.getMonthName(date.getUTCMonth(), this._locale) + " " + date.getUTCDate();
-        break;
-    case SimileAjax.DateTime.MONTH:
-        var m = date.getUTCMonth();
-        if (m != 0) {
-            text = Timeline.GregorianDateLabeller.getMonthName(m, this._locale);
-            break;
-        } // else, fall through
-    case SimileAjax.DateTime.YEAR:
-    case SimileAjax.DateTime.DECADE:
-    case SimileAjax.DateTime.CENTURY:
-    case SimileAjax.DateTime.MILLENNIUM:
-        var y = date.getUTCFullYear();
-        if (y > 0) {
-            text = date.getUTCFullYear();
-        } else {
-            text = (1 - y) + "BC";
-        }
-        emphasized =
-            (intervalUnit == SimileAjax.DateTime.MONTH) ||
-            (intervalUnit == SimileAjax.DateTime.DECADE && y % 100 == 0) ||
-            (intervalUnit == SimileAjax.DateTime.CENTURY && y % 1000 == 0);
-        break;
-    default:
-        text = date.toUTCString();
-    }
-    return { text: text, emphasized: emphasized };
-}
-
-/*
- *  Default Event Source
- *
- */
-
-
-Timeline.DefaultEventSource = function(eventIndex) {
-    this._events = (eventIndex instanceof Object) ? eventIndex : new SimileAjax.EventIndex();
-    this._listeners = [];
-};
-
-Timeline.DefaultEventSource.prototype.addListener = function(listener) {
-    this._listeners.push(listener);
-};
-
-Timeline.DefaultEventSource.prototype.removeListener = function(listener) {
-    for (var i = 0; i < this._listeners.length; i++) {
-        if (this._listeners[i] == listener) {
-            this._listeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.DefaultEventSource.prototype.loadXML = function(xml, url) {
-    var base = this._getBaseURL(url);
-
-    var wikiURL = xml.documentElement.getAttribute("wiki-url");
-    var wikiSection = xml.documentElement.getAttribute("wiki-section");
-
-    var dateTimeFormat = xml.documentElement.getAttribute("date-time-format");
-    var parseDateTimeFunction = this._events.getUnit().getParser(dateTimeFormat);
-
-    var node = xml.documentElement.firstChild;
-    var added = false;
-    while (node != null) {
-        if (node.nodeType == 1) {
-            var description = "";
-            if (node.firstChild != null && node.firstChild.nodeType == 3) {
-                description = node.firstChild.nodeValue;
-            }
-            // instant event: default is true. Or use values from isDuration or durationEvent
-            var instant = (node.getAttribute("isDuration")    === null &&
-                           node.getAttribute("durationEvent") === null) ||
-                          node.getAttribute("isDuration") == "false" ||
-                          node.getAttribute("durationEvent") == "false";
-
-            var evt = new Timeline.DefaultEventSource.Event( {
-                          id: node.getAttribute("id"),
-                       start: parseDateTimeFunction(node.getAttribute("start")),
-                         end: parseDateTimeFunction(node.getAttribute("end")),
-                 latestStart: parseDateTimeFunction(node.getAttribute("latestStart")),
-                 earliestEnd: parseDateTimeFunction(node.getAttribute("earliestEnd")),
-                     instant: instant,
-                        text: node.getAttribute("title"),
-                 description: description,
-                       image: this._resolveRelativeURL(node.getAttribute("image"), base),
-                        link: this._resolveRelativeURL(node.getAttribute("link") , base),
-                        icon: this._resolveRelativeURL(node.getAttribute("icon") , base),
-                       color: node.getAttribute("color"),
-                   textColor: node.getAttribute("textColor"),
-                   hoverText: node.getAttribute("hoverText"),
-                   classname: node.getAttribute("classname"),
-                   tapeImage: node.getAttribute("tapeImage"),
-                  tapeRepeat: node.getAttribute("tapeRepeat"),
-                     caption: node.getAttribute("caption"),
-                     eventID: node.getAttribute("eventID"),
-                    trackNum: node.getAttribute("trackNum")
-            });
-
-            evt._node = node;
-            evt.getProperty = function(name) {
-                return this._node.getAttribute(name);
-            };
-            evt.setWikiInfo(wikiURL, wikiSection);
-
-            this._events.add(evt);
-
-            added = true;
-        }
-        node = node.nextSibling;
-    }
-
-    if (added) {
-        this._fire("onAddMany", []);
-    }
-};
-
-
-Timeline.DefaultEventSource.prototype.loadJSON = function(data, url) {
-    var base = this._getBaseURL(url);
-    var added = false;
-    if (data && data.events){
-        var wikiURL = ("wikiURL" in data) ? data.wikiURL : null;
-        var wikiSection = ("wikiSection" in data) ? data.wikiSection : null;
-
-        var dateTimeFormat = ("dateTimeFormat" in data) ? data.dateTimeFormat : null;
-        var parseDateTimeFunction = this._events.getUnit().getParser(dateTimeFormat);
-
-        for (var i=0; i < data.events.length; i++){
-            var event = data.events[i];
-            // Fixing issue 33:
-            // instant event: default (for JSON only) is false. Or use values from isDuration or durationEvent
-            // isDuration was negated (see issue 33, so keep that interpretation
-            var instant = event.isDuration || (event.durationEvent != null && !event.durationEvent);
-
-            var evt = new Timeline.DefaultEventSource.Event({
-                          id: ("id" in event) ? event.id : undefined,
-                       start: parseDateTimeFunction(event.start),
-                         end: parseDateTimeFunction(event.end),
-                 latestStart: parseDateTimeFunction(event.latestStart),
-                 earliestEnd: parseDateTimeFunction(event.earliestEnd),
-                     instant: instant,
-                        text: event.title,
-                 description: event.description,
-                       image: this._resolveRelativeURL(event.image, base),
-                        link: this._resolveRelativeURL(event.link , base),
-                        icon: this._resolveRelativeURL(event.icon , base),
-                       color: event.color,
-                   textColor: event.textColor,
-                   hoverText: event.hoverText,
-                   classname: event.classname,
-                   tapeImage: event.tapeImage,
-                  tapeRepeat: event.tapeRepeat,
-                     caption: event.caption,
-                     eventID: event.eventID,
-                    trackNum: event.trackNum
-            });
-            evt._obj = event;
-            evt.getProperty = function(name) {
-                return this._obj[name];
-            };
-            evt.setWikiInfo(wikiURL, wikiSection);
-
-            this._events.add(evt);
-            added = true;
-        }
-    }
-
-    if (added) {
-        this._fire("onAddMany", []);
-    }
-};
-
-/*
- *  Contributed by Morten Frederiksen, http://www.wasab.dk/morten/
- */
-Timeline.DefaultEventSource.prototype.loadSPARQL = function(xml, url) {
-    var base = this._getBaseURL(url);
-
-    var dateTimeFormat = 'iso8601';
-    var parseDateTimeFunction = this._events.getUnit().getParser(dateTimeFormat);
-
-    if (xml == null) {
-        return;
-    }
-
-    /*
-     *  Find <results> tag
-     */
-    var node = xml.documentElement.firstChild;
-    while (node != null && (node.nodeType != 1 || node.nodeName != 'results')) {
-        node = node.nextSibling;
-    }
-
-    var wikiURL = null;
-    var wikiSection = null;
-    if (node != null) {
-        wikiURL = node.getAttribute("wiki-url");
-        wikiSection = node.getAttribute("wiki-section");
-
-        node = node.firstChild;
-    }
-
-    var added = false;
-    while (node != null) {
-        if (node.nodeType == 1) {
-            var bindings = { };
-            var binding = node.firstChild;
-            while (binding != null) {
-                if (binding.nodeType == 1 &&
-                    binding.firstChild != null &&
-                    binding.firstChild.nodeType == 1 &&
-                    binding.firstChild.firstChild != null &&
-                    binding.firstChild.firstChild.nodeType == 3) {
-                    bindings[binding.getAttribute('name')] = binding.firstChild.firstChild.nodeValue;
-                }
-                binding = binding.nextSibling;
-            }
-
-            if (bindings["start"] == null && bindings["date"] != null) {
-                bindings["start"] = bindings["date"];
-            }
-
-            // instant event: default is true. Or use values from isDuration or durationEvent
-            var instant = (bindings["isDuration"]    === null &&
-                           bindings["durationEvent"] === null) ||
-                          bindings["isDuration"] == "false" ||
-                          bindings["durationEvent"] == "false";
-
-            var evt = new Timeline.DefaultEventSource.Event({
-                          id: bindings["id"],
-                       start: parseDateTimeFunction(bindings["start"]),
-                         end: parseDateTimeFunction(bindings["end"]),
-                 latestStart: parseDateTimeFunction(bindings["latestStart"]),
-                 earliestEnd: parseDateTimeFunction(bindings["earliestEnd"]),
-                     instant: instant, // instant
-                        text: bindings["title"], // text
-                 description: bindings["description"],
-                       image: this._resolveRelativeURL(bindings["image"], base),
-                        link: this._resolveRelativeURL(bindings["link"] , base),
-                        icon: this._resolveRelativeURL(bindings["icon"] , base),
-                       color: bindings["color"],
-                   textColor: bindings["textColor"],
-                   hoverText: bindings["hoverText"],
-                     caption: bindings["caption"],
-                   classname: bindings["classname"],
-                   tapeImage: bindings["tapeImage"],
-                  tapeRepeat: bindings["tapeRepeat"],
-                     eventID: bindings["eventID"],
-                    trackNum: bindings["trackNum"]
-            });
-            evt._bindings = bindings;
-            evt.getProperty = function(name) {
-                return this._bindings[name];
-            };
-            evt.setWikiInfo(wikiURL, wikiSection);
-
-            this._events.add(evt);
-            added = true;
-        }
-        node = node.nextSibling;
-    }
-
-    if (added) {
-        this._fire("onAddMany", []);
-    }
-};
-
-Timeline.DefaultEventSource.prototype.add = function(evt) {
-    this._events.add(evt);
-    this._fire("onAddOne", [evt]);
-};
-
-Timeline.DefaultEventSource.prototype.addMany = function(events) {
-    for (var i = 0; i < events.length; i++) {
-        this._events.add(events[i]);
-    }
-    this._fire("onAddMany", []);
-};
-
-Timeline.DefaultEventSource.prototype.clear = function() {
-    this._events.removeAll();
-    this._fire("onClear", []);
-};
-
-Timeline.DefaultEventSource.prototype.getEvent = function(id) {
-    return this._events.getEvent(id);
-};
-
-Timeline.DefaultEventSource.prototype.getEventIterator = function(startDate, endDate) {
-    return this._events.getIterator(startDate, endDate);
-};
-
-Timeline.DefaultEventSource.prototype.getEventReverseIterator = function(startDate, endDate) {
-    return this._events.getReverseIterator(startDate, endDate);
-};
-
-Timeline.DefaultEventSource.prototype.getAllEventIterator = function() {
-    return this._events.getAllIterator();
-};
-
-Timeline.DefaultEventSource.prototype.getCount = function() {
-    return this._events.getCount();
-};
-
-Timeline.DefaultEventSource.prototype.getEarliestDate = function() {
-    return this._events.getEarliestDate();
-};
-
-Timeline.DefaultEventSource.prototype.getLatestDate = function() {
-    return this._events.getLatestDate();
-};
-
-Timeline.DefaultEventSource.prototype._fire = function(handlerName, args) {
-    for (var i = 0; i < this._listeners.length; i++) {
-        var listener = this._listeners[i];
-        if (handlerName in listener) {
-            try {
-                listener[handlerName].apply(listener, args);
-            } catch (e) {
-                SimileAjax.Debug.exception(e);
-            }
-        }
-    }
-};
-
-Timeline.DefaultEventSource.prototype._getBaseURL = function(url) {
-    if (url.indexOf("://") < 0) {
-        var url2 = this._getBaseURL(document.location.href);
-        if (url.substr(0,1) == "/") {
-            url = url2.substr(0, url2.indexOf("/", url2.indexOf("://") + 3)) + url;
-        } else {
-            url = url2 + url;
-        }
-    }
-
-    var i = url.lastIndexOf("/");
-    if (i < 0) {
-        return "";
-    } else {
-        return url.substr(0, i+1);
-    }
-};
-
-Timeline.DefaultEventSource.prototype._resolveRelativeURL = function(url, base) {
-    if (url == null || url == "") {
-        return url;
-    } else if (url.indexOf("://") > 0) {
-        return url;
-    } else if (url.substr(0,1) == "/") {
-        return base.substr(0, base.indexOf("/", base.indexOf("://") + 3)) + url;
-    } else {
-        return base + url;
-    }
-};
-
-
-Timeline.DefaultEventSource.Event = function(args) {
-  //
-  // Attention developers!
-  // If you add a new event attribute, please be sure to add it to
-  // all three load functions: loadXML, loadSPARCL, loadJSON.
-  // Thanks!
-  //
-  // args is a hash/object. It supports the following keys. Most are optional
-  //   id            -- an internal id. Really shouldn't be used by events.
-  //                    Timeline library clients should use eventID
-  //   eventID       -- For use by library client when writing custom painters or
-  //                    custom fillInfoBubble
-  //   start
-  //   end
-  //   latestStart
-  //   earliestEnd
-  //   instant      -- boolean. Controls precise/non-precise logic & duration/instant issues
-  //   text         -- event source attribute 'title' -- used as the label on Timelines and in bubbles.
-  //   description  -- used in bubbles
-  //   image        -- used in bubbles
-  //   link         -- used in bubbles
-  //   icon         -- on the Timeline
-  //   color        -- Timeline label and tape color
-  //   textColor    -- Timeline label color, overrides color attribute
-  //   hoverText    -- deprecated, here for backwards compatibility.
-  //                   Superceeded by caption
-  //   caption      -- tooltip-like caption on the Timeline. Uses HTML title attribute
-  //   classname    -- used to set classname in Timeline. Enables better CSS selector rules
-  //   tapeImage    -- background image of the duration event's tape div on the Timeline
-  //   tapeRepeat   -- repeat attribute for tapeImage. {repeat | repeat-x | repeat-y }
-
-  function cleanArg(arg) {
-      // clean up an arg
-      return (args[arg] != null && args[arg] != "") ? args[arg] : null;
-  }
-
-  var id = args.id ? args.id.trim() : "";
-  this._id = id.length > 0 ? id : Timeline.EventUtils.getNewEventID();
-
-  this._instant = args.instant || (args.end == null);
-
-  this._start = args.start;
-  this._end = (args.end != null) ? args.end : args.start;
-
-  this._latestStart = (args.latestStart != null) ?
-                       args.latestStart : (args.instant ? this._end : this._start);
-  this._earliestEnd = (args.earliestEnd != null) ? args.earliestEnd : this._end;
-
-  // check sanity of dates since incorrect dates will later cause calculation errors
-  // when painting
-  var err=[];
-  if (this._start > this._latestStart) {
-          this._latestStart = this._start;
-          err.push("start is > latestStart");}
-  if (this._start > this._earliestEnd) {
-          this._earliestEnd = this._latestStart;
-          err.push("start is > earliestEnd");}
-  if (this._start > this._end) {
-          this._end = this._earliestEnd;
-          err.push("start is > end");}
-  if (this._latestStart > this._earliestEnd) {
-          this._earliestEnd = this._latestStart;
-          err.push("latestStart is > earliestEnd");}
-  if (this._latestStart > this._end) {
-          this._end = this._earliestEnd;
-          err.push("latestStart is > end");}
-  if (this._earliestEnd > this._end) {
-          this._end = this._earliestEnd;
-          err.push("earliestEnd is > end");}
-
-  this._eventID = cleanArg('eventID');
-  this._text = (args.text != null) ? SimileAjax.HTML.deEntify(args.text) : ""; // Change blank titles to ""
-  if (err.length > 0) {
-          this._text += " PROBLEM: " + err.join(", ");
-  }
-
-  this._description = SimileAjax.HTML.deEntify(args.description);
-  this._image = cleanArg('image');
-  this._link =  cleanArg('link');
-  this._title = cleanArg('hoverText');
-  this._title = cleanArg('caption');
-
-  this._icon = cleanArg('icon');
-  this._color = cleanArg('color');
-  this._textColor = cleanArg('textColor');
-  this._classname = cleanArg('classname');
-  this._tapeImage = cleanArg('tapeImage');
-  this._tapeRepeat = cleanArg('tapeRepeat');
-  this._trackNum = cleanArg('trackNum');
-  if (this._trackNum != null) {
-      this._trackNum = parseInt(this._trackNum);
-  }
-
-  this._wikiURL = null;
-  this._wikiSection = null;
-};
-
-Timeline.DefaultEventSource.Event.prototype = {
-    getID:          function() { return this._id; },
-
-    isInstant:      function() { return this._instant; },
-    isImprecise:    function() { return this._start != this._latestStart || this._end != this._earliestEnd; },
-
-    getStart:       function() { return this._start; },
-    getEnd:         function() { return this._end; },
-    getLatestStart: function() { return this._latestStart; },
-    getEarliestEnd: function() { return this._earliestEnd; },
-
-    getEventID:     function() { return this._eventID; },
-    getText:        function() { return this._text; }, // title
-    getDescription: function() { return this._description; },
-    getImage:       function() { return this._image; },
-    getLink:        function() { return this._link; },
-
-    getIcon:        function() { return this._icon; },
-    getColor:       function() { return this._color; },
-    getTextColor:   function() { return this._textColor; },
-    getClassName:   function() { return this._classname; },
-    getTapeImage:   function() { return this._tapeImage; },
-    getTapeRepeat:  function() { return this._tapeRepeat; },
-    getTrackNum:    function() { return this._trackNum; },
-
-    getProperty:    function(name) { return null; },
-
-    getWikiURL:     function() { return this._wikiURL; },
-    getWikiSection: function() { return this._wikiSection; },
-    setWikiInfo: function(wikiURL, wikiSection) {
-        this._wikiURL = wikiURL;
-        this._wikiSection = wikiSection;
-    },
-
-    fillDescription: function(elmt) {
-        elmt.innerHTML = this._description;
-    },
-    fillWikiInfo: function(elmt) {
-        // Many bubbles will not support a wiki link.
-        //
-        // Strategy: assume no wiki link. If we do have
-        // enough parameters for one, then create it.
-        elmt.style.display = "none"; // default
-
-        if (this._wikiURL == null || this._wikiSection == null) {
-          return; // EARLY RETURN
-        }
-
-        // create the wikiID from the property or from the event text (the title)
-        var wikiID = this.getProperty("wikiID");
-        if (wikiID == null || wikiID.length == 0) {
-            wikiID = this.getText(); // use the title as the backup wiki id
-        }
-
-        if (wikiID == null || wikiID.length == 0) {
-          return; // No wikiID. Thus EARLY RETURN
-        }
-
-        // ready to go...
-        elmt.style.display = "inline";
-        wikiID = wikiID.replace(/\s/g, "_");
-        var url = this._wikiURL + this._wikiSection.replace(/\s/g, "_") + "/" + wikiID;
-        var a = document.createElement("a");
-        a.href = url;
-        a.target = "new";
-        a.innerHTML = Timeline.strings[Timeline.clientLocale].wikiLinkLabel;
-
-        elmt.appendChild(document.createTextNode("["));
-        elmt.appendChild(a);
-        elmt.appendChild(document.createTextNode("]"));
-    },
-
-    fillTime: function(elmt, labeller) {
-        if (this._instant) {
-            if (this.isImprecise()) {
-                elmt.appendChild(elmt.ownerDocument.createTextNode(labeller.labelPrecise(this._start)));
-                elmt.appendChild(elmt.ownerDocument.createElement("br"));
-                elmt.appendChild(elmt.ownerDocument.createTextNode(labeller.labelPrecise(this._end)));
-            } else {
-                elmt.appendChild(elmt.ownerDocument.createTextNode(labeller.labelPrecise(this._start)));
-            }
-        } else {
-            if (this.isImprecise()) {
-                elmt.appendChild(elmt.ownerDocument.createTextNode(
-                    labeller.labelPrecise(this._start) + " ~ " + labeller.labelPrecise(this._latestStart)));
-                elmt.appendChild(elmt.ownerDocument.createElement("br"));
-                elmt.appendChild(elmt.ownerDocument.createTextNode(
-                    labeller.labelPrecise(this._earliestEnd) + " ~ " + labeller.labelPrecise(this._end)));
-            } else {
-                elmt.appendChild(elmt.ownerDocument.createTextNode(labeller.labelPrecise(this._start)));
-                elmt.appendChild(elmt.ownerDocument.createElement("br"));
-                elmt.appendChild(elmt.ownerDocument.createTextNode(labeller.labelPrecise(this._end)));
-            }
-        }
-    },
-
-    fillInfoBubble: function(elmt, theme, labeller) {
-        var doc = elmt.ownerDocument;
-
-        var title = this.getText();
-        var link = this.getLink();
-        var image = this.getImage();
-
-        if (image != null) {
-            var img = doc.createElement("img");
-            img.src = image;
-
-            theme.event.bubble.imageStyler(img);
-            elmt.appendChild(img);
-        }
-
-        var divTitle = doc.createElement("div");
-        var textTitle = doc.createTextNode(title);
-        if (link != null) {
-            var a = doc.createElement("a");
-            a.href = link;
-            a.appendChild(textTitle);
-            divTitle.appendChild(a);
-        } else {
-            divTitle.appendChild(textTitle);
-        }
-        theme.event.bubble.titleStyler(divTitle);
-        elmt.appendChild(divTitle);
-
-        var divBody = doc.createElement("div");
-        this.fillDescription(divBody);
-        theme.event.bubble.bodyStyler(divBody);
-        elmt.appendChild(divBody);
-
-        var divTime = doc.createElement("div");
-        this.fillTime(divTime, labeller);
-        theme.event.bubble.timeStyler(divTime);
-        elmt.appendChild(divTime);
-
-        var divWiki = doc.createElement("div");
-        this.fillWikiInfo(divWiki);
-        theme.event.bubble.wikiStyler(divWiki);
-        elmt.appendChild(divWiki);
-    }
-};
-
-
-/*
- *  Original Event Painter
- *
- */
-
-/*
- *
- * To enable a single event listener to monitor everything
- * on a Timeline, we need a way to map from an event's icon,
- * label or tape element to the associated timeline, band and
- * specific event.
- *
- * Thus a set format is used for the id's of the
- * events' elements on the Timeline--
- *
- * element id format for labels, icons, tapes:
- *   labels: label-tl-<timelineID>-<band_index>-<evt.id>
- *    icons: icon-tl-<timelineID>-<band_index>-<evt.id>
- *    tapes: tape1-tl-<timelineID>-<band_index>-<evt.id>
- *           tape2-tl-<timelineID>-<band_index>-<evt.id>
- *           // some events have more than one tape
- *    highlight: highlight1-tl-<timelineID>-<band_index>-<evt.id>
- *               highlight2-tl-<timelineID>-<band_index>-<evt.id>
- *           // some events have more than one highlight div (future)
- * You can then retrieve the band/timeline objects and event object
- * by using Timeline.EventUtils.decodeEventElID
- *
- *
- */
-
-/*
- *    eventPaintListener functions receive calls about painting.
- *    function(band, op, evt, els)
- *       context: 'this' will be an OriginalEventPainter object.
- *                It has properties and methods for obtaining
- *                the relevant band, timeline, etc
- *       band = the band being painted
- *       op = 'paintStarting' // the painter is about to remove
- *            all previously painted events, if any. It will
- *            then start painting all of the visible events that
- *            pass the filter.
- *            evt = null, els = null
- *       op = 'paintEnded' // the painter has finished painting
- *            all of the visible events that passed the filter
- *            evt = null, els = null
- *       op = 'paintedEvent' // the painter just finished painting an event
- *            evt = event just painted
- *            els = array of painted elements' divs. Depending on the event,
- *                  the array could be just a tape or icon (if no label).
- *                  Or could include label, multiple tape divs (imprecise event),
- *                  highlight divs. The array is not ordered. The meaning of
- *                  each el is available by decoding the el's id
- *      Note that there may be no paintedEvent calls if no events were visible
- *      or passed the filter.
- */
-
-Timeline.OriginalEventPainter = function(params) {
-    this._params = params;
-    this._onSelectListeners = [];
-    this._eventPaintListeners = [];
-
-    this._filterMatcher = null;
-    this._highlightMatcher = null;
-    this._frc = null;
-
-    this._eventIdToElmt = {};
-};
-
-Timeline.OriginalEventPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backLayer = null;
-    this._eventLayer = null;
-    this._lineLayer = null;
-    this._highlightLayer = null;
-
-    this._eventIdToElmt = null;
-};
-
-Timeline.OriginalEventPainter.prototype.getType = function() {
-    return 'original';
-};
-
-Timeline.OriginalEventPainter.prototype.addOnSelectListener = function(listener) {
-    this._onSelectListeners.push(listener);
-};
-
-Timeline.OriginalEventPainter.prototype.removeOnSelectListener = function(listener) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        if (this._onSelectListeners[i] == listener) {
-            this._onSelectListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.OriginalEventPainter.prototype.addEventPaintListener = function(listener) {
-    this._eventPaintListeners.push(listener);
-};
-
-Timeline.OriginalEventPainter.prototype.removeEventPaintListener = function(listener) {
-    for (var i = 0; i < this._eventPaintListeners.length; i++) {
-        if (this._eventPaintListeners[i] == listener) {
-            this._eventPaintListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.OriginalEventPainter.prototype.getFilterMatcher = function() {
-    return this._filterMatcher;
-};
-
-Timeline.OriginalEventPainter.prototype.setFilterMatcher = function(filterMatcher) {
-    this._filterMatcher = filterMatcher;
-};
-
-Timeline.OriginalEventPainter.prototype.getHighlightMatcher = function() {
-    return this._highlightMatcher;
-};
-
-Timeline.OriginalEventPainter.prototype.setHighlightMatcher = function(highlightMatcher) {
-    this._highlightMatcher = highlightMatcher;
-};
-
-Timeline.OriginalEventPainter.prototype.paint = function() {
-    // Paints the events for a given section of the band--what is
-    // visible on screen and some extra.
-    var eventSource = this._band.getEventSource();
-    if (eventSource == null) {
-        return;
-    }
-
-    this._eventIdToElmt = {};
-    this._fireEventPaintListeners('paintStarting', null, null);
-    this._prepareForPainting();
-
-    var metrics = this._computeMetrics();
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var filterMatcher = (this._filterMatcher != null) ?
-        this._filterMatcher :
-        function(evt) { return true; };
-    var highlightMatcher = (this._highlightMatcher != null) ?
-        this._highlightMatcher :
-        function(evt) { return -1; };
-
-    var iterator = eventSource.getEventReverseIterator(minDate, maxDate);
-    while (iterator.hasNext()) {
-        var evt = iterator.next();
-        if (filterMatcher(evt)) {
-            this.paintEvent(evt, metrics, this._params.theme, highlightMatcher(evt));
-        }
-    }
-
-    this._highlightLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-    this._eventLayer.style.display = "block";
-    // update the band object for max number of tracks in this section of the ether
-    this._band.updateEventTrackInfo(this._tracks.length, metrics.trackIncrement);
-    this._fireEventPaintListeners('paintEnded', null, null);
-
-    this._setOrthogonalOffset(metrics);
-};
-
-Timeline.OriginalEventPainter.prototype.softPaint = function() {
-    this._setOrthogonalOffset(this._computeMetrics());
-};
-
-Timeline.OriginalEventPainter.prototype._setOrthogonalOffset = function(metrics) {
-    var actualViewWidth = 2 * metrics.trackOffset + this._tracks.length * metrics.trackIncrement;
-    var minOrthogonalOffset = Math.min(0, this._band.getViewWidth() - actualViewWidth);
-    var orthogonalOffset = Math.max(minOrthogonalOffset, this._band.getViewOrthogonalOffset());
-
-    this._highlightLayer.style.top =
-        this._lineLayer.style.top =
-            this._eventLayer.style.top =
-                orthogonalOffset + "px";
-};
-
-Timeline.OriginalEventPainter.prototype._computeMetrics = function() {
-     var eventTheme = this._params.theme.event;
-     var trackHeight = Math.max(eventTheme.track.height, eventTheme.tape.height +
-                         this._frc.getLineHeight());
-     var metrics = {
-            trackOffset: eventTheme.track.offset,
-            trackHeight: trackHeight,
-               trackGap: eventTheme.track.gap,
-         trackIncrement: trackHeight + eventTheme.track.gap,
-                   icon: eventTheme.instant.icon,
-              iconWidth: eventTheme.instant.iconWidth,
-             iconHeight: eventTheme.instant.iconHeight,
-             labelWidth: eventTheme.label.width,
-           maxLabelChar: eventTheme.label.maxLabelChar,
-    impreciseIconMargin: eventTheme.instant.impreciseIconMargin
-     };
-
-     return metrics;
-};
-
-Timeline.OriginalEventPainter.prototype._prepareForPainting = function() {
-    // Remove everything previously painted: highlight, line and event layers.
-    // Prepare blank layers for painting.
-    var band = this._band;
-
-    if (this._backLayer == null) {
-        this._backLayer = this._band.createLayerDiv(0, "timeline-band-events");
-        this._backLayer.style.visibility = "hidden";
-
-        var eventLabelPrototype = document.createElement("span");
-        eventLabelPrototype.className = "timeline-event-label";
-        this._backLayer.appendChild(eventLabelPrototype);
-        this._frc = SimileAjax.Graphics.getFontRenderingContext(eventLabelPrototype);
-    }
-    this._frc.update();
-    this._tracks = [];
-
-    if (this._highlightLayer != null) {
-        band.removeLayerDiv(this._highlightLayer);
-    }
-    this._highlightLayer = band.createLayerDiv(105, "timeline-band-highlights");
-    this._highlightLayer.style.display = "none";
-
-    if (this._lineLayer != null) {
-        band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = band.createLayerDiv(110, "timeline-band-lines");
-    this._lineLayer.style.display = "none";
-
-    if (this._eventLayer != null) {
-        band.removeLayerDiv(this._eventLayer);
-    }
-    this._eventLayer = band.createLayerDiv(115, "timeline-band-events");
-    this._eventLayer.style.display = "none";
-};
-
-Timeline.OriginalEventPainter.prototype.paintEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isInstant()) {
-        this.paintInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-};
-
-Timeline.OriginalEventPainter.prototype.paintInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.OriginalEventPainter.prototype.paintDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.OriginalEventPainter.prototype.paintPreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var iconRightEdge = Math.round(startPixel + metrics.iconWidth / 2);
-    var iconLeftEdge = Math.round(startPixel - metrics.iconWidth / 2);
-
-    var labelDivClassName = this._getLabelDivClassName(evt);
-    var labelSize = this._frc.computeSize(text, labelDivClassName);
-    var labelLeft = iconRightEdge + theme.event.label.offsetFromLine;
-    var labelRight = labelLeft + labelSize.width;
-
-    var rightEdge = labelRight;
-    var track = this._findFreeTrack(evt, rightEdge);
-
-    var labelTop = Math.round(
-        metrics.trackOffset + track * metrics.trackIncrement +
-        metrics.trackHeight / 2 - labelSize.height / 2);
-
-    var iconElmtData = this._paintEventIcon(evt, track, iconLeftEdge, metrics, theme, 0);
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width,
-        labelSize.height, theme, labelDivClassName, highlightIndex);
-    var els = [iconElmtData.elmt, labelElmtData.elmt];
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickInstantEvent(iconElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    var hDiv = this._createHighlightDiv(highlightIndex, iconElmtData, theme, evt);
-    if (hDiv != null) {els.push(hDiv);}
-    this._fireEventPaintListeners('paintedEvent', evt, els);
-
-
-    this._eventIdToElmt[evt.getID()] = iconElmtData.elmt;
-    this._tracks[track] = iconLeftEdge;
-};
-
-Timeline.OriginalEventPainter.prototype.paintImpreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var endDate = evt.getEnd();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-
-    var iconRightEdge = Math.round(startPixel + metrics.iconWidth / 2);
-    var iconLeftEdge = Math.round(startPixel - metrics.iconWidth / 2);
-
-    var labelDivClassName = this._getLabelDivClassName(evt);
-    var labelSize = this._frc.computeSize(text, labelDivClassName);
-    var labelLeft = iconRightEdge + theme.event.label.offsetFromLine;
-    var labelRight = labelLeft + labelSize.width;
-
-    var rightEdge = Math.max(labelRight, endPixel);
-    var track = this._findFreeTrack(evt, rightEdge);
-    var tapeHeight = theme.event.tape.height;
-    var labelTop = Math.round(
-        metrics.trackOffset + track * metrics.trackIncrement + tapeHeight);
-
-    var iconElmtData = this._paintEventIcon(evt, track, iconLeftEdge, metrics, theme, tapeHeight);
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width,
-                        labelSize.height, theme, labelDivClassName, highlightIndex);
-
-    var color = evt.getColor();
-    color = color != null ? color : theme.event.instant.impreciseColor;
-
-    var tapeElmtData = this._paintEventTape(evt, track, startPixel, endPixel,
-        color, theme.event.instant.impreciseOpacity, metrics, theme, 0);
-    var els = [iconElmtData.elmt, labelElmtData.elmt, tapeElmtData.elmt];
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickInstantEvent(iconElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    var hDiv = this._createHighlightDiv(highlightIndex, iconElmtData, theme, evt);
-    if (hDiv != null) {els.push(hDiv);}
-    this._fireEventPaintListeners('paintedEvent', evt, els);
-
-    this._eventIdToElmt[evt.getID()] = iconElmtData.elmt;
-    this._tracks[track] = iconLeftEdge;
-};
-
-Timeline.OriginalEventPainter.prototype.paintPreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var endDate = evt.getEnd();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-
-    var labelDivClassName = this._getLabelDivClassName(evt);
-    var labelSize = this._frc.computeSize(text, labelDivClassName);
-    var labelLeft = startPixel;
-    var labelRight = labelLeft + labelSize.width;
-
-    var rightEdge = Math.max(labelRight, endPixel);
-    var track = this._findFreeTrack(evt, rightEdge);
-    var labelTop = Math.round(
-        metrics.trackOffset + track * metrics.trackIncrement + theme.event.tape.height);
-
-    var color = evt.getColor();
-    color = color != null ? color : theme.event.duration.color;
-
-    var tapeElmtData = this._paintEventTape(evt, track, startPixel, endPixel, color, 100, metrics, theme, 0);
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width,
-      labelSize.height, theme, labelDivClassName, highlightIndex);
-    var els = [tapeElmtData.elmt, labelElmtData.elmt];
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickDurationEvent(tapeElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    var hDiv = this._createHighlightDiv(highlightIndex, tapeElmtData, theme, evt);
-    if (hDiv != null) {els.push(hDiv);}
-    this._fireEventPaintListeners('paintedEvent', evt, els);
-
-    this._eventIdToElmt[evt.getID()] = tapeElmtData.elmt;
-    this._tracks[track] = startPixel;
-};
-
-Timeline.OriginalEventPainter.prototype.paintImpreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var latestStartDate = evt.getLatestStart();
-    var endDate = evt.getEnd();
-    var earliestEndDate = evt.getEarliestEnd();
-
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var latestStartPixel = Math.round(this._band.dateToPixelOffset(latestStartDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-    var earliestEndPixel = Math.round(this._band.dateToPixelOffset(earliestEndDate));
-
-    var labelDivClassName = this._getLabelDivClassName(evt);
-    var labelSize = this._frc.computeSize(text, labelDivClassName);
-    var labelLeft = latestStartPixel;
-    var labelRight = labelLeft + labelSize.width;
-
-    var rightEdge = Math.max(labelRight, endPixel);
-    var track = this._findFreeTrack(evt, rightEdge);
-    var labelTop = Math.round(
-        metrics.trackOffset + track * metrics.trackIncrement + theme.event.tape.height);
-
-    var color = evt.getColor();
-    color = color != null ? color : theme.event.duration.color;
-
-    // Imprecise events can have two event tapes
-    // The imprecise dates tape, uses opacity to be dimmer than precise dates
-    var impreciseTapeElmtData = this._paintEventTape(evt, track, startPixel, endPixel,
-        theme.event.duration.impreciseColor,
-        theme.event.duration.impreciseOpacity, metrics, theme, 0);
-    // The precise dates tape, regular (100%) opacity
-    var tapeElmtData = this._paintEventTape(evt, track, latestStartPixel,
-        earliestEndPixel, color, 100, metrics, theme, 1);
-
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop,
-        labelSize.width, labelSize.height, theme, labelDivClassName, highlightIndex);
-    var els = [impreciseTapeElmtData.elmt, tapeElmtData.elmt, labelElmtData.elmt];
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickDurationEvent(tapeElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    var hDiv = this._createHighlightDiv(highlightIndex, tapeElmtData, theme, evt);
-    if (hDiv != null) {els.push(hDiv);}
-    this._fireEventPaintListeners('paintedEvent', evt, els);
-
-    this._eventIdToElmt[evt.getID()] = tapeElmtData.elmt;
-    this._tracks[track] = startPixel;
-};
-
-Timeline.OriginalEventPainter.prototype._encodeEventElID = function(elType, evt) {
-    return Timeline.EventUtils.encodeEventElID(this._timeline, this._band, elType, evt);
-};
-
-Timeline.OriginalEventPainter.prototype._findFreeTrack = function(event, rightEdge) {
-    var trackAttribute = event.getTrackNum();
-    if (trackAttribute != null) {
-        return trackAttribute; // early return since event includes track number
-    }
-
-    // normal case: find an open track
-    for (var i = 0; i < this._tracks.length; i++) {
-        var t = this._tracks[i];
-        if (t > rightEdge) {
-            break;
-        }
-    }
-    return i;
-};
-
-Timeline.OriginalEventPainter.prototype._paintEventIcon = function(evt, iconTrack, left, metrics, theme, tapeHeight) {
-    // If no tape, then paint the icon in the middle of the track.
-    // If there is a tape, paint the icon below the tape + impreciseIconMargin
-    var icon = evt.getIcon();
-    icon = icon != null ? icon : metrics.icon;
-
-    var top; // top of the icon
-    if (tapeHeight > 0) {
-        top = metrics.trackOffset + iconTrack * metrics.trackIncrement +
-              tapeHeight + metrics.impreciseIconMargin;
-    } else {
-        var middle = metrics.trackOffset + iconTrack * metrics.trackIncrement +
-                     metrics.trackHeight / 2;
-        top = Math.round(middle - metrics.iconHeight / 2);
-    }
-    var img = SimileAjax.Graphics.createTranslucentImage(icon);
-    var iconDiv = this._timeline.getDocument().createElement("div");
-    iconDiv.className = this._getElClassName('timeline-event-icon', evt, 'icon');
-    iconDiv.id = this._encodeEventElID('icon', evt);
-    iconDiv.style.left = left + "px";
-    iconDiv.style.top = top + "px";
-    iconDiv.appendChild(img);
-
-    if(evt._title != null)
-        iconDiv.title = evt._title;
-
-    this._eventLayer.appendChild(iconDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  metrics.iconWidth,
-        height: metrics.iconHeight,
-        elmt:   iconDiv
-    };
-};
-
-Timeline.OriginalEventPainter.prototype._paintEventLabel = function(evt, text, left, top, width,
-    height, theme, labelDivClassName, highlightIndex) {
-    var doc = this._timeline.getDocument();
-
-    var labelDiv = doc.createElement("div");
-    labelDiv.className = labelDivClassName;
-    labelDiv.id = this._encodeEventElID('label', evt);
-    labelDiv.style.left = left + "px";
-    labelDiv.style.width = width + "px";
-    labelDiv.style.top = top + "px";
-    labelDiv.innerHTML = text;
-
-    if(evt._title != null)
-        labelDiv.title = evt._title;
-
-    var color = evt.getTextColor();
-    if (color == null) {
-        color = evt.getColor();
-    }
-    if (color != null) {
-        labelDiv.style.color = color;
-    }
-    if (theme.event.highlightLabelBackground && highlightIndex >= 0) {
-        labelDiv.style.background = this._getHighlightColor(highlightIndex, theme);
-    }
-
-    this._eventLayer.appendChild(labelDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   labelDiv
-    };
-};
-
-Timeline.OriginalEventPainter.prototype._paintEventTape = function(
-    evt, iconTrack, startPixel, endPixel, color, opacity, metrics, theme, tape_index) {
-
-    var tapeWidth = endPixel - startPixel;
-    var tapeHeight = theme.event.tape.height;
-    var top = metrics.trackOffset + iconTrack * metrics.trackIncrement;
-
-    var tapeDiv = this._timeline.getDocument().createElement("div");
-    tapeDiv.className = this._getElClassName('timeline-event-tape', evt, 'tape');
-    tapeDiv.id = this._encodeEventElID('tape' + tape_index, evt);
-    tapeDiv.style.left = startPixel + "px";
-    tapeDiv.style.width = tapeWidth + "px";
-    tapeDiv.style.height = tapeHeight + "px";
-    tapeDiv.style.top = top + "px";
-
-    if(evt._title != null)
-        tapeDiv.title = evt._title;
-
-    if(color != null) {
-        tapeDiv.style.backgroundColor = color;
-    }
-
-    var backgroundImage = evt.getTapeImage();
-    var backgroundRepeat = evt.getTapeRepeat();
-    backgroundRepeat = backgroundRepeat != null ? backgroundRepeat : 'repeat';
-    if(backgroundImage != null) {
-      tapeDiv.style.backgroundImage = "url(" + backgroundImage + ")";
-      tapeDiv.style.backgroundRepeat = backgroundRepeat;
-    }
-
-    SimileAjax.Graphics.setOpacity(tapeDiv, opacity);
-
-    this._eventLayer.appendChild(tapeDiv);
-
-    return {
-        left:   startPixel,
-        top:    top,
-        width:  tapeWidth,
-        height: tapeHeight,
-        elmt:   tapeDiv
-    };
-}
-
-Timeline.OriginalEventPainter.prototype._getLabelDivClassName = function(evt) {
-    return this._getElClassName('timeline-event-label', evt, 'label');
-};
-
-Timeline.OriginalEventPainter.prototype._getElClassName = function(elClassName, evt, prefix) {
-    // Prefix and '_' is added to the event's classname. Set to null for no prefix
-    var evt_classname = evt.getClassName(),
-        pieces = [];
-
-    if (evt_classname) {
-      if (prefix) {pieces.push(prefix + '-' + evt_classname + ' ');}
-      pieces.push(evt_classname + ' ');
-    }
-    pieces.push(elClassName);
-    return(pieces.join(''));
-};
-
-Timeline.OriginalEventPainter.prototype._getHighlightColor = function(highlightIndex, theme) {
-    var highlightColors = theme.event.highlightColors;
-    return highlightColors[Math.min(highlightIndex, highlightColors.length - 1)];
-};
-
-Timeline.OriginalEventPainter.prototype._createHighlightDiv = function(highlightIndex, dimensions, theme, evt) {
-    var div = null;
-    if (highlightIndex >= 0) {
-        var doc = this._timeline.getDocument();
-        var color = this._getHighlightColor(highlightIndex, theme);
-
-        div = doc.createElement("div");
-        div.className = this._getElClassName('timeline-event-highlight', evt, 'highlight');
-        div.id = this._encodeEventElID('highlight0', evt); // in future will have other
-                                                           // highlight divs for tapes + icons
-        div.style.position = "absolute";
-        div.style.overflow = "hidden";
-        div.style.left =    (dimensions.left - 2) + "px";
-        div.style.width =   (dimensions.width + 4) + "px";
-        div.style.top =     (dimensions.top - 2) + "px";
-        div.style.height =  (dimensions.height + 4) + "px";
-        div.style.background = color;
-
-        this._highlightLayer.appendChild(div);
-    }
-    return div;
-};
-
-Timeline.OriginalEventPainter.prototype._onClickInstantEvent = function(icon, domEvt, evt) {
-    var c = SimileAjax.DOM.getPageCoordinates(icon);
-    this._showBubble(
-        c.left + Math.ceil(icon.offsetWidth / 2),
-        c.top + Math.ceil(icon.offsetHeight / 2),
-        evt
-    );
-    this._fireOnSelect(evt.getID());
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.OriginalEventPainter.prototype._onClickDurationEvent = function(target, domEvt, evt) {
-    if ("pageX" in domEvt) {
-        var x = domEvt.pageX;
-        var y = domEvt.pageY;
-    } else {
-        var c = SimileAjax.DOM.getPageCoordinates(target);
-        var x = domEvt.offsetX + c.left;
-        var y = domEvt.offsetY + c.top;
-    }
-    this._showBubble(x, y, evt);
-    this._fireOnSelect(evt.getID());
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.OriginalEventPainter.prototype.showBubble = function(evt) {
-    var elmt = this._eventIdToElmt[evt.getID()];
-    if (elmt) {
-        var c = SimileAjax.DOM.getPageCoordinates(elmt);
-        this._showBubble(c.left + elmt.offsetWidth / 2, c.top + elmt.offsetHeight / 2, evt);
-    }
-};
-
-Timeline.OriginalEventPainter.prototype._showBubble = function(x, y, evt) {
-    var div = document.createElement("div");
-    var themeBubble = this._params.theme.event.bubble;
-    evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller());
-
-    SimileAjax.WindowManager.cancelPopups();
-    SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y,
-        themeBubble.width, null, themeBubble.maxHeight);
-};
-
-Timeline.OriginalEventPainter.prototype._fireOnSelect = function(eventID) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        this._onSelectListeners[i](eventID);
-    }
-};
-
-Timeline.OriginalEventPainter.prototype._fireEventPaintListeners = function(op, evt, els) {
-    for (var i = 0; i < this._eventPaintListeners.length; i++) {
-        this._eventPaintListeners[i](this._band, op, evt, els);
-    }
-};
-/*
- *  Detailed Event Painter
- *
- */
-
-// Note: a number of features from original-painter
-//       are not yet implemented in detailed painter.
-//       Eg classname, id attributes for icons, labels, tapes
-
-Timeline.DetailedEventPainter = function(params) {
-    this._params = params;
-    this._onSelectListeners = [];
-
-    this._filterMatcher = null;
-    this._highlightMatcher = null;
-    this._frc = null;
-
-    this._eventIdToElmt = {};
-};
-
-Timeline.DetailedEventPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backLayer = null;
-    this._eventLayer = null;
-    this._lineLayer = null;
-    this._highlightLayer = null;
-
-    this._eventIdToElmt = null;
-};
-
-Timeline.DetailedEventPainter.prototype.getType = function() {
-    return 'detailed';
-};
-
-Timeline.DetailedEventPainter.prototype.addOnSelectListener = function(listener) {
-    this._onSelectListeners.push(listener);
-};
-
-Timeline.DetailedEventPainter.prototype.removeOnSelectListener = function(listener) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        if (this._onSelectListeners[i] == listener) {
-            this._onSelectListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.DetailedEventPainter.prototype.getFilterMatcher = function() {
-    return this._filterMatcher;
-};
-
-Timeline.DetailedEventPainter.prototype.setFilterMatcher = function(filterMatcher) {
-    this._filterMatcher = filterMatcher;
-};
-
-Timeline.DetailedEventPainter.prototype.getHighlightMatcher = function() {
-    return this._highlightMatcher;
-};
-
-Timeline.DetailedEventPainter.prototype.setHighlightMatcher = function(highlightMatcher) {
-    this._highlightMatcher = highlightMatcher;
-};
-
-Timeline.DetailedEventPainter.prototype.paint = function() {
-    var eventSource = this._band.getEventSource();
-    if (eventSource == null) {
-        return;
-    }
-
-    this._eventIdToElmt = {};
-    this._prepareForPainting();
-
-    var eventTheme = this._params.theme.event;
-    var trackHeight = Math.max(eventTheme.track.height, this._frc.getLineHeight());
-    var metrics = {
-        trackOffset:    Math.round(this._band.getViewWidth() / 2 - trackHeight / 2),
-        trackHeight:    trackHeight,
-        trackGap:       eventTheme.track.gap,
-        trackIncrement: trackHeight + eventTheme.track.gap,
-        icon:           eventTheme.instant.icon,
-        iconWidth:      eventTheme.instant.iconWidth,
-        iconHeight:     eventTheme.instant.iconHeight,
-        labelWidth:     eventTheme.label.width
-    }
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var filterMatcher = (this._filterMatcher != null) ?
-        this._filterMatcher :
-        function(evt) { return true; };
-    var highlightMatcher = (this._highlightMatcher != null) ?
-        this._highlightMatcher :
-        function(evt) { return -1; };
-
-    var iterator = eventSource.getEventReverseIterator(minDate, maxDate);
-    while (iterator.hasNext()) {
-        var evt = iterator.next();
-        if (filterMatcher(evt)) {
-            this.paintEvent(evt, metrics, this._params.theme, highlightMatcher(evt));
-        }
-    }
-
-    this._highlightLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-    this._eventLayer.style.display = "block";
-    // update the band object for max number of tracks in this section of the ether
-    this._band.updateEventTrackInfo(this._lowerTracks.length + this._upperTracks.length,
-                                 metrics.trackIncrement);
-};
-
-Timeline.DetailedEventPainter.prototype.softPaint = function() {
-};
-
-Timeline.DetailedEventPainter.prototype._prepareForPainting = function() {
-    var band = this._band;
-
-    if (this._backLayer == null) {
-        this._backLayer = this._band.createLayerDiv(0, "timeline-band-events");
-        this._backLayer.style.visibility = "hidden";
-
-        var eventLabelPrototype = document.createElement("span");
-        eventLabelPrototype.className = "timeline-event-label";
-        this._backLayer.appendChild(eventLabelPrototype);
-        this._frc = SimileAjax.Graphics.getFontRenderingContext(eventLabelPrototype);
-    }
-    this._frc.update();
-    this._lowerTracks = [];
-    this._upperTracks = [];
-
-    if (this._highlightLayer != null) {
-        band.removeLayerDiv(this._highlightLayer);
-    }
-    this._highlightLayer = band.createLayerDiv(105, "timeline-band-highlights");
-    this._highlightLayer.style.display = "none";
-
-    if (this._lineLayer != null) {
-        band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = band.createLayerDiv(110, "timeline-band-lines");
-    this._lineLayer.style.display = "none";
-
-    if (this._eventLayer != null) {
-        band.removeLayerDiv(this._eventLayer);
-    }
-    this._eventLayer = band.createLayerDiv(110, "timeline-band-events");
-    this._eventLayer.style.display = "none";
-};
-
-Timeline.DetailedEventPainter.prototype.paintEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isInstant()) {
-        this.paintInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-};
-
-Timeline.DetailedEventPainter.prototype.paintInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.DetailedEventPainter.prototype.paintDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.DetailedEventPainter.prototype.paintPreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var iconRightEdge = Math.round(startPixel + metrics.iconWidth / 2);
-    var iconLeftEdge = Math.round(startPixel - metrics.iconWidth / 2);
-
-    var labelSize = this._frc.computeSize(text);
-    var iconTrack = this._findFreeTrackForSolid(iconRightEdge, startPixel);
-    var iconElmtData = this._paintEventIcon(evt, iconTrack, iconLeftEdge, metrics, theme);
-
-    var labelLeft = iconRightEdge + theme.event.label.offsetFromLine;
-    var labelTrack = iconTrack;
-
-    var iconTrackData = this._getTrackData(iconTrack);
-    if (Math.min(iconTrackData.solid, iconTrackData.text) >= labelLeft + labelSize.width) { // label on the same track, to the right of icon
-        iconTrackData.solid = iconLeftEdge;
-        iconTrackData.text = labelLeft;
-    } else { // label on a different track, below icon
-        iconTrackData.solid = iconLeftEdge;
-
-        labelLeft = startPixel + theme.event.label.offsetFromLine;
-        labelTrack = this._findFreeTrackForText(iconTrack, labelLeft + labelSize.width, function(t) { t.line = startPixel - 2; });
-        this._getTrackData(labelTrack).text = iconLeftEdge;
-
-        this._paintEventLine(evt, startPixel, iconTrack, labelTrack, metrics, theme);
-    }
-
-    var labelTop = Math.round(
-        metrics.trackOffset + labelTrack * metrics.trackIncrement +
-        metrics.trackHeight / 2 - labelSize.height / 2);
-
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width, labelSize.height, theme);
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickInstantEvent(iconElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    this._createHighlightDiv(highlightIndex, iconElmtData, theme);
-
-    this._eventIdToElmt[evt.getID()] = iconElmtData.elmt;
-};
-
-Timeline.DetailedEventPainter.prototype.paintImpreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var endDate = evt.getEnd();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-
-    var iconRightEdge = Math.round(startPixel + metrics.iconWidth / 2);
-    var iconLeftEdge = Math.round(startPixel - metrics.iconWidth / 2);
-
-    var labelSize = this._frc.computeSize(text);
-    var iconTrack = this._findFreeTrackForSolid(endPixel, startPixel);
-
-    var tapeElmtData = this._paintEventTape(evt, iconTrack, startPixel, endPixel,
-        theme.event.instant.impreciseColor, theme.event.instant.impreciseOpacity, metrics, theme);
-    var iconElmtData = this._paintEventIcon(evt, iconTrack, iconLeftEdge, metrics, theme);
-
-    var iconTrackData = this._getTrackData(iconTrack);
-    iconTrackData.solid = iconLeftEdge;
-
-    var labelLeft = iconRightEdge + theme.event.label.offsetFromLine;
-    var labelRight = labelLeft + labelSize.width;
-    var labelTrack;
-    if (labelRight < endPixel) {
-        labelTrack = iconTrack;
-    } else {
-        labelLeft = startPixel + theme.event.label.offsetFromLine;
-        labelRight = labelLeft + labelSize.width;
-
-        labelTrack = this._findFreeTrackForText(iconTrack, labelRight, function(t) { t.line = startPixel - 2; });
-        this._getTrackData(labelTrack).text = iconLeftEdge;
-
-        this._paintEventLine(evt, startPixel, iconTrack, labelTrack, metrics, theme);
-    }
-    var labelTop = Math.round(
-        metrics.trackOffset + labelTrack * metrics.trackIncrement +
-        metrics.trackHeight / 2 - labelSize.height / 2);
-
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width, labelSize.height, theme);
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickInstantEvent(iconElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    this._createHighlightDiv(highlightIndex, iconElmtData, theme);
-
-    this._eventIdToElmt[evt.getID()] = iconElmtData.elmt;
-};
-
-Timeline.DetailedEventPainter.prototype.paintPreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var endDate = evt.getEnd();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-
-    var labelSize = this._frc.computeSize(text);
-    var tapeTrack = this._findFreeTrackForSolid(endPixel);
-    var color = evt.getColor();
-    color = color != null ? color : theme.event.duration.color;
-
-    var tapeElmtData = this._paintEventTape(evt, tapeTrack, startPixel, endPixel, color, 100, metrics, theme);
-
-    var tapeTrackData = this._getTrackData(tapeTrack);
-    tapeTrackData.solid = startPixel;
-
-    var labelLeft = startPixel + theme.event.label.offsetFromLine;
-    var labelTrack = this._findFreeTrackForText(tapeTrack, labelLeft + labelSize.width, function(t) { t.line = startPixel - 2; });
-    this._getTrackData(labelTrack).text = startPixel - 2;
-
-    this._paintEventLine(evt, startPixel, tapeTrack, labelTrack, metrics, theme);
-
-    var labelTop = Math.round(
-        metrics.trackOffset + labelTrack * metrics.trackIncrement +
-        metrics.trackHeight / 2 - labelSize.height / 2);
-
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width, labelSize.height, theme);
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickDurationEvent(tapeElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    this._createHighlightDiv(highlightIndex, tapeElmtData, theme);
-
-    this._eventIdToElmt[evt.getID()] = tapeElmtData.elmt;
-};
-
-Timeline.DetailedEventPainter.prototype.paintImpreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var doc = this._timeline.getDocument();
-    var text = evt.getText();
-
-    var startDate = evt.getStart();
-    var latestStartDate = evt.getLatestStart();
-    var endDate = evt.getEnd();
-    var earliestEndDate = evt.getEarliestEnd();
-
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-    var latestStartPixel = Math.round(this._band.dateToPixelOffset(latestStartDate));
-    var endPixel = Math.round(this._band.dateToPixelOffset(endDate));
-    var earliestEndPixel = Math.round(this._band.dateToPixelOffset(earliestEndDate));
-
-    var labelSize = this._frc.computeSize(text);
-    var tapeTrack = this._findFreeTrackForSolid(endPixel);
-    var color = evt.getColor();
-    color = color != null ? color : theme.event.duration.color;
-
-    var impreciseTapeElmtData = this._paintEventTape(evt, tapeTrack, startPixel, endPixel,
-        theme.event.duration.impreciseColor, theme.event.duration.impreciseOpacity, metrics, theme);
-    var tapeElmtData = this._paintEventTape(evt, tapeTrack, latestStartPixel, earliestEndPixel, color, 100, metrics, theme);
-
-    var tapeTrackData = this._getTrackData(tapeTrack);
-    tapeTrackData.solid = startPixel;
-
-    var labelLeft = latestStartPixel + theme.event.label.offsetFromLine;
-    var labelTrack = this._findFreeTrackForText(tapeTrack, labelLeft + labelSize.width, function(t) { t.line = latestStartPixel - 2; });
-    this._getTrackData(labelTrack).text = latestStartPixel - 2;
-
-    this._paintEventLine(evt, latestStartPixel, tapeTrack, labelTrack, metrics, theme);
-
-    var labelTop = Math.round(
-        metrics.trackOffset + labelTrack * metrics.trackIncrement +
-        metrics.trackHeight / 2 - labelSize.height / 2);
-
-    var labelElmtData = this._paintEventLabel(evt, text, labelLeft, labelTop, labelSize.width, labelSize.height, theme);
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickDurationEvent(tapeElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(tapeElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-    this._createHighlightDiv(highlightIndex, tapeElmtData, theme);
-
-    this._eventIdToElmt[evt.getID()] = tapeElmtData.elmt;
-};
-
-Timeline.DetailedEventPainter.prototype._findFreeTrackForSolid = function(solidEdge, softEdge) {
-    for (var i = 0; true; i++) {
-        if (i < this._lowerTracks.length) {
-            var t = this._lowerTracks[i];
-            if (Math.min(t.solid, t.text) > solidEdge && (!(softEdge) || t.line > softEdge)) {
-                return i;
-            }
-        } else {
-            this._lowerTracks.push({
-                solid:  Number.POSITIVE_INFINITY,
-                text:   Number.POSITIVE_INFINITY,
-                line:   Number.POSITIVE_INFINITY
-            });
-
-            return i;
-        }
-
-        if (i < this._upperTracks.length) {
-            var t = this._upperTracks[i];
-            if (Math.min(t.solid, t.text) > solidEdge && (!(softEdge) || t.line > softEdge)) {
-                return -1 - i;
-            }
-        } else {
-            this._upperTracks.push({
-                solid:  Number.POSITIVE_INFINITY,
-                text:   Number.POSITIVE_INFINITY,
-                line:   Number.POSITIVE_INFINITY
-            });
-
-            return -1 - i;
-        }
-    }
-};
-
-Timeline.DetailedEventPainter.prototype._findFreeTrackForText = function(fromTrack, edge, occupiedTrackVisitor) {
-    var extendUp;
-    var index;
-    var firstIndex;
-    var result;
-
-    if (fromTrack < 0) {
-        extendUp = true;
-        firstIndex = -fromTrack;
-
-        index = this._findFreeUpperTrackForText(firstIndex, edge);
-        result = -1 - index;
-    } else if (fromTrack > 0) {
-        extendUp = false;
-        firstIndex = fromTrack + 1;
-
-        index = this._findFreeLowerTrackForText(firstIndex, edge);
-        result = index;
-    } else {
-        var upIndex = this._findFreeUpperTrackForText(0, edge);
-        var downIndex = this._findFreeLowerTrackForText(1, edge);
-
-        if (downIndex - 1 <= upIndex) {
-            extendUp = false;
-            firstIndex = 1;
-            index = downIndex;
-            result = index;
-        } else {
-            extendUp = true;
-            firstIndex = 0;
-            index = upIndex;
-            result = -1 - index;
-        }
-    }
-
-    if (extendUp) {
-        if (index == this._upperTracks.length) {
-            this._upperTracks.push({
-                solid:  Number.POSITIVE_INFINITY,
-                text:   Number.POSITIVE_INFINITY,
-                line:   Number.POSITIVE_INFINITY
-            });
-        }
-        for (var i = firstIndex; i < index; i++) {
-            occupiedTrackVisitor(this._upperTracks[i]);
-        }
-    } else {
-        if (index == this._lowerTracks.length) {
-            this._lowerTracks.push({
-                solid:  Number.POSITIVE_INFINITY,
-                text:   Number.POSITIVE_INFINITY,
-                line:   Number.POSITIVE_INFINITY
-            });
-        }
-        for (var i = firstIndex; i < index; i++) {
-            occupiedTrackVisitor(this._lowerTracks[i]);
-        }
-    }
-    return result;
-};
-
-Timeline.DetailedEventPainter.prototype._findFreeLowerTrackForText = function(index, edge) {
-    for (; index < this._lowerTracks.length; index++) {
-        var t = this._lowerTracks[index];
-        if (Math.min(t.solid, t.text) >= edge) {
-            break;
-        }
-    }
-    return index;
-};
-
-Timeline.DetailedEventPainter.prototype._findFreeUpperTrackForText = function(index, edge) {
-    for (; index < this._upperTracks.length; index++) {
-        var t = this._upperTracks[index];
-        if (Math.min(t.solid, t.text) >= edge) {
-            break;
-        }
-    }
-    return index;
-};
-
-Timeline.DetailedEventPainter.prototype._getTrackData = function(index) {
-    return (index < 0) ? this._upperTracks[-index - 1] : this._lowerTracks[index];
-};
-
-Timeline.DetailedEventPainter.prototype._paintEventLine = function(evt, left, startTrack, endTrack, metrics, theme) {
-    var top = Math.round(metrics.trackOffset + startTrack * metrics.trackIncrement + metrics.trackHeight / 2);
-    var height = Math.round(Math.abs(endTrack - startTrack) * metrics.trackIncrement);
-
-    var lineStyle = "1px solid " + theme.event.label.lineColor;
-    var lineDiv = this._timeline.getDocument().createElement("div");
-	lineDiv.style.position = "absolute";
-    lineDiv.style.left = left + "px";
-    lineDiv.style.width = theme.event.label.offsetFromLine + "px";
-    lineDiv.style.height = height + "px";
-    if (startTrack > endTrack) {
-        lineDiv.style.top = (top - height) + "px";
-        lineDiv.style.borderTop = lineStyle;
-    } else {
-        lineDiv.style.top = top + "px";
-        lineDiv.style.borderBottom = lineStyle;
-    }
-    lineDiv.style.borderLeft = lineStyle;
-    this._lineLayer.appendChild(lineDiv);
-};
-
-Timeline.DetailedEventPainter.prototype._paintEventIcon = function(evt, iconTrack, left, metrics, theme) {
-    var icon = evt.getIcon();
-    icon = icon != null ? icon : metrics.icon;
-
-    var middle = metrics.trackOffset + iconTrack * metrics.trackIncrement + metrics.trackHeight / 2;
-    var top = Math.round(middle - metrics.iconHeight / 2);
-
-    var img = SimileAjax.Graphics.createTranslucentImage(icon);
-    var iconDiv = this._timeline.getDocument().createElement("div");
-    iconDiv.style.position = "absolute";
-    iconDiv.style.left = left + "px";
-    iconDiv.style.top = top + "px";
-    iconDiv.appendChild(img);
-    iconDiv.style.cursor = "pointer";
-
-    if(evt._title != null)
-        iconDiv.title = evt._title
-
-    this._eventLayer.appendChild(iconDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  metrics.iconWidth,
-        height: metrics.iconHeight,
-        elmt:   iconDiv
-    };
-};
-
-Timeline.DetailedEventPainter.prototype._paintEventLabel = function(evt, text, left, top, width, height, theme) {
-    var doc = this._timeline.getDocument();
-
-    var labelBackgroundDiv = doc.createElement("div");
-    labelBackgroundDiv.style.position = "absolute";
-    labelBackgroundDiv.style.left = left + "px";
-    labelBackgroundDiv.style.width = width + "px";
-    labelBackgroundDiv.style.top = top + "px";
-    labelBackgroundDiv.style.height = height + "px";
-    labelBackgroundDiv.style.backgroundColor = theme.event.label.backgroundColor;
-    SimileAjax.Graphics.setOpacity(labelBackgroundDiv, theme.event.label.backgroundOpacity);
-    this._eventLayer.appendChild(labelBackgroundDiv);
-
-    var labelDiv = doc.createElement("div");
-    labelDiv.style.position = "absolute";
-    labelDiv.style.left = left + "px";
-    labelDiv.style.width = width + "px";
-    labelDiv.style.top = top + "px";
-    labelDiv.innerHTML = text;
-    labelDiv.style.cursor = "pointer";
-
-    if(evt._title != null)
-        labelDiv.title = evt._title;
-
-    var color = evt.getTextColor();
-    if (color == null) {
-        color = evt.getColor();
-    }
-    if (color != null) {
-        labelDiv.style.color = color;
-    }
-
-    this._eventLayer.appendChild(labelDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   labelDiv
-    };
-};
-
-Timeline.DetailedEventPainter.prototype._paintEventTape = function(
-    evt, iconTrack, startPixel, endPixel, color, opacity, metrics, theme) {
-
-    var tapeWidth = endPixel - startPixel;
-    var tapeHeight = theme.event.tape.height;
-    var middle = metrics.trackOffset + iconTrack * metrics.trackIncrement + metrics.trackHeight / 2;
-    var top = Math.round(middle - tapeHeight / 2);
-
-    var tapeDiv = this._timeline.getDocument().createElement("div");
-    tapeDiv.style.position = "absolute";
-    tapeDiv.style.left = startPixel + "px";
-    tapeDiv.style.width = tapeWidth + "px";
-    tapeDiv.style.top = top + "px";
-    tapeDiv.style.height = tapeHeight + "px";
-    tapeDiv.style.backgroundColor = color;
-    tapeDiv.style.overflow = "hidden";
-    tapeDiv.style.cursor = "pointer";
-
-    if(evt._title != null)
-        tapeDiv.title = evt._title;
-
-    SimileAjax.Graphics.setOpacity(tapeDiv, opacity);
-
-    this._eventLayer.appendChild(tapeDiv);
-
-    return {
-        left:   startPixel,
-        top:    top,
-        width:  tapeWidth,
-        height: tapeHeight,
-        elmt:   tapeDiv
-    };
-}
-
-Timeline.DetailedEventPainter.prototype._createHighlightDiv = function(highlightIndex, dimensions, theme) {
-    if (highlightIndex >= 0) {
-        var doc = this._timeline.getDocument();
-        var eventTheme = theme.event;
-
-        var color = eventTheme.highlightColors[Math.min(highlightIndex, eventTheme.highlightColors.length - 1)];
-
-        var div = doc.createElement("div");
-        div.style.position = "absolute";
-        div.style.overflow = "hidden";
-        div.style.left =    (dimensions.left - 2) + "px";
-        div.style.width =   (dimensions.width + 4) + "px";
-        div.style.top =     (dimensions.top - 2) + "px";
-        div.style.height =  (dimensions.height + 4) + "px";
-        div.style.background = color;
-
-        this._highlightLayer.appendChild(div);
-    }
-};
-
-Timeline.DetailedEventPainter.prototype._onClickInstantEvent = function(icon, domEvt, evt) {
-    var c = SimileAjax.DOM.getPageCoordinates(icon);
-    this._showBubble(
-        c.left + Math.ceil(icon.offsetWidth / 2),
-        c.top + Math.ceil(icon.offsetHeight / 2),
-        evt
-    );
-    this._fireOnSelect(evt.getID());
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.DetailedEventPainter.prototype._onClickDurationEvent = function(target, domEvt, evt) {
-    if ("pageX" in domEvt) {
-        var x = domEvt.pageX;
-        var y = domEvt.pageY;
-    } else {
-        var c = SimileAjax.DOM.getPageCoordinates(target);
-        var x = domEvt.offsetX + c.left;
-        var y = domEvt.offsetY + c.top;
-    }
-    this._showBubble(x, y, evt);
-    this._fireOnSelect(evt.getID());
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.DetailedEventPainter.prototype.showBubble = function(evt) {
-    var elmt = this._eventIdToElmt[evt.getID()];
-    if (elmt) {
-        var c = SimileAjax.DOM.getPageCoordinates(elmt);
-        this._showBubble(c.left + elmt.offsetWidth / 2, c.top + elmt.offsetHeight / 2, evt);
-    }
-};
-
-Timeline.DetailedEventPainter.prototype._showBubble = function(x, y, evt) {
-    var div = document.createElement("div");
-    var themeBubble = this._params.theme.event.bubble;
-    evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller());
-
-    SimileAjax.WindowManager.cancelPopups();
-    SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y,
-       themeBubble.width, null, themeBubble.maxHeight);
-};
-
-Timeline.DetailedEventPainter.prototype._fireOnSelect = function(eventID) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        this._onSelectListeners[i](eventID);
-    }
-};
-/*
- *  Overview Event Painter
- *
- */
-
-Timeline.OverviewEventPainter = function(params) {
-    this._params = params;
-    this._onSelectListeners = [];
-
-    this._filterMatcher = null;
-    this._highlightMatcher = null;
-};
-
-Timeline.OverviewEventPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._eventLayer = null;
-    this._highlightLayer = null;
-};
-
-Timeline.OverviewEventPainter.prototype.getType = function() {
-    return 'overview';
-};
-
-Timeline.OverviewEventPainter.prototype.addOnSelectListener = function(listener) {
-    this._onSelectListeners.push(listener);
-};
-
-Timeline.OverviewEventPainter.prototype.removeOnSelectListener = function(listener) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        if (this._onSelectListeners[i] == listener) {
-            this._onSelectListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.OverviewEventPainter.prototype.getFilterMatcher = function() {
-    return this._filterMatcher;
-};
-
-Timeline.OverviewEventPainter.prototype.setFilterMatcher = function(filterMatcher) {
-    this._filterMatcher = filterMatcher;
-};
-
-Timeline.OverviewEventPainter.prototype.getHighlightMatcher = function() {
-    return this._highlightMatcher;
-};
-
-Timeline.OverviewEventPainter.prototype.setHighlightMatcher = function(highlightMatcher) {
-    this._highlightMatcher = highlightMatcher;
-};
-
-Timeline.OverviewEventPainter.prototype.paint = function() {
-    var eventSource = this._band.getEventSource();
-    if (eventSource == null) {
-        return;
-    }
-
-    this._prepareForPainting();
-
-    var eventTheme = this._params.theme.event;
-    var metrics = {
-        trackOffset:    eventTheme.overviewTrack.offset,
-        trackHeight:    eventTheme.overviewTrack.height,
-        trackGap:       eventTheme.overviewTrack.gap,
-        trackIncrement: eventTheme.overviewTrack.height + eventTheme.overviewTrack.gap
-    }
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var filterMatcher = (this._filterMatcher != null) ?
-        this._filterMatcher :
-        function(evt) { return true; };
-    var highlightMatcher = (this._highlightMatcher != null) ?
-        this._highlightMatcher :
-        function(evt) { return -1; };
-
-    var iterator = eventSource.getEventReverseIterator(minDate, maxDate);
-    while (iterator.hasNext()) {
-        var evt = iterator.next();
-        if (filterMatcher(evt)) {
-            this.paintEvent(evt, metrics, this._params.theme, highlightMatcher(evt));
-        }
-    }
-
-    this._highlightLayer.style.display = "block";
-    this._eventLayer.style.display = "block";
-    // update the band object for max number of tracks in this section of the ether
-    this._band.updateEventTrackInfo(this._tracks.length, metrics.trackIncrement);
-};
-
-Timeline.OverviewEventPainter.prototype.softPaint = function() {
-};
-
-Timeline.OverviewEventPainter.prototype._prepareForPainting = function() {
-    var band = this._band;
-
-    this._tracks = [];
-
-    if (this._highlightLayer != null) {
-        band.removeLayerDiv(this._highlightLayer);
-    }
-    this._highlightLayer = band.createLayerDiv(105, "timeline-band-highlights");
-    this._highlightLayer.style.display = "none";
-
-    if (this._eventLayer != null) {
-        band.removeLayerDiv(this._eventLayer);
-    }
-    this._eventLayer = band.createLayerDiv(110, "timeline-band-events");
-    this._eventLayer.style.display = "none";
-};
-
-Timeline.OverviewEventPainter.prototype.paintEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isInstant()) {
-        this.paintInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-};
-
-Timeline.OverviewEventPainter.prototype.paintInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var startDate = evt.getStart();
-    var startPixel = Math.round(this._band.dateToPixelOffset(startDate));
-
-    var color = evt.getColor(),
-        klassName = evt.getClassName();
-    if (klassName) {
-      color = null;
-    } else {
-      color = color != null ? color : theme.event.duration.color;
-    }
-
-    var tickElmtData = this._paintEventTick(evt, startPixel, color, 100, metrics, theme);
-
-    this._createHighlightDiv(highlightIndex, tickElmtData, theme);
-};
-
-Timeline.OverviewEventPainter.prototype.paintDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var latestStartDate = evt.getLatestStart();
-    var earliestEndDate = evt.getEarliestEnd();
-
-    var latestStartPixel = Math.round(this._band.dateToPixelOffset(latestStartDate));
-    var earliestEndPixel = Math.round(this._band.dateToPixelOffset(earliestEndDate));
-
-    var tapeTrack = 0;
-    for (; tapeTrack < this._tracks.length; tapeTrack++) {
-        if (earliestEndPixel < this._tracks[tapeTrack]) {
-            break;
-        }
-    }
-    this._tracks[tapeTrack] = earliestEndPixel;
-
-    var color = evt.getColor(),
-        klassName = evt.getClassName();
-    if (klassName) {
-      color = null;
-    } else {
-      color = color != null ? color : theme.event.duration.color;
-    }
-
-    var tapeElmtData = this._paintEventTape(evt, tapeTrack, latestStartPixel, earliestEndPixel,
-      color, 100, metrics, theme, klassName);
-
-    this._createHighlightDiv(highlightIndex, tapeElmtData, theme);
-};
-
-Timeline.OverviewEventPainter.prototype._paintEventTape = function(
-    evt, track, left, right, color, opacity, metrics, theme, klassName) {
-
-    var top = metrics.trackOffset + track * metrics.trackIncrement;
-    var width = right - left;
-    var height = metrics.trackHeight;
-
-    var tapeDiv = this._timeline.getDocument().createElement("div");
-    tapeDiv.className = 'timeline-small-event-tape'
-    if (klassName) {tapeDiv.className += ' small-' + klassName;}
-    tapeDiv.style.left = left + "px";
-    tapeDiv.style.width = width + "px";
-    tapeDiv.style.top = top + "px";
-    tapeDiv.style.height = height + "px";
-
-    if (color) {
-      tapeDiv.style.backgroundColor = color; // set color here if defined by event. Else use css
-    }
- //   tapeDiv.style.overflow = "hidden";   // now set in css
- //   tapeDiv.style.position = "absolute";
-    if(opacity<100) SimileAjax.Graphics.setOpacity(tapeDiv, opacity);
-
-    this._eventLayer.appendChild(tapeDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   tapeDiv
-    };
-}
-
-Timeline.OverviewEventPainter.prototype._paintEventTick = function(
-    evt, left, color, opacity, metrics, theme) {
-
-    var height = theme.event.overviewTrack.tickHeight;
-    var top = metrics.trackOffset - height;
-    var width = 1;
-
-    var tickDiv = this._timeline.getDocument().createElement("div");
-	  tickDiv.className = 'timeline-small-event-icon'
-    tickDiv.style.left = left + "px";
-    tickDiv.style.top = top + "px";
-  //  tickDiv.style.width = width + "px";
-  //  tickDiv.style.position = "absolute";
-  //  tickDiv.style.height = height + "px";
-  //  tickDiv.style.backgroundColor = color;
-  //  tickDiv.style.overflow = "hidden";
-
-    var klassName = evt.getClassName()
-    if (klassName) {tickDiv.className +=' small-' + klassName};
-
-    if(opacity<100) {SimileAjax.Graphics.setOpacity(tickDiv, opacity)};
-
-    this._eventLayer.appendChild(tickDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   tickDiv
-    };
-}
-
-Timeline.OverviewEventPainter.prototype._createHighlightDiv = function(highlightIndex, dimensions, theme) {
-    if (highlightIndex >= 0) {
-        var doc = this._timeline.getDocument();
-        var eventTheme = theme.event;
-
-        var color = eventTheme.highlightColors[Math.min(highlightIndex, eventTheme.highlightColors.length - 1)];
-
-        var div = doc.createElement("div");
-        div.style.position = "absolute";
-        div.style.overflow = "hidden";
-        div.style.left =    (dimensions.left - 1) + "px";
-        div.style.width =   (dimensions.width + 2) + "px";
-        div.style.top =     (dimensions.top - 1) + "px";
-        div.style.height =  (dimensions.height + 2) + "px";
-        div.style.background = color;
-
-        this._highlightLayer.appendChild(div);
-    }
-};
-
-Timeline.OverviewEventPainter.prototype.showBubble = function(evt) {
-    // not implemented
-};
-/*
- *  Compact Event Painter
- *
- */
-
-Timeline.CompactEventPainter = function(params) {
-    this._params = params;
-    this._onSelectListeners = [];
-
-    this._filterMatcher = null;
-    this._highlightMatcher = null;
-    this._frc = null;
-
-    this._eventIdToElmt = {};
-};
-
-Timeline.CompactEventPainter.prototype.getType = function() {
-    return 'compact';
-};
-
-Timeline.CompactEventPainter.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._backLayer = null;
-    this._eventLayer = null;
-    this._lineLayer = null;
-    this._highlightLayer = null;
-
-    this._eventIdToElmt = null;
-};
-
-Timeline.CompactEventPainter.prototype.addOnSelectListener = function(listener) {
-    this._onSelectListeners.push(listener);
-};
-
-Timeline.CompactEventPainter.prototype.removeOnSelectListener = function(listener) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        if (this._onSelectListeners[i] == listener) {
-            this._onSelectListeners.splice(i, 1);
-            break;
-        }
-    }
-};
-
-Timeline.CompactEventPainter.prototype.getFilterMatcher = function() {
-    return this._filterMatcher;
-};
-
-Timeline.CompactEventPainter.prototype.setFilterMatcher = function(filterMatcher) {
-    this._filterMatcher = filterMatcher;
-};
-
-Timeline.CompactEventPainter.prototype.getHighlightMatcher = function() {
-    return this._highlightMatcher;
-};
-
-Timeline.CompactEventPainter.prototype.setHighlightMatcher = function(highlightMatcher) {
-    this._highlightMatcher = highlightMatcher;
-};
-
-Timeline.CompactEventPainter.prototype.paint = function() {
-    var eventSource = this._band.getEventSource();
-    if (eventSource == null) {
-        return;
-    }
-
-    this._eventIdToElmt = {};
-    this._prepareForPainting();
-
-    var metrics = this._computeMetrics();
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    var filterMatcher = (this._filterMatcher != null) ?
-        this._filterMatcher :
-        function(evt) { return true; };
-
-    var highlightMatcher = (this._highlightMatcher != null) ?
-        this._highlightMatcher :
-        function(evt) { return -1; };
-
-    var iterator = eventSource.getEventIterator(minDate, maxDate);
-
-    var stackConcurrentPreciseInstantEvents = "stackConcurrentPreciseInstantEvents" in this._params && typeof this._params.stackConcurrentPreciseInstantEvents == "object";
-    var collapseConcurrentPreciseInstantEvents = "collapseConcurrentPreciseInstantEvents" in this._params && this._params.collapseConcurrentPreciseInstantEvents;
-    if (collapseConcurrentPreciseInstantEvents || stackConcurrentPreciseInstantEvents) {
-        var bufferedEvents = [];
-        var previousInstantEvent = null;
-
-        while (iterator.hasNext()) {
-            var evt = iterator.next();
-            if (filterMatcher(evt)) {
-                if (!evt.isInstant() || evt.isImprecise()) {
-                    this.paintEvent(evt, metrics, this._params.theme, highlightMatcher(evt));
-                } else if (previousInstantEvent != null &&
-                        previousInstantEvent.getStart().getTime() == evt.getStart().getTime()) {
-                    bufferedEvents[bufferedEvents.length - 1].push(evt);
-                } else {
-                    bufferedEvents.push([ evt ]);
-                    previousInstantEvent = evt;
-                }
-            }
-        }
-
-        for (var i = 0; i < bufferedEvents.length; i++) {
-            var compositeEvents = bufferedEvents[i];
-            if (compositeEvents.length == 1) {
-                this.paintEvent(compositeEvents[0], metrics, this._params.theme, highlightMatcher(evt));
-            } else {
-                var match = -1;
-                for (var j = 0; match < 0 && j < compositeEvents.length; j++) {
-                    match = highlightMatcher(compositeEvents[j]);
-                }
-
-                if (stackConcurrentPreciseInstantEvents) {
-                    this.paintStackedPreciseInstantEvents(compositeEvents, metrics, this._params.theme, match);
-                } else {
-                    this.paintCompositePreciseInstantEvents(compositeEvents, metrics, this._params.theme, match);
-                }
-            }
-        }
-    } else {
-        while (iterator.hasNext()) {
-            var evt = iterator.next();
-            if (filterMatcher(evt)) {
-                this.paintEvent(evt, metrics, this._params.theme, highlightMatcher(evt));
-            }
-        }
-    }
-
-    this._highlightLayer.style.display = "block";
-    this._lineLayer.style.display = "block";
-    this._eventLayer.style.display = "block";
-
-    this._setOrthogonalOffset(metrics);
-};
-
-Timeline.CompactEventPainter.prototype.softPaint = function() {
-    this._setOrthogonalOffset(this._computeMetrics());
-};
-
-Timeline.CompactEventPainter.prototype._setOrthogonalOffset = function(metrics) {
-    var actualViewWidth = 2 * metrics.trackOffset + this._tracks.length * metrics.trackHeight;
-    var minOrthogonalOffset = Math.min(0, this._band.getViewWidth() - actualViewWidth);
-    var orthogonalOffset = Math.max(minOrthogonalOffset, this._band.getViewOrthogonalOffset());
-
-    this._highlightLayer.style.top =
-        this._lineLayer.style.top =
-            this._eventLayer.style.top =
-                orthogonalOffset + "px";
-};
-
-Timeline.CompactEventPainter.prototype._computeMetrics = function() {
-    var theme = this._params.theme;
-    var eventTheme = theme.event;
-
-    var metrics = {
-        trackOffset:            "trackOffset" in this._params ? this._params.trackOffset : 10,
-        trackHeight:            "trackHeight" in this._params ? this._params.trackHeight : 10,
-
-        tapeHeight:             theme.event.tape.height,
-        tapeBottomMargin:       "tapeBottomMargin" in this._params ? this._params.tapeBottomMargin : 2,
-
-        labelBottomMargin:      "labelBottomMargin" in this._params ? this._params.labelBottomMargin : 5,
-        labelRightMargin:       "labelRightMargin" in this._params ? this._params.labelRightMargin : 5,
-
-        defaultIcon:            eventTheme.instant.icon,
-        defaultIconWidth:       eventTheme.instant.iconWidth,
-        defaultIconHeight:      eventTheme.instant.iconHeight,
-
-        customIconWidth:        "iconWidth" in this._params ? this._params.iconWidth : eventTheme.instant.iconWidth,
-        customIconHeight:       "iconHeight" in this._params ? this._params.iconHeight : eventTheme.instant.iconHeight,
-
-        iconLabelGap:           "iconLabelGap" in this._params ? this._params.iconLabelGap : 2,
-        iconBottomMargin:       "iconBottomMargin" in this._params ? this._params.iconBottomMargin : 2
-    };
-    if ("compositeIcon" in this._params) {
-        metrics.compositeIcon = this._params.compositeIcon;
-        metrics.compositeIconWidth = this._params.compositeIconWidth || metrics.customIconWidth;
-        metrics.compositeIconHeight = this._params.compositeIconHeight || metrics.customIconHeight;
-    } else {
-        metrics.compositeIcon = metrics.defaultIcon;
-        metrics.compositeIconWidth = metrics.defaultIconWidth;
-        metrics.compositeIconHeight = metrics.defaultIconHeight;
-    }
-    metrics.defaultStackIcon = "icon" in this._params.stackConcurrentPreciseInstantEvents ?
-        this._params.stackConcurrentPreciseInstantEvents.icon : metrics.defaultIcon;
-    metrics.defaultStackIconWidth = "iconWidth" in this._params.stackConcurrentPreciseInstantEvents ?
-        this._params.stackConcurrentPreciseInstantEvents.iconWidth : metrics.defaultIconWidth;
-    metrics.defaultStackIconHeight = "iconHeight" in this._params.stackConcurrentPreciseInstantEvents ?
-        this._params.stackConcurrentPreciseInstantEvents.iconHeight : metrics.defaultIconHeight;
-
-    return metrics;
-};
-
-Timeline.CompactEventPainter.prototype._prepareForPainting = function() {
-    var band = this._band;
-
-    if (this._backLayer == null) {
-        this._backLayer = this._band.createLayerDiv(0, "timeline-band-events");
-        this._backLayer.style.visibility = "hidden";
-
-        var eventLabelPrototype = document.createElement("span");
-        eventLabelPrototype.className = "timeline-event-label";
-        this._backLayer.appendChild(eventLabelPrototype);
-        this._frc = SimileAjax.Graphics.getFontRenderingContext(eventLabelPrototype);
-    }
-    this._frc.update();
-    this._tracks = [];
-
-    if (this._highlightLayer != null) {
-        band.removeLayerDiv(this._highlightLayer);
-    }
-    this._highlightLayer = band.createLayerDiv(105, "timeline-band-highlights");
-    this._highlightLayer.style.display = "none";
-
-    if (this._lineLayer != null) {
-        band.removeLayerDiv(this._lineLayer);
-    }
-    this._lineLayer = band.createLayerDiv(110, "timeline-band-lines");
-    this._lineLayer.style.display = "none";
-
-    if (this._eventLayer != null) {
-        band.removeLayerDiv(this._eventLayer);
-    }
-    this._eventLayer = band.createLayerDiv(115, "timeline-band-events");
-    this._eventLayer.style.display = "none";
-};
-
-Timeline.CompactEventPainter.prototype.paintEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isInstant()) {
-        this.paintInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-};
-
-Timeline.CompactEventPainter.prototype.paintInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseInstantEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.CompactEventPainter.prototype.paintDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    if (evt.isImprecise()) {
-        this.paintImpreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    } else {
-        this.paintPreciseDurationEvent(evt, metrics, theme, highlightIndex);
-    }
-}
-
-Timeline.CompactEventPainter.prototype.paintPreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var commonData = {
-        tooltip: evt.getProperty("tooltip") || evt.getText()
-    };
-
-    var iconData = {
-        url: evt.getIcon()
-    };
-    if (iconData.url == null) {
-        iconData.url = metrics.defaultIcon;
-        iconData.width = metrics.defaultIconWidth;
-        iconData.height = metrics.defaultIconHeight;
-        iconData.className = "timeline-event-icon-default";
-    } else {
-        iconData.width = evt.getProperty("iconWidth") || metrics.customIconWidth;
-        iconData.height = evt.getProperty("iconHeight") || metrics.customIconHeight;
-    }
-
-    var labelData = {
-        text:       evt.getText(),
-        color:      evt.getTextColor() || evt.getColor(),
-        className:  evt.getClassName()
-    };
-
-    var result = this.paintTapeIconLabel(
-        evt.getStart(),
-        commonData,
-        null, // no tape data
-        iconData,
-        labelData,
-        metrics,
-        theme,
-        highlightIndex
-    );
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickInstantEvent(result.iconElmtData.elmt, domEvt, evt);
-    };
-    SimileAjax.DOM.registerEvent(result.iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(result.labelElmtData.elmt, "mousedown", clickHandler);
-
-    this._eventIdToElmt[evt.getID()] = result.iconElmtData.elmt;
-};
-
-Timeline.CompactEventPainter.prototype.paintCompositePreciseInstantEvents = function(events, metrics, theme, highlightIndex) {
-    var evt = events[0];
-
-    var tooltips = [];
-    for (var i = 0; i < events.length; i++) {
-        tooltips.push(events[i].getProperty("tooltip") || events[i].getText());
-    }
-    var commonData = {
-        tooltip: tooltips.join("; ")
-    };
-
-    var iconData = {
-        url: metrics.compositeIcon,
-        width: metrics.compositeIconWidth,
-        height: metrics.compositeIconHeight,
-        className: "timeline-event-icon-composite"
-    };
-
-    var labelData = {
-        text: String.substitute(this._params.compositeEventLabelTemplate, [ events.length ])
-    };
-
-    var result = this.paintTapeIconLabel(
-        evt.getStart(),
-        commonData,
-        null, // no tape data
-        iconData,
-        labelData,
-        metrics,
-        theme,
-        highlightIndex
-    );
-
-    var self = this;
-    var clickHandler = function(elmt, domEvt, target) {
-        return self._onClickMultiplePreciseInstantEvent(result.iconElmtData.elmt, domEvt, events);
-    };
-
-    SimileAjax.DOM.registerEvent(result.iconElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(result.labelElmtData.elmt, "mousedown", clickHandler);
-
-    for (var i = 0; i < events.length; i++) {
-        this._eventIdToElmt[events[i].getID()] = result.iconElmtData.elmt;
-    }
-};
-
-Timeline.CompactEventPainter.prototype.paintStackedPreciseInstantEvents = function(events, metrics, theme, highlightIndex) {
-    var limit = "limit" in this._params.stackConcurrentPreciseInstantEvents ?
-        this._params.stackConcurrentPreciseInstantEvents.limit : 10;
-    var moreMessageTemplate = "moreMessageTemplate" in this._params.stackConcurrentPreciseInstantEvents ?
-        this._params.stackConcurrentPreciseInstantEvents.moreMessageTemplate : "%0 More Events";
-    var showMoreMessage = limit <= events.length - 2; // We want at least 2 more events above the limit.
-                                                      // Otherwise we'd need the singular case of "1 More Event"
-
-    var band = this._band;
-    var getPixelOffset = function(date) {
-        return Math.round(band.dateToPixelOffset(date));
-    };
-    var getIconData = function(evt) {
-        var iconData = {
-            url: evt.getIcon()
-        };
-        if (iconData.url == null) {
-            iconData.url = metrics.defaultStackIcon;
-            iconData.width = metrics.defaultStackIconWidth;
-            iconData.height = metrics.defaultStackIconHeight;
-            iconData.className = "timeline-event-icon-stack timeline-event-icon-default";
-        } else {
-            iconData.width = evt.getProperty("iconWidth") || metrics.customIconWidth;
-            iconData.height = evt.getProperty("iconHeight") || metrics.customIconHeight;
-            iconData.className = "timeline-event-icon-stack";
-        }
-        return iconData;
-    };
-
-    var firstIconData = getIconData(events[0]);
-    var horizontalIncrement = 5;
-    var leftIconEdge = 0;
-    var totalLabelWidth = 0;
-    var totalLabelHeight = 0;
-    var totalIconHeight = 0;
-
-    var records = [];
-    for (var i = 0; i < events.length && (!showMoreMessage || i < limit); i++) {
-        var evt = events[i];
-        var text = evt.getText();
-        var iconData = getIconData(evt);
-        var labelSize = this._frc.computeSize(text);
-        var record = {
-            text:       text,
-            iconData:   iconData,
-            labelSize:  labelSize,
-            iconLeft:   firstIconData.width + i * horizontalIncrement - iconData.width
-        };
-        record.labelLeft = firstIconData.width + i * horizontalIncrement + metrics.iconLabelGap;
-        record.top = totalLabelHeight;
-        records.push(record);
-
-        leftIconEdge = Math.min(leftIconEdge, record.iconLeft);
-        totalLabelHeight += labelSize.height;
-        totalLabelWidth = Math.max(totalLabelWidth, record.labelLeft + labelSize.width);
-        totalIconHeight = Math.max(totalIconHeight, record.top + iconData.height);
-    }
-    if (showMoreMessage) {
-        var moreMessage = String.substitute(moreMessageTemplate, [ events.length - limit ]);
-
-        var moreMessageLabelSize = this._frc.computeSize(moreMessage);
-        var moreMessageLabelLeft = firstIconData.width + (limit - 1) * horizontalIncrement + metrics.iconLabelGap;
-        var moreMessageLabelTop = totalLabelHeight;
-
-        totalLabelHeight += moreMessageLabelSize.height;
-        totalLabelWidth = Math.max(totalLabelWidth, moreMessageLabelLeft + moreMessageLabelSize.width);
-    }
-    totalLabelWidth += metrics.labelRightMargin;
-    totalLabelHeight += metrics.labelBottomMargin;
-    totalIconHeight += metrics.iconBottomMargin;
-
-    var anchorPixel = getPixelOffset(events[0].getStart());
-    var newTracks = [];
-
-    var trackCount = Math.ceil(Math.max(totalIconHeight, totalLabelHeight) / metrics.trackHeight);
-    var rightIconEdge = firstIconData.width + (events.length - 1) * horizontalIncrement;
-    for (var i = 0; i < trackCount; i++) {
-        newTracks.push({ start: leftIconEdge, end: rightIconEdge });
-    }
-    var labelTrackCount = Math.ceil(totalLabelHeight / metrics.trackHeight);
-    for (var i = 0; i < labelTrackCount; i++) {
-        var track = newTracks[i];
-        track.end = Math.max(track.end, totalLabelWidth);
-    }
-
-    var firstTrack = this._fitTracks(anchorPixel, newTracks);
-    var verticalPixelOffset = firstTrack * metrics.trackHeight + metrics.trackOffset;
-
-    var iconStackDiv = this._timeline.getDocument().createElement("div");
-    iconStackDiv.className = 'timeline-event-icon-stack';
-    iconStackDiv.style.position = "absolute";
-    iconStackDiv.style.overflow = "visible";
-    iconStackDiv.style.left = anchorPixel + "px";
-    iconStackDiv.style.top = verticalPixelOffset + "px";
-    iconStackDiv.style.width = rightIconEdge + "px";
-    iconStackDiv.style.height = totalIconHeight + "px";
-    iconStackDiv.innerHTML = "<div style='position: relative'></div>";
-    this._eventLayer.appendChild(iconStackDiv);
-
-    var self = this;
-    var onMouseOver = function(domEvt) {
-        try {
-            var n = parseInt(this.getAttribute("index"));
-            var childNodes = iconStackDiv.firstChild.childNodes;
-            for (var i = 0; i < childNodes.length; i++) {
-                var child = childNodes[i];
-                if (i == n) {
-                    child.style.zIndex = childNodes.length;
-                } else {
-                    child.style.zIndex = childNodes.length - i;
-                }
-            }
-        } catch (e) {
-        }
-    };
-    var paintEvent = function(index) {
-        var record = records[index];
-        var evt = events[index];
-        var tooltip = evt.getProperty("tooltip") || evt.getText();
-
-        var labelElmtData = self._paintEventLabel(
-            { tooltip: tooltip },
-            { text: record.text },
-            anchorPixel + record.labelLeft,
-            verticalPixelOffset + record.top,
-            record.labelSize.width,
-            record.labelSize.height,
-            theme
-        );
-        labelElmtData.elmt.setAttribute("index", index);
-        labelElmtData.elmt.onmouseover = onMouseOver;
-
-        var img = SimileAjax.Graphics.createTranslucentImage(record.iconData.url);
-        var iconDiv = self._timeline.getDocument().createElement("div");
-        iconDiv.className = 'timeline-event-icon' + ("className" in record.iconData ? (" " + record.iconData.className) : "");
-        iconDiv.style.left = record.iconLeft + "px";
-        iconDiv.style.top = record.top + "px";
-        iconDiv.style.zIndex = (records.length - index);
-        iconDiv.appendChild(img);
-        iconDiv.setAttribute("index", index);
-        iconDiv.onmouseover = onMouseOver;
-
-        iconStackDiv.firstChild.appendChild(iconDiv);
-
-        var clickHandler = function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(labelElmtData.elmt, domEvt, evt);
-        };
-
-        SimileAjax.DOM.registerEvent(iconDiv, "mousedown", clickHandler);
-        SimileAjax.DOM.registerEvent(labelElmtData.elmt, "mousedown", clickHandler);
-
-        self._eventIdToElmt[evt.getID()] = iconDiv;
-    };
-    for (var i = 0; i < records.length; i++) {
-        paintEvent(i);
-    }
-
-    if (showMoreMessage) {
-        var moreEvents = events.slice(limit);
-        var moreMessageLabelElmtData = this._paintEventLabel(
-            { tooltip: moreMessage },
-            { text: moreMessage },
-            anchorPixel + moreMessageLabelLeft,
-            verticalPixelOffset + moreMessageLabelTop,
-            moreMessageLabelSize.width,
-            moreMessageLabelSize.height,
-            theme
-        );
-
-        var moreMessageClickHandler = function(elmt, domEvt, target) {
-            return self._onClickMultiplePreciseInstantEvent(moreMessageLabelElmtData.elmt, domEvt, moreEvents);
-        };
-        SimileAjax.DOM.registerEvent(moreMessageLabelElmtData.elmt, "mousedown", moreMessageClickHandler);
-
-        for (var i = 0; i < moreEvents.length; i++) {
-            this._eventIdToElmt[moreEvents[i].getID()] = moreMessageLabelElmtData.elmt;
-        }
-    }
-    //this._createHighlightDiv(highlightIndex, iconElmtData, theme);
-};
-
-Timeline.CompactEventPainter.prototype.paintImpreciseInstantEvent = function(evt, metrics, theme, highlightIndex) {
-    var commonData = {
-        tooltip: evt.getProperty("tooltip") || evt.getText()
-    };
-
-    var tapeData = {
-        start:          evt.getStart(),
-        end:            evt.getEnd(),
-        latestStart:    evt.getLatestStart(),
-        earliestEnd:    evt.getEarliestEnd(),
-        isInstant:      true
-    };
-
-    var iconData = {
-        url: evt.getIcon()
-    };
-    if (iconData.url == null) {
-        iconData = null;
-    } else {
-        iconData.width = evt.getProperty("iconWidth") || metrics.customIconWidth;
-        iconData.height = evt.getProperty("iconHeight") || metrics.customIconHeight;
-    }
-
-    var labelData = {
-        text:       evt.getText(),
-        color:      evt.getTextColor() || evt.getColor(),
-        className:  evt.getClassName()
-    };
-
-    var result = this.paintTapeIconLabel(
-        evt.getStart(),
-        commonData,
-        tapeData, // no tape data
-        iconData,
-        labelData,
-        metrics,
-        theme,
-        highlightIndex
-    );
-
-    var self = this;
-    var clickHandler = iconData != null ?
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.iconElmtData.elmt, domEvt, evt);
-        } :
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.labelElmtData.elmt, domEvt, evt);
-        };
-
-    SimileAjax.DOM.registerEvent(result.labelElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(result.impreciseTapeElmtData.elmt, "mousedown", clickHandler);
-
-    if (iconData != null) {
-        SimileAjax.DOM.registerEvent(result.iconElmtData.elmt, "mousedown", clickHandler);
-        this._eventIdToElmt[evt.getID()] = result.iconElmtData.elmt;
-    } else {
-        this._eventIdToElmt[evt.getID()] = result.labelElmtData.elmt;
-    }
-};
-
-Timeline.CompactEventPainter.prototype.paintPreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var commonData = {
-        tooltip: evt.getProperty("tooltip") || evt.getText()
-    };
-
-    var tapeData = {
-        start:          evt.getStart(),
-        end:            evt.getEnd(),
-        isInstant:      false
-    };
-
-    var iconData = {
-        url: evt.getIcon()
-    };
-    if (iconData.url == null) {
-        iconData = null;
-    } else {
-        iconData.width = evt.getProperty("iconWidth") || metrics.customIconWidth;
-        iconData.height = evt.getProperty("iconHeight") || metrics.customIconHeight;
-    }
-
-    var labelData = {
-        text:       evt.getText(),
-        color:      evt.getTextColor() || evt.getColor(),
-        className:  evt.getClassName()
-    };
-
-    var result = this.paintTapeIconLabel(
-        evt.getLatestStart(),
-        commonData,
-        tapeData, // no tape data
-        iconData,
-        labelData,
-        metrics,
-        theme,
-        highlightIndex
-    );
-
-    var self = this;
-    var clickHandler = iconData != null ?
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.iconElmtData.elmt, domEvt, evt);
-        } :
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.labelElmtData.elmt, domEvt, evt);
-        };
-
-    SimileAjax.DOM.registerEvent(result.labelElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(result.tapeElmtData.elmt, "mousedown", clickHandler);
-
-    if (iconData != null) {
-        SimileAjax.DOM.registerEvent(result.iconElmtData.elmt, "mousedown", clickHandler);
-        this._eventIdToElmt[evt.getID()] = result.iconElmtData.elmt;
-    } else {
-        this._eventIdToElmt[evt.getID()] = result.labelElmtData.elmt;
-    }
-};
-
-Timeline.CompactEventPainter.prototype.paintImpreciseDurationEvent = function(evt, metrics, theme, highlightIndex) {
-    var commonData = {
-        tooltip: evt.getProperty("tooltip") || evt.getText()
-    };
-
-    var tapeData = {
-        start:          evt.getStart(),
-        end:            evt.getEnd(),
-        latestStart:    evt.getLatestStart(),
-        earliestEnd:    evt.getEarliestEnd(),
-        isInstant:      false
-    };
-
-    var iconData = {
-        url: evt.getIcon()
-    };
-    if (iconData.url == null) {
-        iconData = null;
-    } else {
-        iconData.width = evt.getProperty("iconWidth") || metrics.customIconWidth;
-        iconData.height = evt.getProperty("iconHeight") || metrics.customIconHeight;
-    }
-
-    var labelData = {
-        text:       evt.getText(),
-        color:      evt.getTextColor() || evt.getColor(),
-        className:  evt.getClassName()
-    };
-
-    var result = this.paintTapeIconLabel(
-        evt.getLatestStart(),
-        commonData,
-        tapeData, // no tape data
-        iconData,
-        labelData,
-        metrics,
-        theme,
-        highlightIndex
-    );
-
-    var self = this;
-    var clickHandler = iconData != null ?
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.iconElmtData.elmt, domEvt, evt);
-        } :
-        function(elmt, domEvt, target) {
-            return self._onClickInstantEvent(result.labelElmtData.elmt, domEvt, evt);
-        };
-
-    SimileAjax.DOM.registerEvent(result.labelElmtData.elmt, "mousedown", clickHandler);
-    SimileAjax.DOM.registerEvent(result.tapeElmtData.elmt, "mousedown", clickHandler);
-
-    if (iconData != null) {
-        SimileAjax.DOM.registerEvent(result.iconElmtData.elmt, "mousedown", clickHandler);
-        this._eventIdToElmt[evt.getID()] = result.iconElmtData.elmt;
-    } else {
-        this._eventIdToElmt[evt.getID()] = result.labelElmtData.elmt;
-    }
-};
-
-Timeline.CompactEventPainter.prototype.paintTapeIconLabel = function(
-    anchorDate,
-    commonData,
-    tapeData,
-    iconData,
-    labelData,
-    metrics,
-    theme,
-    highlightIndex
-) {
-    var band = this._band;
-    var getPixelOffset = function(date) {
-        return Math.round(band.dateToPixelOffset(date));
-    };
-
-    var anchorPixel = getPixelOffset(anchorDate);
-    var newTracks = [];
-
-    var tapeHeightOccupied = 0;         // how many pixels (vertically) the tape occupies, including bottom margin
-    var tapeTrackCount = 0;             // how many tracks the tape takes up, usually just 1
-    var tapeLastTrackExtraSpace = 0;    // on the last track that the tape occupies, how many pixels are left (for icon and label to occupy as well)
-    if (tapeData != null) {
-        tapeHeightOccupied = metrics.tapeHeight + metrics.tapeBottomMargin;
-        tapeTrackCount = Math.ceil(metrics.tapeHeight / metrics.trackHeight);
-
-        var tapeEndPixelOffset = getPixelOffset(tapeData.end) - anchorPixel;
-        var tapeStartPixelOffset = getPixelOffset(tapeData.start) - anchorPixel;
-
-        for (var t = 0; t < tapeTrackCount; t++) {
-            newTracks.push({ start: tapeStartPixelOffset, end: tapeEndPixelOffset });
-        }
-
-        tapeLastTrackExtraSpace = metrics.trackHeight - (tapeHeightOccupied % metrics.tapeHeight);
-    }
-
-    var iconStartPixelOffset = 0;        // where the icon starts compared to the anchor pixel;
-                                         // this can be negative if the icon is center-aligned around the anchor
-    var iconHorizontalSpaceOccupied = 0; // how many pixels the icon take up from the anchor pixel,
-                                         // including the gap between the icon and the label
-    if (iconData != null) {
-        if ("iconAlign" in iconData && iconData.iconAlign == "center") {
-            iconStartPixelOffset = -Math.floor(iconData.width / 2);
-        }
-        iconHorizontalSpaceOccupied = iconStartPixelOffset + iconData.width + metrics.iconLabelGap;
-
-        if (tapeTrackCount > 0) {
-            newTracks[tapeTrackCount - 1].end = Math.max(newTracks[tapeTrackCount - 1].end, iconHorizontalSpaceOccupied);
-        }
-
-        var iconHeight = iconData.height + metrics.iconBottomMargin + tapeLastTrackExtraSpace;
-        while (iconHeight > 0) {
-            newTracks.push({ start: iconStartPixelOffset, end: iconHorizontalSpaceOccupied });
-            iconHeight -= metrics.trackHeight;
-        }
-    }
-
-    var text = labelData.text;
-    var labelSize = this._frc.computeSize(text);
-    var labelHeight = labelSize.height + metrics.labelBottomMargin + tapeLastTrackExtraSpace;
-    var labelEndPixelOffset = iconHorizontalSpaceOccupied + labelSize.width + metrics.labelRightMargin;
-    if (tapeTrackCount > 0) {
-        newTracks[tapeTrackCount - 1].end = Math.max(newTracks[tapeTrackCount - 1].end, labelEndPixelOffset);
-    }
-    for (var i = 0; labelHeight > 0; i++) {
-        if (tapeTrackCount + i < newTracks.length) {
-            var track = newTracks[tapeTrackCount + i];
-            track.end = labelEndPixelOffset;
-        } else {
-            newTracks.push({ start: 0, end: labelEndPixelOffset });
-        }
-        labelHeight -= metrics.trackHeight;
-    }
-
-    /*
-     *  Try to fit the new track on top of the existing tracks, then
-     *  render the various elements.
-     */
-    var firstTrack = this._fitTracks(anchorPixel, newTracks);
-    var verticalPixelOffset = firstTrack * metrics.trackHeight + metrics.trackOffset;
-    var result = {};
-
-    result.labelElmtData = this._paintEventLabel(
-        commonData,
-        labelData,
-        anchorPixel + iconHorizontalSpaceOccupied,
-        verticalPixelOffset + tapeHeightOccupied,
-        labelSize.width,
-        labelSize.height,
-        theme
-    );
-
-    if (tapeData != null) {
-        if ("latestStart" in tapeData || "earliestEnd" in tapeData) {
-            result.impreciseTapeElmtData = this._paintEventTape(
-                commonData,
-                tapeData,
-                metrics.tapeHeight,
-                verticalPixelOffset,
-                getPixelOffset(tapeData.start),
-                getPixelOffset(tapeData.end),
-                theme.event.duration.impreciseColor,
-                theme.event.duration.impreciseOpacity,
-                metrics,
-                theme
-            );
-        }
-        if (!tapeData.isInstant && "start" in tapeData && "end" in tapeData) {
-            result.tapeElmtData = this._paintEventTape(
-                commonData,
-                tapeData,
-                metrics.tapeHeight,
-                verticalPixelOffset,
-                anchorPixel,
-                getPixelOffset("earliestEnd" in tapeData ? tapeData.earliestEnd : tapeData.end),
-                tapeData.color,
-                100,
-                metrics,
-                theme
-            );
-        }
-    }
-
-    if (iconData != null) {
-        result.iconElmtData = this._paintEventIcon(
-            commonData,
-            iconData,
-            verticalPixelOffset + tapeHeightOccupied,
-            anchorPixel + iconStartPixelOffset,
-            metrics,
-            theme
-        );
-    }
-    //this._createHighlightDiv(highlightIndex, iconElmtData, theme);
-
-    return result;
-};
-
-Timeline.CompactEventPainter.prototype._fitTracks = function(anchorPixel, newTracks) {
-    var firstTrack;
-    for (firstTrack = 0; firstTrack < this._tracks.length; firstTrack++) {
-        var fit = true;
-        for (var j = 0; j < newTracks.length && (firstTrack + j) < this._tracks.length; j++) {
-            var existingTrack = this._tracks[firstTrack + j];
-            var newTrack = newTracks[j];
-            if (anchorPixel + newTrack.start < existingTrack) {
-                fit = false;
-                break;
-            }
-        }
-
-        if (fit) {
-            break;
-        }
-    }
-    for (var i = 0; i < newTracks.length; i++) {
-        this._tracks[firstTrack + i] = anchorPixel + newTracks[i].end;
-    }
-
-    return firstTrack;
-};
-
-
-Timeline.CompactEventPainter.prototype._paintEventIcon = function(commonData, iconData, top, left, metrics, theme) {
-    var img = SimileAjax.Graphics.createTranslucentImage(iconData.url);
-    var iconDiv = this._timeline.getDocument().createElement("div");
-    iconDiv.className = 'timeline-event-icon' + ("className" in iconData ? (" " + iconData.className) : "");
-    iconDiv.style.left = left + "px";
-    iconDiv.style.top = top + "px";
-    iconDiv.appendChild(img);
-
-    if ("tooltip" in commonData && typeof commonData.tooltip == "string") {
-        iconDiv.title = commonData.tooltip;
-    }
-
-    this._eventLayer.appendChild(iconDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  metrics.iconWidth,
-        height: metrics.iconHeight,
-        elmt:   iconDiv
-    };
-};
-
-Timeline.CompactEventPainter.prototype._paintEventLabel = function(commonData, labelData, left, top, width, height, theme) {
-    var doc = this._timeline.getDocument();
-
-    var labelDiv = doc.createElement("div");
-    labelDiv.className = 'timeline-event-label';
-
-    labelDiv.style.left = left + "px";
-    labelDiv.style.width = (width + 1) + "px";
-    labelDiv.style.top = top + "px";
-    labelDiv.innerHTML = labelData.text;
-
-    if ("tooltip" in commonData && typeof commonData.tooltip == "string") {
-        labelDiv.title = commonData.tooltip;
-    }
-    if ("color" in labelData && typeof labelData.color == "string") {
-        labelDiv.style.color = labelData.color;
-    }
-    if ("className" in labelData && typeof labelData.className == "string") {
-        labelDiv.className += ' ' + labelData.className;
-    }
-
-    this._eventLayer.appendChild(labelDiv);
-
-    return {
-        left:   left,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   labelDiv
-    };
-};
-
-Timeline.CompactEventPainter.prototype._paintEventTape = function(
-    commonData, tapeData, height, top, startPixel, endPixel, color, opacity, metrics, theme) {
-
-    var width = endPixel - startPixel;
-
-    var tapeDiv = this._timeline.getDocument().createElement("div");
-    tapeDiv.className = "timeline-event-tape"
-
-    tapeDiv.style.left = startPixel + "px";
-    tapeDiv.style.top = top + "px";
-    tapeDiv.style.width = width + "px";
-    tapeDiv.style.height = height + "px";
-
-    if ("tooltip" in commonData && typeof commonData.tooltip == "string") {
-        tapeDiv.title = commonData.tooltip;
-    }
-    if (color != null && typeof tapeData.color == "string") {
-        tapeDiv.style.backgroundColor = color;
-    }
-
-    if ("backgroundImage" in tapeData && typeof tapeData.backgroundImage == "string") {
-        tapeDiv.style.backgroundImage = "url(" + backgroundImage + ")";
-        tapeDiv.style.backgroundRepeat =
-            ("backgroundRepeat" in tapeData && typeof tapeData.backgroundRepeat == "string")
-                ? tapeData.backgroundRepeat : 'repeat';
-    }
-
-    SimileAjax.Graphics.setOpacity(tapeDiv, opacity);
-
-    if ("className" in tapeData && typeof tapeData.className == "string") {
-        tapeDiv.className += ' ' + tapeData.className;
-    }
-
-    this._eventLayer.appendChild(tapeDiv);
-
-    return {
-        left:   startPixel,
-        top:    top,
-        width:  width,
-        height: height,
-        elmt:   tapeDiv
-    };
-}
-
-Timeline.CompactEventPainter.prototype._createHighlightDiv = function(highlightIndex, dimensions, theme) {
-    if (highlightIndex >= 0) {
-        var doc = this._timeline.getDocument();
-        var eventTheme = theme.event;
-
-        var color = eventTheme.highlightColors[Math.min(highlightIndex, eventTheme.highlightColors.length - 1)];
-
-        var div = doc.createElement("div");
-        div.style.position = "absolute";
-        div.style.overflow = "hidden";
-        div.style.left =    (dimensions.left - 2) + "px";
-        div.style.width =   (dimensions.width + 4) + "px";
-        div.style.top =     (dimensions.top - 2) + "px";
-        div.style.height =  (dimensions.height + 4) + "px";
-//        div.style.background = color;
-
-        this._highlightLayer.appendChild(div);
-    }
-};
-
-Timeline.CompactEventPainter.prototype._onClickMultiplePreciseInstantEvent = function(icon, domEvt, events) {
-    var c = SimileAjax.DOM.getPageCoordinates(icon);
-    this._showBubble(
-        c.left + Math.ceil(icon.offsetWidth / 2),
-        c.top + Math.ceil(icon.offsetHeight / 2),
-        events
-    );
-
-    var ids = [];
-    for (var i = 0; i < events.length; i++) {
-        ids.push(events[i].getID());
-    }
-    this._fireOnSelect(ids);
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-
-    return false;
-};
-
-Timeline.CompactEventPainter.prototype._onClickInstantEvent = function(icon, domEvt, evt) {
-    var c = SimileAjax.DOM.getPageCoordinates(icon);
-    this._showBubble(
-        c.left + Math.ceil(icon.offsetWidth / 2),
-        c.top + Math.ceil(icon.offsetHeight / 2),
-        [evt]
-    );
-    this._fireOnSelect([evt.getID()]);
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.CompactEventPainter.prototype._onClickDurationEvent = function(target, domEvt, evt) {
-    if ("pageX" in domEvt) {
-        var x = domEvt.pageX;
-        var y = domEvt.pageY;
-    } else {
-        var c = SimileAjax.DOM.getPageCoordinates(target);
-        var x = domEvt.offsetX + c.left;
-        var y = domEvt.offsetY + c.top;
-    }
-    this._showBubble(x, y, [evt]);
-    this._fireOnSelect([evt.getID()]);
-
-    domEvt.cancelBubble = true;
-    SimileAjax.DOM.cancelEvent(domEvt);
-    return false;
-};
-
-Timeline.CompactEventPainter.prototype.showBubble = function(evt) {
-    var elmt = this._eventIdToElmt[evt.getID()];
-    if (elmt) {
-        var c = SimileAjax.DOM.getPageCoordinates(elmt);
-        this._showBubble(c.left + elmt.offsetWidth / 2, c.top + elmt.offsetHeight / 2, [evt]);
-    }
-};
-
-Timeline.CompactEventPainter.prototype._showBubble = function(x, y, evts) {
-    var div = document.createElement("div");
-
-    evts = ("fillInfoBubble" in evts) ? [evts] : evts;
-    for (var i = 0; i < evts.length; i++) {
-        var div2 = document.createElement("div");
-        div.appendChild(div2);
-
-        evts[i].fillInfoBubble(div2, this._params.theme, this._band.getLabeller());
-    }
-
-    SimileAjax.WindowManager.cancelPopups();
-    SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, this._params.theme.event.bubble.width);
-};
-
-Timeline.CompactEventPainter.prototype._fireOnSelect = function(eventIDs) {
-    for (var i = 0; i < this._onSelectListeners.length; i++) {
-        this._onSelectListeners[i](eventIDs);
-    }
-};
-/*
- *  Span Highlight Decorator
- *
- */
-
-Timeline.SpanHighlightDecorator = function(params) {
-    // When evaluating params, test against null. Not "p in params". Testing against
-    // null enables caller to explicitly request the default. Testing against "in" means
-    // that the param has to be ommitted to get the default.
-    this._unit = params.unit != null ? params.unit : SimileAjax.NativeDateUnit;
-    this._startDate = (typeof params.startDate == "string") ?
-        this._unit.parseFromObject(params.startDate) : params.startDate;
-    this._endDate = (typeof params.endDate == "string") ?
-        this._unit.parseFromObject(params.endDate) : params.endDate;
-    this._startLabel = params.startLabel != null ? params.startLabel : ""; // not null!
-    this._endLabel   = params.endLabel   != null ? params.endLabel   : ""; // not null!
-    this._color = params.color;
-    this._cssClass = params.cssClass != null ? params.cssClass : null;
-    this._opacity = params.opacity != null ? params.opacity : 100;
-         // Default z is 10, behind everything but background grid.
-         // If inFront, then place just behind events, in front of everything else
-    this._zIndex = (params.inFront != null && params.inFront) ? 113 : 10;
-};
-
-Timeline.SpanHighlightDecorator.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-
-    this._layerDiv = null;
-};
-
-Timeline.SpanHighlightDecorator.prototype.paint = function() {
-    if (this._layerDiv != null) {
-        this._band.removeLayerDiv(this._layerDiv);
-    }
-    this._layerDiv = this._band.createLayerDiv(this._zIndex);
-    this._layerDiv.setAttribute("name", "span-highlight-decorator"); // for debugging
-    this._layerDiv.style.display = "none";
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    if (this._unit.compare(this._startDate, maxDate) < 0 &&
-        this._unit.compare(this._endDate, minDate) > 0) {
-
-        minDate = this._unit.later(minDate, this._startDate);
-        maxDate = this._unit.earlier(maxDate, this._endDate);
-
-        var minPixel = this._band.dateToPixelOffset(minDate);
-        var maxPixel = this._band.dateToPixelOffset(maxDate);
-
-        var doc = this._timeline.getDocument();
-
-        var createTable = function() {
-            var table = doc.createElement("table");
-            table.insertRow(0).insertCell(0);
-            return table;
-        };
-
-        var div = doc.createElement("div");
-        div.className='timeline-highlight-decorator'
-        if(this._cssClass) {
-        	  div.className += ' ' + this._cssClass;
-        }
-        if(this._color != null) {
-        	  div.style.backgroundColor = this._color;
-        }
-        if (this._opacity < 100) {
-            SimileAjax.Graphics.setOpacity(div, this._opacity);
-        }
-        this._layerDiv.appendChild(div);
-
-        var tableStartLabel = createTable();
-        tableStartLabel.className = 'timeline-highlight-label timeline-highlight-label-start'
-        var tdStart =  tableStartLabel.rows[0].cells[0]
-        tdStart.innerHTML = this._startLabel;
-        if (this._cssClass) {
-        	  tdStart.className = 'label_' + this._cssClass;
-        }
-        this._layerDiv.appendChild(tableStartLabel);
-
-        var tableEndLabel = createTable();
-        tableEndLabel.className = 'timeline-highlight-label timeline-highlight-label-end'
-        var tdEnd = tableEndLabel.rows[0].cells[0]
-        tdEnd.innerHTML = this._endLabel;
-        if (this._cssClass) {
-        	   tdEnd.className = 'label_' + this._cssClass;
-        }
-        this._layerDiv.appendChild(tableEndLabel);
-
-        if (this._timeline.isHorizontal()){
-            div.style.left = minPixel + "px";
-            div.style.width = (maxPixel - minPixel) + "px";
-
-            tableStartLabel.style.right = (this._band.getTotalViewLength() - minPixel) + "px";
-            tableStartLabel.style.width = (this._startLabel.length) + "em";
-
-            tableEndLabel.style.left = maxPixel + "px";
-            tableEndLabel.style.width = (this._endLabel.length) + "em";
-
-        } else {
-            div.style.top = minPixel + "px";
-            div.style.height = (maxPixel - minPixel) + "px";
-
-            tableStartLabel.style.bottom = minPixel + "px";
-            tableStartLabel.style.height = "1.5px";
-
-            tableEndLabel.style.top = maxPixel + "px";
-            tableEndLabel.style.height = "1.5px";
-        }
-    }
-    this._layerDiv.style.display = "block";
-};
-
-Timeline.SpanHighlightDecorator.prototype.softPaint = function() {
-};
-
-/*
- *  Point Highlight Decorator
- *
- */
-
-Timeline.PointHighlightDecorator = function(params) {
-    this._unit = params.unit != null ? params.unit : SimileAjax.NativeDateUnit;
-    this._date = (typeof params.date == "string") ?
-        this._unit.parseFromObject(params.date) : params.date;
-    this._width = params.width != null ? params.width : 10;
-      // Since the width is used to calculate placements (see minPixel, below), we
-      // specify width here, not in css.
-    this._color = params.color;
-    this._cssClass = params.cssClass != null ? params.cssClass : '';
-    this._opacity = params.opacity != null ? params.opacity : 100;
-};
-
-Timeline.PointHighlightDecorator.prototype.initialize = function(band, timeline) {
-    this._band = band;
-    this._timeline = timeline;
-    this._layerDiv = null;
-};
-
-Timeline.PointHighlightDecorator.prototype.paint = function() {
-    if (this._layerDiv != null) {
-        this._band.removeLayerDiv(this._layerDiv);
-    }
-    this._layerDiv = this._band.createLayerDiv(10);
-    this._layerDiv.setAttribute("name", "span-highlight-decorator"); // for debugging
-    this._layerDiv.style.display = "none";
-
-    var minDate = this._band.getMinDate();
-    var maxDate = this._band.getMaxDate();
-
-    if (this._unit.compare(this._date, maxDate) < 0 &&
-        this._unit.compare(this._date, minDate) > 0) {
-
-        var pixel = this._band.dateToPixelOffset(this._date);
-        var minPixel = pixel - Math.round(this._width / 2);
-
-        var doc = this._timeline.getDocument();
-
-        var div = doc.createElement("div");
-        div.className='timeline-highlight-point-decorator';
-        div.className += ' ' + this._cssClass;
-
-        if(this._color != null) {
-        	  div.style.backgroundColor = this._color;
-        }
-        if (this._opacity < 100) {
-            SimileAjax.Graphics.setOpacity(div, this._opacity);
-        }
-        this._layerDiv.appendChild(div);
-
-        if (this._timeline.isHorizontal()) {
-            div.style.left = minPixel + "px";
-            div.style.width = this._width;
-        } else {
-            div.style.top = minPixel + "px";
-            div.style.height = this._width;
-        }
-    }
-    this._layerDiv.style.display = "block";
-};
-
-Timeline.PointHighlightDecorator.prototype.softPaint = function() {
-};
-/*
- *  Default Unit
- *
- */
-
-Timeline.NativeDateUnit = new Object();
-
-Timeline.NativeDateUnit.createLabeller = function(locale, timeZone) {
-    return new Timeline.GregorianDateLabeller(locale, timeZone);
-};
-
-Timeline.NativeDateUnit.makeDefaultValue = function() {
-    return new Date();
-};
-
-Timeline.NativeDateUnit.cloneValue = function(v) {
-    return new Date(v.getTime());
-};
-
-Timeline.NativeDateUnit.getParser = function(format) {
-    if (typeof format == "string") {
-        format = format.toLowerCase();
-    }
-    return (format == "iso8601" || format == "iso 8601") ?
-        Timeline.DateTime.parseIso8601DateTime : 
-        Timeline.DateTime.parseGregorianDateTime;
-};
-
-Timeline.NativeDateUnit.parseFromObject = function(o) {
-    return Timeline.DateTime.parseGregorianDateTime(o);
-};
-
-Timeline.NativeDateUnit.toNumber = function(v) {
-    return v.getTime();
-};
-
-Timeline.NativeDateUnit.fromNumber = function(n) {
-    return new Date(n);
-};
-
-Timeline.NativeDateUnit.compare = function(v1, v2) {
-    var n1, n2;
-    if (typeof v1 == "object") {
-        n1 = v1.getTime();
-    } else {
-        n1 = Number(v1);
-    }
-    if (typeof v2 == "object") {
-        n2 = v2.getTime();
-    } else {
-        n2 = Number(v2);
-    }
-
-    return n1 - n2;
-};
-
-Timeline.NativeDateUnit.earlier = function(v1, v2) {
-    return Timeline.NativeDateUnit.compare(v1, v2) < 0 ? v1 : v2;
-};
-
-Timeline.NativeDateUnit.later = function(v1, v2) {
-    return Timeline.NativeDateUnit.compare(v1, v2) > 0 ? v1 : v2;
-};
-
-Timeline.NativeDateUnit.change = function(v, n) {
-    return new Date(v.getTime() + n);
-};
-
-/*
- *  Common localization strings
- *
- */
-
-Timeline.strings["fr"] = {
-    wikiLinkLabel:  "Discute"
-};
-
-/*
- *  Localization of labellers.js
- *
- */
-
-Timeline.GregorianDateLabeller.monthNames["fr"] = [
-    "jan", "fev", "mar", "avr", "mai", "jui", "jui", "aou", "sep", "oct", "nov", "dec"
-];
-/*
- *  Common localization strings
- *
- */
-
-Timeline.strings["en"] = {
-    wikiLinkLabel:  "Discuss"
-};
-
-/*
- *  Localization of labellers.js
- *
- */
-
-Timeline.GregorianDateLabeller.monthNames["en"] = [
-    "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"
-];
-
-Timeline.GregorianDateLabeller.dayNames["en"] = [
-    "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"
-];
--- a/web/data/cubicweb.timeline-ext.js	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,108 +0,0 @@
-/**
- *  :organization: Logilab
- *  :copyright: 2008-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
- *  :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
- *
- */
-
-/**
- * provide our own custom date parser since the default
- * one only understands iso8601 and gregorian dates
- */
-SimileAjax.NativeDateUnit.getParser = Timeline.NativeDateUnit.getParser = function(format) {
-    if (typeof format == "string") {
-        if (format.indexOf('%') != - 1) {
-            return function(datestring) {
-                if (datestring) {
-                    return strptime(datestring, format);
-                }
-                return null;
-            };
-        }
-        format = format.toLowerCase();
-    }
-    if (format == "iso8601" || format == "iso 8601") {
-        return Timeline.DateTime.parseIso8601DateTime;
-    }
-    return Timeline.DateTime.parseGregorianDateTime;
-};
-
-/*** CUBICWEB EVENT PAINTER *****************************************************/
-Timeline.CubicWebEventPainter = function(params) {
-    //  Timeline.OriginalEventPainter.apply(this, arguments);
-    this._params = params;
-    this._onSelectListeners = [];
-
-    this._filterMatcher = null;
-    this._highlightMatcher = null;
-    this._frc = null;
-
-    this._eventIdToElmt = {};
-};
-
-Timeline.CubicWebEventPainter.prototype = new Timeline.OriginalEventPainter();
-
-Timeline.CubicWebEventPainter.prototype._paintEventLabel = function(
-evt, text, left, top, width, height, theme) {
-    var doc = this._timeline.getDocument();
-
-    var labelDiv = doc.createElement("div");
-    labelDiv.className = 'timeline-event-label';
-
-    labelDiv.style.left = left + "px";
-    labelDiv.style.width = width + "px";
-    labelDiv.style.top = top + "px";
-
-    if (evt._obj.onclick) {
-        labelDiv.appendChild(A({
-            'href': evt._obj.onclick
-        },
-        text));
-    } else if (evt._obj.image) {
-        labelDiv.appendChild(IMG({
-            src: evt._obj.image,
-            width: '30px',
-            height: '30px'
-        }));
-    } else {
-        labelDiv.innerHTML = text;
-    }
-
-    if (evt._title != null) labelDiv.title = evt._title;
-
-    var color = evt.getTextColor();
-    if (color == null) {
-        color = evt.getColor();
-    }
-    if (color != null) {
-        labelDiv.style.color = color;
-    }
-    var classname = evt.getClassName();
-    if (classname) labelDiv.className += ' ' + classname;
-
-    this._eventLayer.appendChild(labelDiv);
-
-    return {
-        left: left,
-        top: top,
-        width: width,
-        height: height,
-        elmt: labelDiv
-    };
-};
-
-Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) {
-    var div = DIV({
-        id: 'xxx'
-    });
-    var width = this._params.theme.event.bubble.width;
-    if (!evt._obj.bubbleUrl) {
-        evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller());
-    }
-    SimileAjax.WindowManager.cancelPopups();
-    SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width);
-    if (evt._obj.bubbleUrl) {
-        jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace');
-    }
-};
-
--- a/web/data/cubicweb.widgets.js	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/data/cubicweb.widgets.js	Mon Jun 22 14:27:37 2015 +0200
@@ -428,41 +428,6 @@
     }
 }
 
-/**
- * .. class:: Widgets.TimelineWidget
- *
- * widget based on SIMILE's timeline widget
- * http://code.google.com/p/simile-widgets/
- *
- * Beware not to mess with SIMILE's Timeline JS namepsace !
- */
-
-Widgets.TimelineWidget = defclass("TimelineWidget", null, {
-    __init__: function(wdgnode) {
-        var tldiv = DIV({
-            id: "tl",
-            style: 'height: 200px; border: 1px solid #ccc;'
-        });
-        wdgnode.appendChild(tldiv);
-        var tlunit = wdgnode.getAttribute('cubicweb:tlunit') || 'YEAR';
-        var eventSource = new Timeline.DefaultEventSource();
-        var bandData = {
-            eventPainter: Timeline.CubicWebEventPainter,
-            eventSource: eventSource,
-            width: "100%",
-            intervalUnit: Timeline.DateTime[tlunit.toUpperCase()],
-            intervalPixels: 100
-        };
-        var bandInfos = [Timeline.createBandInfo(bandData)];
-        this.tl = Timeline.create(tldiv, bandInfos);
-        var loadurl = wdgnode.getAttribute('cubicweb:loadurl');
-        Timeline.loadJSON(loadurl, function(json, url) {
-            eventSource.loadJSON(json, url);
-        });
-
-    }
-});
-
 Widgets.TemplateTextField = defclass("TemplateTextField", null, {
 
     __init__: function(wdgnode) {
--- a/web/data/timeline-bundle.css	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,401 +0,0 @@
-div.simileAjax-bubble-container {
-    margin:     0px;
-    padding:    0px;
-    border:     none;
-    position:   absolute;
-    z-index:    1000;
-}
-
-div.simileAjax-bubble-innerContainer {
-    margin:     0px;
-    padding:    0px;
-    border:     none;
-    position:   relative;
-    width:      100%;
-    height:     100%;
-    overflow:   visible;
-}
-
-div.simileAjax-bubble-contentContainer {
-    margin:     0px;
-    padding:    0px;
-    border:     none;
-    position:   absolute;
-    left:       0px;
-    top:        0px;
-    width:      100%;
-    height:     100%;
-    overflow:   auto;
-    background: white;
-}
-
-div.simileAjax-bubble-border-left {
-    position:   absolute;
-    left:       -50px;
-    top:        0px;
-    width:      50px;
-    height:     100%;
-}
-div.simileAjax-bubble-border-left-pngTranslucent {
-    background: url(../images/bubble-left.png) top right repeat-y;
-}
-
-div.simileAjax-bubble-border-right {
-    position:   absolute;
-    right:      -50px;
-    top:        0px;
-    width:      50px;
-    height:     100%;
-}
-.simileAjax-bubble-border-right-pngTranslucent {
-    background: url(../images/bubble-right.png) top left repeat-y;
-}
-
-div.simileAjax-bubble-border-top {
-    position:   absolute;
-    top:        -50px;
-    left:       0px;
-    width:      100%;
-    height:     50px;
-}
-.simileAjax-bubble-border-top-pngTranslucent {
-    background: url(../images/bubble-top.png) bottom left repeat-x;
-}
-
-div.simileAjax-bubble-border-bottom {
-    position:   absolute;
-    bottom:     -50px;
-    left:       0px;
-    width:      100%;
-    height:     50px;
-}
-.simileAjax-bubble-border-bottom-pngTranslucent {
-    background: url(../images/bubble-bottom.png) top left repeat-x;
-}
-
-div.simileAjax-bubble-border-top-left {
-    position:   absolute;
-    top:        -50px;
-    left:       -50px;
-    width:      50px;
-    height:     50px;
-}
-.simileAjax-bubble-border-top-left-pngTranslucent {
-    background: url(../images/bubble-top-left.png) bottom right no-repeat;
-}
-
-div.simileAjax-bubble-border-top-right {
-    position:   absolute;
-    top:        -50px;
-    right:      -50px;
-    width:      50px;
-    height:     50px;
-}
-.simileAjax-bubble-border-top-right-pngTranslucent {
-    background: url(../images/bubble-top-right.png) bottom left no-repeat;
-}
-
-div.simileAjax-bubble-border-bottom-left {
-    position:   absolute;
-    bottom:     -50px;
-    left:       -50px;
-    width:      50px;
-    height:     50px;
-}
-.simileAjax-bubble-border-bottom-left-pngTranslucent {
-    background: url(../images/bubble-bottom-left.png) top right no-repeat;
-}
-
-div.simileAjax-bubble-border-bottom-right {
-    position:   absolute;
-    bottom:     -50px;
-    right:      -50px;
-    width:      50px;
-    height:     50px;
-}
-.simileAjax-bubble-border-bottom-right-pngTranslucent {
-    background: url(../images/bubble-bottom-right.png) top left no-repeat;
-}
-
-div.simileAjax-bubble-arrow-point-left {
-    position:   absolute;
-    left:       -100px;
-    width:      100px;
-    height:     49px;
-}
-.simileAjax-bubble-arrow-point-left-pngTranslucent {
-    background: url(../images/bubble-arrow-point-left.png) center right no-repeat;
-}
-
-div.simileAjax-bubble-arrow-point-right {
-    position:   absolute;
-    right:      -100px;
-    width:      100px;
-    height:     49px;
-}
-.simileAjax-bubble-arrow-point-right-pngTranslucent {
-    background: url(../images/bubble-arrow-point-right.png) center left no-repeat;
-}
-
-div.simileAjax-bubble-arrow-point-up {
-    position:   absolute;
-    top:        -100px;
-    width:      49px;
-    height:     100px;
-}
-.simileAjax-bubble-arrow-point-up-pngTranslucent {
-    background: url(../images/bubble-arrow-point-up.png) bottom center no-repeat;
-}
-
-div.simileAjax-bubble-arrow-point-down {
-    position:   absolute;
-    bottom:     -100px;
-    width:      49px;
-    height:     100px;
-}
-.simileAjax-bubble-arrow-point-down-pngTranslucent {
-    background: url(../images/bubble-arrow-point-down.png) bottom center no-repeat;
-}
-
-
-div.simileAjax-bubble-close {
-    position:   absolute;
-    right:      -10px;
-    top:        -12px;
-    width:      16px;
-    height:     16px;
-    cursor:     pointer;
-}
-.simileAjax-bubble-close-pngTranslucent {
-    background: url(../images/close-button.png) no-repeat;
-}
-.timeline-container {
-    position: relative;
-    overflow: hidden;
-}
-
-.timeline-copyright {
-    position: absolute;
-    bottom: 0px;
-    left: 0px;
-    z-index: 1000;
-    cursor: pointer;
-}
-
-.timeline-message-container {
-    position:   absolute;
-    top:        30%;
-    left:       35%;
-    right:      35%;
-    z-index:    1000;
-    display:    none;
-}
-.timeline-message {
-    font-size:      120%;
-    font-weight:    bold;
-    text-align:     center;
-}
-.timeline-message img {
-    vertical-align: middle;
-}
-
-.timeline-band {
-    position:   absolute;
-    background: #eee;
-    z-index:    10;
-}
-
-.timeline-band-inner {
-    position: relative;
-    width: 100%;
-    height: 100%;
-}
-
-.timeline-band-input {
-    position:   absolute;
-    width:      1em;
-    height:     1em;
-    overflow:   hidden;
-    z-index:    0;
-}
-.timeline-band-input input{
-    width:      0;
-}
-
-.timeline-band-layer {
-    position:   absolute;
-    width:      100%;
-    height:     100%;
-}
-
-.timeline-band-layer-inner {
-    position:   relative;
-    width:      100%;
-    height:     100%;
-}
-
-
-
-/*------------------- Horizontal / Vertical lines ----------------*/
-
-/* style for ethers */
-.timeline-ether-lines{border-color:#666; border-style:dotted; position:absolute;}
-.timeline-horizontal .timeline-ether-lines{border-width:0 0 0 1px; height:100%; top: 0; width: 1px;}
-.timeline-vertical .timeline-ether-lines{border-width:1px 0 0; height:1px; left: 0; width: 100%;}
-
-
-
-/*---------------- Weekends ---------------------------*/
-.timeline-ether-weekends{
-	position:absolute;
-	background-color:#FFFFE0;
-}
-
-.timeline-vertical .timeline-ether-weekends{left:0;width:100%;}
-.timeline-horizontal .timeline-ether-weekends{top:0; height:100%;}
-
-
-/*-------------------------- HIGHLIGHT DECORATORS -------------------*/
-/* Used for decorators, not used for Timeline Highlight              */
-.timeline-highlight-decorator,
-.timeline-highlight-point-decorator{
-	position:absolute;
-	overflow:hidden;
-}
-
-/* Width of horizontal decorators and Height of vertical decorators is
-   set in the decorator function params */
-.timeline-horizontal .timeline-highlight-point-decorator,
-.timeline-horizontal .timeline-highlight-decorator{
-	top:0;
-  height:100%;
-}
-
-.timeline-vertical .timeline-highlight-point-decorator,
-.timeline-vertical .timeline-highlight-decorator{
-	width:100%;
-	left:0;
-}
-
-.timeline-highlight-decorator{background-color:#FFC080;}
-.timeline-highlight-point-decorator{background-color:#ff5;}
-
-
-/*---------------------------- LABELS -------------------------*/
-.timeline-highlight-label {
-  position:absolute; overflow:hidden; font-size:200%;
-  font-weight:bold; color:#999; }
-
-
-/*---------------- VERTICAL LABEL -------------------*/
-.timeline-horizontal .timeline-highlight-label {top:0; height:100%;}
-.timeline-horizontal .timeline-highlight-label td {vertical-align:middle;}
-.timeline-horizontal .timeline-highlight-label-start {text-align:right;}
-.timeline-horizontal .timeline-highlight-label-end {text-align:left;}
-
-
-/*---------------- HORIZONTAL LABEL -------------------*/
-.timeline-vertical .timeline-highlight-label {left:0;width:100%;}
-.timeline-vertical .timeline-highlight-label td {vertical-align:top;}
-.timeline-vertical .timeline-highlight-label-start {text-align:center;}
-.timeline-vertical .timeline-highlight-label-end {text-align:center;}
-
-
-/*-------------------------------- DATE LABELS --------------------------------*/
-.timeline-date-label {
-  position: absolute;
-  border: solid #aaa;
-  color: #aaa;
-  width: 5em;
-  height: 1.5em;}
-.timeline-date-label-em {color: #000;}
-
-/* horizontal */
-.timeline-horizontal .timeline-date-label{padding-left:2px;}
-.timeline-horizontal .timeline-date-label{border-width:0 0 0 1px;}
-.timeline-horizontal .timeline-date-label-em{height:2em}
-
-/* vertical */
-.timeline-vertical .timeline-date-label{padding-top:2px;}
-.timeline-vertical .timeline-date-label{border-width:1px 0 0;}
-.timeline-vertical .timeline-date-label-em{width:7em}
-
-
-/*------------------------------- Ether.highlight -------------------------*/
-.timeline-ether-highlight{position:absolute; background-color:#fff;}
-.timeline-horizontal .timeline-ether-highlight{top:2px;}
-.timeline-vertical .timeline-ether-highlight{left:2px;}
-
-
-/*------------------------------ EVENTS ------------------------------------*/
-.timeline-event-icon, .timeline-event-label,.timeline-event-tape{
-	position:absolute;
-	cursor:pointer;
-}
-
-.timeline-event-tape,
-.timeline-small-event-tape,
-.timeline-small-event-icon{
-	background-color:#58A0DC;
-	overflow:hidden;
-}
-
-.timeline-small-event-tape,
-.timeline-small-event-icon{
-	position:absolute;
-}
-
-.timeline-small-event-icon{width:1px; height:6px;}
-
-  
-/*--------------------------------- TIMELINE-------------------------*/
-.timeline-ether-bg{width:100%; height:100%;}
-.timeline-band-0 .timeline-ether-bg{background-color:#eee}
-.timeline-band-1 .timeline-ether-bg{background-color:#ddd}
-.timeline-band-2 .timeline-ether-bg{background-color:#ccc}
-.timeline-band-3 .timeline-ether-bg{background-color:#aaa}
-.timeline-duration-event {
-    position: absolute;
-    overflow: hidden;
-    border: 1px solid blue;
-}
-
-.timeline-instant-event2 {
-    position: absolute;
-    overflow: hidden;
-    border-left: 1px solid blue;
-    padding-left: 2px;
-}
-
-.timeline-instant-event {
-    position: absolute;
-    overflow: hidden;
-}
-
-.timeline-event-bubble-title {
-    font-weight: bold;
-    border-bottom: 1px solid #888;
-    margin-bottom: 0.5em;
-}
-
-.timeline-event-bubble-body {
-}
-
-.timeline-event-bubble-wiki {
-    margin:     0.5em;
-    text-align: right;
-    color:      #A0A040;
-}
-.timeline-event-bubble-wiki a {
-    color:      #A0A040;
-}
-
-.timeline-event-bubble-time {
-    color: #aaa;
-}
-
-.timeline-event-bubble-image {
-    float: right;
-    padding-left: 5px;
-    padding-bottom: 5px;
-}
\ No newline at end of file
Binary file web/data/timeline/blue-circle.png has changed
Binary file web/data/timeline/bubble-arrows.png has changed
Binary file web/data/timeline/bubble-body-and-arrows.png has changed
Binary file web/data/timeline/bubble-body.png has changed
Binary file web/data/timeline/bubble-bottom-arrow.png has changed
Binary file web/data/timeline/bubble-bottom-left.png has changed
Binary file web/data/timeline/bubble-bottom-right.png has changed
Binary file web/data/timeline/bubble-bottom.png has changed
Binary file web/data/timeline/bubble-left-arrow.png has changed
Binary file web/data/timeline/bubble-left.png has changed
Binary file web/data/timeline/bubble-right-arrow.png has changed
Binary file web/data/timeline/bubble-right.png has changed
Binary file web/data/timeline/bubble-top-arrow.png has changed
Binary file web/data/timeline/bubble-top-left.png has changed
Binary file web/data/timeline/bubble-top-right.png has changed
Binary file web/data/timeline/bubble-top.png has changed
Binary file web/data/timeline/close-button.png has changed
Binary file web/data/timeline/copyright-vertical.png has changed
Binary file web/data/timeline/copyright.png has changed
Binary file web/data/timeline/dark-blue-circle.png has changed
Binary file web/data/timeline/dark-green-circle.png has changed
Binary file web/data/timeline/dark-red-circle.png has changed
Binary file web/data/timeline/dull-blue-circle.png has changed
Binary file web/data/timeline/dull-green-circle.png has changed
Binary file web/data/timeline/dull-red-circle.png has changed
Binary file web/data/timeline/gray-circle.png has changed
Binary file web/data/timeline/green-circle.png has changed
Binary file web/data/timeline/message-bottom-left.png has changed
Binary file web/data/timeline/message-bottom-right.png has changed
Binary file web/data/timeline/message-left.png has changed
Binary file web/data/timeline/message-right.png has changed
Binary file web/data/timeline/message-top-left.png has changed
Binary file web/data/timeline/message-top-right.png has changed
Binary file web/data/timeline/message.png has changed
Binary file web/data/timeline/progress-running.gif has changed
Binary file web/data/timeline/red-circle.png has changed
Binary file web/data/timeline/sundial.png has changed
Binary file web/data/timeline/top-bubble.png has changed
--- a/web/facet.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/facet.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1465,15 +1465,17 @@
 
     def add_rql_restrictions(self):
         """add restriction for this facet into the rql syntax tree"""
-        self.select.set_distinct(True) # XXX
         value = self._cw.form.get(self.__regid__)
         if not value: # no value sent for this facet
             return
+        exists = nodes.Exists()
+        self.select.add_restriction(exists)
         var = self.select.make_variable()
         if self.role == 'subject':
-            self.select.add_relation(self.filtered_variable, self.rtype, var)
+            subj, obj = self.filtered_variable, var
         else:
-            self.select.add_relation(var, self.rtype, self.filtered_variable)
+            subj, obj = var, self.filtered_variable
+        exists.add_relation(subj, self.rtype, obj)
 
 
 class BitFieldFacet(AttributeFacet):
--- a/web/formwidgets.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/formwidgets.py	Mon Jun 22 14:27:37 2015 +0200
@@ -712,7 +712,7 @@
 
 
 class JQueryTimePicker(JQueryDatePicker):
-    """Use jquery.timePicker to define a time picker. Will return the time as an
+    """Use jquery.timePicker to define a time picker. Will return the time as a
     unicode string.
     """
     needs_js = ('jquery.timePicker.js',)
--- a/web/htmlwidgets.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/htmlwidgets.py	Mon Jun 22 14:27:37 2015 +0200
@@ -141,7 +141,7 @@
 
 
 class RawBoxItem(HTMLWidget): # XXX deprecated
-    """a simpe box item displaying raw data"""
+    """a simple box item displaying raw data"""
 
     def __init__(self, label, liclass=None):
         self.label = label
--- a/web/httpcache.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/httpcache.py	Mon Jun 22 14:27:37 2015 +0200
@@ -22,10 +22,6 @@
 from time import mktime
 from datetime import datetime
 
-# time delta usable to convert localized time to GMT time
-# XXX this become erroneous after a DST transition!!!
-GMTOFFSET = - (datetime.now() - datetime.utcnow())
-
 class NoHTTPCacheManager(object):
     """default cache manager: set no-cache cache control policy"""
     def __init__(self, view):
--- a/web/request.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/request.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -38,14 +38,14 @@
 from logilab.common.deprecation import deprecated
 from logilab.mtconverter import xml_escape
 
+from cubicweb import AuthenticationError
 from cubicweb.req import RequestSessionBase
-from cubicweb.dbapi import DBAPIRequest
 from cubicweb.uilib import remove_html_tags, js
 from cubicweb.utils import SizeConstrainedList, HTMLHead, make_uid
 from cubicweb.view import TRANSITIONAL_DOCTYPE_NOEXT
 from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit,
                           RequestError, StatusResponse)
-from cubicweb.web.httpcache import GMTOFFSET, get_validators
+from cubicweb.web.httpcache import get_validators
 from cubicweb.web.http_headers import Headers, Cookie, parseDateTime
 
 _MARKER = object()
@@ -155,9 +155,7 @@
         #: shared among various components used to publish the request (views,
         #: controller, application...)
         self.data = {}
-        #:  search state: 'normal' or 'linksearch' (eg searching for an object
-        #:  to create a relation with another)
-        self.search_state = ('normal',)
+        self._search_state = None
         #: page id, set by htmlheader template
         self.pageid = None
         self._set_pageid()
@@ -354,21 +352,36 @@
             self.session.data.pop(self._msgid, u'')
             del self._msgid
 
+    def _load_search_state(self, searchstate):
+        if searchstate is None or searchstate == 'normal':
+            self._search_state = ('normal',)
+        else:
+            self._search_state = ('linksearch', searchstate.split(':'))
+            assert len(self._search_state[-1]) == 4, 'invalid searchstate'
+
+    @property
+    def search_state(self):
+        """search state: 'normal' or 'linksearch' (i.e. searching for an object
+        to create a relation with another)"""
+        if self._search_state is None:
+            searchstate = self.session.data.get('search_state', 'normal')
+            self._load_search_state(searchstate)
+        return self._search_state
+
+    @search_state.setter
+    def search_state(self, searchstate):
+        self._search_state = searchstate
+
     def update_search_state(self):
-        """update the current search state"""
+        """update the current search state if needed"""
         searchstate = self.form.get('__mode')
-        if not searchstate:
-            searchstate = self.session.data.get('search_state', 'normal')
-        self.set_search_state(searchstate)
+        if searchstate:
+            self.set_search_state(searchstate)
 
     def set_search_state(self, searchstate):
         """set a new search state"""
-        if searchstate is None or searchstate == 'normal':
-            self.search_state = (searchstate or 'normal',)
-        else:
-            self.search_state = ('linksearch', searchstate.split(':'))
-            assert len(self.search_state[-1]) == 4
         self.session.data['search_state'] = searchstate
+        self._load_search_state(searchstate)
 
     def match_search_state(self, rset):
         """when searching an entity to create a relation, return True if entities in
@@ -385,7 +398,7 @@
 
     def update_breadcrumbs(self):
         """stores the last visisted page in session data"""
-        searchstate = self.session.data.get('search_state')
+        searchstate = self.search_state[0]
         if searchstate == 'normal':
             breadcrumbs = self.session.data.get('breadcrumbs')
             if breadcrumbs is None:
@@ -403,67 +416,6 @@
             return breadcrumbs.pop()
         return self.base_url()
 
-    @deprecated('[3.19] use a traditional ajaxfunc / controller')
-    def user_rql_callback(self, rqlargs, *args, **kwargs):
-        """register a user callback to execute some rql query, and return a URL
-        to call that callback which can be inserted in an HTML view.
-
-        `rqlargs` should be a tuple containing argument to give to the execute function.
-
-        The first argument following rqlargs must be the message to be
-        displayed after the callback is called.
-
-        For other allowed arguments, see :meth:`user_callback` method
-        """
-        def rqlexec(req, rql, args=None, key=None):
-            req.execute(rql, args, key)
-        return self.user_callback(rqlexec, rqlargs, *args, **kwargs)
-
-    @deprecated('[3.19] use a traditional ajaxfunc / controller')
-    def user_callback(self, cb, cbargs, *args, **kwargs):
-        """register the given user callback and return a URL which can
-        be inserted in an HTML view. When the URL is accessed, the
-        callback function will be called (as 'cb(req, \*cbargs)', and a
-        message will be displayed in the web interface. The third
-        positional argument must be 'msg', containing the message.
-
-        You can specify the underlying js function to call using a 'jsfunc'
-        named args, to one of :func:`userCallback`,
-        ':func:`userCallbackThenUpdateUI`, ':func:`userCallbackThenReloadPage`
-        (the default). Take care arguments may vary according to the used
-        function.
-        """
-        self.add_js('cubicweb.ajax.js')
-        jsfunc = kwargs.pop('jsfunc', 'userCallbackThenReloadPage')
-        assert not kwargs, 'dunno what to do with remaining kwargs: %s' % kwargs
-        cbname = self.register_onetime_callback(cb, *cbargs)
-        return "javascript: %s" % getattr(js, jsfunc)(cbname, *args)
-
-    @deprecated('[3.19] use a traditional ajaxfunc / controller')
-    def register_onetime_callback(self, func, *args):
-        cbname = build_cb_uid(func.__name__)
-        def _cb(req):
-            try:
-                return func(req, *args)
-            finally:
-                self.unregister_callback(self.pageid, cbname)
-        self.set_page_data(cbname, _cb)
-        return cbname
-
-    @deprecated('[3.19] use a traditional ajaxfunc / controller')
-    def unregister_callback(self, pageid, cbname):
-        assert pageid is not None
-        assert cbname.startswith('cb_')
-        self.info('unregistering callback %s for pageid %s', cbname, pageid)
-        self.del_page_data(cbname)
-
-    @deprecated('[3.19] use a traditional ajaxfunc / controller')
-    def clear_user_callbacks(self):
-        if self.session is not None: # XXX
-            for key in list(self.session.data):
-                if key.startswith('cb_'):
-                    del self.session.data[key]
-
     # web edition helpers #####################################################
 
     @cached # so it's writed only once
@@ -584,8 +536,8 @@
             # we don't want to handle times before the EPOCH (cause bug on
             # windows). Also use > and not >= else expires == 0 and Cookie think
             # that means no expire...
-            assert expires + GMTOFFSET > date(1970, 1, 1)
-            expires = timegm((expires + GMTOFFSET).timetuple())
+            assert expires > date(1970, 1, 1)
+            expires = timegm(expires.timetuple())
         else:
             expires = None
         # make sure cookie is set on the correct path
@@ -859,8 +811,7 @@
         """
         mtime = self.get_header('If-modified-since', raw=False)
         if mtime:
-            # :/ twisted is returned a localized time stamp
-            return datetime.fromtimestamp(mtime) + GMTOFFSET
+            return datetime.utcfromtimestamp(mtime)
         return None
 
     ### outcoming headers
@@ -1005,29 +956,36 @@
         self.set_default_language(vreg)
 
 
-class DBAPICubicWebRequestBase(_CubicWebRequestBase, DBAPIRequest):
-
-    def set_session(self, session):
-        """method called by the session handler when the user is authenticated
-        or an anonymous connection is open
-        """
-        super(CubicWebRequestBase, self).set_session(session)
-        # set request language
-        self.set_user_language(session.user)
-
-
 def _cnx_func(name):
     def proxy(req, *args, **kwargs):
         return getattr(req.cnx, name)(*args, **kwargs)
     return proxy
 
+class _NeedAuthAccessMock(object):
+
+    def __getattribute__(self, attr):
+        raise AuthenticationError()
+
+    def __nonzero__(self):
+        return False
+
+class _MockAnonymousSession(object):
+    sessionid = 'thisisnotarealsession'
+
+    @property
+    def data(self):
+        return {}
+
+    @property
+    def anonymous_session(self):
+        return True
 
 class ConnectionCubicWebRequestBase(_CubicWebRequestBase):
+    cnx = None
+    session = None
 
     def __init__(self, vreg, https=False, form=None, headers={}):
         """"""
-        self.cnx = None
-        self.session = None
         self.vreg = vreg
         try:
             # no vreg or config which doesn't handle translations
@@ -1036,8 +994,7 @@
             self.translations = {}
         super(ConnectionCubicWebRequestBase, self).__init__(vreg, https=https,
                                                        form=form, headers=headers)
-        from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock
-        self.session = DBAPISession(None)
+        self.session = _MockAnonymousSession()
         self.cnx = self.user = _NeedAuthAccessMock()
 
     @property
@@ -1045,8 +1002,10 @@
         return self.cnx.transaction_data
 
     def set_cnx(self, cnx):
+        if 'ecache' in cnx.transaction_data:
+            del cnx.transaction_data['ecache']
         self.cnx = cnx
-        self.session = cnx._session
+        self.session = cnx.session
         self._set_user(cnx.user)
         self.set_user_language(cnx.user)
 
@@ -1056,7 +1015,6 @@
         return rset
 
     def set_default_language(self, vreg):
-        # XXX copy from dbapi
         try:
             lang = vreg.property_value('ui.language')
         except Exception: # property may not be registered
--- a/web/test/data/bootstrap_cubes	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/data/bootstrap_cubes	Mon Jun 22 14:27:37 2015 +0200
@@ -1,1 +1,1 @@
-file, blog, tag, folder
+file, blog, tag
--- a/web/test/data/schema.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/data/schema.py	Mon Jun 22 14:27:37 2015 +0200
@@ -93,9 +93,9 @@
     title = String(maxsize=32, required=True, fulltextindexed=True)
     concerns = SubjectRelation('Project', composite='object')
 
-# used by windmill for `test_edit_relation`
-from cubes.folder.schema import Folder
-
+class Folder(EntityType):
+    name = String(required=True)
+    filed_under = SubjectRelation('Folder', description=_('parent folder'))
 
 class TreeNode(EntityType):
     name = String(required=True)
--- a/web/test/data/views.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/data/views.py	Mon Jun 22 14:27:37 2015 +0200
@@ -16,7 +16,9 @@
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
 
+from cubicweb.predicates import has_related_entities
 from cubicweb.web.views.ajaxcontroller import ajaxfunc
+from cubicweb.web.views.ibreadcrumbs import IBreadCrumbsAdapter
 
 def _recursive_replace_stream_by_content(tree):
     """ Search for streams (i.e. object that have a 'read' method) in a tree
@@ -46,3 +48,10 @@
     except Exception, ex:
         import traceback as tb
         tb.print_exc(ex)
+
+
+class FolderIBreadCrumbsAdapter(IBreadCrumbsAdapter):
+    __select__ = IBreadCrumbsAdapter.__select__ & has_related_entities('filed_under')
+
+    def parent_entity(self):
+        return self.entity.filed_under[0]
--- a/web/test/unittest_application.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_application.py	Mon Jun 22 14:27:37 2015 +0200
@@ -30,7 +30,6 @@
 from cubicweb.web import LogOut, Redirect, INTERNAL_FIELD_VALUE
 from cubicweb.web.views.basecontrollers import ViewController
 from cubicweb.web.application import anonymized_request
-from cubicweb.dbapi import DBAPISession, _NeedAuthAccessMock
 from cubicweb import repoapi
 
 class FakeMapping:
@@ -368,15 +367,11 @@
         req.set_request_header('Cookie', cookie[sessioncookie].OutputString(),
                                raw=True)
         clear_cache(req, 'get_authorization')
-        # reset session as if it was a new incoming request
-        req.session = DBAPISession(None)
-        req.user = req.cnx = _NeedAuthAccessMock
-        
 
     def _test_auth_anon(self, req):
         asession = self.app.get_session(req)
         # important otherwise _reset_cookie will not use the right session
-        req.set_cnx(repoapi.ClientConnection(asession))
+        req.set_cnx(repoapi.Connection(asession))
         self.assertEqual(len(self.open_sessions), 1)
         self.assertEqual(asession.login, 'anon')
         self.assertTrue(asession.anonymous_session)
@@ -386,7 +381,7 @@
         self.assertEqual(1, len(self.open_sessions))
         session = self.app.get_session(req)
         # important otherwise _reset_cookie will not use the right session
-        req.set_cnx(repoapi.ClientConnection(session))
+        req.set_cnx(repoapi.Connection(session))
         self.assertEqual(req.message, 'authentication failure')
         self.assertEqual(req.session.anonymous_session, True)
         self.assertEqual(1, len(self.open_sessions))
--- a/web/test/unittest_facet.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_facet.py	Mon Jun 22 14:27:37 2015 +0200
@@ -129,8 +129,6 @@
             self.assertEqual(f.select.as_string(),
                               'DISTINCT Any  WHERE X is CWUser')
 
-
-
     def test_relationattribute(self):
         with self.admin_access.web_request() as req:
             f, (guests, managers) = self._in_group_facet(req, cls=facet.RelationAttributeFacet)
@@ -150,6 +148,20 @@
             self.assertEqual(f.select.as_string(),
                               "DISTINCT Any  WHERE X is CWUser, X in_group E, E name 'guests'")
 
+    def test_hasrelation(self):
+        with self.admin_access.web_request() as req:
+            rset, rqlst, filtered_variable = self.prepare_rqlst(req)
+            f = facet.HasRelationFacet(req, rset=rset,
+                                       select=rqlst.children[0],
+                                       filtered_variable=filtered_variable)
+            f.__regid__ = 'has_group'
+            f.rtype = 'in_group'
+            f.role = 'subject'
+            f._cw.form[f.__regid__] = 'feed me'
+            f.add_rql_restrictions()
+            self.assertEqual(f.select.as_string(),
+                             'DISTINCT Any  WHERE X is CWUser, EXISTS(X in_group A)')
+
     def test_daterange(self):
         with self.admin_access.web_request() as req:
             rset, rqlst, filtered_variable = self.prepare_rqlst(req)
--- a/web/test/unittest_form.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_form.py	Mon Jun 22 14:27:37 2015 +0200
@@ -39,7 +39,7 @@
     def test_form_field_format(self):
         with self.admin_access.web_request() as req:
             form = FieldsForm(req, None)
-            self.assertEqual(StringField().format(form), 'text/html')
+            self.assertEqual(StringField().format(form), 'text/plain')
             req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"')
             req.cnx.commit()
             self.assertEqual(StringField().format(form), 'text/rest')
--- a/web/test/unittest_formfields.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_formfields.py	Mon Jun 22 14:27:37 2015 +0200
@@ -150,7 +150,7 @@
             self.assertEqual(description_format_field.internationalizable, True)
             self.assertEqual(description_format_field.sort, True)
             # unlike below, initial is bound to form.form_field_format
-            self.assertEqual(description_format_field.value(form), 'text/html')
+            self.assertEqual(description_format_field.value(form), 'text/plain')
             req.cnx.execute('INSERT CWProperty X: X pkey "ui.default-text-format", X value "text/rest", X for_user U WHERE U login "admin"')
             req.cnx.commit()
             self.assertEqual(description_format_field.value(form), 'text/rest')
--- a/web/test/unittest_propertysheet.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_propertysheet.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,13 +1,21 @@
 import os
 from os.path import join, dirname
 from shutil import rmtree
+import errno
+import tempfile
 
 from logilab.common.testlib import TestCase, unittest_main
 
 from cubicweb.web.propertysheet import PropertySheet, lazystr
 
 DATADIR = join(dirname(__file__), 'data')
-CACHEDIR = join(DATADIR, 'uicache')
+
+try:
+    os.makedirs(join(DATADIR, 'uicache'))
+except OSError as err:
+    if err.errno != errno.EEXIST:
+        raise
+CACHEDIR = tempfile.mkdtemp(dir=join(DATADIR, 'uicache'))
 
 class PropertySheetTC(TestCase):
 
--- a/web/test/unittest_views_basecontrollers.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_views_basecontrollers.py	Mon Jun 22 14:27:37 2015 +0200
@@ -156,9 +156,9 @@
 
     def test_user_can_change_its_password(self):
         with self.admin_access.repo_cnx() as cnx:
-            self.create_user(cnx, 'user')
+            self.create_user(cnx, u'user')
             cnx.commit()
-        with self.new_access('user').web_request() as req:
+        with self.new_access(u'user').web_request() as req:
             eid = unicode(req.user.eid)
             req.form = {
                 'eid': eid, '__maineid' : eid,
@@ -287,7 +287,7 @@
 
     def test_edit_multiple_linked(self):
         with self.admin_access.web_request() as req:
-            peid = unicode(self.create_user(req, 'adim').eid)
+            peid = unicode(self.create_user(req, u'adim').eid)
             req.form = {'eid': [peid, 'Y'], '__maineid': peid,
 
                         '__type:'+peid: u'CWUser',
@@ -548,7 +548,7 @@
             self.assertIn('_cwmsgid', params)
             eid = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid
             req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
-                        {'x': self.session.user.eid, 'e': eid})
+                        {'x': req.user.eid, 'e': eid})
             req.cnx.commit()
             req.form = {'eid': unicode(eid), '__type:%s'%eid: 'EmailAddress',
                         '__action_delete': ''}
@@ -692,7 +692,7 @@
 
     def test_nonregr_rollback_on_validation_error(self):
         with self.admin_access.web_request() as req:
-            p = self.create_user(req, "doe")
+            p = self.create_user(req, u"doe")
             # do not try to skip 'primary_email' for this test
             old_skips = p.__class__.skip_copy_for
             p.__class__.skip_copy_for = ()
@@ -754,10 +754,10 @@
 class ReportBugControllerTC(CubicWebTC):
 
     def test_usable_by_guest(self):
-        with self.new_access('anon').web_request() as req:
+        with self.new_access(u'anon').web_request() as req:
             self.assertRaises(NoSelectableObject,
                               self.vreg['controllers'].select, 'reportbug', req)
-        with self.new_access('anon').web_request(description='hop') as req:
+        with self.new_access(u'anon').web_request(description='hop') as req:
             self.vreg['controllers'].select('reportbug', req)
 
 
@@ -836,9 +836,9 @@
             deletes = get_pending_deletes(req)
             self.assertEqual(deletes, [])
             inserts = get_pending_inserts(req)
-            self.assertEqual(inserts, ['12:tags:13', '12:tags:14'])
+            self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14'])
             inserts = get_pending_inserts(req, 12)
-            self.assertEqual(inserts, ['12:tags:13', '12:tags:14'])
+            self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14'])
             inserts = get_pending_inserts(req, 13)
             self.assertEqual(inserts, ['12:tags:13'])
             inserts = get_pending_inserts(req, 14)
@@ -855,9 +855,9 @@
             inserts = get_pending_inserts(req)
             self.assertEqual(inserts, [])
             deletes = get_pending_deletes(req)
-            self.assertEqual(deletes, ['12:tags:13', '12:tags:14'])
+            self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14'])
             deletes = get_pending_deletes(req, 12)
-            self.assertEqual(deletes, ['12:tags:13', '12:tags:14'])
+            self.assertCountEqual(deletes, ['12:tags:13', '12:tags:14'])
             deletes = get_pending_deletes(req, 13)
             self.assertEqual(deletes, ['12:tags:13'])
             deletes = get_pending_deletes(req, 14)
@@ -880,7 +880,7 @@
         with self.remote_calling('add_pending_inserts',
                                  [('12', 'tags', '13'), ('12', 'tags', '14')]) as (_, req):
             inserts = get_pending_inserts(req)
-            self.assertEqual(inserts, ['12:tags:13', '12:tags:14'])
+            self.assertCountEqual(inserts, ['12:tags:13', '12:tags:14'])
             req.remove_pending_operations()
 
 
@@ -1016,8 +1016,8 @@
 
     def setup_database(self):
         with self.admin_access.repo_cnx() as cnx:
-            self.toto = self.create_user(cnx, 'toto',
-                                         password='toto',
+            self.toto = self.create_user(cnx, u'toto',
+                                         password=u'toto',
                                          groups=('users',),
                                          commit=False)
             self.txuuid_toto = cnx.commit()
--- a/web/test/unittest_views_editforms.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_views_editforms.py	Mon Jun 22 14:27:37 2015 +0200
@@ -41,7 +41,7 @@
         with self.admin_access.web_request() as req:
             AFFK.tag_subject_of(('CWUser', 'login', '*'),
                                 {'widget': AutoCompletionWidget(autocomplete_initfunc='get_logins')})
-            form = self.vreg['forms'].select('edition', req, entity=self.user(req))
+            form = self.vreg['forms'].select('edition', req, entity=req.user)
             field = form.field_by_name('login', 'subject')
             self.assertIsInstance(field.widget, AutoCompletionWidget)
             AFFK.del_rtag('CWUser', 'login', '*', 'subject')
@@ -52,18 +52,18 @@
             e = self.vreg['etypes'].etype_class('CWUser')(req)
             # see custom configuration in views.cwuser
             self.assertEqual(rbc(e, 'main', 'attributes'),
-                              [('login', 'subject'),
-                               ('upassword', 'subject'),
-                               ('firstname', 'subject'),
-                               ('surname', 'subject'),
-                               ('in_group', 'subject'),
-                               ])
-            self.assertListEqual(rbc(e, 'muledit', 'attributes'),
+                             [('login', 'subject'),
+                              ('upassword', 'subject'),
+                              ('firstname', 'subject'),
+                              ('surname', 'subject'),
+                              ('in_group', 'subject'),
+                              ])
+            self.assertEqual(rbc(e, 'muledit', 'attributes'),
                                   [('login', 'subject'),
                                    ('upassword', 'subject'),
                                    ('in_group', 'subject'),
                                    ])
-            self.assertListEqual(rbc(e, 'main', 'metadata'),
+            self.assertCountEqual(rbc(e, 'main', 'metadata'),
                                   [('last_login_time', 'subject'),
                                    ('cw_source', 'subject'),
                                    ('creation_date', 'subject'),
@@ -75,7 +75,7 @@
             # XXX skip 'tags' relation here and in the hidden category because
             # of some test interdependancy when pytest is launched on whole cw
             # (appears here while expected in hidden
-            self.assertListEqual([x for x in rbc(e, 'main', 'relations')
+            self.assertCountEqual([x for x in rbc(e, 'main', 'relations')
                                    if x != ('tags', 'object')],
                                   [('connait', 'subject'),
                                    ('custom_workflow', 'subject'),
@@ -123,14 +123,14 @@
             self.assertListEqual(rbc(e, 'muledit', 'attributes'),
                                   [('nom', 'subject'),
                                    ])
-            self.assertListEqual(rbc(e, 'main', 'metadata'),
+            self.assertCountEqual(rbc(e, 'main', 'metadata'),
                                   [('cw_source', 'subject'),
                                    ('creation_date', 'subject'),
                                    ('modification_date', 'subject'),
                                    ('created_by', 'subject'),
                                    ('owned_by', 'subject'),
                                    ])
-            self.assertListEqual(rbc(e, 'main', 'relations'),
+            self.assertCountEqual(rbc(e, 'main', 'relations'),
                                   [('travaille', 'subject'),
                                    ('manager', 'object'),
                                    ('connait', 'object'),
@@ -156,15 +156,15 @@
     def test_attribute_add_permissions(self):
         # https://www.cubicweb.org/ticket/4342844
         with self.admin_access.repo_cnx() as cnx:
-            self.create_user(cnx, 'toto')
+            self.create_user(cnx, u'toto')
             cnx.commit()
-        with self.new_access('toto').web_request() as req:
+        with self.new_access(u'toto').web_request() as req:
             e = self.vreg['etypes'].etype_class('Personne')(req)
             cform = self.vreg['forms'].select('edition', req, entity=e)
             self.assertIn('sexe',
                           [rschema.type
                            for rschema, _ in cform.editable_attributes()])
-            with self.new_access('toto').repo_cnx() as cnx:
+            with self.new_access(u'toto').repo_cnx() as cnx:
                 person_eid = cnx.create_entity('Personne', nom=u'Robert').eid
                 cnx.commit()
             person = req.entity_from_eid(person_eid)
--- a/web/test/unittest_views_xmlrss.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_views_xmlrss.py	Mon Jun 22 14:27:37 2015 +0200
@@ -11,13 +11,13 @@
                 req.user.view('xml'),
                 '''\
 <CWUser eid="6" cwuri="http://testing.fr/cubicweb/6" cwsource="system">
+  <creation_date>%(cdate)s</creation_date>
+  <firstname/>
+  <last_login_time/>
   <login>admin</login>
-  <upassword/>
-  <firstname/>
+  <modification_date>%(mdate)s</modification_date>
   <surname/>
-  <last_login_time/>
-  <creation_date>%(cdate)s</creation_date>
-  <modification_date>%(mdate)s</modification_date>
+  <upassword/>
   <tags role="object">
   </tags>
   <in_group role="subject">
--- a/web/test/unittest_viewselector.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_viewselector.py	Mon Jun 22 14:27:37 2015 +0200
@@ -33,7 +33,6 @@
                                 cwproperties, cwsources, xmlrss, rdf, csvexport, json,
                                 undohistory)
 
-from cubes.folder import views as folderviews
 
 USERACTIONS = [actions.UserPreferencesAction,
                actions.UserInfoAction,
@@ -49,7 +48,7 @@
                  debug.SiteInfoAction]
 
 if hasattr(rdf, 'RDFView'): # not available if rdflib not installed
-    RDFVIEWS = [('rdf', rdf.RDFView)]
+    RDFVIEWS = [('rdf', rdf.RDFView), ('n3rdf', rdf.RDFN3View)]
 else:
     RDFVIEWS = []
 
@@ -101,7 +100,6 @@
                                   ('schema', schema.SchemaView),
                                   ('siteinfo', debug.SiteInfoView),
                                   ('systempropertiesform', cwproperties.SystemCWPropertiesForm),
-                                  ('tree', folderviews.FolderTreeView),
                                   ('undohistory', undohistory.UndoHistoryView)])
 
     def test_possible_views_noresult(self):
@@ -117,50 +115,51 @@
     def test_possible_views_one_egroup(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('CWGroup X WHERE X name "managers"')
-            self.assertListEqual(self.pviews(req, rset),
-                                 [('csvexport', csvexport.CSVRsetView),
-                                  ('ecsvexport', csvexport.CSVEntityView),
-                                  ('ejsonexport', json.JsonEntityView),
-                                  ('filetree', treeview.FileTreeView),
-                                  ('jsonexport', json.JsonRsetView),
-                                  ('list', baseviews.ListView),
-                                  ('oneline', baseviews.OneLineView),
-                                  ('owlabox', owl.OWLABOXView),
-                                  ('primary', cwuser.CWGroupPrimaryView)] + \
-                                 RDFVIEWS + \
-                                 [('rsetxml', xmlrss.XMLRsetView),
-                                  ('rss', xmlrss.RSSView),
-                                  ('sameetypelist', baseviews.SameETypeListView),
-                                  ('security', management.SecurityManagementView),
-                                  ('table', tableview.RsetTableView),
-                                  ('text', baseviews.TextView),
-                                  ('treeview', treeview.TreeView),
-                                  ('xbel', xbel.XbelView),
-                                  ('xml', xmlrss.XMLView)])
+            self.assertCountEqual(self.pviews(req, rset),
+                                  RDFVIEWS +
+                                  [('csvexport', csvexport.CSVRsetView),
+                                   ('ecsvexport', csvexport.CSVEntityView),
+                                   ('ejsonexport', json.JsonEntityView),
+                                   ('filetree', treeview.FileTreeView),
+                                   ('jsonexport', json.JsonRsetView),
+                                   ('list', baseviews.ListView),
+                                   ('oneline', baseviews.OneLineView),
+                                   ('owlabox', owl.OWLABOXView),
+                                   ('primary', cwuser.CWGroupPrimaryView),
+                                   ('rsetxml', xmlrss.XMLRsetView),
+                                   ('rss', xmlrss.RSSView),
+                                   ('sameetypelist', baseviews.SameETypeListView),
+                                   ('security', management.SecurityManagementView),
+                                   ('table', tableview.RsetTableView),
+                                   ('text', baseviews.TextView),
+                                   ('treeview', treeview.TreeView),
+                                   ('xbel', xbel.XbelView),
+                                   ('xml', xmlrss.XMLView)])
 
     def test_possible_views_multiple_egroups(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('CWGroup X')
-            self.assertListEqual(self.pviews(req, rset),
-                                 [('csvexport', csvexport.CSVRsetView),
-                                  ('ecsvexport', csvexport.CSVEntityView),
-                                  ('ejsonexport', json.JsonEntityView),
-                                  ('filetree', treeview.FileTreeView),
-                                  ('jsonexport', json.JsonRsetView),
-                                  ('list', baseviews.ListView),
-                                  ('oneline', baseviews.OneLineView),
-                                  ('owlabox', owl.OWLABOXView),
-                                  ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
-                                  ('rsetxml', xmlrss.XMLRsetView),
-                                  ('rss', xmlrss.RSSView),
-                                  ('sameetypelist', baseviews.SameETypeListView),
-                                  ('security', management.SecurityManagementView),
-                                  ('table', tableview.RsetTableView),
-                                  ('text', baseviews.TextView),
-                                  ('treeview', treeview.TreeView),
-                                  ('xbel', xbel.XbelView),
-                                  ('xml', xmlrss.XMLView),
-                                  ])
+            self.assertCountEqual(self.pviews(req, rset),
+                                  RDFVIEWS +
+                                  [('csvexport', csvexport.CSVRsetView),
+                                   ('ecsvexport', csvexport.CSVEntityView),
+                                   ('ejsonexport', json.JsonEntityView),
+                                   ('filetree', treeview.FileTreeView),
+                                   ('jsonexport', json.JsonRsetView),
+                                   ('list', baseviews.ListView),
+                                   ('oneline', baseviews.OneLineView),
+                                   ('owlabox', owl.OWLABOXView),
+                                   ('primary', cwuser.CWGroupPrimaryView),
+                                   ('rsetxml', xmlrss.XMLRsetView),
+                                   ('rss', xmlrss.RSSView),
+                                   ('sameetypelist', baseviews.SameETypeListView),
+                                   ('security', management.SecurityManagementView),
+                                   ('table', tableview.RsetTableView),
+                                   ('text', baseviews.TextView),
+                                   ('treeview', treeview.TreeView),
+                                   ('xbel', xbel.XbelView),
+                                   ('xml', xmlrss.XMLView),
+                                   ])
 
     def test_propertiesform_admin(self):
         assert self.vreg['views']['propertiesform']
@@ -172,7 +171,7 @@
             self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2))
 
     def test_propertiesform_anon(self):
-        with self.new_access('anon').web_request() as req:
+        with self.new_access(u'anon').web_request() as req:
             rset1 = req.execute('CWUser X WHERE X login "admin"')
             self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=None)
             self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1)
@@ -181,9 +180,9 @@
 
     def test_propertiesform_jdoe(self):
         with self.admin_access.repo_cnx() as cnx:
-            self.create_user(cnx, 'jdoe')
+            self.create_user(cnx, u'jdoe')
             cnx.commit()
-        with self.new_access('jdoe').web_request() as req:
+        with self.new_access(u'jdoe').web_request() as req:
             rset1 = req.execute('CWUser X WHERE X login "admin"')
             rset2 = req.execute('CWUser X WHERE X login "jdoe"')
             self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None))
@@ -193,24 +192,25 @@
     def test_possible_views_multiple_different_types(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('Any X')
-            self.assertListEqual(self.pviews(req, rset),
-                                 [('csvexport', csvexport.CSVRsetView),
-                                  ('ecsvexport', csvexport.CSVEntityView),
-                                  ('ejsonexport', json.JsonEntityView),
-                                  ('filetree', treeview.FileTreeView),
-                                  ('jsonexport', json.JsonRsetView),
-                                  ('list', baseviews.ListView),
-                                  ('oneline', baseviews.OneLineView),
-                                  ('owlabox', owl.OWLABOXView),
-                                  ('primary', primary.PrimaryView),] + RDFVIEWS + [
-                                  ('rsetxml', xmlrss.XMLRsetView),
-                                  ('rss', xmlrss.RSSView),
-                                  ('security', management.SecurityManagementView),
-                                  ('table', tableview.RsetTableView),
-                                  ('text', baseviews.TextView),
-                                  ('treeview', treeview.TreeView),
-                                  ('xbel', xbel.XbelView),
-                                  ('xml', xmlrss.XMLView),
+            self.assertCountEqual(self.pviews(req, rset),
+                                  RDFVIEWS +
+                                  [('csvexport', csvexport.CSVRsetView),
+                                   ('ecsvexport', csvexport.CSVEntityView),
+                                   ('ejsonexport', json.JsonEntityView),
+                                   ('filetree', treeview.FileTreeView),
+                                   ('jsonexport', json.JsonRsetView),
+                                   ('list', baseviews.ListView),
+                                   ('oneline', baseviews.OneLineView),
+                                   ('owlabox', owl.OWLABOXView),
+                                   ('primary', primary.PrimaryView),
+                                   ('rsetxml', xmlrss.XMLRsetView),
+                                   ('rss', xmlrss.RSSView),
+                                   ('security', management.SecurityManagementView),
+                                   ('table', tableview.RsetTableView),
+                                   ('text', baseviews.TextView),
+                                   ('treeview', treeview.TreeView),
+                                   ('xbel', xbel.XbelView),
+                                   ('xml', xmlrss.XMLView),
                                   ])
 
     def test_possible_views_any_rset(self):
@@ -226,28 +226,29 @@
     def test_possible_views_multiple_eusers(self):
         with self.admin_access.web_request() as req:
             rset = req.execute('CWUser X')
-            self.assertListEqual(self.pviews(req, rset),
-                                 [('csvexport', csvexport.CSVRsetView),
-                                  ('ecsvexport', csvexport.CSVEntityView),
-                                  ('ejsonexport', json.JsonEntityView),
-                                  ('filetree', treeview.FileTreeView),
-                                  ('foaf', cwuser.FoafView),
-                                  ('jsonexport', json.JsonRsetView),
-                                  ('list', baseviews.ListView),
-                                  ('oneline', baseviews.OneLineView),
-                                  ('owlabox', owl.OWLABOXView),
-                                  ('primary', primary.PrimaryView)] + RDFVIEWS + [
-                                  ('rsetxml', xmlrss.XMLRsetView),
-                                  ('rss', xmlrss.RSSView),
-                                  ('sameetypelist', baseviews.SameETypeListView),
-                                  ('security', management.SecurityManagementView),
-                                  ('table', tableview.RsetTableView),
-                                  ('text', baseviews.TextView),
-                                  ('treeview', treeview.TreeView),
-                                  ('vcard', vcard.VCardCWUserView),
-                                  ('xbel', xbel.XbelView),
-                                  ('xml', xmlrss.XMLView),
-                                  ])
+            self.assertCountEqual(self.pviews(req, rset),
+                                  RDFVIEWS +
+                                  [('csvexport', csvexport.CSVRsetView),
+                                   ('ecsvexport', csvexport.CSVEntityView),
+                                   ('ejsonexport', json.JsonEntityView),
+                                   ('filetree', treeview.FileTreeView),
+                                   ('foaf', cwuser.FoafView),
+                                   ('jsonexport', json.JsonRsetView),
+                                   ('list', baseviews.ListView),
+                                   ('oneline', baseviews.OneLineView),
+                                   ('owlabox', owl.OWLABOXView),
+                                   ('primary', primary.PrimaryView),
+                                   ('rsetxml', xmlrss.XMLRsetView),
+                                   ('rss', xmlrss.RSSView),
+                                   ('sameetypelist', baseviews.SameETypeListView),
+                                   ('security', management.SecurityManagementView),
+                                   ('table', tableview.RsetTableView),
+                                   ('text', baseviews.TextView),
+                                   ('treeview', treeview.TreeView),
+                                   ('vcard', vcard.VCardCWUserView),
+                                   ('xbel', xbel.XbelView),
+                                   ('xml', xmlrss.XMLView),
+                                   ])
 
     def test_possible_actions_none_rset(self):
         with self.admin_access.web_request() as req:
--- a/web/test/unittest_web.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/test/unittest_web.py	Mon Jun 22 14:27:37 2015 +0200
@@ -126,17 +126,24 @@
         self.assertIn('HttpOnly', webreq.getheader('set-cookie'))
 
 
-class LogQueriesTC(CubicWebServerTC):
+class MiscOptionsTC(CubicWebServerTC):
     @classmethod
     def init_config(cls, config):
-        super(LogQueriesTC, cls).init_config(config)
+        super(MiscOptionsTC, cls).init_config(config)
         cls.logfile = tempfile.NamedTemporaryFile()
         config.global_set_option('query-log-file', cls.logfile.name)
+        config.global_set_option('datadir-url', '//static.testing.fr/')
+        # call load_configuration again to let the config reset its datadir_url
+        config.load_configuration()
 
     def test_log_queries(self):
         self.web_request()
         self.assertTrue(self.logfile.read())
 
+    def test_datadir_url(self):
+        webreq = self.web_request()
+        self.assertNotIn('/data/', webreq.read())
+
     @classmethod
     def tearDownClass(cls):
         cls.logfile.close()
--- a/web/views/ajaxcontroller.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/ajaxcontroller.py	Mon Jun 22 14:27:37 2015 +0200
@@ -427,26 +427,6 @@
     """returns the URL of the external resource named `resource`"""
     return self._cw.uiprops[resource]
 
-@ajaxfunc(output_type='json', check_pageid=True)
-def user_callback(self, cbname):
-    """execute the previously registered user callback `cbname`.
-
-    If matching callback is not found, return None
-    """
-    page_data = self._cw.session.data.get(self._cw.pageid, {})
-    try:
-        cb = page_data[cbname]
-    except KeyError:
-        self.warning('unable to find user callback %s', cbname)
-        return None
-    return cb(self._cw)
-
-
-@ajaxfunc
-def unregister_user_callback(self, cbname):
-    """unregister user callback `cbname`"""
-    self._cw.unregister_callback(self._cw.pageid, cbname)
-
 @ajaxfunc
 def unload_page_data(self):
     """remove user's session data associated to current pageid"""
--- a/web/views/authentication.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/authentication.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -26,9 +26,8 @@
 
 from cubicweb import AuthenticationError, BadConnectionId
 from cubicweb.view import Component
-from cubicweb.dbapi import _repo_connect, ConnectionProperties
 from cubicweb.web import InvalidSession
-from cubicweb.web.application import AbstractAuthenticationManager
+
 
 class NoAuthInfo(Exception): pass
 
@@ -102,6 +101,36 @@
     '("ie" instead of "ei")')
 
 
+class AbstractAuthenticationManager(Component):
+    """authenticate user associated to a request and check session validity"""
+    __abstract__ = True
+    __regid__ = 'authmanager'
+
+    def __init__(self, repo):
+        self.vreg = repo.vreg
+
+    def validate_session(self, req, session):
+        """check session validity, reconnecting it to the repository if the
+        associated connection expired in the repository side (hence the
+        necessity for this method).
+
+        raise :exc:`InvalidSession` if session is corrupted for a reason or
+        another and should be closed
+        """
+        raise NotImplementedError()
+
+    def authenticate(self, req):
+        """authenticate user using connection information found in the request,
+        and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
+        as well as login and authentication information dictionary used to open
+        the connection.
+
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
+        """
+        raise NotImplementedError()
+
+
 class RepositoryAuthenticationManager(AbstractAuthenticationManager):
     """authenticate user associated to a request and check session validity"""
 
--- a/web/views/basecomponents.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/basecomponents.py	Mon Jun 22 14:27:37 2015 +0200
@@ -55,7 +55,7 @@
         else:
             rset = self.cw_rset
         # display multilines query as one line
-        rql = rset is not None and rset.printable_rql(encoded=False) or req.form.get('rql', '')
+        rql = rset is not None and rset.printable_rql() or req.form.get('rql', '')
         rql = rql.replace(u"\n", u" ")
         rql_suggestion_comp = self._cw.vreg['components'].select_or_none('rql.suggestions', self._cw)
         if rql_suggestion_comp is not None:
--- a/web/views/basecontrollers.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/basecontrollers.py	Mon Jun 22 14:27:37 2015 +0200
@@ -92,7 +92,7 @@
     def publish(self, rset=None):
         """log in the instance"""
         path = self._cw.form.get('postlogin_path', '')
-        # redirect expect a URL, not a path. Also path may contains a query
+        # Redirect expects a URL, not a path. Also path may contain a query
         # string, hence should not be given to _cw.build_url()
         raise Redirect(self._cw.base_url() + path)
 
--- a/web/views/basetemplates.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/basetemplates.py	Mon Jun 22 14:27:37 2015 +0200
@@ -514,10 +514,6 @@
         if config['auth-mode'] != 'http':
             self.login_form(id) # Cookie authentication
         w(u'</div>')
-        if self._cw.https and config.anonymous_user()[0] and config['https-deny-anonymous']:
-            path = xml_escape(config['base-url'] + self._cw.relative_path())
-            w(u'<div class="loginMessage"><a href="%s">%s</a></div>\n'
-              % (path, self._cw._('No account? Try public access at %s') % path))
         w(u'</div>\n')
 
     def login_form(self, id):
--- a/web/views/cwsources.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/cwsources.py	Mon Jun 22 14:27:37 2015 +0200
@@ -231,6 +231,7 @@
     __regid__ = 'cwsource'
     title = _('data sources')
     category = 'manage'
+    order = 100
 
 
 class CWSourcesManagementView(StartupView):
--- a/web/views/debug.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/debug.py	Mon Jun 22 14:27:37 2015 +0200
@@ -24,7 +24,6 @@
 
 from logilab.mtconverter import xml_escape
 
-from cubicweb import BadConnectionId
 from cubicweb.predicates import none_rset, match_user_groups
 from cubicweb.view import StartupView
 from cubicweb.web.views import actions, tabs
@@ -98,6 +97,13 @@
         w(u'<h3>%s</h3>' % _('resources usage'))
         w(u'<table>')
         stats = self._cw.call_service('repo_stats')
+        stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks'])
+        stats['threads'] = ', '.join(sorted(stats['threads']))
+        for k in stats:
+            if k in ('extid_cache_size', 'type_source_cache_size'):
+                continue
+            if k.endswith('_cache_size'):
+                stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize'])
         for element in sorted(stats):
             w(u'<tr><th align="left">%s</th><td>%s %s</td></tr>'
                    % (element, xml_escape(unicode(stats[element])),
@@ -179,31 +185,13 @@
     cache_max_age = 0
 
     def call(self, **kwargs):
-        from cubicweb._gcdebug import gc_info
-        from rql.stmts import Union
-        from cubicweb.appobject import AppObject
-        from cubicweb.rset import ResultSet
-        from cubicweb.dbapi import Connection, Cursor
-        from cubicweb.web.request import CubicWebRequestBase
-        lookupclasses = (AppObject,
-                         Union, ResultSet,
-                         Connection, Cursor,
-                         CubicWebRequestBase)
-        try:
-            from cubicweb.server.session import Session, InternalSession
-            lookupclasses += (InternalSession, Session)
-        except ImportError:
-            pass # no server part installed
+        stats = self._cw.call_service('repo_gc_stats')
         self.w(u'<h2>%s</h2>' % _('Garbage collection information'))
-        counters, ocounters, garbage = gc_info(lookupclasses,
-                                               viewreferrersclasses=())
         self.w(u'<h3>%s</h3>' % self._cw._('Looked up classes'))
-        values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True)
-        self.wview('pyvaltable', pyvalue=values)
+        self.wview('pyvaltable', pyvalue=stats['lookupclasses'])
         self.w(u'<h3>%s</h3>' % self._cw._('Most referenced classes'))
-        values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)
-        self.wview('pyvaltable', pyvalue=values[:self._cw.form.get('nb', 20)])
-        if garbage:
+        self.wview('pyvaltable', pyvalue=stats['referenced'])
+        if stats['unreachable']:
             self.w(u'<h3>%s</h3>' % self._cw._('Unreachable objects'))
-            values = sorted(xml_escape(repr(o)) for o in garbage)
+            values = [xml_escape(val) for val in stats['unreachable']]
             self.wview('pyvallist', pyvalue=values)
--- a/web/views/management.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/management.py	Mon Jun 22 14:27:37 2015 +0200
@@ -187,6 +187,13 @@
 
     def call(self):
         stats = self._cw.call_service('repo_stats')
+        stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks'])
+        stats['threads'] = ', '.join(sorted(stats['threads']))
+        for k in stats:
+            if k in ('extid_cache_size', 'type_source_cache_size'):
+                continue
+            if k.endswith('_cache_size'):
+                stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize'])
         results = []
         for element in stats:
             results.append(u'%s %s' % (element, stats[element]))
--- a/web/views/rdf.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/rdf.py	Mon Jun 22 14:27:37 2015 +0200
@@ -47,6 +47,8 @@
         __regid__ = 'rdf'
         title = _('rdf export')
         templatable = False
+        binary = True
+        format = 'xml'
         content_type = 'text/xml' # +rdf
 
         def call(self):
@@ -57,7 +59,7 @@
             for i in xrange(self.cw_rset.rowcount):
                 entity = self.cw_rset.complete_entity(i, 0)
                 self.entity2graph(graph, entity)
-            self.w(graph.serialize().decode('utf-8'))
+            self.w(graph.serialize(format=self.format))
 
         def entity_call(self, entity):
             self.call()
@@ -100,3 +102,8 @@
                             else:
                                 add( (URIRef(related.cwuri), CW[rtype], cwuri) )
 
+
+    class RDFN3View(RDFView):
+        __regid__ = 'n3rdf'
+        format = 'n3'
+        content_type = 'text/n3'
--- a/web/views/sessions.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/sessions.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,20 +15,83 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""web session component: by dfault the session is actually the db connection
-object :/
-"""
-
+"""web session: by default the session is actually the db connection """
 __docformat__ = "restructuredtext en"
 
 from time import time
 
-from cubicweb import (RepositoryError, Unauthorized, AuthenticationError,
-                      BadConnectionId)
-from cubicweb.web import InvalidSession, Redirect
-from cubicweb.web.application import AbstractSessionManager
-from cubicweb.dbapi import ProgrammingError, DBAPISession
-from cubicweb import repoapi
+from cubicweb import RepositoryError, Unauthorized, BadConnectionId
+from cubicweb.web import InvalidSession, component
+
+
+class AbstractSessionManager(component.Component):
+    """manage session data associated to a session identifier"""
+    __abstract__ = True
+    __regid__ = 'sessionmanager'
+
+    def __init__(self, repo):
+        vreg = repo.vreg
+        self.session_time = vreg.config['http-session-time'] or None
+        self.authmanager = vreg['components'].select('authmanager', repo=repo)
+        interval = (self.session_time or 0) / 2.
+        if vreg.config.anonymous_user()[0] is not None:
+            self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60
+            assert self.cleanup_anon_session_time > 0
+            if self.session_time is not None:
+                self.cleanup_anon_session_time = min(self.session_time,
+                                                     self.cleanup_anon_session_time)
+            interval = self.cleanup_anon_session_time / 2.
+        # we don't want to check session more than once every 5 minutes
+        self.clean_sessions_interval = max(5 * 60, interval)
+
+    def clean_sessions(self):
+        """cleanup sessions which has not been unused since a given amount of
+        time. Return the number of sessions which have been closed.
+        """
+        self.debug('cleaning http sessions')
+        session_time = self.session_time
+        closed, total = 0, 0
+        for session in self.current_sessions():
+            total += 1
+            try:
+                last_usage_time = session.cnx.check()
+            except AttributeError:
+                last_usage_time = session.mtime
+            except BadConnectionId:
+                self.close_session(session)
+                closed += 1
+            else:
+                no_use_time = (time() - last_usage_time)
+                if session.anonymous_session:
+                    if no_use_time >= self.cleanup_anon_session_time:
+                        self.close_session(session)
+                        closed += 1
+                elif session_time is not None and no_use_time >= session_time:
+                    self.close_session(session)
+                    closed += 1
+        return closed, total - closed
+
+    def current_sessions(self):
+        """return currently open sessions"""
+        raise NotImplementedError()
+
+    def get_session(self, req, sessionid):
+        """return existing session for the given session identifier"""
+        raise NotImplementedError()
+
+    def open_session(self, req):
+        """open and return a new session for the given request.
+
+        raise :exc:`cubicweb.AuthenticationError` if authentication failed
+        (no authentication info found or wrong user/password)
+        """
+        raise NotImplementedError()
+
+    def close_session(self, session):
+        """close session on logout or on invalid session detected (expired out,
+        corrupted...)
+        """
+        raise NotImplementedError()
 
 
 class InMemoryRepositorySessionManager(AbstractSessionManager):
@@ -97,8 +160,7 @@
         # XXX should properly detect missing permission / non writeable source
         # and avoid "except (RepositoryError, Unauthorized)" below
         try:
-            cnx = repoapi.ClientConnection(session)
-            with cnx:
+            with session.new_cnx() as cnx:
                 cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s',
                            {'x' : session.user.eid})
                 cnx.commit()
--- a/web/views/timeline.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/timeline.py	Mon Jun 22 14:27:37 2015 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -15,124 +15,20 @@
 #
 # You should have received a copy of the GNU Lesser General Public License along
 # with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""basic support for SIMILE's timeline widgets
 
-cf. http://code.google.com/p/simile-widgets/
-"""
-
-__docformat__ = "restructuredtext en"
-
-from logilab.mtconverter import xml_escape
-from logilab.common.date import ustrftime
-
-from cubicweb.predicates import adaptable
-from cubicweb.view import EntityView, StartupView
-from cubicweb.utils import json_dumps
-
-_ = unicode
-
-class TimelineJsonView(EntityView):
-    """generates a json file to feed Timeline.loadJSON()
-    NOTE: work in progress (image_url, bubbleUrl and so on
-    should be properties of entity classes or subviews)
-    """
-    __regid__ = 'timeline-json'
-    __select__ = adaptable('ICalendarable')
-
-    binary = True
-    templatable = False
-    content_type = 'application/json'
-
-    date_fmt = '%Y/%m/%d'
-
-    def call(self):
-        events = []
-        for entity in self.cw_rset.entities():
-            event = self.build_event(entity)
-            if event is not None:
-                events.append(event)
-        timeline_data = {'dateTimeFormat': self.date_fmt,
-                         'events': events}
-        self.w(json_dumps(timeline_data))
-
-    # FIXME: those properties should be defined by the entity class
-    def onclick_url(self, entity):
-        return entity.absolute_url()
-
-    def onclick(self, entity):
-        url = self.onclick_url(entity)
-        if url:
-            return u"javascript: document.location.href='%s'" % url
-        return None
+try:
+    from cubes.timeline.views import (
+            TimelineJsonView,
+            TimelineViewMixIn,
+            TimelineView,
+            StaticTimelineView)
 
-    def build_event(self, entity):
-        """converts `entity` into a JSON object
-        {'start': '1891',
-        'end': '1915',
-        'title': 'Portrait of Horace Brodsky',
-        'description': 'by Henri Gaudier-Brzeska, French Sculptor, 1891-1915',
-        'image': 'http://imagecache2.allposters.com/images/BRGPOD/102770_b.jpg',
-        'link': 'http://www.allposters.com/-sp/Portrait-of-Horace-Brodsky-Posters_i1584413_.htm'
-        }
-        """
-        icalendarable = entity.cw_adapt_to('ICalendarable')
-        start = icalendarable.start
-        stop = icalendarable.stop
-        start = start or stop
-        if start is None and stop is None:
-            return None
-        event_data = {'start': ustrftime(start, self.date_fmt),
-                      'title': xml_escape(entity.dc_title()),
-                      'description': entity.dc_description(format='text/html'),
-                      'link': entity.absolute_url(),
-                      }
-        onclick = self.onclick(entity)
-        if onclick:
-            event_data['onclick'] = onclick
-        if stop:
-            event_data['end'] = ustrftime(stop, self.date_fmt)
-        return event_data
-
-
-class TimelineViewMixIn(object):
-    widget_class = 'TimelineWidget'
-    jsfiles = ('cubicweb.timeline-bundle.js', 'cubicweb.widgets.js',
-               'cubicweb.timeline-ext.js', 'cubicweb.ajax.js')
+except ImportError:
+    pass
+else:
+    from logilab.common.deprecation import class_moved
 
-    def render_url(self, loadurl, tlunit=None):
-        tlunit = tlunit or self._cw.form.get('tlunit')
-        self._cw.add_js(self.jsfiles)
-        self._cw.add_css('timeline-bundle.css')
-        if tlunit:
-            additional = u' cubicweb:tlunit="%s"' % tlunit
-        else:
-            additional = u''
-        self.w(u'<div class="widget" cubicweb:wdgtype="%s" '
-               u'cubicweb:loadtype="auto" cubicweb:loadurl="%s" %s >' %
-               (self.widget_class, xml_escape(loadurl),
-                additional))
-        self.w(u'</div>')
-
-
-class TimelineView(TimelineViewMixIn, EntityView):
-    """builds a cubicweb timeline widget node"""
-    __regid__ = 'timeline'
-    title = _('timeline')
-    __select__ = adaptable('ICalendarable')
-    paginable = False
-    def call(self, tlunit=None):
-        self._cw.html_headers.define_var('Timeline_urlPrefix', self._cw.datadir_url)
-        rql = self.cw_rset.printable_rql()
-        loadurl = self._cw.build_url(rql=rql, vid='timeline-json')
-        self.render_url(loadurl, tlunit)
-
-
-class StaticTimelineView(TimelineViewMixIn, StartupView):
-    """similar to `TimelineView` but loads data from a static
-    JSON file instead of one after a RQL query.
-    """
-    __regid__ = 'static-timeline'
-
-    def call(self, loadurl, tlunit=None, wdgclass=None):
-        self.widget_class = wdgclass or self.widget_class
-        self.render_url(loadurl, tlunit)
+    TimelineJsonView = class_moved(TimelineJsonView, 'TimelineJsonView')
+    TimelineViewMixIn = class_moved(TimelineViewMixIn, 'TimelineViewMixIn')
+    TimelineView = class_moved(TimelineView, 'TimelineView')
+    StaticTimelineView = class_moved(StaticTimelineView, 'StaticTimelineView')
--- a/web/views/urlpublishing.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/urlpublishing.py	Mon Jun 22 14:27:37 2015 +0200
@@ -97,7 +97,7 @@
         self.evaluators = sorted(evaluators, key=lambda x: x.priority)
 
     def process(self, req, path):
-        """Given a URL (essentialy caracterized by a path on the
+        """Given a URL (essentially characterized by a path on the
         server, but additional information may be found in the request
         object), return a publishing method identifier
         (e.g. controller) and an optional result set.
--- a/web/views/xmlrss.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/views/xmlrss.py	Mon Jun 22 14:27:37 2015 +0200
@@ -79,7 +79,7 @@
         self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n'
                % (entity.cw_etype, entity.eid, xml_escape(entity.cwuri),
                   xml_escape(source)))
-        for rschema, attrschema in entity.e_schema.attribute_definitions():
+        for rschema, attrschema in sorted(entity.e_schema.attribute_definitions()):
             attr = rschema.type
             if attr in ('eid', 'cwuri'):
                 continue
--- a/web/webconfig.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/webconfig.py	Mon Jun 22 14:27:37 2015 +0200
@@ -49,7 +49,7 @@
       }),
     # user web ui configuration
     ('fckeditor',
-     {'type' : 'yn', 'default': True,
+     {'type' : 'yn', 'default': False,
       'help': _('should html fields being edited using fckeditor (a HTML '
                 'WYSIWYG editor).  You should also select text/html as default '
                 'text format to actually get fckeditor.'),
@@ -124,16 +124,13 @@
           'where the cubicweb web server is listening on port 8080.',
           'group': 'main', 'level': 3,
           }),
-        ('https-deny-anonymous',
-         {'type': 'yn',
-          'default': False,
-          'help': 'Prevent anonymous user to browse through https version of '
-                  'the site (https-url). Login form will then be displayed '
-                  'until logged',
+        ('datadir-url',
+         {'type': 'string', 'default': None,
+          'help': ('base url for static data, if different from "${base-url}/data/".  '
+                   'If served from a different domain, that domain should allow '
+                   'cross-origin requests.'),
           'group': 'web',
-          'level': 2
-         }
-          ),
+          }),
         ('auth-mode',
          {'type' : 'choice',
           'choices' : ('cookie', 'http'),
@@ -392,6 +389,14 @@
             baseurl += '/'
         if not (self.repairing or self.creating):
             self.global_set_option('base-url', baseurl)
+        self.datadir_url = self['datadir-url']
+        if self.datadir_url:
+            if self.datadir_url[-1] != '/':
+                self.datadir_url += '/'
+            if self.mode != 'test':
+                self.datadir_url += '%s/' % self.instance_md5_version()
+            self.https_datadir_url = self.datadir_url
+            return
         httpsurl = self['https-url']
         data_relpath = self.data_relpath()
         if httpsurl:
--- a/web/webctl.py	Mon Jun 22 12:51:28 2015 +0200
+++ b/web/webctl.py	Mon Jun 22 14:27:37 2015 +0200
@@ -46,9 +46,6 @@
         if not automatic:
             print '\n' + underline_title('Generic web configuration')
             config = self.config
-            if config['repository-uri'].startswith('pyro://') or config.pyro_enabled():
-                print '\n' + underline_title('Pyro configuration')
-                config.input_config('pyro', inputlevel)
             config.input_config('web', inputlevel)
             if ASK.confirm('Allow anonymous access ?', False):
                 config.global_set_option('anonymous-user', 'anon')
--- a/zmqclient.py	Mon Jun 22 12:51:28 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,64 +0,0 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
-"""Source to query another RQL repository using pyro"""
-
-__docformat__ = "restructuredtext en"
-_ = unicode
-
-from functools import partial
-import zmq
-
-from cubicweb.server.cwzmq import cwproto_to_zmqaddr
-
-# XXX hack to overpass old zmq limitation that force to have
-# only one context per python process
-try:
-    from cubicweb.server.cwzmq import ctx
-except ImportError:
-    ctx = zmq.Context()
-
-class ZMQRepositoryClient(object):
-    """
-    This class delegates the overall repository stuff to a remote source.
-
-    So calling a method of this repository will result on calling the
-    corresponding method of the remote source repository.
-
-    Any raised exception on the remote source is propagated locally.
-
-    ZMQ is used as the transport layer and cPickle is used to serialize data.
-    """
-
-    def __init__(self, zmq_address):
-        """A zmq address provided here will be like
-        `zmqpickle-tcp://127.0.0.1:42000`.  W
-
-        We chop the prefix to get a real zmq address.
-        """
-        self.socket = ctx.socket(zmq.REQ)
-        self.socket.connect(cwproto_to_zmqaddr(zmq_address))
-
-    def __zmqcall__(self, name, *args, **kwargs):
-         self.socket.send_pyobj([name, args, kwargs])
-         result = self.socket.recv_pyobj()
-         if isinstance(result, BaseException):
-             raise result
-         return result
-
-    def __getattr__(self, name):
-        return partial(self.__zmqcall__, name)