[repo] use logilab.db instead of lgc.adbh/lgc.db/lgc.sqlgen/indexer, test new date extranction functions
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Mon, 08 Mar 2010 17:57:29 +0100
changeset 4831 c5aec27c1bf7
parent 4829 3b79a0fc91db
child 4832 98c7760b7e9a
[repo] use logilab.db instead of lgc.adbh/lgc.db/lgc.sqlgen/indexer, test new date extranction functions
cwconfig.py
debian/control
devtools/fake.py
doc/tools/generate_modules.py
entity.py
server/__init__.py
server/checkintegrity.py
server/serverctl.py
server/sources/extlite.py
server/sources/native.py
server/sources/rql2sql.py
server/sources/storages.py
server/sqlutils.py
server/test/data/site_erudi.py
server/test/unittest_querier.py
server/test/unittest_rql2sql.py
server/test/unittest_sqlutils.py
--- a/cwconfig.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/cwconfig.py	Mon Mar 08 17:57:29 2010 +0100
@@ -998,7 +998,7 @@
 
 _EXT_REGISTERED = False
 def register_stored_procedures():
-    from logilab.common.adbh import FunctionDescr
+    from logilab.db import FunctionDescr
     from rql.utils import register_function, iter_funcnode_variables
 
     global _EXT_REGISTERED
@@ -1010,8 +1010,7 @@
         supported_backends = ('postgres', 'sqlite',)
         rtype = 'String'
 
-        @classmethod
-        def st_description(cls, funcnode, mainindex, tr):
+        def st_description(self, funcnode, mainindex, tr):
             return ', '.join(sorted(term.get_description(mainindex, tr)
                                     for term in iter_funcnode_variables(funcnode)))
 
@@ -1023,6 +1022,7 @@
 
     register_function(CONCAT_STRINGS) # XXX bw compat
 
+
     class GROUP_CONCAT(CONCAT_STRINGS):
         supported_backends = ('mysql', 'postgres', 'sqlite',)
 
@@ -1033,8 +1033,7 @@
         supported_backends = ('postgres', 'sqlite',)
         rtype = 'String'
 
-        @classmethod
-        def st_description(cls, funcnode, mainindex, tr):
+        def st_description(self, funcnode, mainindex, tr):
             return funcnode.children[0].get_description(mainindex, tr)
 
     register_function(LIMIT_SIZE)
@@ -1046,7 +1045,6 @@
     register_function(TEXT_LIMIT_SIZE)
 
 
-
     class FSPATH(FunctionDescr):
         supported_backends = ('postgres', 'sqlite',)
         rtype = 'Bytes'
--- a/debian/control	Mon Mar 08 09:51:29 2010 +0100
+++ b/debian/control	Mon Mar 08 17:57:29 2010 +0100
@@ -33,7 +33,7 @@
 Conflicts: cubicweb-multisources
 Replaces: cubicweb-multisources
 Provides: cubicweb-multisources
-Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2 | python-mysqldb | python-pysqlite2
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database, python-psycopg2 | python-mysqldb | python-pysqlite2
 Recommends: pyro, cubicweb-documentation (= ${source:Version})
 Description: server part of the CubicWeb framework
  CubicWeb is a semantic web application framework.
--- a/devtools/fake.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/devtools/fake.py	Mon Mar 08 17:57:29 2010 +0100
@@ -7,9 +7,7 @@
 """
 __docformat__ = "restructuredtext en"
 
-from logilab.common.adbh import get_adv_func_helper
-
-from indexer import get_indexer
+from logilab.db import get_db_helper
 
 from cubicweb.req import RequestSessionBase
 from cubicweb.cwvreg import CubicWebVRegistry
@@ -200,12 +198,7 @@
 
 
 class FakeSource(object):
-    dbhelper = get_adv_func_helper('sqlite')
-    indexer = get_indexer('sqlite', 'UTF8')
-    dbhelper.fti_uid_attr = indexer.uid_attr
-    dbhelper.fti_table = indexer.table
-    dbhelper.fti_restriction_sql = indexer.restriction_sql
-    dbhelper.fti_need_distinct_query = indexer.need_distinct
+    dbhelper = get_db_helper('sqlite')
     def __init__(self, uri):
         self.uri = uri
 
--- a/doc/tools/generate_modules.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/doc/tools/generate_modules.py	Mon Mar 08 17:57:29 2010 +0100
@@ -16,7 +16,7 @@
     cw_gen = ModuleGenerator('cubicweb', '../..')
     cw_gen.generate("../book/en/annexes/api_cubicweb.rst",
                     EXCLUDE_DIRS + ('cwdesklets', 'misc', 'skel', 'skeleton'))
-    for modname in ('indexer', 'logilab', 'rql', 'yams'):
+    for modname in ('logilab', 'rql', 'yams'):
         cw_gen = ModuleGenerator(modname, '../../../' + modname)
         cw_gen.generate("../book/en/annexes/api_%s.rst" % modname,
                         EXCLUDE_DIRS + ('tools',))
--- a/entity.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/entity.py	Mon Mar 08 17:57:29 2010 +0100
@@ -894,12 +894,12 @@
         """used by the full text indexer to get words to index
 
         this method should only be used on the repository side since it depends
-        on the indexer package
+        on the logilab.db package
 
         :rtype: list
         :return: the list of indexable word of this entity
         """
-        from indexer.query_objects import tokenize
+        from logilab.db.fti import tokenize
         # take care to cases where we're modyfying the schema
         pending = self._cw.transaction_data.setdefault('pendingrdefs', set())
         words = []
--- a/server/__init__.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/__init__.py	Mon Mar 08 17:57:29 2010 +0100
@@ -145,12 +145,9 @@
     # can't skip entities table even if system source doesn't support them,
     # they are used sometimes by generated sql. Keeping them empty is much
     # simpler than fixing this...
-    if sqlcnx.logged_user != source['db-user']:
-        schemasql = sqlschema(schema, driver, user=source['db-user'])
-    else:
-        schemasql = sqlschema(schema, driver)
-        #skip_entities=[str(e) for e in schema.entities()
-        #               if not repo.system_source.support_entity(str(e))])
+    schemasql = sqlschema(schema, driver)
+    #skip_entities=[str(e) for e in schema.entities()
+    #               if not repo.system_source.support_entity(str(e))])
     sqlexec(schemasql, execute, pbtitle=_title)
     sqlcursor.close()
     sqlcnx.commit()
@@ -237,8 +234,8 @@
     config.set_hooks_mode(oldmode)
 
 
-# sqlite'stored procedures have to be registered at connexion opening time
-SQL_CONNECT_HOOKS = {}
+# sqlite'stored procedures have to be registered at connection opening time
+from logilab.db import SQL_CONNECT_HOOKS
 
 # add to this set relations which should have their add security checking done
 # *BEFORE* adding the actual relation (done after by default)
--- a/server/checkintegrity.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/checkintegrity.py	Mon Mar 08 17:57:29 2010 +0100
@@ -70,13 +70,9 @@
     # to be updated due to the reindexation
     repo = session.repo
     cursor = session.pool['system']
-    if not repo.system_source.indexer.has_fti_table(cursor):
-        from indexer import get_indexer
+    if not repo.system_source.dbhelper.has_fti_table(cursor):
         print 'no text index table'
-        indexer = get_indexer(repo.system_source.dbdriver)
-        # XXX indexer.init_fti(cursor) once index 0.7 is out
-        indexer.init_extensions(cursor)
-        cursor.execute(indexer.sql_init_fti())
+        dbhelper.init_fti(cursor)
     repo.config.disabled_hooks_categories.add('metadata')
     repo.config.disabled_hooks_categories.add('integrity')
     repo.system_source.do_fti = True  # ensure full-text indexation is activated
--- a/server/serverctl.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/serverctl.py	Mon Mar 08 17:57:29 2010 +0100
@@ -63,9 +63,11 @@
             password = getpass('password: ')
     extra_args = source.get('db-extra-arguments')
     extra = extra_args and {'extra_args': extra_args} or {}
-    return get_connection(driver, dbhost, dbname, user, password=password,
-                          port=source.get('db-port'),
-                          **extra)
+    cnx = get_connection(driver, dbhost, dbname, user, password=password,
+                         port=source.get('db-port'),
+                         **extra)
+    cnx.logged_user = logged_user
+    return cnx
 
 def system_source_cnx(source, dbms_system_base=False,
                       special_privs='CREATE/DROP DATABASE', verbose=True):
@@ -75,8 +77,8 @@
     create/drop the instance database)
     """
     if dbms_system_base:
-        from logilab.common.adbh import get_adv_func_helper
-        system_db = get_adv_func_helper(source['db-driver']).system_database()
+        from logilab.db import get_db_helper
+        system_db = get_db_helper(source['db-driver']).system_database()
         return source_cnx(source, system_db, special_privs=special_privs, verbose=verbose)
     return source_cnx(source, special_privs=special_privs, verbose=verbose)
 
@@ -85,11 +87,11 @@
     or a database
     """
     import logilab.common as lgp
-    from logilab.common.adbh import get_adv_func_helper
+    from logilab.db import get_db_helper
     lgp.USE_MX_DATETIME = False
     special_privs = ''
     driver = source['db-driver']
-    helper = get_adv_func_helper(driver)
+    helper = get_db_helper(driver)
     if user is not None and helper.users_support:
         special_privs += '%s USER' % what
     if db is not None:
@@ -202,10 +204,10 @@
 
     def cleanup(self):
         """remove instance's configuration and database"""
-        from logilab.common.adbh import get_adv_func_helper
+        from logilab.db import get_db_helper
         source = self.config.sources()['system']
         dbname = source['db-name']
-        helper = get_adv_func_helper(source['db-driver'])
+        helper = get_db_helper(source['db-driver'])
         if ASK.confirm('Delete database %s ?' % dbname):
             user = source['db-user'] or None
             cnx = _db_sys_cnx(source, 'DROP DATABASE', user=user)
@@ -285,8 +287,7 @@
         )
     def run(self, args):
         """run the command with its specific arguments"""
-        from logilab.common.adbh import get_adv_func_helper
-        from indexer import get_indexer
+        from logilab.db import get_db_helper
         verbose = self.get('verbose')
         automatic = self.get('automatic')
         appid = pop_arg(args, msg='No instance specified !')
@@ -295,7 +296,7 @@
         dbname = source['db-name']
         driver = source['db-driver']
         create_db = self.config.create_db
-        helper = get_adv_func_helper(driver)
+        helper = get_db_helper(driver)
         if driver == 'sqlite':
             if os.path.exists(dbname) and automatic or \
                    ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
@@ -330,8 +331,7 @@
                 raise
         cnx = system_source_cnx(source, special_privs='LANGUAGE C', verbose=verbose)
         cursor = cnx.cursor()
-        indexer = get_indexer(driver)
-        indexer.init_extensions(cursor)
+        dbhelper.init_fti_extensions(cursor)
         # postgres specific stuff
         if driver == 'postgres':
             # install plpythonu/plpgsql language if not installed by the cube
--- a/server/sources/extlite.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/sources/extlite.py	Mon Mar 08 17:57:29 2010 +0100
@@ -20,12 +20,6 @@
         self.source = source
         self._cnx = None
 
-    @property
-    def logged_user(self):
-        if self._cnx is None:
-            self._cnx = self.source._sqlcnx
-        return self._cnx.logged_user
-
     def cursor(self):
         if self._cnx is None:
             self._cnx = self.source._sqlcnx
--- a/server/sources/native.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/sources/native.py	Mon Mar 08 17:57:29 2010 +0100
@@ -21,10 +21,8 @@
 from logilab.common.cache import Cache
 from logilab.common.decorators import cached, clear_cache
 from logilab.common.configuration import Method
-from logilab.common.adbh import get_adv_func_helper
 from logilab.common.shellutils import getlogin
-
-from indexer import get_indexer
+from logilab.db import get_db_helper
 
 from cubicweb import UnknownEid, AuthenticationError, Binary, server
 from cubicweb.cwconfig import CubicWebNoAppConfiguration
@@ -151,16 +149,9 @@
                                 *args, **kwargs)
         # sql generator
         self._rql_sqlgen = self.sqlgen_class(appschema, self.dbhelper,
-                                             self.encoding, ATTR_MAP.copy())
+                                             ATTR_MAP.copy())
         # full text index helper
         self.do_fti = not repo.config['delay-full-text-indexation']
-        if self.do_fti:
-            self.indexer = get_indexer(self.dbdriver, self.encoding)
-            # XXX should go away with logilab.db
-            self.dbhelper.fti_uid_attr = self.indexer.uid_attr
-            self.dbhelper.fti_table = self.indexer.table
-            self.dbhelper.fti_restriction_sql = self.indexer.restriction_sql
-            self.dbhelper.fti_need_distinct_query = self.indexer.need_distinct
         # sql queries cache
         self._cache = Cache(repo.config['rql-cache-size'])
         self._temp_table_data = {}
@@ -207,7 +198,7 @@
         pool.pool_set()
         # check full text index availibility
         if self.do_fti:
-            if not self.indexer.has_fti_table(pool['system']):
+            if not self.dbhelper.has_fti_table(pool['system']):
                 if not self.repo.config.creating:
                     self.critical('no text index table')
                 self.do_fti = False
@@ -321,8 +312,7 @@
         assert isinstance(sql, basestring), repr(sql)
         try:
             cursor = self.doexec(session, sql, args)
-        except (self.dbapi_module.OperationalError,
-                self.dbapi_module.InterfaceError):
+        except (self.OperationalError, self.InterfaceError):
             # FIXME: better detection of deconnection pb
             self.info("request failed '%s' ... retry with a new cursor", sql)
             session.pool.reconnect(self)
@@ -342,7 +332,7 @@
             prefix='ON THE FLY temp data insertion into %s from' % table)
         # generate sql queries if we are able to do so
         sql, query_args = self._rql_sqlgen.generate(union, args, varmap)
-        query = 'INSERT INTO %s %s' % (table, sql.encode(self.encoding))
+        query = 'INSERT INTO %s %s' % (table, sql.encode(self._dbencoding))
         self.doexec(session, query, self.merge_args(args, query_args))
 
     def manual_insert(self, results, table, session):
@@ -359,7 +349,7 @@
             row = tuple(row)
             for index, cell in enumerate(row):
                 if isinstance(cell, Binary):
-                    cell = self.binary(cell.getvalue())
+                    cell = self._binary(cell.getvalue())
                 kwargs[str(index)] = cell
             kwargs_list.append(kwargs)
         self.doexecmany(session, query, kwargs_list)
@@ -614,7 +604,7 @@
         index
         """
         try:
-            self.indexer.cursor_unindex_object(eid, session.pool['system'])
+            self.dbhelper.cursor_unindex_object(eid, session.pool['system'])
         except Exception: # let KeyboardInterrupt / SystemExit propagate
             self.exception('error while unindexing %s', eid)
 
@@ -625,8 +615,8 @@
         try:
             # use cursor_index_object, not cursor_reindex_object since
             # unindexing done in the FTIndexEntityOp
-            self.indexer.cursor_index_object(entity.eid, entity,
-                                             session.pool['system'])
+            self.dbhelper.cursor_index_object(entity.eid, entity,
+                                              session.pool['system'])
         except Exception: # let KeyboardInterrupt / SystemExit propagate
             self.exception('error while reindexing %s', entity)
 
@@ -659,7 +649,7 @@
 
 
 def sql_schema(driver):
-    helper = get_adv_func_helper(driver)
+    helper = get_db_helper(driver)
     tstamp_col_type = helper.TYPE_MAPPING['Datetime']
     schema = """
 /* Create the repository's system database */
@@ -692,7 +682,7 @@
 
 
 def sql_drop_schema(driver):
-    helper = get_adv_func_helper(driver)
+    helper = get_db_helper(driver)
     return """
 %s
 DROP TABLE entities;
--- a/server/sources/rql2sql.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/sources/rql2sql.py	Mon Mar 08 17:57:29 2010 +0100
@@ -332,10 +332,10 @@
     protected by a lock
     """
 
-    def __init__(self, schema, dbms_helper, dbencoding='UTF-8', attrmap=None):
+    def __init__(self, schema, dbms_helper, attrmap=None):
         self.schema = schema
         self.dbms_helper = dbms_helper
-        self.dbencoding = dbencoding
+        self.dbencoding = dbms_helper.dbencoding
         self.keyword_map = {'NOW' : self.dbms_helper.sql_current_timestamp,
                             'TODAY': self.dbms_helper.sql_current_date,
                             }
@@ -977,10 +977,9 @@
 
     def visit_function(self, func):
         """generate SQL name for a function"""
-        # function_description will check function is supported by the backend
-        sqlname = self.dbms_helper.func_sqlname(func.name)
-        return '%s(%s)' % (sqlname, ', '.join(c.accept(self)
-                                              for c in func.children))
+        # func_sql_call will check function is supported by the backend
+        return self.dbms_helper.func_as_sql(func.name,
+                                            [c.accept(self) for c in func.children])
 
     def visit_constant(self, constant):
         """generate SQL name for a constant"""
--- a/server/sources/storages.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/sources/storages.py	Mon Mar 08 17:57:29 2010 +0100
@@ -92,9 +92,8 @@
         cu = sysource.doexec(entity._cw,
                              'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
                                  attr, entity.__regid__, entity.eid))
-        dbmod = sysource.dbapi_module
-        return dbmod.process_value(cu.fetchone()[0], [None, dbmod.BINARY],
-                                   binarywrap=str)
+        return sysource._process_value(cu.fetchone()[0], [None, dbmod.BINARY],
+                                       binarywrap=str)
 
 
 class AddFileOp(Operation):
--- a/server/sqlutils.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/sqlutils.py	Mon Mar 08 17:57:29 2010 +0100
@@ -11,21 +11,17 @@
 import subprocess
 from datetime import datetime, date
 
-import logilab.common as lgc
-from logilab.common import db
+from logilab import db, common as lgc
 from logilab.common.shellutils import ProgressBar
-from logilab.common.adbh import get_adv_func_helper
-from logilab.common.sqlgen import SQLGenerator
 from logilab.common.date import todate, todatetime
-
-from indexer import get_indexer
+from logilab.db.sqlgen import SQLGenerator
 
 from cubicweb import Binary, ConfigurationError
 from cubicweb.uilib import remove_html_tags
 from cubicweb.schema import PURE_VIRTUAL_RTYPES
 from cubicweb.server import SQL_CONNECT_HOOKS
 from cubicweb.server.utils import crypt_password
-
+from rql.utils import RQL_FUNCTIONS_REGISTRY
 
 lgc.USE_MX_DATETIME = False
 SQL_PREFIX = 'cw_'
@@ -77,8 +73,8 @@
     w(native.grant_schema(user, set_owner))
     w('')
     if text_index:
-        indexer = get_indexer(driver)
-        w(indexer.sql_grant_user(user))
+        dbhelper = db.get_db_helper(driver)
+        w(dbhelper.sql_grant_user_on_fti(user))
         w('')
     w(grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX))
     return '\n'.join(output)
@@ -96,11 +92,10 @@
     w = output.append
     w(native.sql_schema(driver))
     w('')
+    dbhelper = db.get_db_helper(driver)
     if text_index:
-        indexer = get_indexer(driver)
-        w(indexer.sql_init_fti())
+        w(dbhelper.sql_init_fti())
         w('')
-    dbhelper = get_adv_func_helper(driver)
     w(schema2sql(dbhelper, schema, prefix=SQL_PREFIX,
                  skip_entities=skip_entities, skip_relations=skip_relations))
     if dbhelper.users_support and user:
@@ -120,8 +115,8 @@
     w(native.sql_drop_schema(driver))
     w('')
     if text_index:
-        indexer = get_indexer(driver)
-        w(indexer.sql_drop_fti())
+        dbhelper = db.get_db_helper(driver)
+        w(dbhelper.sql_drop_fti())
         w('')
     w(dropschema2sql(schema, prefix=SQL_PREFIX,
                      skip_entities=skip_entities,
@@ -137,55 +132,42 @@
     def __init__(self, source_config):
         try:
             self.dbdriver = source_config['db-driver'].lower()
-            self.dbname = source_config['db-name']
+            dbname = source_config['db-name']
         except KeyError:
             raise ConfigurationError('missing some expected entries in sources file')
-        self.dbhost = source_config.get('db-host')
+        dbhost = source_config.get('db-host')
         port = source_config.get('db-port')
-        self.dbport = port and int(port) or None
-        self.dbuser = source_config.get('db-user')
-        self.dbpasswd = source_config.get('db-password')
-        self.encoding = source_config.get('db-encoding', 'UTF-8')
-        self.dbapi_module = db.get_dbapi_compliant_module(self.dbdriver)
-        self.dbdriver_extra_args = source_config.get('db-extra-arguments')
-        self.binary = self.dbapi_module.Binary
-        self.dbhelper = self.dbapi_module.adv_func_helper
+        dbport = port and int(port) or None
+        dbuser = source_config.get('db-user')
+        dbpassword = source_config.get('db-password')
+        dbencoding = source_config.get('db-encoding', 'UTF-8')
+        dbextraargs = source_config.get('db-extra-arguments')
+        self.dbhelper = db.get_db_helper(self.dbdriver)
+        self.dbhelper.record_connection_info(dbname, dbhost, dbport, dbuser,
+                                             dbpassword, dbextraargs,
+                                             dbencoding)
         self.sqlgen = SQLGenerator()
+        # copy back some commonly accessed attributes
+        dbapi_module = self.dbhelper.dbapi_module
+        self.OperationalError = dbapi_module.OperationalError
+        self.InterfaceError = dbapi_module.InterfaceError
+        self._binary = dbapi_module.Binary
+        self._process_value = dbapi_module.process_value
+        self._dbencoding = dbencoding
 
-    def get_connection(self, user=None, password=None):
+    def get_connection(self):
         """open and return a connection to the database"""
-        if user or self.dbuser:
-            self.info('connecting to %s@%s for user %s', self.dbname,
-                      self.dbhost or 'localhost', user or self.dbuser)
-        else:
-            self.info('connecting to %s@%s', self.dbname,
-                      self.dbhost or 'localhost')
-        extra = {}
-        if self.dbdriver_extra_args:
-            extra = {'extra_args': self.dbdriver_extra_args}
-        cnx = self.dbapi_module.connect(self.dbhost, self.dbname,
-                                        user or self.dbuser,
-                                        password or self.dbpasswd,
-                                        port=self.dbport,
-                                        **extra)
-        init_cnx(self.dbdriver, cnx)
-        #self.dbapi_module.type_code_test(cnx.cursor())
-        return cnx
+        return self.dbhelper.get_connection()
 
     def backup_to_file(self, backupfile):
-        for cmd in self.dbhelper.backup_commands(self.dbname, self.dbhost,
-                                                 self.dbuser, backupfile,
-                                                 dbport=self.dbport,
+        for cmd in self.dbhelper.backup_commands(backupfile,
                                                  keepownership=False):
             if _run_command(cmd):
                 if not confirm('   [Failed] Continue anyway?', default='n'):
                     raise Exception('Failed command: %s' % cmd)
 
     def restore_from_file(self, backupfile, confirm, drop=True):
-        for cmd in self.dbhelper.restore_commands(self.dbname, self.dbhost,
-                                                  self.dbuser, backupfile,
-                                                  self.encoding,
-                                                  dbport=self.dbport,
+        for cmd in self.dbhelper.restore_commands(backupfile,
                                                   keepownership=False,
                                                   drop=drop):
             if _run_command(cmd):
@@ -198,7 +180,7 @@
             for key, val in args.iteritems():
                 # convert cubicweb binary into db binary
                 if isinstance(val, Binary):
-                    val = self.binary(val.getvalue())
+                    val = self._binary(val.getvalue())
                 newargs[key] = val
             # should not collide
             newargs.update(query_args)
@@ -208,10 +190,12 @@
     def process_result(self, cursor):
         """return a list of CubicWeb compliant values from data in the given cursor
         """
+        # begin bind to locals for optimization
         descr = cursor.description
-        encoding = self.encoding
-        process_value = self.dbapi_module.process_value
+        encoding = self._dbencoding
+        process_value = self._process_value
         binary = Binary
+        # /end
         results = cursor.fetchall()
         for i, line in enumerate(results):
             result = []
@@ -242,14 +226,14 @@
                         value = value.getvalue()
                     else:
                         value = crypt_password(value)
-                    value = self.binary(value)
+                    value = self._binary(value)
                 # XXX needed for sqlite but I don't think it is for other backends
                 elif atype == 'Datetime' and isinstance(value, date):
                     value = todatetime(value)
                 elif atype == 'Date' and isinstance(value, datetime):
                     value = todate(value)
                 elif isinstance(value, Binary):
-                    value = self.binary(value.getvalue())
+                    value = self._binary(value.getvalue())
             attrs[SQL_PREFIX+str(attr)] = value
         return attrs
 
@@ -259,12 +243,8 @@
 set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
 
 def init_sqlite_connexion(cnx):
-    # XXX should not be publicly exposed
-    #def comma_join(strings):
-    #    return ', '.join(strings)
-    #cnx.create_function("COMMA_JOIN", 1, comma_join)
 
-    class concat_strings(object):
+    class group_concat(object):
         def __init__(self):
             self.values = []
         def step(self, value):
@@ -272,10 +252,7 @@
                 self.values.append(value)
         def finalize(self):
             return ', '.join(self.values)
-    # renamed to GROUP_CONCAT in cubicweb 2.45, keep old name for bw compat for
-    # some time
-    cnx.create_aggregate("CONCAT_STRINGS", 1, concat_strings)
-    cnx.create_aggregate("GROUP_CONCAT", 1, concat_strings)
+    cnx.create_aggregate("GROUP_CONCAT", 1, group_concat)
 
     def _limit_size(text, maxsize, format='text/plain'):
         if len(text) < maxsize:
@@ -293,9 +270,9 @@
     def limit_size2(text, maxsize):
         return _limit_size(text, maxsize)
     cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
+
     import yams.constraints
-    if hasattr(yams.constraints, 'patch_sqlite_decimal'):
-        yams.constraints.patch_sqlite_decimal()
+    yams.constraints.patch_sqlite_decimal()
 
     def fspath(eid, etype, attr):
         try:
@@ -320,10 +297,5 @@
                 raise
     cnx.create_function('_fsopen', 1, _fsopen)
 
-
 sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', [])
 sqlite_hooks.append(init_sqlite_connexion)
-
-def init_cnx(driver, cnx):
-    for hook in SQL_CONNECT_HOOKS.get(driver, ()):
-        hook(cnx)
--- a/server/test/data/site_erudi.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/test/data/site_erudi.py	Mon Mar 08 17:57:29 2010 +0100
@@ -5,7 +5,9 @@
 :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
 :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses
 """
-from logilab.common.adbh import FunctionDescr
+
+from logilab.db import FunctionDescr
+from logilab.db.sqlite import register_sqlite_pyfunc
 from rql.utils import register_function
 
 try:
@@ -13,15 +15,9 @@
         supported_backends = ('sqlite',)
 
     register_function(DUMB_SORT)
-
-
-    def init_sqlite_connexion(cnx):
-        def dumb_sort(something):
-            return something
-        cnx.create_function("DUMB_SORT", 1, dumb_sort)
-
-    from cubicweb.server import sqlutils
-    sqlutils.SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion)
+    def dumb_sort(something):
+        return something
+    register_sqlite_pyfunc(dumb_sort)
 except:
     # already registered
     pass
--- a/server/test/unittest_querier.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/test/unittest_querier.py	Mon Mar 08 17:57:29 2010 +0100
@@ -35,7 +35,7 @@
 SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion)
 
 
-from logilab.common.adbh import _GenericAdvFuncHelper
+from logilab.db import _GenericAdvFuncHelper
 TYPEMAP = _GenericAdvFuncHelper.TYPE_MAPPING
 
 class MakeSchemaTC(TestCase):
@@ -397,6 +397,18 @@
         rset = self.execute('Note X WHERE NOT Y evaluee X')
         self.assertEquals(len(rset.rows), 1, rset.rows)
 
+    def test_select_date_extraction(self):
+        self.execute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s",
+                     {'d': datetime(2001, 2,3, 12,13)})
+        test_data = [('YEAR', 2001), ('MONTH', 2), ('DAY', 3),
+                     ('HOUR', 12), ('MINUTE', 13)]
+        for funcname, result in test_data:
+            rset = self.execute('Any %s(D) WHERE X is Personne, X datenaiss D'
+                                % funcname)
+            self.assertEquals(len(rset.rows), 1)
+            self.assertEquals(rset.rows[0][0], result)
+            self.assertEquals(rset.description, [('Int',)])
+
     def test_select_aggregat_count(self):
         rset = self.execute('Any COUNT(X)')
         self.assertEquals(len(rset.rows), 1)
@@ -430,7 +442,7 @@
         self.assertEquals(rset.description, [('Int',)])
 
     def test_select_custom_aggregat_concat_string(self):
-        rset = self.execute('Any CONCAT_STRINGS(N) WHERE X is CWGroup, X name N')
+        rset = self.execute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N')
         self.failUnless(rset)
         self.failUnlessEqual(sorted(rset[0][0].split(', ')), ['guests', 'managers',
                                                              'owners', 'users'])
--- a/server/test/unittest_rql2sql.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/test/unittest_rql2sql.py	Mon Mar 08 17:57:29 2010 +0100
@@ -13,7 +13,6 @@
 from logilab.common.testlib import TestCase, unittest_main, mock_object
 
 from rql import BadRQLQuery
-from indexer import get_indexer
 
 #from cubicweb.server.sources.native import remove_unused_solutions
 from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions
@@ -1072,7 +1071,7 @@
 WHERE rel_is0.eid_to=2'''),
 
     ]
-from logilab.common.adbh import ADV_FUNC_HELPER_DIRECTORY
+from logilab.db import get_db_helper
 
 class CWRQLTC(RQLGeneratorTC):
     schema = schema
@@ -1106,12 +1105,7 @@
     #capture = True
     def setUp(self):
         RQLGeneratorTC.setUp(self)
-        indexer = get_indexer('postgres', 'utf8')
-        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['postgres']
-        dbms_helper.fti_uid_attr = indexer.uid_attr
-        dbms_helper.fti_table = indexer.table
-        dbms_helper.fti_restriction_sql = indexer.restriction_sql
-        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        dbms_helper = get_db_helper('postgres')
         self.o = SQLGenerator(schema, dbms_helper)
 
     def _norm_sql(self, sql):
@@ -1212,6 +1206,13 @@
 FROM cw_CWUser AS _X
 WHERE _X.cw_login IS NULL''')
 
+
+    def test_date_extraction(self):
+        self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT CAST(EXTRACT(MONTH from _P.cw_creation_date) AS INTEGER)
+FROM cw_Personne AS _P''')
+
+
     def test_parser_parse(self):
         for t in self._parse(PARSER):
             yield t
@@ -1409,17 +1410,17 @@
 
     def setUp(self):
         RQLGeneratorTC.setUp(self)
-        indexer = get_indexer('sqlite', 'utf8')
-        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['sqlite']
-        dbms_helper.fti_uid_attr = indexer.uid_attr
-        dbms_helper.fti_table = indexer.table
-        dbms_helper.fti_restriction_sql = indexer.restriction_sql
-        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        dbms_helper = get_db_helper('sqlite')
         self.o = SQLGenerator(schema, dbms_helper)
 
     def _norm_sql(self, sql):
         return sql.strip().replace(' ILIKE ', ' LIKE ').replace('\nINTERSECT ALL\n', '\nINTERSECT\n')
 
+    def test_date_extraction(self):
+        self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT MONTH(_P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
     def test_union(self):
         for t in self._parse((
             ('(Any N ORDERBY 1 WHERE X name N, X is State)'
@@ -1517,12 +1518,7 @@
 
     def setUp(self):
         RQLGeneratorTC.setUp(self)
-        indexer = get_indexer('mysql', 'utf8')
-        dbms_helper = ADV_FUNC_HELPER_DIRECTORY['mysql']
-        dbms_helper.fti_uid_attr = indexer.uid_attr
-        dbms_helper.fti_table = indexer.table
-        dbms_helper.fti_restriction_sql = indexer.restriction_sql
-        dbms_helper.fti_need_distinct_query = indexer.need_distinct
+        dbms_helper = get_db_helper('mysql')
         self.o = SQLGenerator(schema, dbms_helper)
 
     def _norm_sql(self, sql):
@@ -1537,6 +1533,11 @@
             latest = firstword
         return '\n'.join(newsql)
 
+    def test_date_extraction(self):
+        self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
+                    '''SELECT EXTRACT(MONTH from _P.cw_creation_date)
+FROM cw_Personne AS _P''')
+
     def test_from_clause_needed(self):
         queries = [("Any 1 WHERE EXISTS(T is CWGroup, T name 'managers')",
                     '''SELECT 1
--- a/server/test/unittest_sqlutils.py	Mon Mar 08 09:51:29 2010 +0100
+++ b/server/test/unittest_sqlutils.py	Mon Mar 08 17:57:29 2010 +0100
@@ -20,13 +20,13 @@
 
     def test_init(self):
         o = SQLAdapterMixIn(BASE_CONFIG)
-        self.assertEquals(o.encoding, 'UTF-8')
+        self.assertEquals(o.dbhelper.dbencoding, 'UTF-8')
 
     def test_init_encoding(self):
         config = BASE_CONFIG.copy()
         config['db-encoding'] = 'ISO-8859-1'
         o = SQLAdapterMixIn(config)
-        self.assertEquals(o.encoding, 'ISO-8859-1')
+        self.assertEquals(o.dbhelper.dbencoding, 'ISO-8859-1')
 
 if __name__ == '__main__':
     unittest_main()