--- a/server/sources/extlite.py Sun Jun 16 23:36:06 2013 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,302 +0,0 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""provide an abstract class for external sources using a sqlite database helper
-"""
-
-__docformat__ = "restructuredtext en"
-
-
-from os.path import join, exists
-
-from cubicweb import server
-from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn, sqlexec
-from cubicweb.server.sources import native, rql2sql
-from cubicweb.server.sources import AbstractSource, dbg_st_search, dbg_results
-
-class ConnectionWrapper(object):
- def __init__(self, source=None):
- self.source = source
- self._cnx = None
-
- def cursor(self):
- if self._cnx is None:
- self._cnx = self.source._sqlcnx
- if server.DEBUG & server.DBG_SQL:
- print 'sql cnx OPEN', self._cnx
- return self._cnx.cursor()
-
- def commit(self):
- if self._cnx is not None:
- if server.DEBUG & (server.DBG_SQL | server.DBG_RQL):
- print 'sql cnx COMMIT', self._cnx
- self._cnx.commit()
-
- def rollback(self):
- if self._cnx is not None:
- if server.DEBUG & (server.DBG_SQL | server.DBG_RQL):
- print 'sql cnx ROLLBACK', self._cnx
- self._cnx.rollback()
-
- def close(self):
- if self._cnx is not None:
- if server.DEBUG & server.DBG_SQL:
- print 'sql cnx CLOSE', self._cnx
- self._cnx.close()
- self._cnx = None
-
-
-class SQLiteAbstractSource(AbstractSource):
- """an abstract class for external sources using a sqlite database helper
- """
- sqlgen_class = rql2sql.SQLGenerator
- @classmethod
- def set_nonsystem_types(cls):
- # those entities are only in this source, we don't want them in the
- # system source
- for etype in cls.support_entities:
- native.NONSYSTEM_ETYPES.add(etype)
- for rtype in cls.support_relations:
- native.NONSYSTEM_RELATIONS.add(rtype)
-
- options = (
- ('helper-db-path',
- {'type' : 'string',
- 'default': None,
- 'help': 'path to the sqlite database file used to do queries on the \
-repository.',
- 'level': 2,
- }),
- )
-
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
- # the helper db is used to easy querying and will store everything but
- # actual file content
- dbpath = source_config.get('helper-db-path')
- if dbpath is None:
- dbpath = join(repo.config.appdatahome,
- '%(uri)s.sqlite' % source_config)
- self.dbpath = dbpath
- self.sqladapter = SQLAdapterMixIn({'db-driver': 'sqlite',
- 'db-name': dbpath})
- # those attributes have to be initialized before ancestor's __init__
- # which will call set_schema
- self._need_sql_create = not exists(dbpath)
- self._need_full_import = self._need_sql_create
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
-
- def backup(self, backupfile, confirm):
- """method called to create a backup of the source's data"""
- self.close_source_connections()
- try:
- self.sqladapter.backup_to_file(backupfile, confirm)
- finally:
- self.open_source_connections()
-
- def restore(self, backupfile, confirm, drop):
- """method called to restore a backup of source's data"""
- self.close_source_connections()
- try:
- self.sqladapter.restore_from_file(backupfile, confirm, drop)
- finally:
- self.open_source_connections()
-
- @property
- def _sqlcnx(self):
- # XXX: sqlite connections can only be used in the same thread, so
- # create a new one each time necessary. If it appears to be time
- # consuming, find another way
- return self.sqladapter.get_connection()
-
- def _is_schema_complete(self):
- for etype in self.support_entities:
- if not etype in self.schema:
- self.warning('not ready to generate %s database, %s support missing from schema',
- self.uri, etype)
- return False
- for rtype in self.support_relations:
- if not rtype in self.schema:
- self.warning('not ready to generate %s database, %s support missing from schema',
- self.uri, rtype)
- return False
- return True
-
- def _create_database(self):
- from yams.schema2sql import eschema2sql, rschema2sql
- from cubicweb.toolsutils import restrict_perms_to_user
- self.warning('initializing sqlite database for %s source' % self.uri)
- cnx = self._sqlcnx
- cu = cnx.cursor()
- schema = self.schema
- for etype in self.support_entities:
- eschema = schema.eschema(etype)
- createsqls = eschema2sql(self.sqladapter.dbhelper, eschema,
- skip_relations=('data',), prefix=SQL_PREFIX)
- sqlexec(createsqls, cu, withpb=False)
- for rtype in self.support_relations:
- rschema = schema.rschema(rtype)
- if not rschema.inlined:
- sqlexec(rschema2sql(rschema), cu, withpb=False)
- cnx.commit()
- cnx.close()
- self._need_sql_create = False
- if self.repo.config['uid']:
- from logilab.common.shellutils import chown
- # database file must be owned by the uid of the server process
- self.warning('set %s as owner of the database file',
- self.repo.config['uid'])
- chown(self.dbpath, self.repo.config['uid'])
- restrict_perms_to_user(self.dbpath, self.info)
-
- def set_schema(self, schema):
- super(SQLiteAbstractSource, self).set_schema(schema)
- if self._need_sql_create and self._is_schema_complete() and self.dbpath:
- self._create_database()
- self.rqlsqlgen = self.sqlgen_class(schema, self.sqladapter.dbhelper)
-
- def get_connection(self):
- return ConnectionWrapper(self)
-
- def check_connection(self, cnx):
- """check connection validity, return None if the connection is still valid
- else a new connection (called when the connections set holding the given connection is
- being attached to a session)
-
- always return the connection to reset eventually cached cursor
- """
- return cnx
-
- def cnxset_freed(self, cnx):
- """the connections set holding the given connection is being freed from its current
- attached session: release the connection lock if the connection wrapper
- has a connection set
- """
- # reset _cnx to ensure next thread using cnx will get a new
- # connection
- cnx.close()
-
- def syntax_tree_search(self, session, union, args=None, cachekey=None,
- varmap=None):
- """return result from this source for a rql query (actually from a rql
- syntax tree and a solution dictionary mapping each used variable to a
- possible type). If cachekey is given, the query necessary to fetch the
- results (but not the results themselves) may be cached using this key.
- """
- if self._need_sql_create:
- return []
- assert dbg_st_search(self.uri, union, varmap, args, cachekey)
- sql, qargs, cbs = self.rqlsqlgen.generate(union, args)
- args = self.sqladapter.merge_args(args, qargs)
- cursor = self.doexec(session, sql, args)
- results = self.sqladapter.process_result(cursor, cbs)
- assert dbg_results(results)
- return results
-
- def local_add_entity(self, session, entity):
- """insert the entity in the local database.
-
- This is not provided as add_entity implementation since usually source
- don't want to simply do this, so let raise NotImplementedError and the
- source implementor may use this method if necessary
- """
- attrs = self.sqladapter.preprocess_entity(entity)
- sql = self.sqladapter.sqlgen.insert(SQL_PREFIX + str(entity.e_schema), attrs)
- self.doexec(session, sql, attrs)
-
- def add_entity(self, session, entity):
- """add a new entity to the source"""
- raise NotImplementedError()
-
- def local_update_entity(self, session, entity, attrs=None):
- """update an entity in the source
-
- This is not provided as update_entity implementation since usually
- source don't want to simply do this, so let raise NotImplementedError
- and the source implementor may use this method if necessary
- """
- if attrs is None:
- attrs = self.sqladapter.preprocess_entity(entity)
- sql = self.sqladapter.sqlgen.update(SQL_PREFIX + str(entity.e_schema),
- attrs, [SQL_PREFIX + 'eid'])
- self.doexec(session, sql, attrs)
-
- def update_entity(self, session, entity):
- """update an entity in the source"""
- raise NotImplementedError()
-
- def delete_entity(self, session, entity):
- """delete an entity from the source
-
- this is not deleting a file in the svn but deleting entities from the
- source. Main usage is to delete repository content when a Repository
- entity is deleted.
- """
- attrs = {'cw_eid': entity.eid}
- sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
- self.doexec(session, sql, attrs)
-
- def local_add_relation(self, session, subject, rtype, object):
- """add a relation to the source
-
- This is not provided as add_relation implementation since usually
- source don't want to simply do this, so let raise NotImplementedError
- and the source implementor may use this method if necessary
- """
- attrs = {'eid_from': subject, 'eid_to': object}
- sql = self.sqladapter.sqlgen.insert('%s_relation' % rtype, attrs)
- self.doexec(session, sql, attrs)
-
- def add_relation(self, session, subject, rtype, object):
- """add a relation to the source"""
- raise NotImplementedError()
-
- def delete_relation(self, session, subject, rtype, object):
- """delete a relation from the source"""
- rschema = self.schema.rschema(rtype)
- if rschema.inlined:
- if subject in session.transaction_data.get('pendingeids', ()):
- return
- table = SQL_PREFIX + session.describe(subject)[0]
- column = SQL_PREFIX + rtype
- sql = 'UPDATE %s SET %s=NULL WHERE %seid=%%(eid)s' % (table, column, SQL_PREFIX)
- attrs = {'eid' : subject}
- else:
- attrs = {'eid_from': subject, 'eid_to': object}
- sql = self.sqladapter.sqlgen.delete('%s_relation' % rtype, attrs)
- self.doexec(session, sql, attrs)
-
- def doexec(self, session, query, args=None):
- """Execute a query.
- it's a function just so that it shows up in profiling
- """
- if server.DEBUG:
- print 'exec', query, args
- cursor = session.cnxset[self.uri]
- try:
- # str(query) to avoid error if it's an unicode string
- cursor.execute(str(query), args)
- except Exception as ex:
- self.critical("sql: %r\n args: %s\ndbms message: %r",
- query, args, ex.args[0])
- try:
- session.cnxset.connection(self.uri).rollback()
- self.critical('transaction has been rolled back')
- except Exception:
- pass
- raise
- return cursor
--- a/server/sources/native.py Sun Jun 16 23:36:06 2013 +0200
+++ b/server/sources/native.py Thu Jun 27 08:30:46 2013 +0200
@@ -59,7 +59,7 @@
from cubicweb.cwconfig import CubicWebNoAppConfiguration
from cubicweb.server import hook
from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update
-from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn
+from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn, SqliteCnxLoggingWrapper
from cubicweb.server.rqlannotation import set_qdata
from cubicweb.server.hook import CleanupDeletedEidsCacheOp
from cubicweb.server.edition import EditedEntity
@@ -305,9 +305,8 @@
# running unittest_multisources with the wrapping below
if self.dbdriver == 'sqlite' and \
not getattr(repo.config, 'no_sqlite_wrap', False):
- from cubicweb.server.sources.extlite import ConnectionWrapper
self.dbhelper.dbname = abspath(self.dbhelper.dbname)
- self.get_connection = lambda: ConnectionWrapper(self)
+ self.get_connection = lambda: SqliteCnxLoggingWrapper(self)
self.check_connection = lambda cnx: cnx
def cnxset_freed(cnx):
cnx.close()
@@ -317,14 +316,6 @@
self.create_eid = self._create_eid_sqlite
self.binary_to_str = self.dbhelper.dbapi_module.binary_to_str
-
- @property
- def _sqlcnx(self):
- # XXX: sqlite connections can only be used in the same thread, so
- # create a new one each time necessary. If it appears to be time
- # consuming, find another way
- return SQLAdapterMixIn.get_connection(self)
-
def check_config(self, source_entity):
"""check configuration of source entity"""
if source_entity.host_config:
@@ -735,7 +726,7 @@
cursor = session.cnxset[self.uri]
if server.DEBUG & server.DBG_SQL:
cnx = session.cnxset.connection(self.uri)
- # getattr to get the actual connection if cnx is a ConnectionWrapper
+ # getattr to get the actual connection if cnx is a CnxLoggingWrapper
# instance
print 'exec', query, args, getattr(cnx, '_cnx', cnx)
try:
--- a/server/sqlutils.py Sun Jun 16 23:36:06 2013 +0200
+++ b/server/sqlutils.py Thu Jun 27 08:30:46 2013 +0200
@@ -30,7 +30,7 @@
from logilab.common.date import todate, todatetime, utcdatetime, utctime
from logilab.database.sqlgen import SQLGenerator
-from cubicweb import Binary, ConfigurationError
+from cubicweb import Binary, ConfigurationError, server
from cubicweb.uilib import remove_html_tags
from cubicweb.schema import PURE_VIRTUAL_RTYPES
from cubicweb.server import SQL_CONNECT_HOOKS
@@ -177,6 +177,7 @@
for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
return '\n'.join(cmds)
+
class SQLAdapterMixIn(object):
"""Mixin for SQL data sources, getting a connection from a configuration
dictionary and handling connection locking
@@ -323,6 +324,43 @@
from cubicweb import set_log_methods
set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
+
+class SqliteCnxLoggingWrapper(object):
+ def __init__(self, source=None):
+ self.source = source
+ self._cnx = None
+
+ def cursor(self):
+ # sqlite connections can only be used in the same thread, so
+ # create a new one each time necessary. If it appears to be time
+ # consuming, find another way
+ if self._cnx is None:
+ # direct access to SQLAdapterMixIn to get an unwrapped connection
+ self._cnx = SQLAdapterMixIn.get_connection(self.source)
+ if server.DEBUG & server.DBG_SQL:
+ print 'sql cnx OPEN', self._cnx
+ return self._cnx.cursor()
+
+ def commit(self):
+ if self._cnx is not None:
+ if server.DEBUG & (server.DBG_SQL | server.DBG_RQL):
+ print 'sql cnx COMMIT', self._cnx
+ self._cnx.commit()
+
+ def rollback(self):
+ if self._cnx is not None:
+ if server.DEBUG & (server.DBG_SQL | server.DBG_RQL):
+ print 'sql cnx ROLLBACK', self._cnx
+ self._cnx.rollback()
+
+ def close(self):
+ if self._cnx is not None:
+ if server.DEBUG & server.DBG_SQL:
+ print 'sql cnx CLOSE', self._cnx
+ self._cnx.close()
+ self._cnx = None
+
+
def init_sqlite_connexion(cnx):
class group_concat(object):