[multi-sources-removal] Drop no more used federated ('true') multi-sources related code
Related to #2919300 (almost done!)
--- a/_exceptions.py Wed Jan 22 15:50:03 2014 +0100
+++ b/_exceptions.py Mon Jul 01 16:13:17 2013 +0200
@@ -76,13 +76,6 @@
"""the eid is not defined in the system tables"""
msg = 'No entity with eid %s in the repository'
-class ETypeNotSupportedBySources(RepositoryError, InternalError):
- """no source support an entity type"""
- msg = 'No source supports %r entity\'s type'
-
-class MultiSourcesError(RepositoryError, InternalError):
- """usually due to bad multisources configuration or rql query"""
-
class UniqueTogetherError(RepositoryError):
"""raised when a unique_together constraint caused an IntegrityError"""
def __init__(self, session, **kwargs):
--- a/devtools/repotest.py Wed Jan 22 15:50:03 2014 +0100
+++ b/devtools/repotest.py Mon Jul 01 16:13:17 2013 +0200
@@ -332,7 +332,6 @@
from cubicweb.server.querier import ExecutionPlan
_orig_check_permissions = ExecutionPlan._check_permissions
-_orig_init_temp_table = ExecutionPlan.init_temp_table
def _check_permissions(*args, **kwargs):
res, restricted = _orig_check_permissions(*args, **kwargs)
@@ -342,15 +341,6 @@
def _dummy_check_permissions(self, rqlst):
return {(): rqlst.solutions}, set()
-def _init_temp_table(self, table, selection, solution):
- if self.tablesinorder is None:
- tablesinorder = self.tablesinorder = {}
- else:
- tablesinorder = self.tablesinorder
- if not table in tablesinorder:
- tablesinorder[table] = 'table%s' % len(tablesinorder)
- return _orig_init_temp_table(self, table, selection, solution)
-
from cubicweb.server import rqlannotation
_orig_select_principal = rqlannotation._select_principal
@@ -391,11 +381,9 @@
rqlrewrite.RQLRewriter.build_variantes = _build_variantes
ExecutionPlan._check_permissions = _check_permissions
ExecutionPlan.tablesinorder = None
- ExecutionPlan.init_temp_table = _init_temp_table
def undo_monkey_patch():
rqlrewrite.iter_relations = _orig_iter_relations
rqlrewrite.RQLRewriter.insert_snippets = _orig_insert_snippets
rqlrewrite.RQLRewriter.build_variantes = _orig_build_variantes
ExecutionPlan._check_permissions = _orig_check_permissions
- ExecutionPlan.init_temp_table = _orig_init_temp_table
--- a/server/querier.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/querier.py Mon Jul 01 16:13:17 2013 +0200
@@ -154,8 +154,6 @@
self.syssource = session.repo.system_source
# execution steps
self.steps = []
- # index of temporary tables created during execution
- self.temp_tables = {}
# various resource accesors
self.querier = querier
self.schema = querier.schema
@@ -170,49 +168,15 @@
"""add a step to the plan"""
self.steps.append(step)
- def clean(self):
- """remove temporary tables"""
- self.syssource.clean_temp_data(self.session, self.temp_tables)
-
def sqlexec(self, sql, args=None):
return self.syssource.sqlexec(self.session, sql, args)
def execute(self):
"""execute a plan and return resulting rows"""
- try:
- for step in self.steps:
- result = step.execute()
- # the latest executed step contains the full query result
- return result
- finally:
- self.clean()
-
- def make_temp_table_name(self, table):
- """
- return a temp table name according to db backend
- """
- return self.syssource.make_temp_table_name(table)
-
-
- def init_temp_table(self, table, selected, sol):
- """initialize sql schema and variable map for a temporary table which
- will be used to store result for the given rqlst
- """
- try:
- outputmap, sqlschema, _ = self.temp_tables[table]
- update_varmap(outputmap, selected, table)
- except KeyError:
- sqlschema, outputmap = self.syssource.temp_table_def(selected, sol,
- table)
- self.temp_tables[table] = [outputmap, sqlschema, False]
- return outputmap
-
- def create_temp_table(self, table):
- """create a temporary table to store result for the given rqlst"""
- if not self.temp_tables[table][-1]:
- sqlschema = self.temp_tables[table][1]
- self.syssource.create_temp_table(self.session, table, sqlschema)
- self.temp_tables[table][-1] = True
+ for step in self.steps:
+ result = step.execute()
+ # the latest executed step contains the full query result
+ return result
def preprocess(self, union, security=True):
"""insert security when necessary then annotate rql st for sql generation
--- a/server/repository.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/repository.py Mon Jul 01 16:13:17 2013 +0200
@@ -51,7 +51,6 @@
from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, QueryError,
UnknownEid, AuthenticationError, ExecutionError,
- ETypeNotSupportedBySources, MultiSourcesError,
BadConnectionId, Unauthorized, ValidationError,
RepositoryError, UniqueTogetherError, onevent)
from cubicweb import cwvreg, schema, server
--- a/server/sources/__init__.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/sources/__init__.py Mon Jul 01 16:13:17 2013 +0200
@@ -365,15 +365,6 @@
"""
raise NotImplementedError(self)
- def flying_insert(self, table, session, union, args=None, varmap=None):
- """similar as .syntax_tree_search, but inserts data in the temporary
- table (on-the-fly if possible, eg for the system source whose the given
- cursor come from). If not possible, inserts all data by calling
- .executemany().
- """
- res = self.syntax_tree_search(session, union, args, varmap=varmap)
- session.repo.system_source.manual_insert(res, table, session)
-
# write modification api ###################################################
# read-only sources don't have to implement methods below
@@ -452,22 +443,12 @@
"""execute the query and return its result"""
raise NotImplementedError(self)
- def temp_table_def(self, selection, solution, table, basemap):
- raise NotImplementedError(self)
-
def create_index(self, session, table, column, unique=False):
raise NotImplementedError(self)
def drop_index(self, session, table, column, unique=False):
raise NotImplementedError(self)
- def create_temp_table(self, session, table, schema):
- raise NotImplementedError(self)
-
- def clean_temp_data(self, session, temptables):
- """remove temporary data, usually associated to temporary tables"""
- pass
-
@deprecated('[3.13] use repo.eid2extid(source, eid, session)')
def eid2extid(self, eid, session=None):
--- a/server/sources/native.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/sources/native.py Mon Jul 01 16:13:17 2013 +0200
@@ -95,23 +95,6 @@
return self.cu.fetchone()
-def make_schema(selected, solution, table, typemap):
- """return a sql schema to store RQL query result"""
- sql = []
- varmap = {}
- for i, term in enumerate(selected):
- name = 'C%s' % i
- key = term.as_string()
- varmap[key] = '%s.%s' % (table, name)
- ttype = term.get_type(solution)
- try:
- sql.append('%s %s' % (name, typemap[ttype]))
- except KeyError:
- # assert not schema(ttype).final
- sql.append('%s %s' % (name, typemap['Int']))
- return ','.join(sql), varmap
-
-
def sql_or_clauses(sql, clauses):
select, restr = sql.split(' WHERE ', 1)
restrclauses = restr.split(' AND ')
@@ -280,7 +263,6 @@
self.do_fti = not repo.config['delay-full-text-indexation']
# sql queries cache
self._cache = QueryCache(repo.config['rql-cache-size'])
- self._temp_table_data = {}
# we need a lock to protect eid attribution function (XXX, really?
# explain)
self._eid_cnx_lock = Lock()
@@ -516,52 +498,6 @@
assert dbg_results(results)
return results
- def flying_insert(self, table, session, union, args=None, varmap=None):
- """similar as .syntax_tree_search, but inserts data in the
- temporary table (on-the-fly if possible, eg for the system
- source whose the given cursor come from). If not possible,
- inserts all data by calling .executemany().
- """
- assert dbg_st_search(
- self.uri, union, varmap, args,
- prefix='ON THE FLY temp data insertion into %s from' % table)
- # generate sql queries if we are able to do so
- sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap)
- query = 'INSERT INTO %s %s' % (table, sql.encode(self._dbencoding))
- self.doexec(session, query, self.merge_args(args, qargs))
-
- def manual_insert(self, results, table, session):
- """insert given result into a temporary table on the system source"""
- if server.DEBUG & server.DBG_RQL:
- print ' manual insertion of', len(results), 'results into', table
- if not results:
- return
- query_args = ['%%(%s)s' % i for i in xrange(len(results[0]))]
- query = 'INSERT INTO %s VALUES(%s)' % (table, ','.join(query_args))
- kwargs_list = []
- for row in results:
- kwargs = {}
- row = tuple(row)
- for index, cell in enumerate(row):
- if isinstance(cell, Binary):
- cell = self._binary(cell.getvalue())
- kwargs[str(index)] = cell
- kwargs_list.append(kwargs)
- self.doexecmany(session, query, kwargs_list)
-
- def clean_temp_data(self, session, temptables):
- """remove temporary data, usually associated to temporary tables"""
- if temptables:
- for table in temptables:
- try:
- self.doexec(session,'DROP TABLE %s' % table)
- except Exception:
- pass
- try:
- del self._temp_table_data[table]
- except KeyError:
- continue
-
@contextmanager
def _storage_handler(self, entity, event):
# 1/ memorize values as they are before the storage is called.
@@ -865,26 +801,12 @@
pass
return None
- def make_temp_table_name(self, table):
- return self.dbhelper.temporary_table_name(table)
-
- def temp_table_def(self, selected, sol, table):
- return make_schema(selected, sol, table, self.dbhelper.TYPE_MAPPING)
-
- def create_temp_table(self, session, table, schema):
- # we don't want on commit drop, this may cause problem when
- # running with an ldap source, and table will be deleted manually any way
- # on commit
- sql = self.dbhelper.sql_temporary_table(table, schema, False)
- self.doexec(session, sql)
-
def _create_eid_sqlite(self, session):
with self._eid_cnx_lock:
for sql in self.dbhelper.sqls_increment_sequence('entities_id_seq'):
cursor = self.doexec(session, sql)
return cursor.fetchone()[0]
-
def create_eid(self, session): # pylint: disable=E0202
# lock needed to prevent 'Connection is busy with results for another
# command (0)' errors with SQLServer
--- a/server/test/unittest_querier.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/test/unittest_querier.py Mon Jul 01 16:13:17 2013 +0200
@@ -27,7 +27,6 @@
from cubicweb import QueryError, Unauthorized, Binary
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.server.utils import crypt_password
-from cubicweb.server.sources.native import make_schema
from cubicweb.server.querier import manual_build_descr, _make_description
from cubicweb.devtools import get_test_db_handler, TestServerConfiguration
from cubicweb.devtools.testlib import CubicWebTC
@@ -60,17 +59,6 @@
SQL_CONNECT_HOOKS['sqlite'].append(init_sqlite_connexion)
-from logilab.database import _GenericAdvFuncHelper
-TYPEMAP = _GenericAdvFuncHelper.TYPE_MAPPING
-
-class MakeSchemaTC(TestCase):
- def test_known_values(self):
- solution = {'A': 'String', 'B': 'CWUser'}
- self.assertEqual(make_schema((Variable('A'), Variable('B')), solution,
- 'table0', TYPEMAP),
- ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'}))
-
-
def setUpClass(cls, *args):
global repo, cnx
config = TestServerConfiguration(apphome=UtilsTC.datadir)
--- a/server/utils.py Wed Jan 22 15:50:03 2014 +0100
+++ b/server/utils.py Mon Jul 01 16:13:17 2013 +0200
@@ -72,24 +72,6 @@
# wrong password
return ''
-def cartesian_product(seqin):
- """returns a generator which returns the cartesian product of `seqin`
-
- for more details, see :
- http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302478
- """
- def rloop(seqin, comb):
- """recursive looping function"""
- if seqin: # any more sequences to process?
- for item in seqin[0]:
- newcomb = comb + [item] # add next item to current combination
- # call rloop w/ remaining seqs, newcomb
- for item in rloop(seqin[1:], newcomb):
- yield item # seqs and newcomb
- else: # processing last sequence
- yield comb # comb finished, add to list
- return rloop(seqin, [])
-
def eschema_eid(session, eschema):
"""get eid of the CWEType entity for the given yams type. You should use