--- a/dbapi.py Fri Mar 05 09:39:34 2010 +0100
+++ b/dbapi.py Fri Mar 05 12:18:22 2010 +0100
@@ -398,14 +398,20 @@
def check(self):
"""raise `BadSessionId` if the connection is no more valid"""
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
self._repo.check_session(self.sessionid)
def set_session_props(self, **props):
"""raise `BadSessionId` if the connection is no more valid"""
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
self._repo.set_session_props(self.sessionid, props)
def get_shared_data(self, key, default=None, pop=False):
"""return value associated to `key` in shared data"""
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
return self._repo.get_shared_data(self.sessionid, key, default, pop)
def set_shared_data(self, key, value, querydata=False):
@@ -416,6 +422,8 @@
transaction, and won't be available through the connexion, only on the
repository side.
"""
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
return self._repo.set_shared_data(self.sessionid, key, value, querydata)
def get_schema(self):
@@ -501,6 +509,8 @@
def user(self, req=None, props=None):
"""return the User object associated to this connection"""
# cnx validity is checked by the call to .user_info
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
eid, login, groups, properties = self._repo.user_info(self.sessionid,
props)
if req is None:
@@ -521,6 +531,8 @@
pass
def describe(self, eid):
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
return self._repo.describe(self.sessionid, eid)
def close(self):
@@ -535,6 +547,7 @@
if self._closed:
raise ProgrammingError('Connection is already closed')
self._repo.close(self.sessionid)
+ del self._repo # necessary for proper garbage collection
self._closed = 1
def commit(self):
--- a/devtools/repotest.py Fri Mar 05 09:39:34 2010 +0100
+++ b/devtools/repotest.py Fri Mar 05 12:18:22 2010 +0100
@@ -95,6 +95,31 @@
def __iter__(self):
return iter(sorted(self.origdict, key=self.sortkey))
+def schema_eids_idx(schema):
+ """return a dictionary mapping schema types to their eids so we can reread
+ it from the fs instead of the db (too costly) between tests
+ """
+ schema_eids = {}
+ for x in schema.entities():
+ schema_eids[x] = x.eid
+ for x in schema.relations():
+ schema_eids[x] = x.eid
+ for rdef in x.rdefs.itervalues():
+ schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid
+ return schema_eids
+
+def restore_schema_eids_idx(schema, schema_eids):
+ """rebuild schema eid index"""
+ for x in schema.entities():
+ x.eid = schema_eids[x]
+ schema._eid_index[x.eid] = x
+ for x in schema.relations():
+ x.eid = schema_eids[x]
+ schema._eid_index[x.eid] = x
+ for rdef in x.rdefs.itervalues():
+ rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)]
+ schema._eid_index[rdef.eid] = rdef
+
from logilab.common.testlib import TestCase
from rql import RQLHelper
@@ -150,6 +175,7 @@
self.pool = self.session.set_pool()
self.maxeid = self.get_max_eid()
do_monkey_patch()
+ self._dumb_sessions = []
def get_max_eid(self):
return self.session.unsafe_execute('Any MAX(X)')[0][0]
@@ -161,6 +187,10 @@
self.session.rollback()
self.cleanup()
self.commit()
+ # properly close dumb sessions
+ for session in self._dumb_sessions:
+ session.rollback()
+ session.close()
self.repo._free_pool(self.pool)
assert self.session.user.eid != -1
@@ -198,6 +228,8 @@
u._groups = set(groups)
s = Session(u, self.repo)
s._threaddata.pool = self.pool
+ # register session to ensure it gets closed
+ self._dumb_sessions.append(s)
return s
def execute(self, rql, args=None, eid_key=None, build_descr=True):
@@ -223,6 +255,7 @@
self.sources = self.o._repo.sources
self.system = self.sources[-1]
do_monkey_patch()
+ self._dumb_sessions = [] # by hi-jacked parent setup
def add_source(self, sourcecls, uri):
self.sources.append(sourcecls(self.repo, self.o.schema,
@@ -237,6 +270,9 @@
del self.repo.sources_by_uri[source.uri]
self.newsources -= 1
undo_monkey_patch()
+ for session in self._dumb_sessions:
+ session._threaddata.pool = None
+ session.close()
def _prepare_plan(self, rql, kwargs=None):
rqlst = self.o.parse(rql, annotate=True)
--- a/devtools/test/unittest_testlib.py Fri Mar 05 09:39:34 2010 +0100
+++ b/devtools/test/unittest_testlib.py Fri Mar 05 12:18:22 2010 +0100
@@ -9,12 +9,12 @@
from cStringIO import StringIO
from unittest import TestSuite
-
-from logilab.common.testlib import (TestCase, unittest_main,
+from logilab.common.testlib import (TestCase, unittest_main,
SkipAwareTextTestRunner)
from cubicweb.devtools import htmlparser
from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.pytestconf import clean_repo_test_cls
class WebTestTC(TestCase):
@@ -37,7 +37,7 @@
self.assertEquals(result.testsRun, 2)
self.assertEquals(len(result.errors), 0)
self.assertEquals(len(result.failures), 1)
-
+ clean_repo_test_cls(MyWebTest)
HTML_PAGE = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
--- a/etwist/server.py Fri Mar 05 09:39:34 2010 +0100
+++ b/etwist/server.py Fri Mar 05 12:18:22 2010 +0100
@@ -350,32 +350,131 @@
set_log_methods(CubicWebRootResource, getLogger('cubicweb.twisted'))
+listiterator = type(iter([]))
-def _gc_debug():
+def _gc_debug(all=True):
import gc
from pprint import pprint
from cubicweb.appobject import AppObject
gc.collect()
count = 0
acount = 0
+ fcount = 0
+ rcount = 0
+ ccount = 0
+ scount = 0
ocount = {}
+ from rql.stmts import Union
+ from cubicweb.schema import CubicWebSchema
+ from cubicweb.rset import ResultSet
+ from cubicweb.dbapi import Connection, Cursor
+ from cubicweb.req import RequestSessionBase
+ from cubicweb.server.repository import Repository
+ from cubicweb.server.sources.native import NativeSQLSource
+ from cubicweb.server.session import Session
+ from cubicweb.devtools.testlib import CubicWebTC
+ from logilab.common.testlib import TestSuite
+ from optparse import Values
+ import types, weakref
for obj in gc.get_objects():
- if isinstance(obj, CubicWebTwistedRequestAdapter):
+ if isinstance(obj, RequestSessionBase):
count += 1
+ if isinstance(obj, Session):
+ print ' session', obj, referrers(obj, True)
elif isinstance(obj, AppObject):
acount += 1
- else:
+ elif isinstance(obj, ResultSet):
+ rcount += 1
+ #print ' rset', obj, referrers(obj)
+ elif isinstance(obj, Repository):
+ print ' REPO', obj, referrers(obj, True)
+ #elif isinstance(obj, NativeSQLSource):
+ # print ' SOURCe', obj, referrers(obj)
+ elif isinstance(obj, CubicWebTC):
+ print ' TC', obj, referrers(obj)
+ elif isinstance(obj, TestSuite):
+ print ' SUITE', obj, referrers(obj)
+ #elif isinstance(obj, Values):
+ # print ' values', '%#x' % id(obj), referrers(obj, True)
+ elif isinstance(obj, Connection):
+ ccount += 1
+ #print ' cnx', obj, referrers(obj)
+ #elif isinstance(obj, Cursor):
+ # ccount += 1
+ # print ' cursor', obj, referrers(obj)
+ elif isinstance(obj, file):
+ fcount += 1
+ # print ' open file', file.name, file.fileno
+ elif isinstance(obj, CubicWebSchema):
+ scount += 1
+ print ' schema', obj, referrers(obj)
+ elif not isinstance(obj, (type, tuple, dict, list, set, frozenset,
+ weakref.ref, weakref.WeakKeyDictionary,
+ listiterator,
+ property, classmethod,
+ types.ModuleType, types.MemberDescriptorType,
+ types.FunctionType, types.MethodType)):
try:
ocount[obj.__class__] += 1
except KeyError:
ocount[obj.__class__] = 1
except AttributeError:
pass
- print 'IN MEM REQUESTS', count
- print 'IN MEM APPOBJECTS', acount
- ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20]
- pprint(ocount)
- print 'UNREACHABLE', gc.garbage
+ if count:
+ print ' NB REQUESTS/SESSIONS', count
+ if acount:
+ print ' NB APPOBJECTS', acount
+ if ccount:
+ print ' NB CONNECTIONS', ccount
+ if rcount:
+ print ' NB RSETS', rcount
+ if scount:
+ print ' NB SCHEMAS', scount
+ if fcount:
+ print ' NB FILES', fcount
+ if all:
+ ocount = sorted(ocount.items(), key=lambda x: x[1], reverse=True)[:20]
+ pprint(ocount)
+ if gc.garbage:
+ print 'UNREACHABLE', gc.garbage
+
+def referrers(obj, showobj=False):
+ try:
+ return sorted(set((type(x), showobj and x or getattr(x, '__name__', '%#x' % id(x)))
+ for x in _referrers(obj)))
+ except TypeError:
+ s = set()
+ unhashable = []
+ for x in _referrers(obj):
+ try:
+ s.add(x)
+ except TypeError:
+ unhashable.append(x)
+ return sorted(s) + unhashable
+
+def _referrers(obj, seen=None, level=0):
+ import gc, types
+ from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema
+ interesting = []
+ if seen is None:
+ seen = set()
+ for x in gc.get_referrers(obj):
+ if id(x) in seen:
+ continue
+ seen.add(id(x))
+ if isinstance(x, types.FrameType):
+ continue
+ if isinstance(x, (CubicWebRelationSchema, CubicWebEntitySchema)):
+ continue
+ if isinstance(x, (list, tuple, set, dict, listiterator)):
+ if level >= 5:
+ pass
+ #interesting.append(x)
+ else:
+ interesting += _referrers(x, seen, level+1)
+ else:
+ interesting.append(x)
+ return interesting
def run(config, debug):
# create the site
--- a/ext/html4zope.py Fri Mar 05 09:39:34 2010 +0100
+++ b/ext/html4zope.py Fri Mar 05 12:18:22 2010 +0100
@@ -24,12 +24,13 @@
__docformat__ = 'reStructuredText'
+import os
+
from logilab.mtconverter import xml_escape
from docutils import nodes
from docutils.writers.html4css1 import Writer as CSS1Writer
from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator
-import os
default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3))
--- a/ext/rest.py Fri Mar 05 09:39:34 2010 +0100
+++ b/ext/rest.py Fri Mar 05 12:18:22 2010 +0100
@@ -25,7 +25,7 @@
from os.path import join
from docutils import statemachine, nodes, utils, io
-from docutils.core import publish_string
+from docutils.core import Publisher
from docutils.parsers.rst import Parser, states, directives
from docutils.parsers.rst.roles import register_canonical_role, set_classes
@@ -92,14 +92,15 @@
in `docutils.parsers.rst.directives.misc`
"""
context = state.document.settings.context
+ cw = context._cw
source = state_machine.input_lines.source(
lineno - state_machine.input_offset - 1)
#source_dir = os.path.dirname(os.path.abspath(source))
fid = arguments[0]
- for lang in chain((context._cw.lang, context.vreg.property_value('ui.language')),
- context.config.available_languages()):
+ for lang in chain((cw.lang, cw.vreg.property_value('ui.language')),
+ cw.vreg.config.available_languages()):
rid = '%s_%s.rst' % (fid, lang)
- resourcedir = context.config.locate_doc_file(rid)
+ resourcedir = cw.vreg.config.locate_doc_file(rid)
if resourcedir:
break
else:
@@ -196,6 +197,15 @@
self.finish_parse()
+# XXX docutils keep a ref on context, can't find a correct way to remove it
+class CWReSTPublisher(Publisher):
+ def __init__(self, context, settings, **kwargs):
+ Publisher.__init__(self, **kwargs)
+ self.set_components('standalone', 'restructuredtext', 'pseudoxml')
+ self.process_programmatic_settings(None, settings, None)
+ self.settings.context = context
+
+
def rest_publish(context, data):
"""publish a string formatted as ReStructured Text to HTML
@@ -218,7 +228,7 @@
# remove unprintable characters unauthorized in xml
data = data.translate(ESC_CAR_TABLE)
settings = {'input_encoding': encoding, 'output_encoding': 'unicode',
- 'warning_stream': StringIO(), 'context': context,
+ 'warning_stream': StringIO(),
# dunno what's the max, severe is 4, and we never want a crash
# (though try/except may be a better option...)
'halt_level': 10,
@@ -233,9 +243,17 @@
else:
base_url = None
try:
- return publish_string(writer=Writer(base_url=base_url),
- parser=CubicWebReSTParser(), source=data,
- settings_overrides=settings)
+ pub = CWReSTPublisher(context, settings,
+ parser=CubicWebReSTParser(),
+ writer=Writer(base_url=base_url),
+ source_class=io.StringInput,
+ destination_class=io.StringOutput)
+ pub.set_source(data)
+ pub.set_destination()
+ res = pub.publish(enable_exit_status=None)
+ # necessary for proper garbage collection, else a ref is kept somewhere in docutils...
+ del pub.settings.context
+ return res
except Exception:
LOGGER.exception('error while publishing ReST text')
if not isinstance(data, unicode):
--- a/hooks/syncschema.py Fri Mar 05 09:39:34 2010 +0100
+++ b/hooks/syncschema.py Fri Mar 05 12:18:22 2010 +0100
@@ -12,11 +12,12 @@
"""
__docformat__ = "restructuredtext en"
+from copy import copy
from yams.schema import BASE_TYPES, RelationSchema, RelationDefinitionSchema
-from yams.buildobjs import EntityType, RelationType, RelationDefinition
-from yams.schema2sql import eschema2sql, rschema2sql, type_from_constraints
+from yams import buildobjs as ybo, schema2sql as y2sql
from logilab.common.decorators import clear_cache
+from logilab.common.testlib import mock_object
from cubicweb import ValidationError
from cubicweb.selectors import implements
@@ -246,7 +247,7 @@
# need to create the relation if it has not been already done by
# another event of the same transaction
if not rschema.type in session.transaction_data.get('createdtables', ()):
- tablesql = rschema2sql(rschema)
+ tablesql = y2sql.rschema2sql(rschema)
# create the necessary table
for sql in tablesql.split(';'):
if sql.strip():
@@ -314,13 +315,13 @@
rtype = entity.rtype.name
obj = str(entity.otype.name)
constraints = get_constraints(self.session, entity)
- rdef = RelationDefinition(subj, rtype, obj,
- description=entity.description,
- cardinality=entity.cardinality,
- constraints=constraints,
- order=entity.ordernum,
- eid=entity.eid,
- **kwargs)
+ rdef = ybo.RelationDefinition(subj, rtype, obj,
+ description=entity.description,
+ cardinality=entity.cardinality,
+ constraints=constraints,
+ order=entity.ordernum,
+ eid=entity.eid,
+ **kwargs)
MemSchemaRDefAdd(self.session, rdef)
return rdef
@@ -338,8 +339,8 @@
'internationalizable': entity.internationalizable}
rdef = self.init_rdef(**props)
sysource = session.pool.source('system')
- attrtype = type_from_constraints(sysource.dbhelper, rdef.object,
- rdef.constraints)
+ attrtype = y2sql.type_from_constraints(
+ sysource.dbhelper, rdef.object, rdef.constraints)
# XXX should be moved somehow into lgc.adbh: sqlite doesn't support to
# add a new column with UNIQUE, it should be added after the ALTER TABLE
# using ADD INDEX
@@ -370,12 +371,13 @@
self.error('error while creating index for %s.%s: %s',
table, column, ex)
# final relations are not infered, propagate
+ schema = session.vreg.schema
try:
- eschema = session.vreg.schema.eschema(rdef.subject)
+ eschema = schema.eschema(rdef.subject)
except KeyError:
return # entity type currently being added
# propagate attribute to children classes
- rschema = session.vreg.schema.rschema(rdef.name)
+ rschema = schema.rschema(rdef.name)
# if relation type has been inserted in the same transaction, its final
# attribute is still set to False, so we've to ensure it's False
rschema.final = True
@@ -385,15 +387,19 @@
'cardinality': rdef.cardinality,
'constraints': rdef.constraints,
'permissions': rdef.get_permissions(),
- 'order': rdef.order})
+ 'order': rdef.order,
+ 'infered': False, 'eid': None
+ })
+ cstrtypemap = ss.cstrtype_mapping(session)
groupmap = group_mapping(session)
+ object = schema.eschema(rdef.object)
for specialization in eschema.specialized_by(False):
if (specialization, rdef.object) in rschema.rdefs:
continue
- sperdef = RelationDefinitionSchema(specialization, rschema, rdef.object, props)
- for rql, args in ss.rdef2rql(rschema, str(specialization),
- rdef.object, sperdef, groupmap=groupmap):
- session.execute(rql, args)
+ sperdef = RelationDefinitionSchema(specialization, rschema,
+ object, props)
+ ss.execschemarql(session.execute, sperdef,
+ ss.rdef2rql(sperdef, cstrtypemap, groupmap))
# set default value, using sql for performance and to avoid
# modification_date update
if default:
@@ -442,13 +448,13 @@
rtype in session.transaction_data.get('createdtables', ())):
try:
rschema = schema.rschema(rtype)
- tablesql = rschema2sql(rschema)
+ tablesql = y2sql.rschema2sql(rschema)
except KeyError:
# fake we add it to the schema now to get a correctly
# initialized schema but remove it before doing anything
# more dangerous...
rschema = schema.add_relation_type(rdef)
- tablesql = rschema2sql(rschema)
+ tablesql = y2sql.rschema2sql(rschema)
schema.del_relation_type(rtype)
# create the necessary table
for sql in tablesql.split(';'):
@@ -481,11 +487,11 @@
return
atype = self.rschema.objects(etype)[0]
constraints = self.rschema.rdef(etype, atype).constraints
- coltype = type_from_constraints(adbh, atype, constraints,
- creating=False)
+ coltype = y2sql.type_from_constraints(adbh, atype, constraints,
+ creating=False)
# XXX check self.values['cardinality'][0] actually changed?
- sql = adbh.sql_set_null_allowed(table, column, coltype,
- self.values['cardinality'][0] != '1')
+ notnull = self.values['cardinality'][0] != '1'
+ sql = adbh.sql_set_null_allowed(table, column, coltype, notnull)
session.system_sql(sql)
if 'fulltextindexed' in self.values:
UpdateFTIndexOp(session)
@@ -524,8 +530,8 @@
oldcstr is None or oldcstr.max != newcstr.max):
adbh = self.session.pool.source('system').dbhelper
card = rtype.rdef(subjtype, objtype).cardinality
- coltype = type_from_constraints(adbh, objtype, [newcstr],
- creating=False)
+ coltype = y2sql.type_from_constraints(adbh, objtype, [newcstr],
+ creating=False)
sql = adbh.sql_change_col_type(table, column, coltype, card != '1')
try:
session.system_sql(sql, rollback_on_failure=False)
@@ -826,23 +832,26 @@
return
schema = self._cw.vreg.schema
name = entity['name']
- etype = EntityType(name=name, description=entity.get('description'),
- meta=entity.get('meta')) # don't care about final
+ etype = ybo.EntityType(name=name, description=entity.get('description'),
+ meta=entity.get('meta')) # don't care about final
# fake we add it to the schema now to get a correctly initialized schema
# but remove it before doing anything more dangerous...
schema = self._cw.vreg.schema
eschema = schema.add_entity_type(etype)
# generate table sql and rql to add metadata
- tablesql = eschema2sql(self._cw.pool.source('system').dbhelper, eschema,
- prefix=SQL_PREFIX)
- relrqls = []
+ tablesql = y2sql.eschema2sql(self._cw.pool.source('system').dbhelper,
+ eschema, prefix=SQL_PREFIX)
+ rdefrqls = []
+ gmap = group_mapping(self._cw)
+ cmap = ss.cstrtype_mapping(self._cw)
for rtype in (META_RTYPES - VIRTUAL_RTYPES):
rschema = schema[rtype]
sampletype = rschema.subjects()[0]
desttype = rschema.objects()[0]
- props = rschema.rdef(sampletype, desttype)
- relrqls += list(ss.rdef2rql(rschema, name, desttype, props,
- groupmap=group_mapping(self._cw)))
+ rdef = copy(rschema.rdef(sampletype, desttype))
+ rdef.subject = mock_object(eid=entity.eid)
+ mock = mock_object(eid=None)
+ rdefrqls.append( (mock, tuple(ss.rdef2rql(rdef, cmap, gmap))) )
# now remove it !
schema.del_entity_type(name)
# create the necessary table
@@ -855,8 +864,8 @@
etype.eid = entity.eid
MemSchemaCWETypeAdd(self._cw, etype)
# add meta relations
- for rql, kwargs in relrqls:
- self._cw.execute(rql, kwargs)
+ for rdef, relrqls in rdefrqls:
+ ss.execschemarql(self._cw.execute, rdef, relrqls)
class BeforeUpdateCWETypeHook(DelCWETypeHook):
@@ -913,12 +922,12 @@
def __call__(self):
entity = self.entity
- rtype = RelationType(name=entity.name,
- description=entity.get('description'),
- meta=entity.get('meta', False),
- inlined=entity.get('inlined', False),
- symmetric=entity.get('symmetric', False),
- eid=entity.eid)
+ rtype = ybo.RelationType(name=entity.name,
+ description=entity.get('description'),
+ meta=entity.get('meta', False),
+ inlined=entity.get('inlined', False),
+ symmetric=entity.get('symmetric', False),
+ eid=entity.eid)
MemSchemaCWRTypeAdd(self._cw, rtype)
--- a/hooks/test/unittest_syncschema.py Fri Mar 05 09:39:34 2010 +0100
+++ b/hooks/test/unittest_syncschema.py Fri Mar 05 12:18:22 2010 +0100
@@ -3,9 +3,11 @@
from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.sqlutils import SQL_PREFIX
-
+from cubicweb.devtools.repotest import schema_eids_idx, restore_schema_eids_idx
-SCHEMA_EIDS = {}
+def teardown_module(*args):
+ del SchemaModificationHooksTC.schema_eids
+
class SchemaModificationHooksTC(CubicWebTC):
reset_schema = True
@@ -15,29 +17,12 @@
# we have to read schema from the database to get eid for schema entities
config._cubes = None
cls.repo.fill_schema()
- # remember them so we can reread it from the fs instead of the db (too
- # costly) between tests
- for x in cls.repo.schema.entities():
- SCHEMA_EIDS[x] = x.eid
- for x in cls.repo.schema.relations():
- SCHEMA_EIDS[x] = x.eid
- for rdef in x.rdefs.itervalues():
- SCHEMA_EIDS[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid
+ cls.schema_eids = schema_eids_idx(cls.repo.schema)
@classmethod
def _refresh_repo(cls):
super(SchemaModificationHooksTC, cls)._refresh_repo()
- # rebuild schema eid index
- schema = cls.repo.schema
- for x in schema.entities():
- x.eid = SCHEMA_EIDS[x]
- schema._eid_index[x.eid] = x
- for x in cls.repo.schema.relations():
- x.eid = SCHEMA_EIDS[x]
- schema._eid_index[x.eid] = x
- for rdef in x.rdefs.itervalues():
- rdef.eid = SCHEMA_EIDS[(rdef.subject, rdef.rtype, rdef.object)]
- schema._eid_index[rdef.eid] = rdef
+ restore_schema_eids_idx(cls.repo.schema, cls.schema_eids)
def index_exists(self, etype, attr, unique=False):
self.session.set_pool()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/pytestconf.py Fri Mar 05 12:18:22 2010 +0100
@@ -0,0 +1,34 @@
+"""pytest configuration file: we need this to properly remove ressources
+cached on test classes, at least until we've proper support for teardown_class
+"""
+import sys
+from os.path import split, splitext
+from logilab.common.pytest import PyTester
+
+from cubicweb.etwist.server import _gc_debug
+
+class CustomPyTester(PyTester):
+ def testfile(self, filename, batchmode=False):
+ try:
+ return super(CustomPyTester, self).testfile(filename, batchmode)
+ finally:
+ modname = splitext(split(filename)[1])[0]
+ try:
+ module = sys.modules[modname]
+ except KeyError:
+ # error during test module import
+ return
+ for cls in vars(module).values():
+ if getattr(cls, '__module__', None) != modname:
+ continue
+ clean_repo_test_cls(cls)
+ #_gc_debug()
+
+def clean_repo_test_cls(cls):
+ if 'repo' in cls.__dict__:
+ if not cls.repo._shutting_down:
+ cls.repo.shutdown()
+ del cls.repo
+ for clsattr in ('cnx', '_orig_cnx', 'config', '_config', 'vreg', 'schema'):
+ if clsattr in cls.__dict__:
+ delattr(cls, clsattr)
--- a/server/__init__.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/__init__.py Fri Mar 05 12:18:22 2010 +0100
@@ -200,6 +200,7 @@
cnx.commit()
cnx.close()
session.close()
+ repo.shutdown()
# restore initial configuration
config.creating = False
config.read_instance_schema = read_instance_schema
--- a/server/migractions.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/migractions.py Fri Mar 05 12:18:22 2010 +0100
@@ -25,10 +25,12 @@
import os.path as osp
from datetime import datetime
from glob import glob
+from copy import copy
from warnings import warn
from logilab.common.deprecation import deprecated
from logilab.common.decorators import cached, clear_cache
+from logilab.common.testlib import mock_object
from yams.constraints import SizeConstraint
from yams.schema2sql import eschema2sql, rschema2sql
@@ -283,6 +285,11 @@
"""cached group mapping"""
return ss.group_mapping(self._cw)
+ @cached
+ def cstrtype_mapping(self):
+ """cached constraint types mapping"""
+ return ss.cstrtype_mapping(self._cw)
+
def exec_event_script(self, event, cubepath=None, funcname=None,
*args, **kwargs):
if cubepath:
@@ -400,14 +407,17 @@
return
self._synchronized.add(rtype)
rschema = self.fs_schema.rschema(rtype)
+ reporschema = self.repo.schema.rschema(rtype)
if syncprops:
- self.rqlexecall(ss.updaterschema2rql(rschema),
+ assert reporschema.eid, reporschema
+ self.rqlexecall(ss.updaterschema2rql(rschema, reporschema.eid),
ask_confirm=self.verbosity>=2)
if syncrdefs:
- reporschema = self.repo.schema.rschema(rtype)
for subj, obj in rschema.rdefs:
if (subj, obj) not in reporschema.rdefs:
continue
+ if rschema in VIRTUAL_RTYPES:
+ continue
self._synchronize_rdef_schema(subj, rschema, obj,
syncprops=syncprops,
syncperms=syncperms)
@@ -440,9 +450,11 @@
'Y is CWEType, Y name %(y)s',
{'x': str(repoeschema), 'y': str(espschema)},
ask_confirm=False)
- self.rqlexecall(ss.updateeschema2rql(eschema),
+ self.rqlexecall(ss.updateeschema2rql(eschema, repoeschema.eid),
ask_confirm=self.verbosity >= 2)
for rschema, targettypes, role in eschema.relation_definitions(True):
+ if rschema in VIRTUAL_RTYPES:
+ continue
if role == 'subject':
if not rschema in repoeschema.subject_relations():
continue
@@ -480,11 +492,11 @@
confirm = self.verbosity >= 2
if syncprops:
# properties
- self.rqlexecall(ss.updaterdef2rql(rschema, subjtype, objtype),
+ rdef = rschema.rdef(subjtype, objtype)
+ repordef = reporschema.rdef(subjtype, objtype)
+ self.rqlexecall(ss.updaterdef2rql(rdef, repordef.eid),
ask_confirm=confirm)
# constraints
- rdef = rschema.rdef(subjtype, objtype)
- repordef = reporschema.rdef(subjtype, objtype)
newconstraints = list(rdef.constraints)
# 1. remove old constraints and update constraints of the same type
# NOTE: don't use rschema.constraint_by_type because it may be
@@ -510,10 +522,10 @@
self.rqlexec('SET X value %(v)s WHERE X eid %(x)s',
values, 'x', ask_confirm=confirm)
# 2. add new constraints
- for newcstr in newconstraints:
- self.rqlexecall(ss.constraint2rql(rschema, subjtype, objtype,
- newcstr),
- ask_confirm=confirm)
+ cstrtype_map = self.cstrtype_mapping()
+ self.rqlexecall(ss.constraints2rql(cstrtype_map, newconstraints,
+ repordef.eid),
+ ask_confirm=confirm)
if syncperms and not rschema in VIRTUAL_RTYPES:
self._synchronize_permissions(rdef, repordef.eid)
@@ -674,18 +686,23 @@
targeted type is known
"""
instschema = self.repo.schema
- if etype in instschema:
- # XXX (syt) plz explain: if we're adding an entity type, it should
- # not be there...
- eschema = instschema[etype]
- if eschema.final:
- instschema.del_entity_type(etype)
- else:
- eschema = self.fs_schema.eschema(etype)
+ assert not etype in instschema
+ # # XXX (syt) plz explain: if we're adding an entity type, it should
+ # # not be there...
+ # eschema = instschema[etype]
+ # if eschema.final:
+ # instschema.del_entity_type(etype)
+ # else:
+ eschema = self.fs_schema.eschema(etype)
confirm = self.verbosity >= 2
groupmap = self.group_mapping()
+ cstrtypemap = self.cstrtype_mapping()
# register the entity into CWEType
- self.rqlexecall(ss.eschema2rql(eschema, groupmap), ask_confirm=confirm)
+ try:
+ execute = self._cw.unsafe_execute
+ except AttributeError:
+ execute = self._cw.execute
+ ss.execschemarql(execute, eschema, ss.eschema2rql(eschema, groupmap))
# add specializes relation if needed
self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm)
# register entity's attributes
@@ -698,9 +715,8 @@
# actually in the schema
self.cmd_add_relation_type(rschema.type, False, commit=True)
# register relation definition
- self.rqlexecall(ss.rdef2rql(rschema, etype, attrschema.type,
- groupmap=groupmap),
- ask_confirm=confirm)
+ rdef = self._get_rdef(rschema, eschema, eschema.destination(rschema))
+ ss.execschemarql(execute, rdef, ss.rdef2rql(rdef, cstrtypemap, groupmap),)
# take care to newly introduced base class
# XXX some part of this should probably be under the "if auto" block
for spschema in eschema.specialized_by(recursive=False):
@@ -760,10 +776,12 @@
# remember this two avoid adding twice non symmetric relation
# such as "Emailthread forked_from Emailthread"
added.append((etype, rschema.type, targettype))
- self.rqlexecall(ss.rdef2rql(rschema, etype, targettype,
- groupmap=groupmap),
- ask_confirm=confirm)
+ rdef = self._get_rdef(rschema, eschema, targetschema)
+ ss.execschemarql(execute, rdef,
+ ss.rdef2rql(rdef, cstrtypemap, groupmap))
for rschema in eschema.object_relations():
+ if rschema.type in META_RTYPES:
+ continue
rtypeadded = rschema.type in instschema or rschema.type in added
for targetschema in rschema.subjects(etype):
# ignore relations where the targeted type is not in the
@@ -781,9 +799,9 @@
elif (targettype, rschema.type, etype) in added:
continue
# register relation definition
- self.rqlexecall(ss.rdef2rql(rschema, targettype, etype,
- groupmap=groupmap),
- ask_confirm=confirm)
+ rdef = self._get_rdef(rschema, targetschema, eschema)
+ ss.execschemarql(execute, rdef,
+ ss.rdef2rql(rdef, cstrtypemap, groupmap))
if commit:
self.commit()
@@ -822,15 +840,26 @@
committing depends on the `commit` argument value).
"""
+ reposchema = self.repo.schema
rschema = self.fs_schema.rschema(rtype)
+ try:
+ execute = self._cw.unsafe_execute
+ except AttributeError:
+ execute = self._cw.execute
# register the relation into CWRType and insert necessary relation
# definitions
- self.rqlexecall(ss.rschema2rql(rschema, addrdef=False),
- ask_confirm=self.verbosity>=2)
+ ss.execschemarql(execute, rschema, ss.rschema2rql(rschema, addrdef=False))
if addrdef:
self.commit()
- self.rqlexecall(ss.rdef2rql(rschema, groupmap=self.group_mapping()),
- ask_confirm=self.verbosity>=2)
+ gmap = self.group_mapping()
+ cmap = self.cstrtype_mapping()
+ for rdef in rschema.rdefs.itervalues():
+ if not (reposchema.has_entity(rdef.subject)
+ and reposchema.has_entity(rdef.object)):
+ continue
+ self._set_rdef_eid(rdef)
+ ss.execschemarql(execute, rdef,
+ ss.rdef2rql(rdef, cmap, gmap))
if rtype in META_RTYPES:
# if the relation is in META_RTYPES, ensure we're adding it for
# all entity types *in the persistent schema*, not only those in
@@ -839,15 +868,14 @@
if not etype in self.fs_schema:
# get sample object type and rproperties
objtypes = rschema.objects()
- assert len(objtypes) == 1
+ assert len(objtypes) == 1, objtypes
objtype = objtypes[0]
- props = rschema.rproperties(
- rschema.subjects(objtype)[0], objtype)
- assert props
- self.rqlexecall(ss.rdef2rql(rschema, etype, objtype, props,
- groupmap=self.group_mapping()),
- ask_confirm=self.verbosity>=2)
-
+ rdef = copy(rschema.rdef(rschema.subjects(objtype)[0], objtype))
+ rdef.subject = etype
+ rdef.rtype = self.repo.schema.rschema(rschema)
+ rdef.object = self.repo.schema.rschema(objtype)
+ ss.execschemarql(execute, rdef,
+ ss.rdef2rql(rdef, cmap, gmap))
if commit:
self.commit()
@@ -877,12 +905,28 @@
rschema = self.fs_schema.rschema(rtype)
if not rtype in self.repo.schema:
self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
- self.rqlexecall(ss.rdef2rql(rschema, subjtype, objtype,
- groupmap=self.group_mapping()),
- ask_confirm=self.verbosity>=2)
+ try:
+ execute = self._cw.unsafe_execute
+ except AttributeError:
+ execute = self._cw.execute
+ rdef = self._get_rdef(rschema, subjtype, objtype)
+ ss.execschemarql(execute, rdef,
+ ss.rdef2rql(rdef, self.cstrtype_mapping(),
+ self.group_mapping()))
if commit:
self.commit()
+ def _get_rdef(self, rschema, subjtype, objtype):
+ return self._set_rdef_eid(rschema.rdefs[(subjtype, objtype)])
+
+ def _set_rdef_eid(self, rdef):
+ for attr in ('rtype', 'subject', 'object'):
+ schemaobj = getattr(rdef, attr)
+ if getattr(schemaobj, 'eid', None) is None:
+ schemaobj.eid = self.repo.schema[schemaobj].eid
+ assert schemaobj.eid is not None
+ return rdef
+
def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True):
"""unregister an existing relation definition"""
rschema = self.repo.schema.rschema(rtype)
--- a/server/querier.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/querier.py Fri Mar 05 12:18:22 2010 +0100
@@ -22,9 +22,8 @@
from cubicweb.server.utils import cleanup_solutions
from cubicweb.server.rqlannotation import SQLGenAnnotator, set_qdata
-from cubicweb.server.ssplanner import add_types_restriction
+from cubicweb.server.ssplanner import READ_ONLY_RTYPES, add_types_restriction
-READ_ONLY_RTYPES = set(('eid', 'has_text', 'is', 'is_instance_of', 'identity'))
def empty_rset(rql, args, rqlst=None):
"""build an empty result set object"""
@@ -377,39 +376,6 @@
self._r_obj_index = {}
self._expanded_r_defs = {}
- def relation_definitions(self, rqlst, to_build):
- """add constant values to entity def, mark variables to be selected
- """
- to_select = {}
- for relation in rqlst.main_relations:
- lhs, rhs = relation.get_variable_parts()
- rtype = relation.r_type
- if rtype in READ_ONLY_RTYPES:
- raise QueryError("can't assign to %s" % rtype)
- try:
- edef = to_build[str(lhs)]
- except KeyError:
- # lhs var is not to build, should be selected and added as an
- # object relation
- edef = to_build[str(rhs)]
- to_select.setdefault(edef, []).append((rtype, lhs, 1))
- else:
- if isinstance(rhs, Constant) and not rhs.uid:
- # add constant values to entity def
- value = rhs.eval(self.args)
- eschema = edef.e_schema
- attrtype = eschema.subjrels[rtype].objects(eschema)[0]
- if attrtype == 'Password' and isinstance(value, unicode):
- value = value.encode('UTF8')
- edef[rtype] = value
- elif to_build.has_key(str(rhs)):
- # create a relation between two newly created variables
- self.add_relation_def((edef, rtype, to_build[rhs.name]))
- else:
- to_select.setdefault(edef, []).append( (rtype, rhs, 0) )
- return to_select
-
-
def add_entity_def(self, edef):
"""add an entity definition to build"""
edef.querier_pending_relations = {}
--- a/server/repository.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/repository.py Fri Mar 05 12:18:22 2010 +0100
@@ -327,6 +327,7 @@
"""called on server stop event to properly close opened sessions and
connections
"""
+ assert not self._shutting_down, 'already shutting down'
self._shutting_down = True
if isinstance(self._looping_tasks, tuple): # if tasks have been started
for looptask in self._looping_tasks:
--- a/server/schemaserial.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/schemaserial.py Fri Mar 05 12:18:22 2010 +0100
@@ -50,6 +50,10 @@
continue
return res
+def cstrtype_mapping(cursor):
+ """cached constraint types mapping"""
+ return dict(cursor.execute('Any T, X WHERE X is CWConstraintType, X name T'))
+
# schema / perms deserialization ##############################################
def deserialize_schema(schema, session):
@@ -229,14 +233,15 @@
eschemas.remove(schema.eschema('CWEType'))
eschemas.insert(0, schema.eschema('CWEType'))
for eschema in eschemas:
- for rql, kwargs in eschema2rql(eschema, groupmap):
- execute(rql, kwargs, build_descr=False)
+ execschemarql(execute, eschema, eschema2rql(eschema, groupmap))
if pb is not None:
pb.update()
# serialize constraint types
+ cstrtypemap = {}
rql = 'INSERT CWConstraintType X: X name %(ct)s'
for cstrtype in CONSTRAINTS:
- execute(rql, {'ct': unicode(cstrtype)}, build_descr=False)
+ cstrtypemap[cstrtype] = execute(rql, {'ct': unicode(cstrtype)},
+ build_descr=False)[0][0]
if pb is not None:
pb.update()
# serialize relations
@@ -246,8 +251,15 @@
if pb is not None:
pb.update()
continue
- for rql, kwargs in rschema2rql(rschema, groupmap=groupmap):
- execute(rql, kwargs, build_descr=False)
+ execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False))
+ if rschema.symmetric:
+ rdefs = [rdef for k, rdef in rschema.rdefs.iteritems()
+ if (rdef.subject, rdef.object) == k]
+ else:
+ rdefs = rschema.rdefs.itervalues()
+ for rdef in rdefs:
+ execschemarql(execute, rdef,
+ rdef2rql(rdef, cstrtypemap, groupmap))
if pb is not None:
pb.update()
for rql, kwargs in specialize2rql(schema):
@@ -258,6 +270,55 @@
print
+# high level serialization functions
+
+def execschemarql(execute, schema, rqls):
+ for rql, kwargs in rqls:
+ kwargs['x'] = schema.eid
+ rset = execute(rql, kwargs, build_descr=False)
+ if schema.eid is None:
+ schema.eid = rset[0][0]
+ else:
+ assert rset
+
+def erschema2rql(erschema, groupmap):
+ if isinstance(erschema, schemamod.EntitySchema):
+ return eschema2rql(erschema, groupmap=groupmap)
+ return rschema2rql(erschema, groupmap=groupmap)
+
+def specialize2rql(schema):
+ for eschema in schema.entities():
+ if eschema.final:
+ continue
+ for rql, kwargs in eschemaspecialize2rql(eschema):
+ yield rql, kwargs
+
+# etype serialization
+
+def eschema2rql(eschema, groupmap=None):
+ """return a list of rql insert statements to enter an entity schema
+ in the database as an CWEType entity
+ """
+ relations, values = eschema_relations_values(eschema)
+ # NOTE: 'specializes' relation can't be inserted here since there's no
+ # way to make sure the parent type is inserted before the child type
+ yield 'INSERT CWEType X: %s' % ','.join(relations) , values
+ # entity permissions
+ if groupmap is not None:
+ for rql, args in _erperms2rql(eschema, groupmap):
+ yield rql, args
+
+def eschema_relations_values(eschema):
+ values = _ervalues(eschema)
+ relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
+ return relations, values
+
+def eschemaspecialize2rql(eschema):
+ specialized_type = eschema.specializes()
+ if specialized_type:
+ values = {'x': eschema.eid, 'et': specialized_type.eid}
+ yield 'SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', values
+
def _ervalues(erschema):
try:
type_ = unicode(erschema.type)
@@ -273,10 +334,23 @@
'description': desc,
}
-def eschema_relations_values(eschema):
- values = _ervalues(eschema)
- relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
- return relations, values
+# rtype serialization
+
+def rschema2rql(rschema, cstrtypemap=None, addrdef=True, groupmap=None):
+ """return a list of rql insert statements to enter a relation schema
+ in the database as an CWRType entity
+ """
+ if rschema.type == 'has_text':
+ return
+ relations, values = rschema_relations_values(rschema)
+ yield 'INSERT CWRType X: %s' % ','.join(relations), values
+ if addrdef:
+ assert cstrtypemap
+ # sort for testing purpose
+ for rdef in sorted(rschema.rdefs.itervalues(),
+ key=lambda x: (x.subject, x.object)):
+ for rql, values in rdef2rql(rdef, cstrtypemap, groupmap):
+ yield rql, values
def rschema_relations_values(rschema):
values = _ervalues(rschema)
@@ -290,169 +364,58 @@
relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
return relations, values
-def _rdef_values(objtype, props):
- amap = {'order': 'ordernum'}
+# rdef serialization
+
+def rdef2rql(rdef, cstrtypemap, groupmap=None):
+ # don't serialize infered relations
+ if rdef.infered:
+ return
+ relations, values = _rdef_values(rdef)
+ relations.append('X relation_type ER,X from_entity SE,X to_entity OE')
+ values.update({'se': rdef.subject.eid, 'rt': rdef.rtype.eid, 'oe': rdef.object.eid})
+ if rdef.final:
+ etype = 'CWAttribute'
+ else:
+ etype = 'CWRelation'
+ yield 'INSERT %s X: %s WHERE SE eid %%(se)s,ER eid %%(rt)s,OE eid %%(oe)s' % (
+ etype, ','.join(relations), ), values
+ for rql, values in constraints2rql(cstrtypemap, rdef.constraints):
+ yield rql, values
+ # no groupmap means "no security insertion"
+ if groupmap:
+ for rql, args in _erperms2rql(rdef, groupmap):
+ yield rql, args
+
+def _rdef_values(rdef):
+ amap = {'order': 'ordernum', 'default': 'defaultval'}
values = {}
- for prop, default in schemamod.RelationDefinitionSchema.rproperty_defs(objtype).iteritems():
+ for prop, default in rdef.rproperty_defs(rdef.object).iteritems():
if prop in ('eid', 'constraints', 'uid', 'infered', 'permissions'):
continue
- value = props.get(prop, default)
+ value = getattr(rdef, prop)
+ # XXX type cast really necessary?
if prop in ('indexed', 'fulltextindexed', 'internationalizable'):
value = bool(value)
elif prop == 'ordernum':
value = int(value)
elif isinstance(value, str):
value = unicode(value)
+ if value is not None and prop == 'default':
+ if value is False:
+ value = u''
+ if not isinstance(value, unicode):
+ value = unicode(value)
values[amap.get(prop, prop)] = value
- return values
-
-def nfrdef_relations_values(objtype, props):
- values = _rdef_values(objtype, props)
- relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
- return relations, values
-
-def frdef_relations_values(objtype, props):
- values = _rdef_values(objtype, props)
- default = values['default']
- del values['default']
- if default is not None:
- if default is False:
- default = u''
- elif not isinstance(default, unicode):
- default = unicode(default)
- values['defaultval'] = default
relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
return relations, values
-
-def __rdef2rql(genmap, rschema, subjtype=None, objtype=None, props=None,
- groupmap=None):
- if subjtype is None:
- assert objtype is None
- assert props is None
- targets = sorted(rschema.rdefs)
- else:
- assert not objtype is None
- targets = [(subjtype, objtype)]
- # relation schema
- if rschema.final:
- etype = 'CWAttribute'
- else:
- etype = 'CWRelation'
- for subjtype, objtype in targets:
- if props is None:
- _props = rschema.rdef(subjtype, objtype)
- else:
- _props = props
- # don't serialize infered relations
- if _props.get('infered'):
- continue
- gen = genmap[rschema.final]
- for rql, values in gen(rschema, subjtype, objtype, _props):
- yield rql, values
- # no groupmap means "no security insertion"
- if groupmap:
- for rql, args in _erperms2rql(_props, groupmap):
- args['st'] = str(subjtype)
- args['rt'] = str(rschema)
- args['ot'] = str(objtype)
- yield rql + 'X is %s, X from_entity ST, X to_entity OT, '\
- 'X relation_type RT, RT name %%(rt)s, ST name %%(st)s, '\
- 'OT name %%(ot)s' % etype, args
-
-
-def schema2rql(schema, skip=None, allow=None):
- """return a list of rql insert statements to enter the schema in the
- database as CWRType and CWEType entities
- """
- assert not (skip is not None and allow is not None), \
- 'can\'t use both skip and allow'
- all = schema.entities() + schema.relations()
- if skip is not None:
- return chain(*[erschema2rql(schema[t]) for t in all if not t in skip])
- elif allow is not None:
- return chain(*[erschema2rql(schema[t]) for t in all if t in allow])
- return chain(*[erschema2rql(schema[t]) for t in all])
-
-def erschema2rql(erschema, groupmap):
- if isinstance(erschema, schemamod.EntitySchema):
- return eschema2rql(erschema, groupmap=groupmap)
- return rschema2rql(erschema, groupmap=groupmap)
-
-def eschema2rql(eschema, groupmap=None):
- """return a list of rql insert statements to enter an entity schema
- in the database as an CWEType entity
- """
- relations, values = eschema_relations_values(eschema)
- # NOTE: 'specializes' relation can't be inserted here since there's no
- # way to make sure the parent type is inserted before the child type
- yield 'INSERT CWEType X: %s' % ','.join(relations) , values
- # entity permissions
- if groupmap is not None:
- for rql, args in _erperms2rql(eschema, groupmap):
- args['name'] = str(eschema)
- yield rql + 'X is CWEType, X name %(name)s', args
-
-def specialize2rql(schema):
- for eschema in schema.entities():
- for rql, kwargs in eschemaspecialize2rql(eschema):
- yield rql, kwargs
-
-def eschemaspecialize2rql(eschema):
- specialized_type = eschema.specializes()
- if specialized_type:
- values = {'x': eschema.type, 'et': specialized_type.type}
- yield 'SET X specializes ET WHERE X name %(x)s, ET name %(et)s', values
-
-def rschema2rql(rschema, addrdef=True, groupmap=None):
- """return a list of rql insert statements to enter a relation schema
- in the database as an CWRType entity
- """
- if rschema.type == 'has_text':
- return
- relations, values = rschema_relations_values(rschema)
- yield 'INSERT CWRType X: %s' % ','.join(relations), values
- if addrdef:
- for rql, values in rdef2rql(rschema, groupmap=groupmap):
- yield rql, values
-
-def rdef2rql(rschema, subjtype=None, objtype=None, props=None, groupmap=None):
- genmap = {True: frdef2rql, False: nfrdef2rql}
- return __rdef2rql(genmap, rschema, subjtype, objtype, props, groupmap)
-
-
-_LOCATE_RDEF_RQL0 = 'X relation_type ER,X from_entity SE,X to_entity OE'
-_LOCATE_RDEF_RQL1 = 'SE name %(se)s,ER name %(rt)s,OE name %(oe)s'
-
-def frdef2rql(rschema, subjtype, objtype, props):
- relations, values = frdef_relations_values(objtype, props)
- relations.append(_LOCATE_RDEF_RQL0)
- values.update({'se': str(subjtype), 'rt': str(rschema), 'oe': str(objtype)})
- yield 'INSERT CWAttribute X: %s WHERE %s' % (','.join(relations), _LOCATE_RDEF_RQL1), values
- for rql, values in rdefrelations2rql(rschema, subjtype, objtype, props):
- yield rql + ', EDEF is CWAttribute', values
-
-def nfrdef2rql(rschema, subjtype, objtype, props):
- relations, values = nfrdef_relations_values(objtype, props)
- relations.append(_LOCATE_RDEF_RQL0)
- values.update({'se': str(subjtype), 'rt': str(rschema), 'oe': str(objtype)})
- yield 'INSERT CWRelation X: %s WHERE %s' % (','.join(relations), _LOCATE_RDEF_RQL1), values
- for rql, values in rdefrelations2rql(rschema, subjtype, objtype, props):
- yield rql + ', EDEF is CWRelation', values
-
-def rdefrelations2rql(rschema, subjtype, objtype, props):
- iterators = []
- for constraint in props.constraints:
- iterators.append(constraint2rql(rschema, subjtype, objtype, constraint))
- return chain(*iterators)
-
-def constraint2rql(rschema, subjtype, objtype, constraint):
- values = {'ctname': unicode(constraint.type()),
- 'value': unicode(constraint.serialize()),
- 'rt': str(rschema), 'se': str(subjtype), 'oe': str(objtype)}
- yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \
-CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, \
-ER name %(rt)s, SE name %(se)s, OE name %(oe)s', values
+def constraints2rql(cstrtypemap, constraints, rdefeid=None):
+ for constraint in constraints:
+ values = {'ct': cstrtypemap[constraint.type()],
+ 'value': unicode(constraint.serialize()),
+ 'x': rdefeid} # when not specified, will have to be set by the caller
+ yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \
+CT eid %(ct)s, EDEF eid %(x)s', values
def _erperms2rql(erschema, groupmap):
@@ -471,7 +434,7 @@
if isinstance(group_or_rqlexpr, basestring):
# group
try:
- yield ('SET X %s_permission Y WHERE Y eid %%(g)s, ' % action,
+ yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action,
{'g': groupmap[group_or_rqlexpr]})
except KeyError:
continue
@@ -479,36 +442,24 @@
# rqlexpr
rqlexpr = group_or_rqlexpr
yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, '
- 'E mainvars %%(v)s, X %s_permission E WHERE ' % action,
+ 'E mainvars %%(v)s, X %s_permission E WHERE X eid %%(x)s' % action,
{'e': unicode(rqlexpr.expression),
'v': unicode(rqlexpr.mainvars),
't': unicode(rqlexpr.__class__.__name__)})
+# update functions
-def updateeschema2rql(eschema):
+def updateeschema2rql(eschema, eid):
relations, values = eschema_relations_values(eschema)
- values['et'] = eschema.type
- yield 'SET %s WHERE X is CWEType, X name %%(et)s' % ','.join(relations), values
-
-def updaterschema2rql(rschema):
- relations, values = rschema_relations_values(rschema)
- values['rt'] = rschema.type
- yield 'SET %s WHERE X is CWRType, X name %%(rt)s' % ','.join(relations), values
+ values['x'] = eid
+ yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
-def updaterdef2rql(rschema, subjtype=None, objtype=None, props=None):
- genmap = {True: updatefrdef2rql, False: updatenfrdef2rql}
- return __rdef2rql(genmap, rschema, subjtype, objtype, props)
+def updaterschema2rql(rschema, eid):
+ relations, values = rschema_relations_values(rschema)
+ values['x'] = eid
+ yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
-def updatefrdef2rql(rschema, subjtype, objtype, props):
- relations, values = frdef_relations_values(objtype, props)
- values.update({'se': subjtype, 'rt': str(rschema), 'oe': objtype})
- yield 'SET %s WHERE %s, %s, X is CWAttribute' % (','.join(relations),
- _LOCATE_RDEF_RQL0,
- _LOCATE_RDEF_RQL1), values
-
-def updatenfrdef2rql(rschema, subjtype, objtype, props):
- relations, values = nfrdef_relations_values(objtype, props)
- values.update({'se': subjtype, 'rt': str(rschema), 'oe': objtype})
- yield 'SET %s WHERE %s, %s, X is CWRelation' % (','.join(relations),
- _LOCATE_RDEF_RQL0,
- _LOCATE_RDEF_RQL1), values
+def updaterdef2rql(rdef, eid):
+ relations, values = _rdef_values(rdef)
+ values['x'] = eid
+ yield 'SET %s WHERE X eid %%(x)s' % ','.join(relations), values
--- a/server/session.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/session.py Fri Mar 05 12:18:22 2010 +0100
@@ -585,10 +585,10 @@
def add_operation(self, operation, index=None):
"""add an observer"""
assert self.commit_state != 'commit'
- if index is not None:
+ if index is None:
+ self.pending_operations.append(operation)
+ else:
self.pending_operations.insert(index, operation)
- else:
- self.pending_operations.append(operation)
# querier helpers #########################################################
--- a/server/ssplanner.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/ssplanner.py Fri Mar 05 12:18:22 2010 +0100
@@ -10,12 +10,104 @@
from copy import copy
from rql.stmts import Union, Select
-from rql.nodes import Constant
+from rql.nodes import Constant, Relation
from cubicweb import QueryError, typed_eid
from cubicweb.schema import VIRTUAL_RTYPES
from cubicweb.rqlrewrite import add_types_restriction
+READ_ONLY_RTYPES = set(('eid', 'has_text', 'is', 'is_instance_of', 'identity'))
+
+_CONSTANT = object()
+_FROM_SUBSTEP = object()
+
+def _extract_const_attributes(plan, rqlst, to_build):
+ """add constant values to entity def, mark variables to be selected
+ """
+ to_select = {}
+ for relation in rqlst.main_relations:
+ lhs, rhs = relation.get_variable_parts()
+ rtype = relation.r_type
+ if rtype in READ_ONLY_RTYPES:
+ raise QueryError("can't assign to %s" % rtype)
+ try:
+ edef = to_build[str(lhs)]
+ except KeyError:
+ # lhs var is not to build, should be selected and added as an
+ # object relation
+ edef = to_build[str(rhs)]
+ to_select.setdefault(edef, []).append((rtype, lhs, 1))
+ else:
+ if isinstance(rhs, Constant) and not rhs.uid:
+ # add constant values to entity def
+ value = rhs.eval(plan.args)
+ eschema = edef.e_schema
+ attrtype = eschema.subjrels[rtype].objects(eschema)[0]
+ if attrtype == 'Password' and isinstance(value, unicode):
+ value = value.encode('UTF8')
+ edef[rtype] = value
+ elif to_build.has_key(str(rhs)):
+ # create a relation between two newly created variables
+ plan.add_relation_def((edef, rtype, to_build[rhs.name]))
+ else:
+ to_select.setdefault(edef, []).append( (rtype, rhs, 0) )
+ return to_select
+
+def _extract_eid_consts(plan, rqlst):
+ """return a dict mapping rqlst variable object to their eid if specified in
+ the syntax tree
+ """
+ session = plan.session
+ eschema = session.vreg.schema.eschema
+ if rqlst.where is None:
+ return {}
+ eidconsts = {}
+ neweids = session.transaction_data.get('neweids', ())
+ for rel in rqlst.where.get_nodes(Relation):
+ if rel.r_type == 'eid' and not rel.neged(strict=True):
+ lhs, rhs = rel.get_variable_parts()
+ if isinstance(rhs, Constant):
+ eid = typed_eid(rhs.eval(plan.args))
+ # check read permission here since it may not be done by
+ # the generated select substep if not emited (eg nothing
+ # to be selected)
+ if eid not in neweids:
+ eschema(session.describe(eid)[0]).check_perm(session, 'read')
+ eidconsts[lhs.variable] = eid
+ return eidconsts
+
+def _build_substep_query(select, origrqlst):
+ """Finalize substep select query that should be executed to get proper
+ selection of stuff to insert/update.
+
+ Return None when no query actually needed, else the given select node that
+ will be used as substep query.
+
+ When select has nothing selected, search in origrqlst for restriction that
+ should be considered.
+ """
+ if select.selection:
+ if origrqlst.where is not None:
+ select.set_where(origrqlst.where.copy(select))
+ return select
+ if origrqlst.where is None:
+ return
+ for rel in origrqlst.where.iget_nodes(Relation):
+ # search for a relation which is neither a type restriction (is) nor an
+ # eid specification (not neged eid with constant node
+ if rel.neged(strict=True) or not (
+ rel.is_types_restriction() or
+ (rel.r_type == 'eid'
+ and isinstance(rel.get_variable_parts()[1], Constant))):
+ break
+ else:
+ return
+ select.set_where(origrqlst.where.copy(select))
+ if not select.selection:
+ # no selection, append one randomly
+ select.append_selected(rel.children[0].copy(select))
+ return select
+
class SSPlanner(object):
"""SingleSourcePlanner: build execution plan for rql queries
@@ -56,34 +148,37 @@
to_build[var.name] = etype_class(etype)(session)
plan.add_entity_def(to_build[var.name])
# add constant values to entity def, mark variables to be selected
- to_select = plan.relation_definitions(rqlst, to_build)
+ to_select = _extract_const_attributes(plan, rqlst, to_build)
# add necessary steps to add relations and update attributes
step = InsertStep(plan) # insert each entity and its relations
- step.children += self._compute_relation_steps(plan, rqlst.solutions,
- rqlst.where, to_select)
+ step.children += self._compute_relation_steps(plan, rqlst, to_select)
return (step,)
- def _compute_relation_steps(self, plan, solutions, restriction, to_select):
+ def _compute_relation_steps(self, plan, rqlst, to_select):
"""handle the selection of relations for an insert query"""
+ eidconsts = _extract_eid_consts(plan, rqlst)
for edef, rdefs in to_select.items():
# create a select rql st to fetch needed data
select = Select()
eschema = edef.e_schema
- for i in range(len(rdefs)):
- rtype, term, reverse = rdefs[i]
- select.append_selected(term.copy(select))
+ for i, (rtype, term, reverse) in enumerate(rdefs):
+ if getattr(term, 'variable', None) in eidconsts:
+ value = eidconsts[term.variable]
+ else:
+ select.append_selected(term.copy(select))
+ value = _FROM_SUBSTEP
if reverse:
- rdefs[i] = rtype, RelationsStep.REVERSE_RELATION
+ rdefs[i] = (rtype, InsertRelationsStep.REVERSE_RELATION, value)
else:
rschema = eschema.subjrels[rtype]
if rschema.final or rschema.inlined:
- rdefs[i] = rtype, RelationsStep.FINAL
+ rdefs[i] = (rtype, InsertRelationsStep.FINAL, value)
else:
- rdefs[i] = rtype, RelationsStep.RELATION
- if restriction is not None:
- select.set_where(restriction.copy(select))
- step = RelationsStep(plan, edef, rdefs)
- step.children += self._select_plan(plan, select, solutions)
+ rdefs[i] = (rtype, InsertRelationsStep.RELATION, value)
+ step = InsertRelationsStep(plan, edef, rdefs)
+ select = _build_substep_query(select, rqlst)
+ if select is not None:
+ step.children += self._select_plan(plan, select, rqlst.solutions)
yield step
def build_delete_plan(self, plan, rqlst):
@@ -127,37 +222,61 @@
def build_set_plan(self, plan, rqlst):
"""get an execution plan from an SET RQL query"""
- select = Select()
- # extract variables to add to the selection
- selected_index = {}
- index = 0
- relations, attrrelations = [], []
getrschema = self.schema.rschema
- for relation in rqlst.main_relations:
+ select = Select() # potential substep query
+ selectedidx = {} # local state
+ attributes = set() # edited attributes
+ updatedefs = [] # definition of update attributes/relations
+ selidx = residx = 0 # substep selection / resulting rset indexes
+ # search for eid const in the WHERE clause
+ eidconsts = _extract_eid_consts(plan, rqlst)
+ # build `updatedefs` describing things to update and add necessary
+ # variables to the substep selection
+ for i, relation in enumerate(rqlst.main_relations):
if relation.r_type in VIRTUAL_RTYPES:
raise QueryError('can not assign to %r relation'
% relation.r_type)
lhs, rhs = relation.get_variable_parts()
- if not lhs.as_string('utf-8') in selected_index:
- select.append_selected(lhs.copy(select))
- selected_index[lhs.as_string('utf-8')] = index
- index += 1
- if not rhs.as_string('utf-8') in selected_index:
- select.append_selected(rhs.copy(select))
- selected_index[rhs.as_string('utf-8')] = index
- index += 1
+ lhskey = lhs.as_string('utf-8')
+ if not lhskey in selectedidx:
+ if lhs.variable in eidconsts:
+ eid = eidconsts[lhs.variable]
+ lhsinfo = (_CONSTANT, eid, residx)
+ else:
+ select.append_selected(lhs.copy(select))
+ lhsinfo = (_FROM_SUBSTEP, selidx, residx)
+ selidx += 1
+ residx += 1
+ selectedidx[lhskey] = lhsinfo
+ else:
+ lhsinfo = selectedidx[lhskey][:-1] + (None,)
+ rhskey = rhs.as_string('utf-8')
+ if not rhskey in selectedidx:
+ if isinstance(rhs, Constant):
+ rhsinfo = (_CONSTANT, rhs.eval(plan.args), residx)
+ elif getattr(rhs, 'variable', None) in eidconsts:
+ eid = eidconsts[rhs.variable]
+ rhsinfo = (_CONSTANT, eid, residx)
+ else:
+ select.append_selected(rhs.copy(select))
+ rhsinfo = (_FROM_SUBSTEP, selidx, residx)
+ selidx += 1
+ residx += 1
+ selectedidx[rhskey] = rhsinfo
+ else:
+ rhsinfo = selectedidx[rhskey][:-1] + (None,)
rschema = getrschema(relation.r_type)
+ updatedefs.append( (lhsinfo, rhsinfo, rschema) )
if rschema.final or rschema.inlined:
- attrrelations.append(relation)
- else:
- relations.append(relation)
- # add step necessary to fetch all selected variables values
- if rqlst.where is not None:
- select.set_where(rqlst.where.copy(select))
- # set distinct to avoid potential duplicate key error
- select.distinct = True
- step = UpdateStep(plan, attrrelations, relations, selected_index)
- step.children += self._select_plan(plan, select, rqlst.solutions)
+ attributes.add(relation.r_type)
+ # the update step
+ step = UpdateStep(plan, updatedefs, attributes)
+ # when necessary add substep to fetch yet unknown values
+ select = _build_substep_query(select, rqlst)
+ if select is not None:
+ # set distinct to avoid potential duplicate key error
+ select.distinct = True
+ step.children += self._select_plan(plan, select, rqlst.solutions)
return (step,)
# internal methods ########################################################
@@ -308,7 +427,7 @@
# UPDATE/INSERT/DELETE steps ##################################################
-class RelationsStep(Step):
+class InsertRelationsStep(Step):
"""step consisting in adding attributes/relations to entity defs from a
previous FetchStep
@@ -334,33 +453,38 @@
"""execute this step"""
base_edef = self.edef
edefs = []
- result = self.execute_child()
+ if self.children:
+ result = self.execute_child()
+ else:
+ result = [[]]
for row in result:
# get a new entity definition for this row
edef = copy(base_edef)
# complete this entity def using row values
- for i in range(len(self.rdefs)):
- rtype, rorder = self.rdefs[i]
- if rorder == RelationsStep.FINAL:
- edef[rtype] = row[i]
- elif rorder == RelationsStep.RELATION:
- self.plan.add_relation_def( (edef, rtype, row[i]) )
- edef.querier_pending_relations[(rtype, 'subject')] = row[i]
+ index = 0
+ for rtype, rorder, value in self.rdefs:
+ if value is _FROM_SUBSTEP:
+ value = row[index]
+ index += 1
+ if rorder == InsertRelationsStep.FINAL:
+ edef[rtype] = value
+ elif rorder == InsertRelationsStep.RELATION:
+ self.plan.add_relation_def( (edef, rtype, value) )
+ edef.querier_pending_relations[(rtype, 'subject')] = value
else:
- self.plan.add_relation_def( (row[i], rtype, edef) )
- edef.querier_pending_relations[(rtype, 'object')] = row[i]
+ self.plan.add_relation_def( (value, rtype, edef) )
+ edef.querier_pending_relations[(rtype, 'object')] = value
edefs.append(edef)
self.plan.substitute_entity_def(base_edef, edefs)
return result
-
class InsertStep(Step):
"""step consisting in inserting new entities / relations"""
def execute(self):
"""execute this step"""
for step in self.children:
- assert isinstance(step, RelationsStep)
+ assert isinstance(step, InsertRelationsStep)
step.plan = self.plan
step.execute()
# insert entities first
@@ -408,40 +532,46 @@
definitions and from results fetched in previous step
"""
- def __init__(self, plan, attribute_relations, relations, selected_index):
+ def __init__(self, plan, updatedefs, attributes):
Step.__init__(self, plan)
- self.attribute_relations = attribute_relations
- self.relations = relations
- self.selected_index = selected_index
+ self.updatedefs = updatedefs
+ self.attributes = attributes
def execute(self):
"""execute this step"""
- plan = self.plan
session = self.plan.session
repo = session.repo
edefs = {}
# insert relations
- attributes = set([relation.r_type for relation in self.attribute_relations])
- result = self.execute_child()
- for row in result:
- for relation in self.attribute_relations:
- lhs, rhs = relation.get_variable_parts()
- eid = typed_eid(row[self.selected_index[str(lhs)]])
- try:
- edef = edefs[eid]
- except KeyError:
- edefs[eid] = edef = session.entity_from_eid(eid)
- if isinstance(rhs, Constant):
- # add constant values to entity def
- value = rhs.eval(plan.args)
- edef[relation.r_type] = value
+ if self.children:
+ result = self.execute_child()
+ else:
+ result = [[]]
+ for i, row in enumerate(result):
+ newrow = []
+ for (lhsinfo, rhsinfo, rschema) in self.updatedefs:
+ lhsval = _handle_relterm(lhsinfo, row, newrow)
+ rhsval = _handle_relterm(rhsinfo, row, newrow)
+ if rschema.final or rschema.inlined:
+ eid = typed_eid(lhsval)
+ try:
+ edef = edefs[eid]
+ except KeyError:
+ edefs[eid] = edef = session.entity_from_eid(eid)
+ edef[str(rschema)] = rhsval
else:
- edef[relation.r_type] = row[self.selected_index[str(rhs)]]
- for relation in self.relations:
- subj = row[self.selected_index[str(relation.children[0])]]
- obj = row[self.selected_index[str(relation.children[1])]]
- repo.glob_add_relation(session, subj, relation.r_type, obj)
+ repo.glob_add_relation(session, lhsval, str(rschema), rhsval)
+ result[i] = newrow
# update entities
for eid, edef in edefs.iteritems():
- repo.glob_update_entity(session, edef, attributes)
+ repo.glob_update_entity(session, edef, self.attributes)
return result
+
+def _handle_relterm(info, row, newrow):
+ if info[0] is _CONSTANT:
+ val = info[1]
+ else: # _FROM_SUBSTEP
+ val = row[info[1]]
+ if info[-1] is not None:
+ newrow.append(val)
+ return val
--- a/server/test/unittest_checkintegrity.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_checkintegrity.py Fri Mar 05 12:18:22 2010 +0100
@@ -13,10 +13,9 @@
from cubicweb.server.checkintegrity import check
-repo, cnx = init_test_database()
-
class CheckIntegrityTC(TestCase):
def test(self):
+ repo, cnx = init_test_database()
sys.stderr = sys.stdout = StringIO()
try:
check(repo, cnx, ('entities', 'relations', 'text_index', 'metadata'),
@@ -24,6 +23,7 @@
finally:
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
+ repo.shutdown()
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_hook.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_hook.py Fri Mar 05 12:18:22 2010 +0100
@@ -69,6 +69,10 @@
config.bootstrap_cubes()
schema = config.load_schema()
+def teardown_module(*args):
+ global config, schema
+ del config, schema
+
class AddAnyHook(hook.Hook):
__regid__ = 'addany'
category = 'cat1'
--- a/server/test/unittest_ldapuser.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_ldapuser.py Fri Mar 05 12:18:22 2010 +0100
@@ -370,6 +370,11 @@
LDAPUserSourceTC._init_repo()
repo = LDAPUserSourceTC.repo
+def teardown_module(*args):
+ global repo
+ del repo
+ del RQL2LDAPFilterTC.schema
+
class RQL2LDAPFilterTC(RQLGeneratorTC):
schema = repo.schema
--- a/server/test/unittest_migractions.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_migractions.py Fri Mar 05 12:18:22 2010 +0100
@@ -14,6 +14,11 @@
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.server.migractions import *
+migrschema = None
+def teardown_module(*args):
+ global migrschema
+ del migrschema
+ del MigrationCommandsTC.origschema
class MigrationCommandsTC(CubicWebTC):
@@ -35,6 +40,13 @@
def _refresh_repo(cls):
super(MigrationCommandsTC, cls)._refresh_repo()
cls.repo.set_schema(deepcopy(cls.origschema), resetvreg=False)
+ # reset migration schema eids
+ for eschema in migrschema.entities():
+ eschema.eid = None
+ for rschema in migrschema.relations():
+ rschema.eid = None
+ for rdef in rschema.rdefs.values():
+ rdef.eid = None
def setUp(self):
CubicWebTC.setUp(self)
@@ -44,7 +56,6 @@
assert self.cnx is self.mh._cnx
assert self.session is self.mh.session, (self.session.id, self.mh.session.id)
-
def test_add_attribute_int(self):
self.failIf('whatever' in self.schema)
self.request().create_entity('Note')
--- a/server/test/unittest_msplanner.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_msplanner.py Fri Mar 05 12:18:22 2010 +0100
@@ -60,6 +60,11 @@
# keep cnx so it's not garbage collected and the associated session is closed
repo, cnx = init_test_database()
+def teardown_module(*args):
+ global repo, cnx
+ del repo, cnx
+
+
class BaseMSPlannerTC(BasePlannerTC):
"""test planner related feature on a 3-sources repository:
@@ -87,10 +92,10 @@
self.add_source(FakeCardSource, 'cards')
def tearDown(self):
- super(BaseMSPlannerTC, self).tearDown()
# restore hijacked security
self.restore_orig_affaire_security()
self.restore_orig_cwuser_security()
+ super(BaseMSPlannerTC, self).tearDown()
def restore_orig_affaire_security(self):
affreadperms = list(self.schema['Affaire'].permissions['read'])
@@ -1520,15 +1525,11 @@
repo._type_source_cache[999999] = ('Note', 'cards', 999999)
repo._type_source_cache[999998] = ('State', 'system', None)
self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
- [('FetchStep', [('Any T WHERE N eid 999999, N type T, N is Note',
- [{'N': 'Note', 'T': 'String'}])],
- [self.cards], None, {'N.type': 'table0.C0', 'T': 'table0.C0'}, []),
- ('InsertStep',
- [('RelationsStep',
- [('OneFetchStep', [('Any 999998,T WHERE N type T, N is Note',
+ [('InsertStep',
+ [('InsertRelationsStep',
+ [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note',
[{'N': 'Note', 'T': 'String'}])],
- None, None, [self.system],
- {'N.type': 'table0.C0', 'T': 'table0.C0'}, [])])
+ None, None, [self.cards], {}, [])])
])
],
{'n': 999999, 's': 999998})
@@ -1537,15 +1538,11 @@
repo._type_source_cache[999999] = ('Note', 'cards', 999999)
repo._type_source_cache[999998] = ('State', 'system', None)
self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T',
- [('FetchStep', [('Any T,N WHERE N eid 999999, N type T, N is Note',
- [{'N': 'Note', 'T': 'String'}])],
- [self.cards], None, {'N': 'table0.C1', 'N.type': 'table0.C0', 'T': 'table0.C0'}, []),
- ('InsertStep',
- [('RelationsStep',
- [('OneFetchStep', [('Any 999998,T,N WHERE N type T, N is Note',
+ [('InsertStep',
+ [('InsertRelationsStep',
+ [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note',
[{'N': 'Note', 'T': 'String'}])],
- None, None, [self.system],
- {'N': 'table0.C1', 'N.type': 'table0.C0', 'T': 'table0.C0'}, [])
+ None, None, [self.cards], {}, [])
])
])
],
@@ -1556,8 +1553,8 @@
repo._type_source_cache[999998] = ('State', 'cards', 999998)
self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T',
[('InsertStep',
- [('RelationsStep',
- [('OneFetchStep', [('Any 999998,T WHERE N eid 999999, N type T, N is Note',
+ [('InsertRelationsStep',
+ [('OneFetchStep', [('Any T WHERE N eid 999999, N type T, N is Note',
[{'N': 'Note', 'T': 'String'}])],
None, None, [self.cards], {}, [])]
)]
@@ -1569,10 +1566,7 @@
repo._type_source_cache[999998] = ('State', 'system', None)
self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s',
[('InsertStep',
- [('RelationsStep',
- [('OneFetchStep', [('Any 999998,999999', [{}])],
- None, None, [self.system], {}, [])]
- )]
+ [('InsertRelationsStep', [])]
)],
{'n': 999999, 's': 999998})
@@ -1581,11 +1575,7 @@
repo._type_source_cache[999998] = ('State', 'system', None)
self._test('INSERT Note X: X in_state S, X type "bla", X migrated_from N WHERE S eid %(s)s, N eid %(n)s, A concerne N',
[('InsertStep',
- [('RelationsStep',
- [('OneFetchStep', [('Any 999998,999999 WHERE A concerne 999999, A is Affaire',
- [{'A': 'Affaire'}])],
- None, None, [self.system], {}, [])]
- )]
+ [('InsertRelationsStep', [])]
)],
{'n': 999999, 's': 999998})
@@ -1667,7 +1657,7 @@
# source, states should only be searched in the system source as well
self._test('SET X in_state S WHERE X eid %(x)s, S name "deactivated"',
[('UpdateStep', [
- ('OneFetchStep', [('DISTINCT Any 5,S WHERE S name "deactivated", S is State',
+ ('OneFetchStep', [('DISTINCT Any S WHERE S name "deactivated", S is State',
[{'S': 'State'}])],
None, None, [self.system], {}, []),
]),
@@ -1817,7 +1807,7 @@
[('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])],
[self.cards], None, {'Y': u'table0.C0'}, []),
('UpdateStep',
- [('OneFetchStep', [('DISTINCT Any 999999,Y WHERE Y migrated_from 999998, Y is Note',
+ [('OneFetchStep', [('DISTINCT Any Y WHERE Y migrated_from 999998, Y is Note',
[{'Y': 'Note'}])],
None, None, [self.system],
{'Y': u'table0.C0'}, [])])],
@@ -1844,14 +1834,9 @@
def test_nonregr11(self):
repo._type_source_cache[999999] = ('Bookmark', 'system', 999999)
self._test('SET X bookmarked_by Y WHERE X eid %(x)s, Y login "hop"',
- [('FetchStep',
- [('Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])],
- [self.ldap, self.system],
- None, {'Y': 'table0.C0'}, []),
- ('UpdateStep',
- [('OneFetchStep', [('DISTINCT Any 999999,Y WHERE Y is CWUser', [{'Y': 'CWUser'}])],
- None, None, [self.system], {'Y': 'table0.C0'},
- [])]
+ [('UpdateStep',
+ [('OneFetchStep', [('DISTINCT Any Y WHERE Y login "hop", Y is CWUser', [{'Y': 'CWUser'}])],
+ None, None, [self.ldap, self.system], {}, [])]
)],
{'x': 999999})
--- a/server/test/unittest_multisources.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_multisources.py Fri Mar 05 12:18:22 2010 +0100
@@ -48,7 +48,12 @@
def teardown_module(*args):
PyroRQLSource.get_connection = PyroRQLSource_get_connection
Connection.close = Connection_close
-
+ global repo2, cnx2, repo3, cnx3
+ repo2.shutdown()
+ repo3.shutdown()
+ del repo2, cnx2, repo3, cnx3
+ #del TwoSourcesTC.config.vreg
+ #del TwoSourcesTC.config
class TwoSourcesTC(CubicWebTC):
config = TwoSourcesConfiguration('data')
@@ -130,7 +135,7 @@
cu = cnx.cursor()
rset = cu.execute('Any X WHERE X has_text "card"')
self.assertEquals(len(rset), 5, zip(rset.rows, rset.description))
- cnx.close()
+ Connection_close(cnx)
def test_synchronization(self):
cu = cnx2.cursor()
--- a/server/test/unittest_querier.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_querier.py Fri Mar 05 12:18:22 2010 +0100
@@ -48,6 +48,11 @@
repo, cnx = init_test_database()
+def teardown_module(*args):
+ global repo, cnx
+ cnx.close()
+ repo.shutdown()
+ del repo, cnx
class UtilsTC(BaseQuerierTC):
@@ -1023,6 +1028,10 @@
{'x': str(eid1), 'y': str(eid2)})
rset = self.execute('Any X, Y WHERE X travaille Y')
self.assertEqual(len(rset.rows), 1)
+ # test add of an existant relation but with NOT X rel Y protection
+ self.failIf(self.execute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s,"
+ "NOT X travaille Y",
+ {'x': str(eid1), 'y': str(eid2)}))
def test_update_2ter(self):
rset = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")
--- a/server/test/unittest_repository.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_repository.py Fri Mar 05 12:18:22 2010 +0100
@@ -21,7 +21,7 @@
from cubicweb import (BadConnectionId, RepositoryError, ValidationError,
UnknownEid, AuthenticationError)
from cubicweb.schema import CubicWebSchema, RQLConstraint
-from cubicweb.dbapi import connect, repo_connect, multiple_connections_unfix
+from cubicweb.dbapi import connect, multiple_connections_unfix
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import tuplify
from cubicweb.server import repository, hook
@@ -38,25 +38,29 @@
"""
def test_fill_schema(self):
- self.repo.schema = CubicWebSchema(self.repo.config.appid)
- self.repo.config._cubes = None # avoid assertion error
- self.repo.config.repairing = True # avoid versions checking
- self.repo.fill_schema()
- table = SQL_PREFIX + 'CWEType'
- namecol = SQL_PREFIX + 'name'
- finalcol = SQL_PREFIX + 'final'
- self.session.set_pool()
- cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
- namecol, table, finalcol))
- self.assertEquals(cu.fetchall(), [])
- cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
- % (namecol, table, finalcol, namecol), {'final': 'TRUE'})
- self.assertEquals(cu.fetchall(), [(u'Boolean',), (u'Bytes',),
- (u'Date',), (u'Datetime',),
- (u'Decimal',),(u'Float',),
- (u'Int',),
- (u'Interval',), (u'Password',),
- (u'String',), (u'Time',)])
+ origshema = self.repo.schema
+ try:
+ self.repo.schema = CubicWebSchema(self.repo.config.appid)
+ self.repo.config._cubes = None # avoid assertion error
+ self.repo.config.repairing = True # avoid versions checking
+ self.repo.fill_schema()
+ table = SQL_PREFIX + 'CWEType'
+ namecol = SQL_PREFIX + 'name'
+ finalcol = SQL_PREFIX + 'final'
+ self.session.set_pool()
+ cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
+ namecol, table, finalcol))
+ self.assertEquals(cu.fetchall(), [])
+ cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
+ % (namecol, table, finalcol, namecol), {'final': 'TRUE'})
+ self.assertEquals(cu.fetchall(), [(u'Boolean',), (u'Bytes',),
+ (u'Date',), (u'Datetime',),
+ (u'Decimal',),(u'Float',),
+ (u'Int',),
+ (u'Interval',), (u'Password',),
+ (u'String',), (u'Time',)])
+ finally:
+ self.repo.set_schema(origshema)
def test_schema_has_owner(self):
repo = self.repo
@@ -263,6 +267,8 @@
self.fail('something went wrong, thread still alive')
finally:
repository.pyro_unregister(self.repo.config)
+ from logilab.common import pyro_ext
+ pyro_ext._DAEMONS.clear()
def _pyro_client(self, done):
cnx = connect(self.repo.config.appid, u'admin', password='gingkow')
@@ -470,13 +476,6 @@
u'system.version.tag'])
CALLED = []
-class EcritParHook(hook.Hook):
- __regid__ = 'inlinedrelhook'
- __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par')
- events = ('before_add_relation', 'after_add_relation',
- 'before_delete_relation', 'after_delete_relation')
- def __call__(self):
- CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto))
class InlineRelHooksTC(CubicWebTC):
"""test relation hooks are called for inlined relations
@@ -491,6 +490,14 @@
def test_inline_relation(self):
"""make sure <event>_relation hooks are called for inlined relation"""
+ class EcritParHook(hook.Hook):
+ __regid__ = 'inlinedrelhook'
+ __select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par')
+ events = ('before_add_relation', 'after_add_relation',
+ 'before_delete_relation', 'after_delete_relation')
+ def __call__(self):
+ CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto))
+
self.hm.register(EcritParHook)
eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
eidn = self.execute('INSERT Note X: X type "T"')[0][0]
--- a/server/test/unittest_rql2sql.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_rql2sql.py Fri Mar 05 12:18:22 2010 +0100
@@ -37,6 +37,10 @@
schema['state_of'].inlined = False
schema['comments'].inlined = False
+def teardown_module(*args):
+ global config, schema
+ del config, schema
+
PARSER = [
(r"Personne P WHERE P nom 'Zig\'oto';",
'''SELECT _P.cw_eid
--- a/server/test/unittest_rqlannotation.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_rqlannotation.py Fri Mar 05 12:18:22 2010 +0100
@@ -8,6 +8,11 @@
repo, cnx = init_test_database()
+def teardown_module(*args):
+ global repo, cnx
+ del repo, cnx
+
+
class SQLGenAnnotatorTC(BaseQuerierTC):
repo = repo
--- a/server/test/unittest_schemaserial.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_schemaserial.py Fri Mar 05 12:18:22 2010 +0100
@@ -15,9 +15,19 @@
config.bootstrap_cubes()
schema = loader.load(config)
+def teardown_module(*args):
+ global schema, config, loader
+ del schema, config, loader
+
from cubicweb.server.schemaserial import *
from cubicweb.server.schemaserial import _erperms2rql as erperms2rql
+cstrtypemap = {'RQLConstraint': 'RQLConstraint_eid',
+ 'SizeConstraint': 'SizeConstraint_eid',
+ 'StaticVocabularyConstraint': 'StaticVocabularyConstraint_eid',
+ 'FormatConstraint': 'FormatConstraint_eid',
+ }
+
class Schema2RQLTC(TestCase):
def test_eschema2rql1(self):
@@ -34,104 +44,124 @@
{'description': u'', 'final': True, 'name': u'String'})])
def test_eschema2rql_specialization(self):
+ # x: None since eschema.eid are None
self.assertListEquals(sorted(specialize2rql(schema)),
- [('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
- {'et': 'BaseTransition', 'x': 'Transition'}),
- ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
- {'et': 'BaseTransition', 'x': 'WorkflowTransition'}),
- ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
- {'et': 'Division', 'x': 'SubDivision'}),
- # ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
+ [('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
+ {'et': None, 'x': None}),
+ ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
+ {'et': None, 'x': None}),
+ ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
+ {'et': None, 'x': None}),
+ # ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
# {'et': 'File', 'x': 'Image'}),
- ('SET X specializes ET WHERE X name %(x)s, ET name %(et)s',
- {'et': 'Societe', 'x': 'Division'})])
+ ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s',
+ {'et': None, 'x': None})])
def test_rschema2rql1(self):
- self.assertListEquals(list(rschema2rql(schema.rschema('relation_type'))),
+ self.assertListEquals(list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)),
[
('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s',
{'description': u'link a relation definition to its relation type', 'symmetric': False, 'name': u'relation_type', 'final' : False, 'fulltext_container': None, 'inlined': True}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'CWRType',
- 'ordernum': 1, 'cardinality': u'1*', 'se': 'CWAttribute'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWRelation',
- {'rt': 'relation_type', 'oe': 'CWRType', 'ctname': u'RQLConstraint', 'se': 'CWAttribute', 'value': u';O;O final TRUE\n'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'', 'composite': u'object', 'cardinality': u'1*',
+ 'ordernum': 1}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final TRUE\n'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'relation_type', 'description': u'', 'composite': u'object', 'oe': 'CWRType',
- 'ordernum': 1, 'cardinality': u'1*', 'se': 'CWRelation'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWRelation',
- {'rt': 'relation_type', 'oe': 'CWRType', 'ctname': u'RQLConstraint', 'se': 'CWRelation', 'value': u';O;O final FALSE\n'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'', 'composite': u'object',
+ 'ordernum': 1, 'cardinality': u'1*'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final FALSE\n'}),
])
def test_rschema2rql2(self):
- self.assertListEquals(list(rschema2rql(schema.rschema('add_permission'))),
+ self.assertListEquals(list(rschema2rql(schema.rschema('add_permission'), cstrtypemap)),
[
('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'', 'symmetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 9999, 'cardinality': u'**', 'se': 'CWEType'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 9999, 'cardinality': u'*?', 'se': 'CWEType'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'add_permission', 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'oe': 'CWGroup', 'ordernum': 9999, 'cardinality': u'**', 'se': 'CWRelation'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'add_permission', 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'oe': 'RQLExpression', 'ordernum': 9999, 'cardinality': u'*?', 'se': 'CWRelation'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}),
])
def test_rschema2rql3(self):
- self.assertListEquals(list(rschema2rql(schema.rschema('cardinality'))),
+ self.assertListEquals(list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)),
[
('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s',
{'description': u'', 'symmetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}),
- ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'CWAttribute'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute',
- {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'CWAttribute', 'value': u'max=2'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute',
- {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'CWAttribute', 'value': u"u'?1', u'11'"}),
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?1', u'11'"}),
- ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE name %(se)s,ER name %(rt)s,OE name %(oe)s',
- {'rt': 'cardinality', 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1', 'oe': 'String', 'se': 'CWRelation'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute',
- {'rt': 'cardinality', 'oe': 'String', 'ctname': u'SizeConstraint', 'se': 'CWRelation', 'value': u'max=2'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT name %(ctname)s, EDEF relation_type ER, EDEF from_entity SE, EDEF to_entity OE, ER name %(rt)s, SE name %(se)s, OE name %(oe)s, EDEF is CWAttribute',
- {'rt': 'cardinality', 'oe': 'String', 'ctname': u'StaticVocabularyConstraint', 'se': 'CWRelation', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"}),
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"}),
])
+ def test_rdef2rql(self):
+ self.assertListEquals(list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)),
+ [
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None, 'rt': None, 'oe': None,
+ 'description': u'', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 7, 'defaultval': u'text/plain', 'indexed': False, 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'value': u'None', 'ct': 'FormatConstraint_eid'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'value': u'max=50', 'ct': 'SizeConstraint_eid'})])
+
def test_updateeschema2rql1(self):
- self.assertListEquals(list(updateeschema2rql(schema.eschema('CWAttribute'))),
- [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X is CWEType, X name %(et)s',
- {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', 'et': 'CWAttribute', 'final': False, 'name': u'CWAttribute'}),
+ self.assertListEquals(list(updateeschema2rql(schema.eschema('CWAttribute'), 1)),
+ [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s',
+ {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', 'x': 1, 'final': False, 'name': u'CWAttribute'}),
])
def test_updateeschema2rql2(self):
- self.assertListEquals(list(updateeschema2rql(schema.eschema('String'))),
- [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X is CWEType, X name %(et)s',
- {'description': u'', 'et': 'String', 'final': True, 'name': u'String'})
+ self.assertListEquals(list(updateeschema2rql(schema.eschema('String'), 1)),
+ [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s',
+ {'description': u'', 'x': 1, 'final': True, 'name': u'String'})
])
def test_updaterschema2rql1(self):
- self.assertListEquals(list(updaterschema2rql(schema.rschema('relation_type'))),
+ self.assertListEquals(list(updaterschema2rql(schema.rschema('relation_type'), 1)),
[
- ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X is CWRType, X name %(rt)s',
- {'rt': 'relation_type', 'symmetric': False,
+ ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
+ {'x': 1, 'symmetric': False,
'description': u'link a relation definition to its relation type',
'final': False, 'fulltext_container': None, 'inlined': True, 'name': u'relation_type'})
])
def test_updaterschema2rql2(self):
expected = [
- ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X is CWRType, X name %(rt)s',
- {'rt': 'add_permission', 'symmetric': False,
+ ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
+ {'x': 1, 'symmetric': False,
'description': u'', 'final': False, 'fulltext_container': None,
'inlined': False, 'name': u'add_permission'})
]
- for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'))):
+ for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)):
yield self.assertEquals, (rql, args), expected[i]
class Perms2RQLTC(TestCase):
@@ -144,29 +174,29 @@
def test_eperms2rql1(self):
self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)],
- [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}),
- ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X update_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
+ [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}),
+ ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
])
def test_rperms2rql2(self):
self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), self.GROUP_MAPPING)],
- [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}),
- ('SET X add_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X delete_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
+ [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}),
+ ('SET X add_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X delete_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
])
def test_rperms2rql3(self):
self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), self.GROUP_MAPPING)],
- [('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 1}),
- ('SET X read_permission Y WHERE Y eid %(g)s, ', {'g': 2}),
- ('SET X update_permission Y WHERE Y eid %(g)s, ', {'g': 0}),
+ [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}),
+ ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}),
+ ('SET X update_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}),
])
#def test_perms2rql(self):
--- a/server/test/unittest_security.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_security.py Fri Mar 05 12:18:22 2010 +0100
@@ -456,8 +456,8 @@
rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}, 'x')
self.assertEquals(rset.rows, [[anon.eid]])
# but can't modify it
- cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
- self.assertRaises(Unauthorized, cnx.commit)
+ self.assertRaises(Unauthorized,
+ cu.execute, 'SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
def test_in_group_relation(self):
cnx = self.login('iaminusersgrouponly')
--- a/server/test/unittest_ssplanner.py Fri Mar 05 09:39:34 2010 +0100
+++ b/server/test/unittest_ssplanner.py Fri Mar 05 12:18:22 2010 +0100
@@ -12,6 +12,10 @@
# keep cnx so it's not garbage collected and the associated session closed
repo, cnx = init_test_database()
+def teardown_module(*args):
+ global repo, cnx
+ del repo, cnx
+
class SSPlannerTC(BasePlannerTC):
repo = repo
_test = test_plan
--- a/test/unittest_dbapi.py Fri Mar 05 09:39:34 2010 +0100
+++ b/test/unittest_dbapi.py Fri Mar 05 12:18:22 2010 +0100
@@ -35,8 +35,8 @@
self.assertEquals(cnx.user(None).login, 'anon')
self.assertEquals(cnx.describe(1), (u'CWGroup', u'system', None))
self.restore_connection() # proper way to close cnx
- self.assertRaises(ConnectionError, cnx.user, None)
- self.assertRaises(ConnectionError, cnx.describe, 1)
+ self.assertRaises(ProgrammingError, cnx.user, None)
+ self.assertRaises(ProgrammingError, cnx.describe, 1)
def test_session_data_api(self):
cnx = self.login('anon')
@@ -64,9 +64,9 @@
cnx.set_shared_data('data', 4)
self.assertEquals(cnx.get_shared_data('data'), 4)
self.restore_connection() # proper way to close cnx
- self.assertRaises(ConnectionError, cnx.check)
- self.assertRaises(ConnectionError, cnx.set_shared_data, 'data', 0)
- self.assertRaises(ConnectionError, cnx.get_shared_data, 'data')
+ self.assertRaises(ProgrammingError, cnx.check)
+ self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0)
+ self.assertRaises(ProgrammingError, cnx.get_shared_data, 'data')
if __name__ == '__main__':
from logilab.common.testlib import unittest_main