# HG changeset patch # User Sylvain Thénault # Date 1245756998 -7200 # Node ID 51c84d585456724f9615f7becc96cc4fd0e8baea # Parent 0072247db207565757a405fd03744aa04261aa8d# Parent d4c2fb633062780fa20e1cc320f3746e49701561 backport stable branch diff -r 0072247db207 -r 51c84d585456 .hgtags --- a/.hgtags Tue Jun 23 13:31:35 2009 +0200 +++ b/.hgtags Tue Jun 23 13:36:38 2009 +0200 @@ -40,3 +40,4 @@ 4003d24974f15f17bd03b7efd6a5047cad4e4c41 cubicweb-debian-version-3_2_3-1 2d7d3062ca03d4b4144100013dc4ab7f9d9cb25e cubicweb-version-3_3_0 07214e923e75c8f0490e609e9bee0f4964b87114 cubicweb-debian-version-3_3_0-1 +a356da3e725bfcb59d8b48a89d04be05ea261fd3 3.3.1 diff -r 0072247db207 -r 51c84d585456 __pkginfo__.py --- a/__pkginfo__.py Tue Jun 23 13:31:35 2009 +0200 +++ b/__pkginfo__.py Tue Jun 23 13:36:38 2009 +0200 @@ -7,7 +7,7 @@ distname = "cubicweb" modname = "cubicweb" -numversion = (3, 3, 0) +numversion = (3, 3, 1) version = '.'.join(str(num) for num in numversion) license = 'LGPL v2' diff -r 0072247db207 -r 51c84d585456 appobject.py --- a/appobject.py Tue Jun 23 13:31:35 2009 +0200 +++ b/appobject.py Tue Jun 23 13:36:38 2009 +0200 @@ -25,8 +25,9 @@ class Cache(dict): def __init__(self): super(Cache, self).__init__() - self.cache_creation_date = None - self.latest_cache_lookup = datetime.now() + _now = datetime.now() + self.cache_creation_date = _now + self.latest_cache_lookup = _now CACHE_REGISTRY = {} @@ -127,8 +128,7 @@ if cachename in CACHE_REGISTRY: cache = CACHE_REGISTRY[cachename] else: - cache = Cache() - CACHE_REGISTRY[cachename] = cache + cache = CACHE_REGISTRY[cachename] = Cache() _now = datetime.now() if _now > cache.latest_cache_lookup + ONESECOND: ecache = self.req.execute('Any C,T WHERE C is CWCache, C name %(name)s, C timestamp T', diff -r 0072247db207 -r 51c84d585456 common/migration.py --- a/common/migration.py Tue Jun 23 13:31:35 2009 +0200 +++ b/common/migration.py Tue Jun 23 13:36:38 2009 +0200 @@ -17,6 +17,8 @@ from logilab.common.decorators import cached from logilab.common.configuration import REQUIRED, read_old_config +from cubicweb import ConfigurationError + def migration_files(config, toupgrade): """return an orderer list of path of scripts to execute to upgrade @@ -328,18 +330,18 @@ self.config.add_cubes(newcubes) return newcubes - def cmd_remove_cube(self, cube): + def cmd_remove_cube(self, cube, removedeps=False): + if removedeps: + toremove = self.config.expand_cubes([cube]) + else: + toremove = (cube,) origcubes = self.config._cubes - basecubes = list(origcubes) - for pkg in self.config.expand_cubes([cube]): - try: - basecubes.remove(pkg) - except ValueError: - continue + basecubes = [c for c in origcubes if not c in toremove] self.config._cubes = tuple(self.config.expand_cubes(basecubes)) removed = [p for p in origcubes if not p in self.config._cubes] - assert cube in removed, \ - "can't remove cube %s, used as a dependancy" % cube + if not cube in removed: + raise ConfigurationError("can't remove cube %s, " + "used as a dependency" % cube) return removed def rewrite_configuration(self): diff -r 0072247db207 -r 51c84d585456 common/mixins.py --- a/common/mixins.py Tue Jun 23 13:31:35 2009 +0200 +++ b/common/mixins.py Tue Jun 23 13:36:38 2009 +0200 @@ -191,11 +191,18 @@ return rset.get_entity(0, 0) return None - def change_state(self, stateeid, trcomment=None, trcommentformat=None): + def change_state(self, state, trcomment=None, trcommentformat=None): """change the entity's state according to a state defined in given parameters """ - assert not isinstance(stateeid, basestring), 'change_state wants a state eid' + if isinstance(state, basestring): + state = self.wf_state(state) + assert state is not None, 'not a %s state: %s' % (self.id, state) + if hasattr(state, 'eid'): + stateeid = state.eid + else: + stateeid = state + stateeid = typed_eid(stateeid) if trcomment: self.req.set_shared_data('trcomment', trcomment) if trcommentformat: diff -r 0072247db207 -r 51c84d585456 cwctl.py --- a/cwctl.py Tue Jun 23 13:31:35 2009 +0200 +++ b/cwctl.py Tue Jun 23 13:36:38 2009 +0200 @@ -273,7 +273,8 @@ cwcfg.creating = True config = cwcfg.config_for(appid, configname) config.set_language = False - config.init_cubes(config.expand_cubes(cubes)) + cubes = config.expand_cubes(cubes) + config.init_cubes(cubes) helper = self.config_helper(config) # check the cube exists try: diff -r 0072247db207 -r 51c84d585456 dbapi.py --- a/dbapi.py Tue Jun 23 13:31:35 2009 +0200 +++ b/dbapi.py Tue Jun 23 13:36:38 2009 +0200 @@ -176,7 +176,7 @@ except KeyError: # this occurs usually during test execution self._ = self.__ = unicode - self.debug('request language: %s', self.lang) + self.debug('request default language: %s', self.lang) def decorate_rset(self, rset): rset.vreg = self.vreg diff -r 0072247db207 -r 51c84d585456 debian/changelog --- a/debian/changelog Tue Jun 23 13:31:35 2009 +0200 +++ b/debian/changelog Tue Jun 23 13:36:38 2009 +0200 @@ -1,3 +1,9 @@ +cubicweb (3.3.1-1) unstable; urgency=low + + * new upstream release + + -- Aurélien Campéas Mon, 22 Jun 2009 12:00:00 +0200 + cubicweb (3.3.0-1) unstable; urgency=low * new upstream release diff -r 0072247db207 -r 51c84d585456 devtools/devctl.py --- a/devtools/devctl.py Tue Jun 23 13:31:35 2009 +0200 +++ b/devtools/devctl.py Tue Jun 23 13:36:38 2009 +0200 @@ -572,22 +572,25 @@ req = requests.setdefault(rql, []) time.strip() chunks = time.split() + clocktime = float(chunks[0][1:]) cputime = float(chunks[-3]) - req.append( cputime ) + req.append( (clocktime, cputime) ) except Exception, exc: sys.stderr.write('Line %s: %s (%s)\n' % (lineno, exc, line)) stat = [] for rql, times in requests.items(): - stat.append( (sum(times), len(times), rql) ) + stat.append( (sum(time[0] for time in times), + sum(time[1] for time in times), + len(times), rql) ) stat.sort() stat.reverse() - total_time = sum(time for time, occ, rql in stat)*0.01 - print 'Percentage;Cumulative Time;Occurences;Query' - for time, occ, rql in stat: - print '%.2f;%.2f;%s;%s' % (time/total_time, time, occ, rql) + total_time = sum(clocktime for clocktime, cputime, occ, rql in stat)*0.01 + print 'Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query' + for clocktime, cputime, occ, rql in stat: + print '%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime, cputime, occ, rql) register_commands((UpdateCubicWebCatalogCommand, UpdateTemplateCatalogCommand, diff -r 0072247db207 -r 51c84d585456 devtools/fake.py --- a/devtools/fake.py Tue Jun 23 13:31:35 2009 +0200 +++ b/devtools/fake.py Tue Jun 23 13:36:38 2009 +0200 @@ -188,12 +188,12 @@ self.user = user or FakeUser() self.is_internal_session = False self.is_super_session = self.user.eid == -1 - self._query_data = {} + self.transaction_data = {} def execute(self, *args): pass def commit(self, *args): - self._query_data.clear() + self.transaction_data.clear() def close(self, *args): pass def system_sql(self, sql, args=None): diff -r 0072247db207 -r 51c84d585456 devtools/fill.py --- a/devtools/fill.py Tue Jun 23 13:31:35 2009 +0200 +++ b/devtools/fill.py Tue Jun 23 13:36:38 2009 +0200 @@ -10,7 +10,7 @@ from random import randint, choice from copy import deepcopy -from datetime import datetime, date, timedelta +from datetime import datetime, date, time#timedelta from decimal import Decimal from yams.constraints import (SizeConstraint, StaticVocabularyConstraint, @@ -163,7 +163,7 @@ def generate_time(self, attrname, index): """generates a random time (format is ' HH:MM')""" - return timedelta(0, 11, index%60) #'11:%02d' % (index % 60) + return time(11, index%60) #'11:%02d' % (index % 60) def generate_datetime(self, attrname, index): """generates a random date (format is 'yyyy-mm-dd HH:MM')""" diff -r 0072247db207 -r 51c84d585456 devtools/migrtest.py --- a/devtools/migrtest.py Tue Jun 23 13:31:35 2009 +0200 +++ b/devtools/migrtest.py Tue Jun 23 13:36:38 2009 +0200 @@ -42,14 +42,14 @@ from logilab.common.shellutils import cp, rm from cubicweb.toolsutils import read_config -from cubicweb.server.serverctl import generate_sources_file +from cubicweb.server.utils import generate_sources_file # XXXX use db-copy instead # test environment configuration chrootpath = '/sandbox/cubicwebtest' tmpdbhost = 'crater' -tmpdbuser = 'syt' +tmpdbuser = 'syt' tmpdbpasswd = 'syt' def play_migration(applhome, applhost='', sudo=False): diff -r 0072247db207 -r 51c84d585456 devtools/repotest.py --- a/devtools/repotest.py Tue Jun 23 13:31:35 2009 +0200 +++ b/devtools/repotest.py Tue Jun 23 13:36:38 2009 +0200 @@ -11,6 +11,8 @@ from pprint import pprint +from logilab.common.decorators import clear_cache + def tuplify(list): for i in range(len(list)): if type(list[i]) is not type(()): @@ -115,6 +117,9 @@ def tearDown(self): ExecutionPlan._check_permissions = _orig_check_permissions rqlannotation._select_principal = _orig_select_principal + + def set_debug(self, debug): + set_debug(debug) def _prepare(self, rql): #print '******************** prepare', rql @@ -203,6 +208,34 @@ class BasePlannerTC(BaseQuerierTC): + def setup(self): + clear_cache(self.repo, 'rel_type_sources') + clear_cache(self.repo, 'rel_type_sources') + clear_cache(self.repo, 'can_cross_relation') + clear_cache(self.repo, 'is_multi_sources_relation') + # XXX source_defs + self.o = self.repo.querier + self.session = self.repo._sessions.values()[0] + self.pool = self.session.set_pool() + self.schema = self.o.schema + self.sources = self.o._repo.sources + self.system = self.sources[-1] + self.newsources = 0 + do_monkey_patch() + + def add_source(self, sourcecls, uri): + self.sources.append(sourcecls(self.repo, self.o.schema, + {'uri': uri})) + self.repo.sources_by_uri[uri] = self.sources[-1] + setattr(self, uri, self.sources[-1]) + self.newsources += 1 + + def tearDown(self): + while self.newsources: + source = self.sources.pop(-1) + del self.repo.sources_by_uri[source.uri] + self.newsources -= 1 + undo_monkey_patch() def _prepare_plan(self, rql, kwargs=None): rqlst = self.o.parse(rql, annotate=True) diff -r 0072247db207 -r 51c84d585456 devtools/testlib.py diff -r 0072247db207 -r 51c84d585456 doc/book/en/annexes/faq.rst --- a/doc/book/en/annexes/faq.rst Tue Jun 23 13:31:35 2009 +0200 +++ b/doc/book/en/annexes/faq.rst Tue Jun 23 13:36:38 2009 +0200 @@ -273,14 +273,26 @@ --------------------------------------------------------------- You just need to put the appropriate context manager around view/component - selection: :: + selection (one standard place in in vreg.py) : :: + + def possible_objects(self, registry, *args, **kwargs): + """return an iterator on possible objects in a registry for this result set - from cubicweb.common.selectors import traced_selection - with traced_selection(): - comp = self.vreg.select_object('contentnavigation', 'wfhistory', - self.req, rset, context='navcontentbottom') + actions returned are classes, not instances + """ + from cubicweb.selectors import traced_selection + with traced_selection(): + for vobjects in self.registry(registry).values(): + try: + yield self.select(vobjects, *args, **kwargs) + except NoSelectableObject: + continue - This will yield additional WARNINGs, like this: :: + Don't forget the 'from __future__ improt with_statement' at the + module top-level. + + This will yield additional WARNINGs, like this: + :: 2009-01-09 16:43:52 - (cubicweb.selectors) WARNING: selector one_line_rset returned 0 for @@ -351,4 +363,4 @@ $ psql mydb mydb=> update cw_cwuser set cw_upassword='qHO8282QN5Utg' where cw_login='joe'; - UPDATE 1 \ No newline at end of file + UPDATE 1 diff -r 0072247db207 -r 51c84d585456 doc/book/en/development/devrepo/sessions.rst --- a/doc/book/en/development/devrepo/sessions.rst Tue Jun 23 13:31:35 2009 +0200 +++ b/doc/book/en/development/devrepo/sessions.rst Tue Jun 23 13:36:38 2009 +0200 @@ -3,7 +3,24 @@ Sessions ======== +There are three kinds of sessions. + +* user sessions are the most common: they are related to users and + carry security checks coming with user credentials + +* super sessions are children of ordinary user sessions and allow to + bypass security checks (they are created by calling unsafe_execute + on a user session); this is often convenient in hooks which may + touch data that is not directly updatable by users + +* internal sessions have all the powers; they are also used in only a + few situations where you don't already have an adequate session at + hand, like: user authentication, data synchronisation in + multi-source contexts + +Do not confuse the session type with their connection mode, for +instance : 'in memory' or 'pyro'. + [WRITE ME] * authentication and management of sessions - diff -r 0072247db207 -r 51c84d585456 entities/lib.py --- a/entities/lib.py Tue Jun 23 13:31:35 2009 +0200 +++ b/entities/lib.py Tue Jun 23 13:36:38 2009 +0200 @@ -141,4 +141,7 @@ {'t': datetime.now(), 'x': self.eid}, 'x') def valid(self, date): - return date < self.timestamp + if date: + return date > self.timestamp + return False + diff -r 0072247db207 -r 51c84d585456 entity.py --- a/entity.py Tue Jun 23 13:31:35 2009 +0200 +++ b/entity.py Tue Jun 23 13:36:38 2009 +0200 @@ -405,7 +405,7 @@ path = etype.lower() if mainattr != 'eid': value = getattr(self, mainattr) - if value is None: + if value is None or unicode(value) == u'': mainattr = 'eid' path += '/eid' elif needcheck: diff -r 0072247db207 -r 51c84d585456 goa/__init__.py --- a/goa/__init__.py Tue Jun 23 13:31:35 2009 +0200 +++ b/goa/__init__.py Tue Jun 23 13:36:38 2009 +0200 @@ -101,36 +101,36 @@ # activate entity caching on the server side def set_entity_cache(self, entity): - self._query_data.setdefault('_eid_cache', {})[entity.eid] = entity + self.transaction_data.setdefault('_eid_cache', {})[entity.eid] = entity def entity_cache(self, eid): - return self._query_data['_eid_cache'][eid] + return self.transaction_data['_eid_cache'][eid] def drop_entity_cache(self, eid=None): if eid is None: - self._query_data['_eid_cache'] = {} - elif '_eid_cache' in self._query_data: - self._query_data['_eid_cache'].pop(eid, None) + self.transaction_data['_eid_cache'] = {} + elif '_eid_cache' in self.transaction_data: + self.transaction_data['_eid_cache'].pop(eid, None) def datastore_get(self, key): if isinstance(key, basestring): key = Key(key) try: - gentity = self._query_data['_key_cache'][key] + gentity = self.transaction_data['_key_cache'][key] #self.critical('cached %s', gentity) except KeyError: gentity = Get(key) #self.critical('Get %s', gentity) - self._query_data.setdefault('_key_cache', {})[key] = gentity + self.transaction_data.setdefault('_key_cache', {})[key] = gentity return gentity def clear_datastore_cache(self, key=None): if key is None: - self._query_data['_key_cache'] = {} + self.transaction_data['_key_cache'] = {} else: if isinstance(key, basestring): key = Key(key) - self._query_data['_key_cache'].pop(key, None) + self.transaction_data['_key_cache'].pop(key, None) from cubicweb.server.session import Session Session.set_entity_cache = set_entity_cache diff -r 0072247db207 -r 51c84d585456 goa/gaesource.py --- a/goa/gaesource.py Tue Jun 23 13:31:35 2009 +0200 +++ b/goa/gaesource.py Tue Jun 23 13:36:38 2009 +0200 @@ -47,7 +47,7 @@ asession.user.clear_related_cache(rtype, 'object') def _mark_modified(session, gaeentity): - modified = session.query_data('modifiedentities', {}, setdefault=True) + modified = session.transaction_data.setdefault('modifiedentities', {}) modified[str(gaeentity.key())] = gaeentity DatastorePutOp(session) @@ -98,8 +98,8 @@ return 0 def _put_entities(self): - pending = self.session.query_data('pendingeids', ()) - modified = self.session.query_data('modifiedentities', {}) + pending = self.session.transaction_data.get('pendingeids', ()) + modified = self.session.transaction_data.get('modifiedentities', {}) for eid, gaeentity in modified.iteritems(): assert not eid in pending Put(gaeentity) @@ -263,7 +263,7 @@ Delete(key) session.clear_datastore_cache(key) session.drop_entity_cache(eid) - session.query_data('modifiedentities', {}).pop(eid, None) + session.transaction_data.get('modifiedentities', {}).pop(eid, None) def add_relation(self, session, subject, rtype, object): """add a relation to the source""" @@ -275,7 +275,7 @@ def delete_relation(self, session, subject, rtype, object): """delete a relation from the source""" gaesubj, gaeobj, cards = _rinfo(session, subject, rtype, object) - pending = session.query_data('pendingeids', set(), setdefault=True) + pending = session.transaction_data.setdefault('pendingeids', set()) if not subject in pending: _rdel(session, gaesubj, gaeobj.key(), 's_' + rtype, cards[0]) if not object in pending: diff -r 0072247db207 -r 51c84d585456 interfaces.py --- a/interfaces.py Tue Jun 23 13:31:35 2009 +0200 +++ b/interfaces.py Tue Jun 23 13:36:38 2009 +0200 @@ -197,6 +197,14 @@ """interface for items that do have a begin date 'start' and an end date 'stop' """ + @property + def start(self): + """return start date""" + + @property + def stop(self): + """return stop state""" + class ICalendarViews(Interface): """calendar views interface""" def matching_dates(self, begin, end): diff -r 0072247db207 -r 51c84d585456 schema.py --- a/schema.py Tue Jun 23 13:31:35 2009 +0200 +++ b/schema.py Tue Jun 23 13:36:38 2009 +0200 @@ -897,9 +897,9 @@ def _load_definition_files(self, cubes=None): # bootstraping, ignore cubes - for filepath in self.include_schema_files('bootstrap'): - self.info('loading %s', filepath) - self.handle_file(filepath) + filepath = join(self.lib_directory, 'bootstrap.py') + self.info('loading %s', filepath) + self.handle_file(filepath) def unhandled_file(self, filepath): """called when a file without handler associated has been found""" @@ -923,10 +923,10 @@ return super(CubicWebSchemaLoader, self).load(config, path=path, **kwargs) def _load_definition_files(self, cubes): - for filepath in (self.include_schema_files('bootstrap') - + self.include_schema_files('base') - + self.include_schema_files('workflow') - + self.include_schema_files('Bookmark')): + for filepath in (join(self.lib_directory, 'bootstrap.py'), + join(self.lib_directory, 'base.py'), + join(self.lib_directory, 'workflow.py'), + join(self.lib_directory, 'Bookmark.py')): self.info('loading %s', filepath) self.handle_file(filepath) for cube in cubes: diff -r 0072247db207 -r 51c84d585456 schemas/Bookmark.py --- a/schemas/Bookmark.py Tue Jun 23 13:31:35 2009 +0200 +++ b/schemas/Bookmark.py Tue Jun 23 13:36:38 2009 +0200 @@ -1,4 +1,4 @@ -""" +"""the Bookmark entity type for internal links :organization: Logilab :copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. @@ -11,14 +11,15 @@ from yams.buildobjs import EntityType, RelationType, String class Bookmark(EntityType): - """define an entity type, used to build the application schema""" + """bookmarks are used to have user's specific internal links""" permissions = { 'read': ('managers', 'users', 'guests',), 'add': ('managers', 'users',), 'delete': ('managers', 'owners',), 'update': ('managers', 'owners',), } - title = String(required=True, maxsize=128) + + title = String(required=True, maxsize=128, internationalizable=True) path = String(maxsize=512, required=True, description=_("relative url of the bookmarked page")) diff -r 0072247db207 -r 51c84d585456 server/hookhelper.py --- a/server/hookhelper.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/hookhelper.py Tue Jun 23 13:36:38 2009 +0200 @@ -100,7 +100,8 @@ relation hooks, the relation may has been deleted at this point, so we have handle that """ - for eidfrom, rtype, eidto in reversed(session.query_data('pendingrelations', ())): + pending = session.transaction_data.get('pendingrelations', ()) + for eidfrom, rtype, eidto in reversed(pending): if rtype == 'in_state' and eidfrom == eid: rset = session.execute('Any S,N WHERE S eid %(x)s, S name N', {'x': eidto}, 'x') diff -r 0072247db207 -r 51c84d585456 server/hooks.py --- a/server/hooks.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/hooks.py Tue Jun 23 13:36:38 2009 +0200 @@ -18,7 +18,8 @@ from cubicweb.server.repository import FTIndexEntityOp def relation_deleted(session, eidfrom, rtype, eidto): - session.add_query_data('pendingrelations', (eidfrom, rtype, eidto)) + session.transaction_data.setdefault('pendingrelations', []).append( + (eidfrom, rtype, eidto)) # base meta-data handling ##################################################### @@ -41,7 +42,7 @@ class SetCreatorOp(PreCommitOperation): def precommit_event(self): - if self.eid in self.session.query_data('pendingeids', ()): + if self.eid in self.session.transaction_data.get('pendingeids', ()): # entity have been created and deleted in the same transaction return ueid = self.session.user.eid @@ -138,7 +139,7 @@ def precommit_event(self): session = self.session - if not self.eid in session.query_data('pendingeids', ()): + if not self.eid in session.transaction_data.get('pendingeids', ()): etype = session.describe(self.eid)[0] session.unsafe_execute('DELETE %s X WHERE X eid %%(x)s, NOT %s' % (etype, self.relation), @@ -166,7 +167,7 @@ eidfrom, rtype, eidto = self.rdef # first check related entities have not been deleted in the same # transaction - pending = self.session.query_data('pendingeids', ()) + pending = self.session.transaction_data.get('pendingeids', ()) if eidfrom in pending: return if eidto in pending: @@ -217,7 +218,7 @@ def precommit_event(self): # recheck pending eids - if self.eid in self.session.query_data('pendingeids', ()): + if self.eid in self.session.transaction_data.get('pendingeids', ()): return if self.session.unsafe_execute(*self._rql()).rowcount < 1: etype = self.session.describe(self.eid)[0] @@ -274,7 +275,7 @@ def cardinalitycheck_before_del_relation(session, eidfrom, rtype, eidto): """check cardinalities are satisfied""" card = rproperty(session, rtype, eidfrom, eidto, 'cardinality') - pendingeids = session.query_data('pendingeids', ()) + pendingeids = session.transaction_data.get('pendingeids', ()) if card[0] in '1+' and not eidfrom in pendingeids: checkrel_if_necessary(session, CheckSRelationOp, rtype, eidfrom) if card[1] in '1+' and not eidto in pendingeids: @@ -423,7 +424,7 @@ {'name': str(entity.e_schema)}) # if there is an initial state and the entity's state is not set, # use the initial state as a default state - pendingeids = session.query_data('pendingeids', ()) + pendingeids = session.transaction_data.get('pendingeids', ()) if rset and not entity.eid in pendingeids and not entity.in_state: session.unsafe_execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s', {'x' : entity.eid, 's' : rset[0][0]}, 'x') @@ -505,7 +506,7 @@ key=key, value=value) def before_del_eproperty(session, eid): - for eidfrom, rtype, eidto in session.query_data('pendingrelations', ()): + for eidfrom, rtype, eidto in session.transaction_data.get('pendingrelations', ()): if rtype == 'for_user' and eidfrom == eid: # if for_user was set, delete has already been handled break diff -r 0072247db207 -r 51c84d585456 server/migractions.py --- a/server/migractions.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/migractions.py Tue Jun 23 13:36:38 2009 +0200 @@ -23,7 +23,7 @@ from datetime import datetime from logilab.common.deprecation import deprecated_function, obsolete -from logilab.common.decorators import cached +from logilab.common.decorators import cached, clear_cache from logilab.common.adbh import get_adv_func_helper from yams.constraints import SizeConstraint @@ -35,8 +35,8 @@ from cubicweb.common.migration import MigrationHelper, yes try: - from cubicweb.server import schemaserial as ss - from cubicweb.server.utils import manager_userpasswd + from cubicweb.server import SOURCE_TYPES, schemaserial as ss + from cubicweb.server.utils import manager_userpasswd, ask_source_config from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX except ImportError: # LAX pass @@ -77,7 +77,8 @@ """write current installed versions (of cubicweb software and of each used cube) into the database """ - self.cmd_set_property('system.version.cubicweb', self.config.cubicweb_version()) + self.cmd_set_property('system.version.cubicweb', + self.config.cubicweb_version()) for pkg in self.config.cubes(): pkgversion = self.config.cube_version(pkg) self.cmd_set_property('system.version.%s' % pkg.lower(), pkgversion) @@ -479,9 +480,16 @@ newcubes = super(ServerMigrationHelper, self).cmd_add_cubes(cubes) if not newcubes: return - for pack in newcubes: - self.cmd_set_property('system.version.'+pack, - self.config.cube_version(pack)) + for cube in newcubes: + self.cmd_set_property('system.version.'+cube, + self.config.cube_version(cube)) + if cube in SOURCE_TYPES: + # don't use config.sources() in case some sources have been + # disabled for migration + sourcescfg = self.config.read_sources_file() + sourcescfg[cube] = ask_source_config(cube) + self.config.write_sources_file(sourcescfg) + clear_cache(self.config, 'read_sources_file') if not update_database: self.commit() return @@ -515,8 +523,9 @@ self.exec_event_script('postcreate', self.config.cube_dir(pack)) self.commit() - def cmd_remove_cube(self, cube): - removedcubes = super(ServerMigrationHelper, self).cmd_remove_cube(cube) + def cmd_remove_cube(self, cube, removedeps=False): + removedcubes = super(ServerMigrationHelper, self).cmd_remove_cube( + cube, removedeps) if not removedcubes: return fsschema = self.fs_schema @@ -688,7 +697,8 @@ `newname` is a string giving the name of the renamed entity type """ self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(oldname)s', - {'newname' : unicode(newname), 'oldname' : oldname}) + {'newname' : unicode(newname), 'oldname' : oldname}, + ask_confirm=False) if commit: self.commit() diff -r 0072247db207 -r 51c84d585456 server/msplanner.py --- a/server/msplanner.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/msplanner.py Tue Jun 23 13:36:38 2009 +0200 @@ -158,6 +158,7 @@ # XXX move functions below to rql ############################################## def is_ancestor(n1, n2): + """return True if n2 is a parent scope of n1""" p = n1.parent while p is not None: if p is n2: @@ -171,17 +172,14 @@ newnode.append(part) return newnode -def same_scope(var): - """return true if the variable is always used in the same scope""" - try: - return var.stinfo['samescope'] - except KeyError: - for rel in var.stinfo['relations']: - if not rel.scope is var.scope: - var.stinfo['samescope'] = False - return False - var.stinfo['samescope'] = True - return True +def used_in_outer_scope(var, scope): + """return true if the variable is used in an outer scope of the given scope + """ + for rel in var.stinfo['relations']: + rscope = rel.scope + if not rscope is scope and is_ancestor(scope, rscope): + return True + return False ################################################################################ @@ -354,6 +352,8 @@ if source is self.system_source: for const in vconsts: self._set_source_for_term(source, const) + elif not self._sourcesterms: + self._set_source_for_term(source, const) elif source in self._sourcesterms: source_scopes = frozenset(t.scope for t in self._sourcesterms[source]) for const in vconsts: @@ -361,7 +361,7 @@ self._set_source_for_term(source, const) # if system source is used, add every rewritten constant # to its supported terms even when associated entity - # doesn't actually comes from it so we get a changes + # doesn't actually come from it so we get a changes # that allequals will return True as expected when # computing needsplit # check const is used in a relation restriction @@ -555,7 +555,12 @@ self.needsplit = False elif not self.needsplit: if not allequals(self._sourcesterms.itervalues()): - self.needsplit = True + for terms in self._sourcesterms.itervalues(): + if any(x for x in terms if not isinstance(x, Constant)): + self.needsplit = True + return + self._sourcesterms = {self.system_source: {}} + self.needsplit = False else: sample = self._sourcesterms.itervalues().next() if len(sample) > 1: @@ -1216,6 +1221,7 @@ self.terms = terms self.solindices = solindices self.final = final + self._pending_vrefs = [] # terms which appear in unsupported branches needsel |= self.extneedsel self.needsel = needsel @@ -1227,6 +1233,7 @@ self.mayneedvar, self.hasvar = {}, {} self.use_only_defined = False self.scopes = {rqlst: newroot} + self.current_scope = rqlst if rqlst.where: rqlst = self._rqlst_accept(rqlst, rqlst.where, newroot, terms, newroot.set_where) @@ -1368,9 +1375,14 @@ else: raise UnsupportedBranch() rschema = self.schema.rschema(node.r_type) + self._pending_vrefs = [] try: res = self.visit_default(node, newroot, terms)[0] - except Exception, ex: + except: + # when a relation isn't supported, we should dereference potentially + # introduced variable refs + for vref in self._pending_vrefs: + vref.unregister_reference() raise ored = node.ored() if rschema.is_final() or rschema.inlined: @@ -1397,7 +1409,7 @@ return False if var.name in self.extneedsel or var.stinfo['selected']: return False - if not same_scope(var): + if not var in terms or used_in_outer_scope(var, self.current_scope): return False if any(v for v, _ in var.stinfo['attrvars'] if not v in terms): return False @@ -1433,7 +1445,9 @@ # set scope so we can insert types restriction properly newvar = newroot.get_variable(node.name) newvar.stinfo['scope'] = self.scopes.get(node.variable.scope, newroot) - return VariableRef(newvar), node + vref = VariableRef(newvar) + self._pending_vrefs.append(vref) + return vref, node def visit_constant(self, node, newroot, terms): return copy_node(newroot, node), node diff -r 0072247db207 -r 51c84d585456 server/pool.py --- a/server/pool.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/pool.py Tue Jun 23 13:36:38 2009 +0200 @@ -68,11 +68,11 @@ # internals ############################################################### - def pool_set(self, session): + def pool_set(self): """pool is being set""" self.check_connections() - def pool_reset(self, session): + def pool_reset(self): """pool is being reseted""" for source, cnx in self.source_cnxs.values(): source.pool_reset(cnx) diff -r 0072247db207 -r 51c84d585456 server/querier.py --- a/server/querier.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/querier.py Tue Jun 23 13:36:38 2009 +0200 @@ -298,7 +298,7 @@ localchecks = {} if rqlst.where is not None: varkwargs = var_kwargs(rqlst.where, self.args) - neweids = self.session.query_data('neweids', ()) + neweids = self.session.transaction_data.get('neweids', ()) else: varkwargs = None restricted_vars = set() diff -r 0072247db207 -r 51c84d585456 server/repository.py --- a/server/repository.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/repository.py Tue Jun 23 13:36:38 2009 +0200 @@ -60,13 +60,19 @@ """the observed connections pool has been rollbacked, remove inserted eid from repository type/source cache """ - self.repo.clear_caches(self.session.query_data('pendingeids', ())) + try: + self.repo.clear_caches(self.session.transaction_data['pendingeids']) + except KeyError: + pass def rollback_event(self): """the observed connections pool has been rollbacked, remove inserted eid from repository type/source cache """ - self.repo.clear_caches(self.session.query_data('neweids', ())) + try: + self.repo.clear_caches(self.session.transaction_data['neweids']) + except KeyError: + pass class FTIndexEntityOp(LateOperation): @@ -80,7 +86,7 @@ def precommit_event(self): session = self.session entity = self.entity - if entity.eid in session.query_data('pendingeids', ()): + if entity.eid in session.transaction_data.get('pendingeids', ()): return # entity added and deleted in the same transaction session.repo.system_source.fti_unindex_entity(session, entity.eid) for container in entity.fti_containers(): @@ -864,7 +870,8 @@ self.system_source.add_info(session, entity, source, extid) if complete: entity.complete(entity.e_schema.indexable_attributes()) - session.add_query_data('neweids', entity.eid) + new = session.transaction_data.setdefault('neweids', set()) + new.add(entity.eid) # now we can update the full text index if self.do_fti: FTIndexEntityOp(session, entity=entity) @@ -881,7 +888,7 @@ * setup cache update operation """ self.system_source.fti_unindex_entity(session, eid) - pending = session.query_data('pendingeids', set(), setdefault=True) + pending = session.transaction_data.setdefault('pendingeids', set()) pending.add(eid) CleanupEidTypeCacheOp(session) @@ -918,7 +925,7 @@ def index_entity(self, session, entity): """full text index a modified entity""" - alreadydone = session.query_data('indexedeids', set(), setdefault=True) + alreadydone = session.transaction_data.setdefault('indexedeids', set()) if entity.eid in alreadydone: self.info('skipping reindexation of %s, already done', entity.eid) return diff -r 0072247db207 -r 51c84d585456 server/schemahooks.py --- a/server/schemahooks.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/schemahooks.py Tue Jun 23 13:36:38 2009 +0200 @@ -37,7 +37,7 @@ def get_constraints(session, entity): constraints = [] - for cstreid in session.query_data(entity.eid, ()): + for cstreid in session.transaction_data.get(entity.eid, ()): cstrent = session.entity(cstreid) cstr = CONSTRAINTS[cstrent.type].deserialize(cstrent.value) cstr.eid = cstreid @@ -62,7 +62,8 @@ # is done by the dbhelper) session.pool.source('system').create_index(session, table, column) session.info('added index on %s(%s)', table, column) - session.add_query_data('createdattrs', '%s.%s' % (etype, rtype)) + session.transaction_data.setdefault('createdattrs', []).append( + '%s.%s' % (etype, rtype)) class SchemaOperation(Operation): @@ -107,8 +108,8 @@ """actually remove a database from the application's schema""" table = None # make pylint happy def precommit_event(self): - dropped = self.session.query_data('droppedtables', - default=set(), setdefault=True) + dropped = self.session.transaction_data.setdefault('droppedtables', + set()) if self.table in dropped: return # already processed dropped.add(self.table) @@ -208,7 +209,7 @@ * delete the associated relation type when necessary """ subjschema, rschema, objschema = session.repo.schema.schema_by_eid(rdefeid) - pendings = session.query_data('pendingeids', ()) + pendings = session.transaction_data.get('pendingeids', ()) # first delete existing relation if necessary if rschema.is_final(): rdeftype = 'CWAttribute' @@ -472,14 +473,14 @@ except KeyError: alreadythere = False if not (alreadythere or - key in session.query_data('createdattrs', ())): + key in session.transaction_data.get('createdattrs', ())): add_inline_relation_column(session, subj, rtype) else: # need to create the relation if no relation definition in the # schema and if it has not been added during other event of the same # transaction if not (rschema.subjects() or - rtype in session.query_data('createdtables', ())): + rtype in session.transaction_data.get('createdtables', ())): try: rschema = schema[rtype] tablesql = rschema2sql(rschema) @@ -494,7 +495,8 @@ for sql in tablesql.split(';'): if sql.strip(): self.session.system_sql(sql) - session.add_query_data('createdtables', rtype) + session.transaction_data.setdefault('createdtables', []).append( + rtype) def after_add_enfrdef(session, entity): AddCWRelationPreCommitOp(session, entity=entity) @@ -572,6 +574,7 @@ constraints = self.rschema.rproperty(etype, atype, 'constraints') coltype = type_from_constraints(adbh, atype, constraints, creating=False) + # XXX check self.values['cardinality'][0] actually changed? sql = adbh.sql_set_null_allowed(table, column, coltype, self.values['cardinality'][0] != '1') self.session.system_sql(sql) @@ -619,13 +622,14 @@ if not inlined: # need to create the relation if it has not been already done by another # event of the same transaction - if not rschema.type in session.query_data('createdtables', ()): + if not rschema.type in session.transaction_data.get('createdtables', ()): tablesql = rschema2sql(rschema) # create the necessary table for sql in tablesql.split(';'): if sql.strip(): sqlexec(sql) - session.add_query_data('createdtables', rschema.type) + session.transaction_data.setdefault('createdtables', []).append( + rschema.type) # copy existant data column = SQL_PREFIX + rtype for etype in rschema.subjects(): @@ -696,7 +700,7 @@ session = self.session # when the relation is added in the same transaction, the constraint object # is created by AddEN?FRDefPreCommitOp, there is nothing to do here - if rdef.eid in session.query_data('neweids', ()): + if rdef.eid in session.transaction_data.get('neweids', ()): self.cancelled = True return self.cancelled = False @@ -772,7 +776,7 @@ def before_delete_constrained_by(session, fromeid, rtype, toeid): - if not fromeid in session.query_data('pendingeids', ()): + if not fromeid in session.transaction_data.get('pendingeids', ()): schema = session.repo.schema entity = session.eid_rset(toeid).get_entity(0, 0) subjtype, rtype, objtype = schema.schema_by_eid(fromeid) @@ -785,8 +789,8 @@ def after_add_constrained_by(session, fromeid, rtype, toeid): - if fromeid in session.query_data('neweids', ()): - session.add_query_data(fromeid, toeid) + if fromeid in session.transaction_data.get('neweids', ()): + session.transaction_data.setdefault(fromeid, []).append(toeid) # schema permissions synchronization ########################################## @@ -907,7 +911,7 @@ skip the operation if the related type is being deleted """ - if subject in session.query_data('pendingeids', ()): + if subject in session.transaction_data.get('pendingeids', ()): return perm = rtype.split('_', 1)[0] if session.describe(object)[0] == 'CWGroup': diff -r 0072247db207 -r 51c84d585456 server/schemaserial.py --- a/server/schemaserial.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/schemaserial.py Tue Jun 23 13:36:38 2009 +0200 @@ -109,7 +109,7 @@ ETYPE_NAME_MAP[etype]) print sql sqlcu.execute(sql) - # other table renaming done once schema has been readen + # other table renaming done once schema has been read # print 'reading schema from the database...' index = {} permsdict = deserialize_ertype_permissions(session) diff -r 0072247db207 -r 51c84d585456 server/serverconfig.py --- a/server/serverconfig.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/serverconfig.py Tue Jun 23 13:36:38 2009 +0200 @@ -10,12 +10,66 @@ import os from os.path import join, exists -from logilab.common.configuration import Method +from logilab.common.configuration import REQUIRED, Method, Configuration, \ + ini_format_section from logilab.common.decorators import wproperty, cached, clear_cache from cubicweb import CW_SOFTWARE_ROOT, RegistryNotFound -from cubicweb.toolsutils import env_path, read_config +from cubicweb.toolsutils import env_path, read_config, restrict_perms_to_user from cubicweb.cwconfig import CubicWebConfiguration, merge_options +from cubicweb.server import SOURCE_TYPES + + +USER_OPTIONS = ( + ('login', {'type' : 'string', + 'default': REQUIRED, + 'help': "cubicweb manager account's login " + '(this user will be created)', + 'inputlevel': 0, + }), + ('password', {'type' : 'password', + 'help': "cubicweb manager account's password", + 'inputlevel': 0, + }), + ) + +def generate_sources_file(sourcesfile, sourcescfg, keys=None): + """serialize repository'sources configuration into a INI like file + + the `keys` parameter may be used to sort sections + """ + if keys is None: + keys = sourcescfg.keys() + else: + for key in sourcescfg: + if not key in keys: + keys.append(key) + stream = open(sourcesfile, 'w') + for uri in keys: + sconfig = sourcescfg[uri] + if isinstance(sconfig, dict): + # get a Configuration object + if uri == 'admin': + options = USER_OPTIONS + else: + options = SOURCE_TYPES[sconfig['adapter']].options + _sconfig = Configuration(options=options) + for attr, val in sconfig.items(): + if attr == 'uri': + continue + if attr == 'adapter': + _sconfig.adapter = val + else: + _sconfig.set_option(attr, val) + sconfig = _sconfig + optsbysect = list(sconfig.options_by_section()) + assert len(optsbysect) == 1, 'all options for a source should be in the same group' + ini_format_section(stream, uri, optsbysect[0][1]) + if hasattr(sconfig, 'adapter'): + print >> stream + print >> stream, '# adapter for this source (YOU SHOULD NOT CHANGE THIS)' + print >> stream, 'adapter=%s' % sconfig.adapter + print >> stream class ServerConfiguration(CubicWebConfiguration): @@ -188,16 +242,27 @@ # restricted user, this user usually don't have access to the sources # configuration file (#16102) @cached + def read_sources_file(self): + return read_config(self.sources_file()) + def sources(self): """return a dictionnaries containing sources definitions indexed by sources'uri """ - allsources = read_config(self.sources_file()) + allsources = self.read_sources_file() if self._enabled_sources is None: return allsources return dict((uri, config) for uri, config in allsources.items() if uri in self._enabled_sources or uri == 'admin') + def write_sources_file(self, sourcescfg): + sourcesfile = self.sources_file() + if exists(sourcesfile): + import shutil + shutil.copy(sourcesfile, sourcesfile + '.bak') + generate_sources_file(sourcesfile, sourcescfg, ['admin', 'system']) + restrict_perms_to_user(sourcesfile) + def pyro_enabled(self): """pyro is always enabled in standalone repository configuration""" return True diff -r 0072247db207 -r 51c84d585456 server/serverctl.py --- a/server/serverctl.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/serverctl.py Tue Jun 23 13:36:38 2009 +0200 @@ -10,13 +10,14 @@ import sys import os -from logilab.common.configuration import REQUIRED, Configuration, ini_format_section +from logilab.common.configuration import Configuration from logilab.common.clcommands import register_commands, cmd_run, pop_arg from cubicweb import AuthenticationError, ExecutionError, ConfigurationError -from cubicweb.toolsutils import (Command, CommandHandler, confirm, - restrict_perms_to_user) -from cubicweb.server.serverconfig import ServerConfiguration +from cubicweb.toolsutils import Command, CommandHandler, confirm +from cubicweb.server import SOURCE_TYPES +from cubicweb.server.utils import ask_source_config +from cubicweb.server.serverconfig import USER_OPTIONS, ServerConfiguration # utility functions ########################################################### @@ -92,41 +93,6 @@ pass return cnx -def generate_sources_file(sourcesfile, sourcescfg, keys=None): - """serialize repository'sources configuration into a INI like file - - the `keys` parameter may be used to sort sections - """ - from cubicweb.server.sources import SOURCE_TYPES - if keys is None: - keys = sourcescfg.keys() - else: - for key in sourcescfg: - if not key in keys: - keys.append(key) - stream = open(sourcesfile, 'w') - for uri in keys: - sconfig = sourcescfg[uri] - if isinstance(sconfig, dict): - # get a Configuration object - _sconfig = Configuration(options=SOURCE_TYPES[sconfig['adapter']].options) - for attr, val in sconfig.items(): - if attr == 'uri': - continue - if attr == 'adapter': - _sconfig.adapter = val - else: - _sconfig.set_option(attr, val) - sconfig = _sconfig - optsbysect = list(sconfig.options_by_section()) - assert len(optsbysect) == 1, 'all options for a source should be in the same group' - ini_format_section(stream, uri, optsbysect[0][1]) - if hasattr(sconfig, 'adapter'): - print >> stream - print >> stream, '# adapter for this source (YOU SHOULD NOT CHANGE THIS)' - print >> stream, 'adapter=%s' % sconfig.adapter - print >> stream - def repo_cnx(config): """return a in-memory repository and a db api connection it""" from cubicweb.dbapi import in_memory_cnx @@ -155,7 +121,6 @@ """create an application by copying files from the given cube and by asking information necessary to build required configuration files """ - from cubicweb.server.sources import SOURCE_TYPES config = self.config print 'application\'s repository configuration' print '-' * 72 @@ -170,25 +135,37 @@ sconfig.adapter = 'native' sconfig.input_config(inputlevel=inputlevel) sourcescfg = {'system': sconfig} + for cube in cubes: + # if a source is named as the cube containing it, we need the + # source to use the cube, so add it. + if cube in SOURCE_TYPES: + sourcescfg[cube] = ask_source_config(cube, inputlevel) while raw_input('enter another source [y/N]: ').strip().lower() == 'y': - sourcetype = raw_input('source type (%s): ' % ', '.join(SOURCE_TYPES.keys())) - sconfig = Configuration(options=SOURCE_TYPES[sourcetype].options) - sconfig.adapter = sourcetype - sourceuri = raw_input('source uri: ').strip() - assert not sourceuri in sourcescfg - sconfig.input_config(inputlevel=inputlevel) - sourcescfg[sourceuri] = sconfig - # module names look like cubes.mycube.themodule - sourcecube = SOURCE_TYPES[sourcetype].module.split('.', 2)[1] - # if the source adapter is coming from an external component, ensure - # it's specified in used cubes - if sourcecube != 'cubicweb' and not sourcecube in cubes: - cubes.append(sourcecube) + available = sorted(stype for stype in SOURCE_TYPES + if not stype in cubes) + while True: + sourcetype = raw_input('source type (%s): ' % ', '.join(available)) + if sourcetype in available: + break + print 'unknown source type, use one of the available type' + while True: + sourceuri = raw_input('source uri: ').strip() + if sourceuri != 'admin' and sourceuri not in sourcescfg: + break + print 'uri already used, choose another one' + sourcescfg[sourceuri] = ask_source_config(sourcetype) + sourcemodule = SOURCE_TYPES[sourcetype].module + if not sourcemodule.startswith('cubicweb.'): + # module names look like cubes.mycube.themodule + sourcecube = SOURCE_TYPES[sourcetype].module.split('.', 2)[1] + # if the source adapter is coming from an external component, + # ensure it's specified in used cubes + if not sourcecube in cubes: + cubes.append(sourcecube) sconfig = Configuration(options=USER_OPTIONS) sconfig.input_config(inputlevel=inputlevel) sourcescfg['admin'] = sconfig - generate_sources_file(sourcesfile, sourcescfg, ['admin', 'system']) - restrict_perms_to_user(sourcesfile) + config.write_sources_file(sourcescfg) # remember selected cubes for later initialization of the database config.write_bootstrap_cubes_file(cubes) @@ -199,19 +176,6 @@ else: print 'nevermind, you can do it later using the db-create command' -USER_OPTIONS = ( - ('login', {'type' : 'string', - 'default': REQUIRED, - 'help': "cubicweb manager account's login " - '(this user will be created)', - 'inputlevel': 0, - }), - ('password', {'type' : 'password', - 'help': "cubicweb manager account's password", - 'inputlevel': 0, - }), - ) - class RepositoryDeleteHandler(CommandHandler): cmdname = 'delete' @@ -435,7 +399,7 @@ from cubicweb.server.utils import crypt_password, manager_userpasswd appid = pop_arg(args, 1, msg="No application specified !") config = ServerConfiguration.config_for(appid) - sourcescfg = config.sources() + sourcescfg = config.read_sources_file() try: adminlogin = sourcescfg['admin']['login'] except KeyError: @@ -454,9 +418,7 @@ sconfig['login'] = adminlogin sconfig['password'] = passwd sourcescfg['admin'] = sconfig - sourcesfile = config.sources_file() - generate_sources_file(sourcesfile, sourcescfg) - restrict_perms_to_user(sourcesfile) + config.write_sources_file(sourcescfg) except Exception, ex: cnx.rollback() import traceback diff -r 0072247db207 -r 51c84d585456 server/session.py --- a/server/session.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/session.py Tue Jun 23 13:36:38 2009 +0200 @@ -11,6 +11,7 @@ import threading from time import time +from logilab.common.deprecation import obsolete from rql.nodes import VariableRef, Function, ETYPE_PYOBJ_MAP, etype_from_pyobj from yams import BASE_TYPES @@ -65,54 +66,35 @@ self.data = {} # i18n initialization self.set_language(cnxprops.lang) + # internals self._threaddata = threading.local() self._threads_in_transaction = set() self._closed = False - def get_mode(self): - return getattr(self._threaddata, 'mode', 'read') - def set_mode(self, value): - self._threaddata.mode = value - # transaction mode (read/write), resetted to read on commit / rollback - mode = property(get_mode, set_mode) + def __str__(self): + return '<%ssession %s (%s 0x%x)>' % (self.cnxtype, self.user.login, + self.id, id(self)) + # resource accessors ###################################################### - def get_commit_state(self): - return getattr(self._threaddata, 'commit_state', None) - def set_commit_state(self, value): - self._threaddata.commit_state = value - commit_state = property(get_commit_state, set_commit_state) - - # set according to transaction mode for each query - @property - def pool(self): - return getattr(self._threaddata, 'pool', None) + def actual_session(self): + """return the original parent session if any, else self""" + return self - # pending transaction operations - @property - def pending_operations(self): - try: - return self._threaddata.pending_operations - except AttributeError: - self._threaddata.pending_operations = [] - return self._threaddata.pending_operations + def etype_class(self, etype): + """return an entity class for the given entity type""" + return self.vreg.etype_class(etype) + + def entity(self, eid): + """return a result set for the given eid""" + return self.eid_rset(eid).get_entity(0, 0) - # rql rewriter - @property - def rql_rewriter(self): - try: - return self._threaddata._rewriter - except AttributeError: - self._threaddata._rewriter = RQLRewriter(self.repo.querier, self) - return self._threaddata._rewriter - - # transaction queries data - @property - def _query_data(self): - try: - return self._threaddata._query_data - except AttributeError: - self._threaddata._query_data = {} - return self._threaddata._query_data + def system_sql(self, sql, args=None): + """return a sql cursor on the system database""" + if not sql.split(None, 1)[0].upper() == 'SELECT': + self.mode = 'write' + cursor = self.pool['system'] + self.pool.source('system').doexec(cursor, sql, args) + return cursor def set_language(self, language): """i18n configuration for translation""" @@ -132,63 +114,61 @@ assert prop == 'lang' # this is the only one changeable property for now self.set_language(value) - def __str__(self): - return '<%ssession %s (%s 0x%x)>' % (self.cnxtype, self.user.login, - self.id, id(self)) + # connection management ################################################### - def etype_class(self, etype): - """return an entity class for the given entity type""" - return self.vreg.etype_class(etype) + def get_mode(self): + return getattr(self._threaddata, 'mode', 'read') + def set_mode(self, value): + self._threaddata.mode = value + mode = property(get_mode, set_mode, + doc='transaction mode (read/write), resetted to read on ' + 'commit / rollback') - def entity(self, eid): - """return a result set for the given eid""" - return self.eid_rset(eid).get_entity(0, 0) + def get_commit_state(self): + return getattr(self._threaddata, 'commit_state', None) + def set_commit_state(self, value): + self._threaddata.commit_state = value + commit_state = property(get_commit_state, set_commit_state) - def _touch(self): - """update latest session usage timestamp and reset mode to read - """ - self.timestamp = time() - self.local_perm_cache.clear() - self._threaddata.mode = 'read' + @property + def pool(self): + """connections pool, set according to transaction mode for each query""" + return getattr(self._threaddata, 'pool', None) def set_pool(self): """the session need a pool to execute some queries""" if self._closed: raise Exception('try to set pool on a closed session') if self.pool is None: + # get pool first to avoid race-condition self._threaddata.pool = self.repo._get_pool() try: - self._threaddata.pool.pool_set(self) + self._threaddata.pool.pool_set() except: + self._threaddata.pool = None self.repo._free_pool(self.pool) - self._threaddata.pool = None raise self._threads_in_transaction.add(threading.currentThread()) return self._threaddata.pool def reset_pool(self): - """the session has no longer using its pool, at least for some time - """ + """the session has no longer using its pool, at least for some time""" # pool may be none if no operation has been done since last commit # or rollback if self.pool is not None and self.mode == 'read': # even in read mode, we must release the current transaction + pool = self.pool self._threads_in_transaction.remove(threading.currentThread()) - self.repo._free_pool(self.pool) - self.pool.pool_reset(self) + pool.pool_reset() self._threaddata.pool = None + # free pool once everything is done to avoid race-condition + self.repo._free_pool(pool) - def system_sql(self, sql, args=None): - """return a sql cursor on the system database""" - if not sql.split(None, 1)[0].upper() == 'SELECT': - self.mode = 'write' - cursor = self.pool['system'] - self.pool.source('system').doexec(cursor, sql, args) - return cursor - - def actual_session(self): - """return the original parent session if any, else self""" - return self + def _touch(self): + """update latest session usage timestamp and reset mode to read""" + self.timestamp = time() + self.local_perm_cache.clear() + self._threaddata.mode = 'read' # shared data handling ################################################### @@ -202,7 +182,7 @@ def set_shared_data(self, key, value, querydata=False): """set value associated to `key` in session data""" if querydata: - self.set_query_data(key, value) + self.transaction_data[key] = value else: self.data[key] = value @@ -288,7 +268,7 @@ """commit the current session's transaction""" if self.pool is None: assert not self.pending_operations - self._query_data.clear() + self.transaction_data.clear() self._touch() return if self.commit_state: @@ -321,7 +301,7 @@ self._touch() self.commit_state = None self.pending_operations[:] = [] - self._query_data.clear() + self.transaction_data.clear() if reset_pool: self.reset_pool() @@ -329,7 +309,7 @@ """rollback the current session's transaction""" if self.pool is None: assert not self.pending_operations - self._query_data.clear() + self.transaction_data.clear() self._touch() return try: @@ -344,7 +324,7 @@ finally: self._touch() self.pending_operations[:] = [] - self._query_data.clear() + self.transaction_data.clear() if reset_pool: self.reset_pool() @@ -371,20 +351,32 @@ # transaction data/operations management ################################## - def add_query_data(self, key, value): - self._query_data.setdefault(key, []).append(value) + @property + def transaction_data(self): + try: + return self._threaddata.transaction_data + except AttributeError: + self._threaddata.transaction_data = {} + return self._threaddata.transaction_data - def set_query_data(self, key, value): - self._query_data[key] = value - + @obsolete('use direct access to session.transaction_data') def query_data(self, key, default=None, setdefault=False, pop=False): if setdefault: assert not pop - return self._query_data.setdefault(key, default) + return self.transaction_data.setdefault(key, default) if pop: - return self._query_data.pop(key, default) + return self.transaction_data.pop(key, default) else: - return self._query_data.get(key, default) + return self.transaction_data.get(key, default) + + @property + def pending_operations(self): + try: + return self._threaddata.pending_operations + except AttributeError: + self._threaddata.pending_operations = [] + return self._threaddata.pending_operations + def add_operation(self, operation, index=None): """add an observer""" @@ -396,6 +388,14 @@ # querier helpers ######################################################### + @property + def rql_rewriter(self): + try: + return self._threaddata._rewriter + except AttributeError: + self._threaddata._rewriter = RQLRewriter(self.repo.querier, self) + return self._threaddata._rewriter + def build_description(self, rqlst, args, result): """build a description for a given result""" if len(rqlst.children) == 1 and len(rqlst.children[0].solutions) == 1: @@ -502,8 +502,8 @@ def pending_operations(self): return self.parent_session.pending_operations @property - def _query_data(self): - return self.parent_session._query_data + def transaction_data(self): + return self.parent_session.transaction_data def set_pool(self): """the session need a pool to execute some queries""" diff -r 0072247db207 -r 51c84d585456 server/sources/extlite.py --- a/server/sources/extlite.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/sources/extlite.py Tue Jun 23 13:36:38 2009 +0200 @@ -8,8 +8,6 @@ __docformat__ = "restructuredtext en" -import time -import threading from os.path import join, exists from cubicweb import server @@ -17,24 +15,21 @@ from cubicweb.server.sources import AbstractSource, native from cubicweb.server.sources.rql2sql import SQLGenerator -def timeout_acquire(lock, timeout): - while not lock.acquire(False): - time.sleep(0.2) - timeout -= 0.2 - if timeout <= 0: - raise RuntimeError("svn source is busy, can't acquire connection lock") - class ConnectionWrapper(object): def __init__(self, source=None): self.source = source self._cnx = None @property - def cnx(self): + def logged_user(self): if self._cnx is None: - timeout_acquire(self.source._cnxlock, 5) self._cnx = self.source._sqlcnx - return self._cnx + return self._cnx.logged_user + + def cursor(self): + if self._cnx is None: + self._cnx = self.source._sqlcnx + return self._cnx.cursor() def commit(self): if self._cnx is not None: @@ -44,8 +39,10 @@ if self._cnx is not None: self._cnx.rollback() - def cursor(self): - return self.cnx.cursor() + def close(self): + if self._cnx is not None: + self._cnx.close() + self._cnx = None class SQLiteAbstractSource(AbstractSource): @@ -87,11 +84,6 @@ self._need_full_import = self._need_sql_create AbstractSource.__init__(self, repo, appschema, source_config, *args, **kwargs) - # sql database can only be accessed by one connection at a time, and a - # connection can only be used by the thread which created it so: - # * create the connection when needed - # * use a lock to be sure only one connection is used - self._cnxlock = threading.Lock() @property def _sqlcnx(self): @@ -164,11 +156,10 @@ has a connection set """ if cnx._cnx is not None: - try: - cnx._cnx.close() - cnx._cnx = None - finally: - self._cnxlock.release() + cnx._cnx.close() + # reset _cnx to ensure next thread using cnx will get a new + # connection + cnx._cnx = None def syntax_tree_search(self, session, union, args=None, cachekey=None, varmap=None, debug=0): @@ -182,11 +173,13 @@ sql, query_args = self.rqlsqlgen.generate(union, args) if server.DEBUG: print self.uri, 'SOURCE RQL', union.as_string() - print 'GENERATED SQL', sql args = self.sqladapter.merge_args(args, query_args) cursor = session.pool[self.uri] - cursor.execute(sql, args) - return self.sqladapter.process_result(cursor) + self.doexec(cursor, sql, args) + res = self.sqladapter.process_result(cursor) + if server.DEBUG: + print '------>', res + return res def local_add_entity(self, session, entity): """insert the entity in the local database. @@ -195,10 +188,9 @@ don't want to simply do this, so let raise NotImplementedError and the source implementor may use this method if necessary """ - cu = session.pool[self.uri] attrs = self.sqladapter.preprocess_entity(entity) sql = self.sqladapter.sqlgen.insert(SQL_PREFIX + str(entity.e_schema), attrs) - cu.execute(sql, attrs) + self.doexec(session.pool[self.uri], sql, attrs) def add_entity(self, session, entity): """add a new entity to the source""" @@ -211,12 +203,11 @@ source don't want to simply do this, so let raise NotImplementedError and the source implementor may use this method if necessary """ - cu = session.pool[self.uri] if attrs is None: attrs = self.sqladapter.preprocess_entity(entity) sql = self.sqladapter.sqlgen.update(SQL_PREFIX + str(entity.e_schema), attrs, [SQL_PREFIX + 'eid']) - cu.execute(sql, attrs) + self.doexec(session.pool[self.uri], sql, attrs) def update_entity(self, session, entity): """update an entity in the source""" @@ -229,16 +220,30 @@ source. Main usage is to delete repository content when a Repository entity is deleted. """ - sqlcursor = session.pool[self.uri] attrs = {SQL_PREFIX + 'eid': eid} sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + etype, attrs) - sqlcursor.execute(sql, attrs) + self.doexec(session.pool[self.uri], sql, attrs) + + def local_add_relation(self, session, subject, rtype, object): + """add a relation to the source + + This is not provided as add_relation implementation since usually + source don't want to simply do this, so let raise NotImplementedError + and the source implementor may use this method if necessary + """ + attrs = {'eid_from': subject, 'eid_to': object} + sql = self.sqladapter.sqlgen.insert('%s_relation' % rtype, attrs) + self.doexec(session.pool[self.uri], sql, attrs) + + def add_relation(self, session, subject, rtype, object): + """add a relation to the source""" + raise NotImplementedError() def delete_relation(self, session, subject, rtype, object): """delete a relation from the source""" rschema = self.schema.rschema(rtype) if rschema.inlined: - if subject in session.query_data('pendingeids', ()): + if subject in session.transaction_data.get('pendingeids', ()): return table = SQL_PREFIX + session.describe(subject)[0] column = SQL_PREFIX + rtype @@ -247,5 +252,21 @@ else: attrs = {'eid_from': subject, 'eid_to': object} sql = self.sqladapter.sqlgen.delete('%s_relation' % rtype, attrs) - sqlcursor = session.pool[self.uri] - sqlcursor.execute(sql, attrs) + self.doexec(session.pool[self.uri], sql, attrs) + + def doexec(self, cursor, query, args=None): + """Execute a query. + it's a function just so that it shows up in profiling + """ + #t1 = time() + if server.DEBUG: + print 'exec', query, args + #import sys + #sys.stdout.flush() + # str(query) to avoid error if it's an unicode string + try: + cursor.execute(str(query), args) + except Exception, ex: + self.critical("sql: %r\n args: %s\ndbms message: %r", + query, args, ex.args[0]) + raise diff -r 0072247db207 -r 51c84d585456 server/sources/native.py --- a/server/sources/native.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/sources/native.py Tue Jun 23 13:36:38 2009 +0200 @@ -5,7 +5,7 @@ from which it comes from) are stored in a varchar column encoded as a base64 string. This is because it should actually be Bytes but we want an index on it for fast querying. - + :organization: Logilab :copyright: 2001-2009 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr @@ -154,6 +154,25 @@ self._cache = Cache(repo.config['rql-cache-size']) self._temp_table_data = {} self._eid_creation_lock = Lock() + # XXX no_sqlite_wrap trick since we've a sqlite locking pb when + # running unittest_multisources with the wrapping below + if self.dbdriver == 'sqlite' and \ + not getattr(repo.config, 'no_sqlite_wrap', False): + from cubicweb.server.sources.extlite import ConnectionWrapper + self.get_connection = lambda: ConnectionWrapper(self) + self.check_connection = lambda cnx: cnx + def pool_reset(cnx): + if cnx._cnx is not None: + cnx._cnx.close() + cnx._cnx = None + self.pool_reset = pool_reset + + @property + def _sqlcnx(self): + # XXX: sqlite connections can only be used in the same thread, so + # create a new one each time necessary. If it appears to be time + # consuming, find another way + return SQLAdapterMixIn.get_connection(self) def reset_caches(self): """method called during test to reset potential source caches""" @@ -171,21 +190,25 @@ return self.process_result(cursor) def init_creating(self): + pool = self.repo._get_pool() + pool.pool_set() # check full text index availibility - pool = self.repo._get_pool() if not self.indexer.has_fti_table(pool['system']): self.error('no text index table') self.indexer = None + pool.pool_reset() self.repo._free_pool(pool) def init(self): self.init_creating() pool = self.repo._get_pool() + pool.pool_set() # XXX cubicweb < 2.42 compat if 'deleted_entities' in self.dbhelper.list_tables(pool['system']): self.has_deleted_entitites_table = True else: self.has_deleted_entitites_table = False + pool.pool_reset() self.repo._free_pool(pool) # ISource interface ####################################################### @@ -316,42 +339,22 @@ query = 'INSERT INTO %s %s' % (table, sql.encode(self.encoding)) self.doexec(session.pool[self.uri], query, self.merge_args(args, query_args)) -# XXX commented until it's proved to be necessary -# # XXX probably inefficient -# tempdata = self._temp_table_data.setdefault(table, set()) -# cursor = session.pool[self.uri] -# cursor.execute('select * from %s' % table) -# for row in cursor.fetchall(): -# print 'data', row -# tempdata.add(tuple(row)) else: super(NativeSQLSource, self).flying_insert(table, session, union, args, varmap) def _manual_insert(self, results, table, session): """insert given result into a temporary table on the system source""" - #print 'manual insert', table, results if not results: return - #cursor.execute('select * from %s'%table) - #assert len(cursor.fetchall())== 0 - encoding = self.encoding - # added chr to be sqlite compatible query_args = ['%%(%s)s' % i for i in xrange(len(results[0]))] query = 'INSERT INTO %s VALUES(%s)' % (table, ','.join(query_args)) kwargs_list = [] -# tempdata = self._temp_table_data.setdefault(table, set()) for row in results: kwargs = {} row = tuple(row) -# XXX commented until it's proved to be necessary -# if row in tempdata: -# continue -# tempdata.add(row) for index, cell in enumerate(row): - if type(cell) is unicode: - cell = cell.encode(encoding) - elif isinstance(cell, Binary): + if isinstance(cell, Binary): cell = self.binary(cell.getvalue()) kwargs[str(index)] = cell kwargs_list.append(kwargs) diff -r 0072247db207 -r 51c84d585456 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/sources/rql2sql.py Tue Jun 23 13:36:38 2009 +0200 @@ -827,6 +827,9 @@ contextrels[var.name] = attrvars[var.name] except KeyError: attrvars[var.name] = relation + if var.name in self._varmap: + # ensure table is added + self._var_info(var.variable) if not contextrels: relation.children[1].accept(self, contextrels) return '' diff -r 0072247db207 -r 51c84d585456 server/ssplanner.py --- a/server/ssplanner.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/ssplanner.py Tue Jun 23 13:36:38 2009 +0200 @@ -440,7 +440,7 @@ session = self.plan.session delete = session.repo.glob_delete_entity # register pending eids first to avoid multiple deletion - pending = session.query_data('pendingeids', set(), setdefault=True) + pending = session.transaction_data.setdefault('pendingeids', set()) actual = todelete - pending pending |= actual for eid in actual: diff -r 0072247db207 -r 51c84d585456 server/test/unittest_extlite.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_extlite.py Tue Jun 23 13:36:38 2009 +0200 @@ -0,0 +1,62 @@ +import threading, os, time + +from logilab.common.testlib import TestCase, unittest_main +from logilab.common.db import get_connection + +class SQLiteTC(TestCase): + sqlite_file = '_extlite_test.sqlite' + def setUp(self): + cnx1 = get_connection('sqlite', database=self.sqlite_file) + print 'SET IP' + cu = cnx1.cursor() + cu.execute('CREATE TABLE toto(name integer);') + cnx1.commit() + cnx1.close() + + def tearDown(self): + try: + os.remove(self.sqlite_file) + except: + pass + def test(self): + lock = threading.Lock() + + def run_thread(): + print 'run_thread' + cnx2 = get_connection('sqlite', database=self.sqlite_file) + lock.acquire() + print 't2 sel1' + cu = cnx2.cursor() + cu.execute('SELECT name FROM toto') + self.failIf(cu.fetchall()) + cnx2.commit() + print 'done' + lock.release() + time.sleep(0.1) + lock.acquire() + print 't2 sel2' + cu.execute('SELECT name FROM toto') + self.failUnless(cu.fetchall()) + print 'done' + lock.release() + + cnx1 = get_connection('sqlite', database=self.sqlite_file) + lock.acquire() + thread = threading.Thread(target=run_thread) + thread.start() + cu = cnx1.cursor() + print 't1 sel' + cu.execute('SELECT name FROM toto') + print 'done' + lock.release() + time.sleep(0.1) + cnx1.commit() + lock.acquire() + print 't1 insert' + cu.execute("INSERT INTO toto(name) VALUES ('toto')") + cnx1.commit() + print 'done' + lock.release() + +if __name__ == '__main__': + unittest_main() diff -r 0072247db207 -r 51c84d585456 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/test/unittest_migractions.py Tue Jun 23 13:36:38 2009 +0200 @@ -7,6 +7,7 @@ from logilab.common.testlib import TestCase, unittest_main from cubicweb.devtools.apptest import RepositoryBasedTC, get_versions +from cubicweb import ConfigurationError from cubicweb.schema import CubicWebSchemaLoader from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.server.repository import Repository @@ -365,7 +366,7 @@ finally: self.mh.cmd_set_size_constraint('CWEType', 'description', None) - def test_add_remove_cube(self): + def test_add_remove_cube_and_deps(self): cubes = set(self.config.cubes()) schema = self.repo.schema self.assertEquals(sorted(schema['see_also']._rproperties.keys()), @@ -374,11 +375,10 @@ ('Note', 'Note'), ('Note', 'Bookmark')])) try: try: - self.mh.cmd_remove_cube('email') + self.mh.cmd_remove_cube('email', removedeps=True) # file was there because it's an email dependancy, should have been removed - cubes.remove('email') - cubes.remove('file') - self.assertEquals(set(self.config.cubes()), cubes) + self.failIf('email' in self.config.cubes()) + self.failIf('file' in self.config.cubes()) for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failIf(ertype in schema, ertype) @@ -392,17 +392,14 @@ self.assertEquals(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) self.assertEquals(self.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0) self.assertEquals(self.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) - self.failIf('email' in self.config.cubes()) - self.failIf('file' in self.config.cubes()) except : import traceback traceback.print_exc() raise finally: self.mh.cmd_add_cube('email') - cubes.add('email') - cubes.add('file') - self.assertEquals(set(self.config.cubes()), cubes) + self.failUnless('email' in self.config.cubes()) + self.failUnless('file' in self.config.cubes()) for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failUnless(ertype in schema, ertype) @@ -420,8 +417,6 @@ email_version) self.assertEquals(self.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], file_version) - self.failUnless('email' in self.config.cubes()) - self.failUnless('file' in self.config.cubes()) # trick: overwrite self.maxeid to avoid deletion of just reintroduced # types (and their associated tables!) self.maxeid = self.execute('Any MAX(X)')[0][0] @@ -429,6 +424,38 @@ # next test may fail complaining of missing tables self.commit() + + def test_add_remove_cube_no_deps(self): + cubes = set(self.config.cubes()) + schema = self.repo.schema + try: + try: + self.mh.cmd_remove_cube('email') + cubes.remove('email') + self.failIf('email' in self.config.cubes()) + self.failUnless('file' in self.config.cubes()) + for ertype in ('Email', 'EmailThread', 'EmailPart', + 'sender', 'in_thread', 'reply_to'): + self.failIf(ertype in schema, ertype) + except : + import traceback + traceback.print_exc() + raise + finally: + self.mh.cmd_add_cube('email') + self.failUnless('email' in self.config.cubes()) + # trick: overwrite self.maxeid to avoid deletion of just reintroduced + # types (and their associated tables!) + self.maxeid = self.execute('Any MAX(X)')[0][0] + # why this commit is necessary is unclear to me (though without it + # next test may fail complaining of missing tables + self.commit() + + def test_remove_dep_cube(self): + ex = self.assertRaises(ConfigurationError, self.mh.cmd_remove_cube, 'file') + self.assertEquals(str(ex), "can't remove cube file, used as a dependency") + + def test_set_state(self): user = self.session.user self.mh.set_state(user.eid, 'deactivated') diff -r 0072247db207 -r 51c84d585456 server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/test/unittest_msplanner.py Tue Jun 23 13:36:38 2009 +0200 @@ -7,7 +7,7 @@ """ from logilab.common.decorators import clear_cache from cubicweb.devtools import init_test_database -from cubicweb.devtools.repotest import BasePlannerTC, do_monkey_patch, undo_monkey_patch, test_plan +from cubicweb.devtools.repotest import BasePlannerTC, test_plan class _SetGenerator(object): """singleton to easily create set using "s[0]" or "s[0,1,2]" for instance @@ -55,11 +55,6 @@ {'X': 'Societe'}, {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}]) -def clear_ms_caches(repo): - clear_cache(repo, 'rel_type_sources') - clear_cache(repo, 'can_cross_relation') - clear_cache(repo, 'is_multi_sources_relation') - # XXX source_defs # keep cnx so it's not garbage collected and the associated session is closed repo, cnx = init_test_database('sqlite') @@ -75,11 +70,7 @@ def setUp(self): #_QuerierTC.setUp(self) - clear_cache(repo, 'rel_type_sources') - self.o = repo.querier - self.session = repo._sessions.values()[0] - self.pool = self.session.set_pool() - self.schema = self.o.schema + self.setup() # hijack Affaire security affreadperms = list(self.schema['Affaire']._groups['read']) self.prevrqlexpr_affaire = affreadperms[-1] @@ -91,26 +82,11 @@ self.prevrqlexpr_user = userreadperms[-1] userreadperms[-1] = ERQLExpression('X owned_by U') self.schema['CWUser']._groups['read'] = tuple(userreadperms) - - self.sources = self.o._repo.sources - self.system = self.sources[-1] - self.sources.append(FakeUserROSource(self.o._repo, self.o.schema, - {'uri': 'ldapuser'})) - repo.sources_by_uri['ldapuser'] = self.sources[-1] - self.ldap = self.sources[-1] - self.sources.append(FakeCardSource(self.o._repo, self.o.schema, - {'uri': 'cards'})) - repo.sources_by_uri['cards'] = self.sources[-1] - self.rql = self.sources[-1] - do_monkey_patch() - clear_ms_caches(repo) + self.add_source(FakeUserROSource, 'ldap') + self.add_source(FakeCardSource, 'cards') def tearDown(self): - undo_monkey_patch() - del self.sources[-1] - del self.sources[-1] - del repo.sources_by_uri['ldapuser'] - del repo.sources_by_uri['cards'] + super(BaseMSPlannerTC, self).tearDown() # restore hijacked security self.restore_orig_affaire_security() self.restore_orig_euser_security() @@ -163,7 +139,7 @@ """retrieve Card X from both sources and return concatenation of results """ self._test('Any X, XT WHERE X is Card, X title XT', - {self.system: {'X': s[0]}, self.rql: {'X': s[0]}}, False) + {self.system: {'X': s[0]}, self.cards: {'X': s[0]}}, False) def test_simple_eid_specified(self): """retrieve CWUser X from system source (eid is specified, can locate the entity) @@ -264,7 +240,7 @@ """ State S could come from both rql source and system source, but since X cannot come from the rql source, the solution - {self.rql : 'S'} must be removed + {self.cards : 'S'} must be removed """ self._test('Any G,L WHERE X in_group G, X login L, G name "managers", ' 'EXISTS(X copain T, T login L, T login in ("comme", "cochon")) OR ' @@ -276,12 +252,12 @@ def test_relation_need_split(self): self._test('Any X, S WHERE X in_state S', {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}, - self.rql: {'X': s[2], 'S': s[2]}}, + self.cards: {'X': s[2], 'S': s[2]}}, True) def test_not_relation_need_split(self): self._test('Any SN WHERE NOT X in_state S, S name SN', - {self.rql: {'X': s[2], 'S': s[0, 1, 2]}, + {self.cards: {'X': s[2], 'S': s[0, 1, 2]}, self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2]}}, True) @@ -292,14 +268,14 @@ # linking 9999999 to a state self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', {'x': 999999}, - {self.rql: {'x': s[0], 'S': s[0]}, + {self.cards: {'x': s[0], 'S': s[0]}, self.system: {'x': s[0], 'S': s[0]}}, False) def test_relation_restriction_ambigous_need_split(self): self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', {self.system: {'X': s[0, 1, 2], 'S': s[0, 1, 2], 'T': s[0, 1, 2], 'tags': s[0, 1, 2]}, - self.rql: {'X': s[2], 'S': s[2]}}, + self.cards: {'X': s[2], 'S': s[2]}}, True) def test_simplified_var(self): @@ -323,7 +299,7 @@ ueid = self.session.user.eid self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', {'x': 999999,}, - {self.rql: {'Y': s[0]}, self.system: {'Y': s[0], 'x': s[0]}}, + {self.cards: {'Y': s[0]}, self.system: {'Y': s[0], 'x': s[0]}}, True) def test_crossed_relation_eid_1_invariant(self): @@ -337,7 +313,7 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', {'x': 999999,}, - {self.rql: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, + {self.cards: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, self.system: {'Y': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, False) @@ -345,7 +321,7 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', {'x': 999999}, - {self.rql: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, + {self.cards: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}, self.system: {'X': s[0], 'AD': s[0], 'multisource_crossed_rel': s[0], 'x': s[0]}}, True) @@ -353,7 +329,7 @@ repo._type_source_cache[999999] = ('Note', 'system', 999999) self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', {'x': 999999}, - {self.rql: {'X': s[0], 'AD': s[0]}, + {self.cards: {'X': s[0], 'AD': s[0]}, self.system: {'X': s[0], 'AD': s[0], 'x': s[0]}}, True) @@ -362,7 +338,7 @@ repo._type_source_cache[999998] = ('State', 'cards', 999998) self._test('Any S,T WHERE S eid %(s)s, N eid %(n)s, N type T, N is Note, S is State', {'n': 999999, 's': 999998}, - {self.rql: {'s': s[0], 'N': s[0]}}, False) + {self.cards: {'s': s[0], 'N': s[0]}}, False) @@ -441,7 +417,7 @@ """ self._test('Any X, XT WHERE X is Card, X title XT', [('OneFetchStep', [('Any X,XT WHERE X is Card, X title XT', [{'X': 'Card', 'XT': 'String'}])], - None, None, [self.rql, self.system], {}, [])]) + None, None, [self.cards, self.system], {}, [])]) def test_simple_eid_specified(self): """retrieve CWUser X from system source (eid is specified, can locate the entity) @@ -668,7 +644,7 @@ [('AggrStep', 'Any MAX(X)', None, None, 'table0', None, [('FetchStep', [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], - [self.rql, self.system], {}, {'MAX(X)': 'table0.C0'}, []) + [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) ]) ]) @@ -706,7 +682,7 @@ def test_3sources_ambigous(self): self._test('Any X,T WHERE X owned_by U, U login "syt", X title T', [('FetchStep', [('Any X,T WHERE X title T, X is Card', [{'X': 'Card', 'T': 'String'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'X': 'table0.C0', 'X.title': 'table0.C1'}, []), ('FetchStep', [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], [self.ldap, self.system], None, @@ -730,11 +706,11 @@ 'X is Card, X creation_date < TODAY, X creation_date >= VR)))', [('FetchStep', [('Any VR WHERE X creation_date < TODAY, X creation_date >= VR, X is Card', [{'X': 'Card', 'VR': 'Datetime'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'VR': 'table0.C0', 'X.creation_date': 'table0.C0'}, []), ('FetchStep', [('Any V,VR WHERE V creation_date VR, V is Card', [{'VR': 'Datetime', 'V': 'Card'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'VR': 'table1.C1', 'V': 'table1.C0', 'V.creation_date': 'table1.C1'}, []), ('OneFetchStep', [('Any V,MAX(VR) WHERE V creation_date VR, (V creation_date TODAY) OR (V creation_date < TODAY, NOT EXISTS(X creation_date >= VR, X is Card)), V is Card', [{'X': 'Card', 'VR': 'Datetime', 'V': 'Card'}])], @@ -749,7 +725,7 @@ [('OneFetchStep', [('Any X,R WHERE X is Note, X in_state S, X type R, NOT EXISTS(Y is Note, Y in_state S, Y type R, X identity Y), S is State', [{'Y': 'Note', 'X': 'Note', 'S': 'State', 'R': 'String'}])], None, None, - [self.rql, self.system], {}, []) + [self.cards, self.system], {}, []) ]) def test_not_identity(self): @@ -766,7 +742,7 @@ 'NOT EXISTS(Y is Note, Y in_state S, Y type R)', [('FetchStep', [('Any A,R WHERE Y in_state A, Y type R, A is State, Y is Note', [{'Y': 'Note', 'A': 'State', 'R': 'String'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'A': 'table0.C0', 'R': 'table0.C1', 'Y.type': 'table0.C1'}, []), ('FetchStep', [('Any X,R WHERE X login R, X is CWUser', [{'X': 'CWUser', 'R': 'String'}])], [self.ldap, self.system], None, @@ -782,7 +758,7 @@ self.session = self._user_session()[1] self._test('Any X WHERE X has_text "bla"', [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.rql, self.system], None, {'E': 'table0.C0'}, []), + [self.cards, self.system], None, {'E': 'table0.C0'}, []), ('UnionStep', None, None, [('OneFetchStep', [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire', @@ -809,7 +785,7 @@ # note: same as the above query but because of the subquery usage, the display differs (not printing solutions for each union) self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla"', [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.rql, self.system], None, {'E': 'table1.C0'}, []), + [self.cards, self.system], None, {'E': 'table1.C0'}, []), ('UnionFetchStep', [ ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire', [{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])], @@ -886,7 +862,7 @@ self.session = self._user_session()[1] self._test('Any MAX(X)', [('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.rql, self.system], None, {'E': 'table1.C0'}, []), + [self.cards, self.system], None, {'E': 'table1.C0'}, []), ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], None, {'X': 'table2.C0'}, []), ('UnionFetchStep', [ @@ -895,7 +871,7 @@ ('UnionFetchStep', [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.rql, self.system], {}, {'X': 'table0.C0'}, []), + [self.cards, self.system], {}, {'X': 'table0.C0'}, []), ('FetchStep', [('Any X WHERE X is IN(Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, Tag, TrInfo, Transition)', sorted([{'X': 'Bookmark'}, {'X': 'Comment'}, {'X': 'Division'}, @@ -925,9 +901,9 @@ self._test('Any ET, COUNT(X) GROUPBY ET ORDERBY ET WHERE X is ET', [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], - [self.rql, self.system], None, {'X': 'table1.C0'}, []), + [self.cards, self.system], None, {'X': 'table1.C0'}, []), ('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])], - [self.rql, self.system], None, {'E': 'table2.C0'}, []), + [self.cards, self.system], None, {'E': 'table2.C0'}, []), ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], None, {'X': 'table3.C0'}, []), ('UnionFetchStep', @@ -998,7 +974,7 @@ self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', [('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), + [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), ('FetchStep', [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], [self.ldap, self.system], None, {'U': 'table1.C0'}, []), @@ -1016,7 +992,7 @@ self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"', [('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), + [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), ('OneFetchStep', [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity 5), U is CWUser, X is Card', [{'U': 'CWUser', 'X': 'Card', 'XT': 'String'}])], @@ -1034,7 +1010,7 @@ [self.system], {}, {'L': 'table0.C1', 'U': 'table0.C0', 'U.login': 'table0.C1'}, []), ('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.rql, self.system], None, {'X': 'table1.C0', 'X.title': 'table1.C1', 'XT': 'table1.C1'}, []), + [self.cards, self.system], None, {'X': 'table1.C0', 'X.title': 'table1.C1', 'XT': 'table1.C1'}, []), ('OneFetchStep', [('Any X,XT,U WHERE X owned_by U?, X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], @@ -1051,7 +1027,7 @@ self._test('Any X, XT LIMIT 10 OFFSET 10 WHERE X is Card, X owned_by U, X title XT, U login "syt"', [('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), + [self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), ('FetchStep', [('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])], [self.ldap, self.system], None, {'U': 'table1.C0'}, []), @@ -1170,14 +1146,14 @@ None, None, [self.system], {}, []), ('OneFetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', [{'X': 'Note', 'S': 'State'}])], - None, None, [self.rql, self.system], {}, []), + None, None, [self.cards, self.system], {}, []), ])]) def test_relation_selection_need_split(self): self._test('Any X,S,U WHERE X in_state S, X todo_by U', [('FetchStep', [('Any X,S WHERE X in_state S, S is State, X is Note', [{'X': 'Note', 'S': 'State'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'S': 'table0.C1'}, []), + [self.cards, self.system], None, {'X': 'table0.C0', 'S': 'table0.C1'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any X,S,U WHERE X in_state S, X todo_by U, S is State, U is CWUser, X is Note', [{'X': 'Note', 'S': 'State', 'U': 'CWUser'}])], @@ -1192,7 +1168,7 @@ self._test('Any X,U WHERE X in_state S, S name "pending", X todo_by U', [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', [{'X': 'Note', 'S': 'State'}])], - [self.rql, self.system], None, {'X': 'table0.C0'}, []), + [self.cards, self.system], None, {'X': 'table0.C0'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any X,U WHERE X todo_by U, U is CWUser, X is Note', [{'X': 'Note', 'U': 'CWUser'}])], @@ -1207,7 +1183,7 @@ self._test('Any X,T WHERE X in_state S, S name "pending", T tags X', [('FetchStep', [('Any X WHERE X in_state S, S name "pending", S is State, X is Note', [{'X': 'Note', 'S': 'State'}])], - [self.rql, self.system], None, {'X': 'table0.C0'}, []), + [self.cards, self.system], None, {'X': 'table0.C0'}, []), ('UnionStep', None, None, [ ('OneFetchStep', [('Any X,T WHERE T tags X, T is Tag, X is Note', [{'X': 'Note', 'T': 'Tag'}])], @@ -1231,7 +1207,7 @@ self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', [('OneFetchStep', [('Any SN WHERE NOT 5 in_state S, S name SN, S is State', [{'S': 'State', 'SN': 'String'}])], - None, None, [self.rql, self.system], {}, [])], + None, None, [self.cards, self.system], {}, [])], {'x': ueid}) def test_not_relation_no_split_external(self): @@ -1242,20 +1218,20 @@ self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN', [('OneFetchStep', [('Any SN WHERE NOT 999999 in_state S, S name SN, S is State', [{'S': 'State', 'SN': 'String'}])], - None, None, [self.rql, self.system], {}, [])], + None, None, [self.cards, self.system], {}, [])], {'x': 999999}) def test_not_relation_need_split(self): self._test('Any SN WHERE NOT X in_state S, S name SN', [('FetchStep', [('Any SN,S WHERE S name SN, S is State', [{'S': 'State', 'SN': 'String'}])], - [self.rql, self.system], None, {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, + [self.cards, self.system], None, {'S': 'table0.C1', 'S.name': 'table0.C0', 'SN': 'table0.C0'}, []), ('IntersectStep', None, None, [('OneFetchStep', [('Any SN WHERE NOT X in_state S, S name SN, S is State, X is Note', [{'S': 'State', 'SN': 'String', 'X': 'Note'}])], - None, None, [self.rql, self.system], {}, + None, None, [self.cards, self.system], {}, []), ('OneFetchStep', [('Any SN WHERE NOT X in_state S, S name SN, S is State, X is IN(Affaire, CWUser)', @@ -1270,7 +1246,7 @@ self._test('Any A,B,C,D WHERE A eid %(x)s,A creation_date B,A modification_date C, A todo_by D?', [('FetchStep', [('Any A,B,C WHERE A eid 999999, A creation_date B, A modification_date C, A is Note', [{'A': 'Note', 'C': 'Datetime', 'B': 'Datetime'}])], - [self.rql], None, + [self.cards], None, {'A': 'table0.C0', 'A.creation_date': 'table0.C1', 'A.modification_date': 'table0.C2', 'C': 'table0.C2', 'B': 'table0.C1'}, []), #('FetchStep', [('Any D WHERE D is CWUser', [{'D': 'CWUser'}])], # [self.ldap, self.system], None, {'D': 'table1.C0'}, []), @@ -1299,7 +1275,7 @@ self._test('Any X WHERE X has_text "toto", X title "zoubidou"', [('FetchStep', [(u'Any X WHERE X title "zoubidou", X is Card', [{'X': 'Card'}])], - [self.rql, self.system], None, {'X': 'table0.C0'}, []), + [self.cards, self.system], None, {'X': 'table0.C0'}, []), ('UnionStep', None, None, [ ('OneFetchStep', [(u'Any X WHERE X has_text "toto", X is Card', [{'X': 'Card'}])], @@ -1315,7 +1291,7 @@ [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', None, None, 'table0', None, [ ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', [{'X': 'Note', 'RF': 'String'}])], - [self.rql, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), + [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), ]) ]) @@ -1325,7 +1301,7 @@ None, None, 'table0', None, [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', [{'X': 'Card', 'RF': 'String'}])], - [self.rql, self.system], {}, + [self.cards, self.system], {}, {'X': 'table0.C0', 'X.title': 'table0.C1', 'RF': 'table0.C1'}, []), ('FetchStep', [('Any X,RF WHERE X title RF, X is IN(Bookmark, EmailThread)', [{'RF': 'String', 'X': 'Bookmark'}, @@ -1339,7 +1315,7 @@ self._test('Any X,Y WHERE X is Bookmark, Y is Card, X title T, Y title T', [('FetchStep', [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.title': 'table0.C1'}, []), ('OneFetchStep', [('Any X,Y WHERE X title T, Y title T, X is Bookmark, Y is Card', @@ -1352,11 +1328,11 @@ self._test('Any X,Y WHERE X is Note, Y is Card, X type T, Y title T', [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), ('FetchStep', [('Any Y,T WHERE Y title T, Y is Card', [{'T': 'String', 'Y': 'Card'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.title': 'table1.C1'}, []), ('OneFetchStep', [('Any X,Y WHERE X type T, Y title T, X is Note, Y is Card', @@ -1372,7 +1348,7 @@ [('FetchStep', [('Any Y,D WHERE Y creation_date > D, Y is Card', [{'D': 'Datetime', 'Y': 'Card'}])], - [self.rql,self.system], None, + [self.cards,self.system], None, {'D': 'table0.C1', 'Y': 'table0.C0', 'Y.creation_date': 'table0.C1'}, []), ('OneFetchStep', [('Any X,Y WHERE X creation_date D, Y creation_date > D, X is Bookmark, Y is Card', @@ -1415,7 +1391,7 @@ 'X.title': 'table0.C1'}, []), ('FetchStep', [('Any X,T WHERE X is Card, X title T', [{'T': 'String', 'X': 'Card'}])], - [self.rql, self.system], {}, + [self.cards, self.system], {}, {'N': 'table0.C1', 'T': 'table0.C1', 'X': 'table0.C0', @@ -1459,7 +1435,7 @@ repo._type_source_cache[999999] = ('Note', 'system', 999999) self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), ('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], @@ -1472,7 +1448,7 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any Y WHERE X eid %(x)s, X multisource_crossed_rel Y', [('OneFetchStep', [('Any Y WHERE 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])], - None, None, [self.rql, self.system], {}, []) + None, None, [self.cards, self.system], {}, []) ], {'x': 999999,}) @@ -1480,12 +1456,12 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.rql], None, + None, None, [self.cards], None, []), ('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], @@ -1499,7 +1475,7 @@ repo._type_source_cache[999999] = ('Note', 'system', 999999) self._test('Any Y WHERE X eid %(x)s, NOT X multisource_crossed_rel Y', [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.rql, self.system], None, {'Y': 'table0.C0'}, []), + [self.cards, self.system], None, {'Y': 'table0.C0'}, []), ('OneFetchStep', [('Any Y WHERE NOT 999999 multisource_crossed_rel Y, Y is Note', [{'Y': 'Note'}])], None, None, [self.system], @@ -1516,15 +1492,15 @@ repo._type_source_cache[999999] = ('Note', 'system', 999999) self._test('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T', [('FetchStep', [('Any X,T WHERE X type T, X is Note', [{'T': 'String', 'X': 'Note'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table0.C1', 'X': 'table0.C0', 'X.type': 'table0.C1'}, []), ('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.rql, self.system], None, + [self.cards, self.system], None, {'T': 'table1.C1', 'Y': 'table1.C0', 'Y.type': 'table1.C1'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, X is Note, Y is Note', [{'T': 'String', 'X': 'Note', 'Y': 'Note'}])], - None, None, [self.rql], None, + None, None, [self.cards], None, []), ('OneFetchStep', [('Any X,Y,T WHERE X multisource_crossed_rel Y, Y type T, X type T, X is Note, Y is Note', [{'T': 'String', 'X': 'Note', 'Y': 'Note'}])], @@ -1543,7 +1519,7 @@ self._test('INSERT Note X: X in_state S, X type T WHERE S eid %(s)s, N eid %(n)s, N type T', [('FetchStep', [('Any T WHERE N eid 999999, N type T, N is Note', [{'N': 'Note', 'T': 'String'}])], - [self.rql], None, {'N.type': 'table0.C0', 'T': 'table0.C0'}, []), + [self.cards], None, {'N.type': 'table0.C0', 'T': 'table0.C0'}, []), ('InsertStep', [('RelationsStep', [('OneFetchStep', [('Any 999998,T WHERE N type T, N is Note', @@ -1560,7 +1536,7 @@ self._test('INSERT Note X: X in_state S, X type T, X migrated_from N WHERE S eid %(s)s, N eid %(n)s, N type T', [('FetchStep', [('Any T,N WHERE N eid 999999, N type T, N is Note', [{'N': 'Note', 'T': 'String'}])], - [self.rql], None, {'N': 'table0.C1', 'N.type': 'table0.C0', 'T': 'table0.C0'}, []), + [self.cards], None, {'N': 'table0.C1', 'N.type': 'table0.C0', 'T': 'table0.C0'}, []), ('InsertStep', [('RelationsStep', [('OneFetchStep', [('Any 999998,T,N WHERE N type T, N is Note', @@ -1580,7 +1556,7 @@ [('RelationsStep', [('OneFetchStep', [('Any 999998,T WHERE N eid 999999, N type T, N is Note', [{'N': 'Note', 'T': 'String'}])], - None, None, [self.rql], {}, [])] + None, None, [self.cards], {}, [])] )] )], {'n': 999999, 's': 999998}) @@ -1730,7 +1706,7 @@ self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', [{'X': 'Note', 'D': 'Datetime'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, []), + [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'D': 'table0.C1'}, []), ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', [{'X': 'CWUser', 'D': 'Datetime'}])], [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), @@ -1786,7 +1762,7 @@ 'MB depends_on B, B documented_by V, V multisource_rel P, NOT P eid %(p)s', [('FetchStep', [('Any V WHERE V multisource_rel P, NOT P eid %s, P is Note, V is Card'%noteeid, [{'P': 'Note', 'V': 'Card'}])], - [self.rql, self.system], None, {'V': 'table0.C0'}, []), + [self.cards, self.system], None, {'V': 'table0.C0'}, []), ('OneFetchStep', [('DISTINCT Any V WHERE MB documented_by %s, MB depends_on B, B documented_by V, B is Affaire, MB is Affaire, V is Card'%cardeid, [{'B': 'Affaire', 'MB': 'Affaire', 'V': 'Card'}])], None, None, [self.system], {'V': 'table0.C0'}, [])], @@ -1804,7 +1780,7 @@ ]) self._test('Any X WHERE X concerne Y, Y is Note', [('FetchStep', [('Any Y WHERE Y is Note', [{'Y': 'Note'}])], - [self.rql, self.system], None, {'Y': 'table0.C0'}, []), + [self.cards, self.system], None, {'Y': 'table0.C0'}, []), ('OneFetchStep', [('Any X WHERE X concerne Y, X is Affaire, Y is Note', [{'X': 'Affaire', 'Y': 'Note'}])], None, None, [self.system], {'Y': 'table0.C0'}, []) @@ -1814,7 +1790,7 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A is Affaire, A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, W multisource_rel WP)) OR (EXISTS(A concerne W)), W eid %(n)s', [('FetchStep', [('Any WP WHERE 999999 multisource_rel WP, WP is Note', [{'WP': 'Note'}])], - [self.rql], None, {'WP': u'table0.C0'}, []), + [self.cards], None, {'WP': u'table0.C0'}, []), ('OneFetchStep', [('Any S,SUM(DUR),SUM(I),(SUM(I) - SUM(DUR)),MIN(DI),MAX(DI) GROUPBY S ORDERBY S WHERE A duration DUR, A invoiced I, A modification_date DI, A in_state S, S name SN, (EXISTS(A concerne WP, WP is Note)) OR (EXISTS(A concerne 999999)), A is Affaire, S is State', [{'A': 'Affaire', 'DI': 'Datetime', 'DUR': 'Int', 'I': 'Int', 'S': 'State', 'SN': 'String', 'WP': 'Note'}])], None, None, [self.system], {'WP': u'table0.C0'}, [])], @@ -1824,7 +1800,7 @@ repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X', [('FetchStep', [('Any WHERE 999999 multisource_rel Y, Y is Note', [{'Y': 'Note'}])], - [self.rql], None, {}, []), + [self.cards], None, {}, []), ('OneFetchStep', [('Any 999999,Z WHERE Z concerne 999999, Z is Affaire', [{'Z': 'Affaire'}])], None, None, [self.system], {}, [])], @@ -1835,7 +1811,7 @@ repo._type_source_cache[999998] = ('Note', 'cards', 999998) self._test('SET X migrated_from Y WHERE X eid %(x)s, Y multisource_rel Z, Z eid %(z)s, Y migrated_from Z', [('FetchStep', [('Any Y WHERE Y multisource_rel 999998, Y is Note', [{'Y': 'Note'}])], - [self.rql], None, {'Y': u'table0.C0'}, []), + [self.cards], None, {'Y': u'table0.C0'}, []), ('UpdateStep', [('OneFetchStep', [('DISTINCT Any 999999,Y WHERE Y migrated_from 999998, Y is Note', [{'Y': 'Note'}])], @@ -1844,7 +1820,7 @@ {'x': 999999, 'z': 999998}) def test_nonregr10(self): - repo._type_source_cache[999999] = ('CWUser', 'ldapuser', 999999) + repo._type_source_cache[999999] = ('CWUser', 'ldap', 999999) self._test('Any X,AA,AB ORDERBY AA WHERE E eid %(x)s, E owned_by X, X login AA, X modification_date AB', [('FetchStep', [('Any X,AA,AB WHERE X login AA, X modification_date AB, X is CWUser', @@ -1880,7 +1856,7 @@ self._test('Any X ORDERBY Z DESC WHERE X modification_date Z, E eid %(x)s, E see_also X', [('FetchStep', [('Any X,Z WHERE X modification_date Z, X is Note', [{'X': 'Note', 'Z': 'Datetime'}])], - [self.rql, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, + [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, []), ('AggrStep', 'Any X ORDERBY Z DESC', None, None, 'table1', None, @@ -1967,42 +1943,23 @@ repo = repo def setUp(self): - self.o = repo.querier - self.session = repo._sessions.values()[0] - self.pool = self.session.set_pool() - self.schema = self.o.schema - self.sources = self.o._repo.sources - self.system = self.sources[-1] - self.sources.append(FakeCardSource(self.o._repo, self.o.schema, - {'uri': 'cards'})) - repo.sources_by_uri['cards'] = self.sources[-1] - self.rql = self.sources[-1] - self.sources.append(FakeCardSource(self.o._repo, self.o.schema, - {'uri': 'cards2'})) - repo.sources_by_uri['cards2'] = self.sources[-1] - self.rql2 = self.sources[-1] - do_monkey_patch() + self.setup() + self.add_source(FakeCardSource, 'cards') + self.add_source(FakeCardSource, 'cards2') self.planner = MSPlanner(self.o.schema, self.o._rqlhelper) assert repo.sources_by_uri['cards2'].support_relation('multisource_crossed_rel') assert 'multisource_crossed_rel' in repo.sources_by_uri['cards2'].cross_relations assert repo.sources_by_uri['cards'].support_relation('multisource_crossed_rel') assert 'multisource_crossed_rel' in repo.sources_by_uri['cards'].cross_relations - clear_ms_caches(repo) _test = test_plan - def tearDown(self): - undo_monkey_patch() - del self.sources[-1] - del self.sources[-1] - del repo.sources_by_uri['cards'] - del repo.sources_by_uri['cards2'] def test_linked_external_entities(self): repo._type_source_cache[999999] = ('Tag', 'system', 999999) self._test('Any X,XT WHERE X is Card, X title XT, T tags X, T eid %(t)s', [('FetchStep', [('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []), ('OneFetchStep', @@ -2018,7 +1975,7 @@ self._test('Any X,AD,AE WHERE E eid %(x)s, E migrated_from X, X in_state AD, AD name AE', [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, []), @@ -2034,14 +1991,14 @@ self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - None, None, [self.rql], None, + None, None, [self.cards], None, []), ('OneFetchStep', [('Any X,AD,AE WHERE 999999 multisource_crossed_rel X, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], @@ -2057,7 +2014,7 @@ self._test('Any X,AD,AE WHERE E eid %(x)s, E multisource_crossed_rel X, X in_state AD, AD name AE', [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, []), @@ -2072,18 +2029,18 @@ self._test('Any X,AD,AE WHERE E multisource_crossed_rel X, X in_state AD, AD name AE, E is Note', [('FetchStep', [('Any X,AD,AE WHERE X in_state AD, AD name AE, AD is State, X is Note', [{'AD': 'State', 'AE': 'String', 'X': 'Note'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'AD': 'table0.C1', 'AD.name': 'table0.C2', 'AE': 'table0.C2', 'X': 'table0.C0'}, []), ('FetchStep', [('Any E WHERE E is Note', [{'E': 'Note'}])], - [self.rql, self.rql2, self.system], + [self.cards, self.cards2, self.system], None, {'E': 'table1.C0'}, []), ('UnionStep', None, None, [('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], - None, None, [self.rql, self.rql2], None, + None, None, [self.cards, self.cards2], None, []), ('OneFetchStep', [('Any X,AD,AE WHERE E multisource_crossed_rel X, AD name AE, AD is State, E is Note, X is Note', [{'AD': 'State', 'AE': 'String', 'E': 'Note', 'X': 'Note'}])], @@ -2102,7 +2059,7 @@ self._test('Any S WHERE E eid %(x)s, E in_state S, NOT S name "moved"', [('OneFetchStep', [('Any S WHERE 999999 in_state S, NOT S name "moved", S is State', [{'S': 'State'}])], - None, None, [self.rql], {}, [] + None, None, [self.cards], {}, [] )], {'x': 999999}) @@ -2111,11 +2068,66 @@ self._test('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', [('OneFetchStep', [('Any X,AA,AB WHERE 999999 in_state X, X name AA, X modification_date AB, X is State', [{'AA': 'String', 'AB': 'Datetime', 'X': 'State'}])], - None, None, [self.rql], {}, [] + None, None, [self.cards], {}, [] )], {'x': 999999}) + +class FakeVCSSource(AbstractSource): + uri = 'ccc' + support_entities = {'Card': True, 'Note': True} + support_relations = {'multisource_inlined_rel': True, + 'multisource_rel': True} + #dont_cross_relations = set(('fiche', 'in_state')) + #cross_relations = set(('multisource_crossed_rel',)) + + def syntax_tree_search(self, *args, **kwargs): + return [] + +class MSPlannerVCSSource(BasePlannerTC): + repo = repo + + def setUp(self): + self.setup() + self.add_source(FakeVCSSource, 'vcs') + self.planner = MSPlanner(self.o.schema, self.o._rqlhelper) + _test = test_plan + + def test_multisource_inlined_rel_skipped(self): + self._test('Any MAX(VC) ' + 'WHERE VC multisource_inlined_rel R2, R para %(branch)s, VC in_state S, S name "published", ' + '(EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2))', + [('FetchStep', [('Any VC WHERE VC multisource_inlined_rel R2, R para "???", (EXISTS(R identity R2)) OR (EXISTS(R multisource_rel R2)), R is Note, R2 is Note, VC is Note', + [{'R': 'Note', 'R2': 'Note', 'VC': 'Note'}])], + [self.vcs, self.system], None, + {'VC': 'table0.C0'}, + []), + ('OneFetchStep', [(u'Any MAX(VC) WHERE VC in_state S, S name "published", S is State, VC is Note', + [{'S': 'State', 'VC': 'Note'}])], + None, None, [self.system], + {'VC': 'table0.C0'}, + []) + ]) + + def test_fully_simplified_extsource(self): + self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998) + self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999) + self._test('Any X, Y WHERE NOT X multisource_rel Y, X eid 999998, Y eid 999999', + [('OneFetchStep', [('Any 999998,999999 WHERE NOT 999998 multisource_rel 999999', [{}])], + None, None, [self.vcs], {}, []) + ]) + + def test_nonregr_fully_simplified_extsource(self): + self.repo._type_source_cache[999998] = ('Note', 'vcs', 999998) + self.repo._type_source_cache[999999] = ('Note', 'vcs', 999999) + self.repo._type_source_cache[1000000] = ('Note', 'system', 1000000) + self._test('DISTINCT Any T,FALSE,L,M WHERE L eid 1000000, M eid 999999, T eid 999998', + [('OneFetchStep', [('DISTINCT Any 999998,FALSE,1000000,999999', [{}])], + None, None, [self.system], {}, []) + ]) + + if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() diff -r 0072247db207 -r 51c84d585456 server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/test/unittest_multisources.py Tue Jun 23 13:36:38 2009 +0200 @@ -14,6 +14,8 @@ from cubicweb.devtools.apptest import RepositoryBasedTC from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch +TestServerConfiguration.no_sqlite_wrap = True + class TwoSourcesConfiguration(TestServerConfiguration): sourcefile = 'sources_multi' @@ -114,6 +116,7 @@ cu = cnx.cursor() rset = cu.execute('Any X WHERE X has_text "card"') self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) + cnx.close() def test_synchronization(self): cu = cnx2.cursor() diff -r 0072247db207 -r 51c84d585456 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/test/unittest_rql2sql.py Tue Jun 23 13:36:38 2009 +0200 @@ -1138,16 +1138,22 @@ def _norm_sql(self, sql): return sql.strip() - def _check(self, rql, sql, varmap=None): + def _check(self, rql, sql, varmap=None, args=None): + if args is None: + args = {'text': 'hip hop momo'} try: union = self._prepare(rql) - r, args = self.o.generate(union, {'text': 'hip hop momo'}, + r, nargs = self.o.generate(union, args, varmap=varmap) + args.update(nargs) self.assertLinesEquals((r % args).strip(), self._norm_sql(sql)) except Exception, ex: - print rql if 'r' in locals(): - print (r%args).strip() + try: + print (r%args).strip() + except KeyError: + print 'strange, missing substitution' + print r, nargs print '!=' print sql.strip() raise @@ -1207,18 +1213,27 @@ FROM in_basket_relation AS rel_in_basket0 WHERE rel_in_basket0.eid_to=12''') - def test_varmap(self): + def test_varmap1(self): self._check('Any X,L WHERE X is CWUser, X in_group G, X login L, G name "users"', '''SELECT T00.x, T00.l FROM T00, cw_CWGroup AS G, in_group_relation AS rel_in_group0 WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=G.cw_eid AND G.cw_name=users''', varmap={'X': 'T00.x', 'X.login': 'T00.l'}) + + def test_varmap2(self): self._check('Any X,L,GN WHERE X is CWUser, X in_group G, X login L, G name GN', '''SELECT T00.x, T00.l, G.cw_name FROM T00, cw_CWGroup AS G, in_group_relation AS rel_in_group0 WHERE rel_in_group0.eid_from=T00.x AND rel_in_group0.eid_to=G.cw_eid''', varmap={'X': 'T00.x', 'X.login': 'T00.l'}) + def test_varmap3(self): + self.set_debug(True) + self._check('Any %(x)s,D WHERE F data D, F is File', + 'SELECT 728, _TDF0.C0\nFROM _TDF0', + args={'x': 728}, + varmap={'F.data': '_TDF0.C0', 'D': '_TDF0.C0'}) + def test_is_null_transform(self): union = self._prepare('Any X WHERE X login %(login)s') r, args = self.o.generate(union, {'login': None}) diff -r 0072247db207 -r 51c84d585456 server/utils.py --- a/server/utils.py Tue Jun 23 13:31:35 2009 +0200 +++ b/server/utils.py Tue Jun 23 13:36:38 2009 +0200 @@ -13,6 +13,10 @@ from getpass import getpass from random import choice +from logilab.common.configuration import Configuration + +from cubicweb.server import SOURCE_TYPES + try: from crypt import crypt except ImportError: @@ -83,6 +87,13 @@ return user, passwd +def ask_source_config(sourcetype, inputlevel=0): + sconfig = Configuration(options=SOURCE_TYPES[sourcetype].options) + sconfig.adapter = sourcetype + sconfig.input_config(inputlevel=inputlevel) + return sconfig + + class LoopTask(object): """threaded task restarting itself once executed""" def __init__(self, interval, func): diff -r 0072247db207 -r 51c84d585456 sobjects/hooks.py --- a/sobjects/hooks.py Tue Jun 23 13:31:35 2009 +0200 +++ b/sobjects/hooks.py Tue Jun 23 13:36:38 2009 +0200 @@ -26,7 +26,7 @@ beid = None # make pylint happy def precommit_event(self): session = self.session - if not self.beid in session.query_data('pendingeids', ()): + if not self.beid in session.transaction_data.get('pendingeids', ()): if not session.unsafe_execute('Any X WHERE X bookmarked_by U, X eid %(x)s', {'x': self.beid}, 'x'): session.unsafe_execute('DELETE Bookmark X WHERE X eid %(x)s', diff -r 0072247db207 -r 51c84d585456 sobjects/notification.py --- a/sobjects/notification.py Tue Jun 23 13:31:35 2009 +0200 +++ b/sobjects/notification.py Tue Jun 23 13:36:38 2009 +0200 @@ -62,7 +62,7 @@ class RenderAndSendNotificationView(PreCommitOperation): """delay rendering of notification view until precommit""" def precommit_event(self): - if self.view.rset[0][0] in self.session.query_data('pendingeids', ()): + if self.view.rset[0][0] in self.session.transaction_data.get('pendingeids', ()): return # entity added and deleted in the same transaction self.view.render_and_send(**getattr(self, 'viewargs', {})) diff -r 0072247db207 -r 51c84d585456 sobjects/supervising.py --- a/sobjects/supervising.py Tue Jun 23 13:31:35 2009 +0200 +++ b/sobjects/supervising.py Tue Jun 23 13:36:38 2009 +0200 @@ -36,7 +36,8 @@ # don't record last_login_time update which are done # automatically at login time return False - self.session.add_query_data('pendingchanges', (self._event(), args)) + self.session.transaction_data.setdefault('pendingchanges', []).append( + (self._event(), args)) return True def _event(self): @@ -54,10 +55,8 @@ # may raise an error during deletion process, for instance due to # missing required relation title = '#%s' % eid - self.session.add_query_data('pendingchanges', - ('delete_entity', - (eid, str(entity.e_schema), - title))) + self.session.transaction_data.setdefault('pendingchanges', []).append( + ('delete_entity', (eid, str(entity.e_schema), title))) return True @@ -227,7 +226,7 @@ uinfo = {'email': config['sender-addr'], 'name': config['sender-name']} view = self._get_view() - content = view.render(changes=session.query_data('pendingchanges')) + content = view.render(changes=session.transaction_data.get('pendingchanges')) recipients = view.recipients() msg = format_mail(uinfo, recipients, content, view.subject(), config=config) self.to_send = [(msg, recipients)] diff -r 0072247db207 -r 51c84d585456 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Tue Jun 23 13:31:35 2009 +0200 +++ b/sobjects/test/unittest_supervising.py Tue Jun 23 13:36:38 2009 +0200 @@ -45,7 +45,7 @@ view = sentops[0]._get_view() self.assertEquals(view.recipients(), ['test@logilab.fr']) self.assertEquals(view.subject(), '[data supervision] changes summary') - data = view.render(changes=session.query_data('pendingchanges')).strip() + data = view.render(changes=session.transaction_data.get('pendingchanges')).strip() data = re.sub('#\d+', '#EID', data) data = re.sub('/\d+', '/EID', data) self.assertTextEquals('''user admin has made the following change(s): diff -r 0072247db207 -r 51c84d585456 test/unittest_entity.py --- a/test/unittest_entity.py Tue Jun 23 13:31:35 2009 +0200 +++ b/test/unittest_entity.py Tue Jun 23 13:36:38 2009 +0200 @@ -390,6 +390,11 @@ metainf['extid'] = 1234 self.assertEquals(note.absolute_url(), 'http://cubicweb2.com/note/1234') + def test_absolute_url_empty_field(self): + card = self.add_entity('Card', wikiid=u'', title=u'test') + self.assertEquals(card.absolute_url(), + 'http://testing.fr/cubicweb/card/eid/%s' % card.eid) + if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() diff -r 0072247db207 -r 51c84d585456 vregistry.py --- a/vregistry.py Tue Jun 23 13:31:35 2009 +0200 +++ b/vregistry.py Tue Jun 23 13:36:38 2009 +0200 @@ -283,7 +283,8 @@ # registered() is technically a classmethod but is not declared # as such because we need to compose registered in some cases vobject = obj.registered.im_func(obj, self) - assert not vobject in vobjects, vobject + assert not vobject in vobjects, \ + 'object %s is already registered' % vobject assert callable(vobject.__select__), vobject vobjects.append(vobject) try: @@ -299,24 +300,16 @@ registryname = registryname or obj.__registry__ registry = self.registry(registryname) removed_id = obj.classid() - for registered in registry[obj.id]: + for registered in registry.get(obj.id, ()): # use classid() to compare classes because vreg will probably # have its own version of the class, loaded through execfile if registered.classid() == removed_id: # XXX automatic reloading management - try: - registry[obj.id].remove(registered) - except KeyError: - self.warning('can\'t remove %s, no id %s in the %s registry', - removed_id, obj.id, registryname) - except ValueError: - self.warning('can\'t remove %s, not in the %s registry with id %s', - removed_id, registryname, obj.id) -# else: -# # if objects is empty, remove oid from registry -# if not registry[obj.id]: -# del regcontent[oid] + registry[obj.id].remove(registered) break + else: + self.warning('can\'t remove %s, no id %s in the %s registry', + removed_id, obj.id, registryname) def register_and_replace(self, obj, replaced, registryname=None): if hasattr(replaced, 'classid'): diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.css --- a/web/data/cubicweb.css Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.css Tue Jun 23 13:36:38 2009 +0200 @@ -1,6 +1,6 @@ /* * :organization: Logilab - * :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :copyright: 2003-2009 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr */ /***************************************/ @@ -12,7 +12,7 @@ padding :0px; } -html, body { +html, body { background: #e2e2e2; } @@ -277,8 +277,8 @@ position: relative; min-height: 800px; } - -table#mainLayout{ + +table#mainLayout{ margin:0px 3px; } @@ -321,7 +321,7 @@ /* boxes */ div.navboxes { - margin-top: 8px; + margin-top: 8px; } div.boxFrame { @@ -459,9 +459,9 @@ max-width: 50em; } -ul.sideBox li{ +ul.sideBox li{ list-style: none; - background: none; + background: none; padding: 0px 0px 1px 1px; } @@ -576,7 +576,7 @@ div.primaryRight{ float:right; - + } div.metadata { @@ -603,7 +603,7 @@ padding-bottom:0.4px } -div.row span.label{ +div.row span.label{ padding-right:1em } @@ -772,20 +772,20 @@ /* addcombobox */ /***************************************/ -input#newopt{ - width:120px ; +input#newopt{ + width:120px ; display:block; float:left; } -div#newvalue{ +div#newvalue{ margin-top:2px; } #add_newopt{ background: #fffff8 url("go.png") 50% 50% no-repeat; width: 20px; - line-height: 20px; + line-height: 20px; display:block; float:left; } @@ -794,7 +794,7 @@ /* buttons */ /***************************************/ -input.button{ +input.button{ margin: 1em 1em 0px 0px; border: 1px solid #edecd2; border-color:#edecd2 #cfceb7 #cfceb7 #edecd2; @@ -838,7 +838,7 @@ font-weight: bold; } -input.validateButton { +input.validateButton { margin: 1em 1em 0px 0px; border: 1px solid #edecd2; border-color:#edecd2 #cfceb7 #cfceb7 #edecd2; diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.facets.css --- a/web/data/cubicweb.facets.css Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.facets.css Tue Jun 23 13:36:38 2009 +0200 @@ -1,7 +1,7 @@ #filterbox fieldset{ margin: 0px; padding: 0px; -} +} div.facet { margin-bottom: 8px; @@ -14,11 +14,11 @@ font-size: 80%; color: #000; margin-bottom: 2px; - cursor: pointer; + cursor: pointer; font: bold 100% Georgia; } -div.facetTitle a { +div.facetTitle a { padding-left: 10px; background: transparent url("puce.png") 0% 50% no-repeat; } @@ -26,12 +26,12 @@ div.facetBody { } -.opened{ - color: #000 !important; +.opened{ + color: #000 !important; } div.overflowed{ - height: 12em; + height: 12em; overflow-y: auto; } @@ -50,12 +50,12 @@ } div.facetValue img{ - float: left; + float: left; background: #fff; } div.facetValue a { - margin-left: 20px; + margin-left: 20px; display: block; margin-top: -6px; /* FIXME why do we need this ? */ } @@ -78,11 +78,11 @@ } -div.facetCheckBox{ +div.facetCheckBox{ line-height:0.8em; } -.facet input{ +.facet input{ margin-top:3px; border:1px solid #ccc; font-size:11px; diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.htmlhelpers.js --- a/web/data/cubicweb.htmlhelpers.js Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.htmlhelpers.js Tue Jun 23 13:36:38 2009 +0200 @@ -247,6 +247,11 @@ } } +function limitTextAreaSize(textarea, size) { + var $area = jQuery(textarea); + $area.val($area.val().slice(0, size)); +} + //============= page loading events ==========================================// function roundedCornersOnLoad() { jQuery('div.sideBox').corner('bottom 6px'); diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.login.css --- a/web/data/cubicweb.login.css Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.login.css Tue Jun 23 13:36:38 2009 +0200 @@ -1,7 +1,7 @@ /* styles for the login popup and login form * * :organization: Logilab - * :copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :copyright: 2003-2009 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr */ @@ -12,7 +12,7 @@ width: 26em; padding: 0px 1px 1px; font-weight: bold; - background: #E4EAD8; + background: #E4EAD8; } div#popupLoginBox div#loginContent { @@ -20,7 +20,7 @@ padding: 5px 3px 4px; } -div#loginBox { +div#loginBox { position : absolute; top: 15%; left : 50%; @@ -32,12 +32,12 @@ text-align: center; } -div#loginBox h1 { +div#loginBox h1 { color: #FF7700; font-size: 140%; } -div#loginTitle { +div#loginTitle { color: #fff; font-weight: bold; font-size: 140%; @@ -46,32 +46,32 @@ background: #ff7700 url("banner.png") left top repeat-x; } -div#loginBox div#loginContent form { +div#loginBox div#loginContent form { padding-top: 1em; - width: 90%; - margin: auto; + width: 90%; + margin: auto; } -#popupLoginBox table td { - padding: 0px 3px; +#popupLoginBox table td { + padding: 0px 3px; white-space: nowrap; } -#loginContent table { +#loginContent table { padding: 0px 0.5em; margin: auto; } -#loginBox table td { - padding: 0px 3px 0.6em; +#loginBox table td { + padding: 0px 3px 0.6em; white-space: nowrap; } -#loginBox .loginButton { +#loginBox .loginButton { margin-top: 0.6em; } -#loginContent input.data { +#loginContent input.data { width:12em; } @@ -79,5 +79,5 @@ border: 1px solid #edecd2; border-color:#edecd2 #cfceb7 #cfceb7 #edecd2; margin: 2px 0px 0px; - background: #f0eff0 url("gradient-grey-up.png") left top repeat-x; + background: #f0eff0 url("gradient-grey-up.png") left top repeat-x; } diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.preferences.js --- a/web/data/cubicweb.preferences.js Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.preferences.js Tue Jun 23 13:36:38 2009 +0200 @@ -4,6 +4,8 @@ * move me in a more appropriate place */ +var prefsValues = {}; + function togglePrefVisibility(elemId) { clearPreviousMessages(); jQuery('#' + elemId).toggleClass('hidden'); @@ -21,7 +23,6 @@ _toggleFieldset(fieldsetid, 0, linklabel, linkhref); } - function _toggleFieldset(fieldsetid, closeaction, linklabel, linkhref){ jQuery('#'+fieldsetid).find('div.openlink').each(function(){ var link = A({'href' : "javascript:noop();", @@ -75,7 +76,6 @@ jQuery('#err-value:' + formid).remove(); } - function checkValues(form, success){ var unfreezeButtons = false; jQuery(form).find('select').each(function () { @@ -100,8 +100,8 @@ } function _checkValue(input, unfreezeButtons){ - var currentValueInput = jQuery("input[name=current-" + input.attr('name') + "]"); - if (currentValueInput.val() != input.val()){ + var currentValue = prefsValues[input.attr('name')]; + if (currentValue != input.val()){ input.addClass('changed'); unfreezeButtons = true; }else{ @@ -112,27 +112,22 @@ return unfreezeButtons; } - function setCurrentValues(form){ - jQuery(form).find('input[name^=current-value]').each(function () { - var currentValueInput = jQuery(this); - var name = currentValueInput.attr('name').split('-')[1]; - jQuery(form).find("[name=" + name + "]").each(function (){ - var input = jQuery(this); - if(input.attr('type') == 'radio'){ - // NOTE: there seems to be a bug with jQuery(input).attr('checked') - // in our case, we can't rely on its value, we use - // the DOM API instead. - if(input[0].checked){ - currentValueInput.val(input.val()); - } - }else{ - currentValueInput.val(input.val()); - } - }); - }); + jQuery(form).find('input[name^=value]').each(function () { + var input = jQuery(this); + if(input.attr('type') == 'radio'){ + // NOTE: there seems to be a bug with jQuery(input).attr('checked') + // in our case, we can't rely on its value, we use + // the DOM API instead. + if(input[0].checked){ + prefsValues[input.attr('name')] = input.val(); + } + }else{ + prefsValues[input.attr('name')] = input.val(); + } + }); } - + function initEvents(){ jQuery('form').each(function() { var form = jQuery(this); @@ -153,4 +148,3 @@ $(document).ready(function() { initEvents(); }); - diff -r 0072247db207 -r 51c84d585456 web/data/cubicweb.timeline-bundle.js --- a/web/data/cubicweb.timeline-bundle.js Tue Jun 23 13:31:35 2009 +0200 +++ b/web/data/cubicweb.timeline-bundle.js Tue Jun 23 13:36:38 2009 +0200 @@ -80,7 +80,7 @@ SimileAjax.includeJavascriptFile(doc, urlPrefix + filenames[i]); } SimileAjax.loadingScriptsCount += filenames.length; - SimileAjax.includeJavascriptFile(doc, SimileAjax.urlPrefix + "scripts/signal.js?" + filenames.length); + // XXX adim SimileAjax.includeJavascriptFile(doc, SimileAjax.urlPrefix + "scripts/signal.js?" + filenames.length); }; SimileAjax.includeCssFile = function(doc, url) { if (doc.body == null) { @@ -208,7 +208,7 @@ // } else { // SimileAjax.includeJavascriptFiles(document, SimileAjax.urlPrefix + "scripts/", javascriptFiles); // } - SimileAjax.includeCssFiles(document, SimileAjax.urlPrefix + "styles/", cssFiles); +// SimileAjax.includeCssFiles(document, SimileAjax.urlPrefix + "styles/", cssFiles); SimileAjax.loaded = true; })(); @@ -3830,8 +3830,8 @@ includeJavascriptFiles(Timeline.urlPrefix, [ "timeline-bundle.js" ]); includeCssFiles(Timeline.urlPrefix, [ "timeline-bundle.css" ]); } else { - includeJavascriptFiles(Timeline.urlPrefix + "scripts/", javascriptFiles); - includeCssFiles(Timeline.urlPrefix + "styles/", cssFiles); + // XXX adim includeJavascriptFiles(Timeline.urlPrefix + "scripts/", javascriptFiles); + // XXX adim includeCssFiles(Timeline.urlPrefix + "styles/", cssFiles); } /* @@ -3879,8 +3879,8 @@ for (var l = 0; l < supportedLocales.length; l++) { var locale = supportedLocales[l]; if (loadLocale[locale]) { - includeJavascriptFiles(Timeline.urlPrefix + "scripts/l10n/" + locale + "/", localizedJavascriptFiles); - includeCssFiles(Timeline.urlPrefix + "styles/l10n/" + locale + "/", localizedCssFiles); + // XXX adim includeJavascriptFiles(Timeline.urlPrefix + "scripts/l10n/" + locale + "/", localizedJavascriptFiles); + // XXX adim includeCssFiles(Timeline.urlPrefix + "styles/l10n/" + locale + "/", localizedCssFiles); } } diff -r 0072247db207 -r 51c84d585456 web/facet.py --- a/web/facet.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/facet.py Tue Jun 23 13:36:38 2009 +0200 @@ -744,7 +744,7 @@ % (cssclass, html_escape(unicode(self.value)))) self.w(u'
') self.w(u'%s ' % (imgsrc, imgalt)) - self.w(u'' % (facetid,title)) + self.w(u'' % (facetid, title)) self.w(u'
\n') self.w(u'\n') self.w(u'\n') diff -r 0072247db207 -r 51c84d585456 web/formfields.py --- a/web/formfields.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/formfields.py Tue Jun 23 13:36:38 2009 +0200 @@ -101,10 +101,10 @@ return self.__unicode__().encode('utf-8') def init_widget(self, widget): - if widget is None and self.choices: - widget = Select() if widget is not None: self.widget = widget + elif self.choices and not self.widget.vocabulary_widget: + self.widget = Select() if isinstance(self.widget, type): self.widget = self.widget() @@ -376,7 +376,7 @@ return formatstr % float(value) def render_example(self, req): - return self.format_value(req, 1.234) + return self.format_single_value(req, 1.234) class DateField(StringField): @@ -387,7 +387,7 @@ return value and ustrftime(value, req.property_value(self.format_prop)) or u'' def render_example(self, req): - return self.format_value(req, datetime.now()) + return self.format_single_value(req, datetime.now()) class DateTimeField(DateField): @@ -395,7 +395,7 @@ class TimeField(DateField): - format_prop = 'ui.datetime-format' + format_prop = 'ui.time-format' widget = TextInput @@ -455,17 +455,17 @@ help = rschema.rproperty(eschema, targetschema, 'description') if rschema.is_final(): if rschema.rproperty(eschema, targetschema, 'internationalizable'): - kwargs['internationalizable'] = True + kwargs.setdefault('internationalizable', True) def get_default(form, es=eschema, rs=rschema): return es.default(rs) - kwargs['initial'] = get_default + kwargs.setdefault('initial', get_default) else: targetschema = rschema.subjects(eschema)[0] card = rschema.rproperty(targetschema, eschema, 'cardinality')[1] help = rschema.rproperty(targetschema, eschema, 'description') kwargs['required'] = card in '1+' kwargs['name'] = rschema.type - kwargs['help'] = help + kwargs.setdefault('help', help) if rschema.is_final(): if skip_meta_attr and rschema in eschema.meta_attributes(): return None diff -r 0072247db207 -r 51c84d585456 web/formwidgets.py --- a/web/formwidgets.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/formwidgets.py Tue Jun 23 13:36:38 2009 +0200 @@ -22,6 +22,8 @@ # automatically set id and tabindex attributes ? setdomid = True settabindex = True + # does this widget expect a vocabulary + vocabulary_widget = False def __init__(self, attrs=None, setdomid=None, settabindex=None): if attrs is None: @@ -146,7 +148,7 @@ """''' % {'eid': file.eid}) +''' % {'eid': file.eid}) def test_passwordfield(self): diff -r 0072247db207 -r 51c84d585456 web/test/unittest_formfields.py --- a/web/test/unittest_formfields.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/test/unittest_formfields.py Tue Jun 23 13:36:38 2009 +0200 @@ -6,13 +6,13 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ -from logilab.common.testlib import TestCase, unittest_main +from logilab.common.testlib import TestCase, unittest_main, mock_object as mock from yams.constraints import StaticVocabularyConstraint, SizeConstraint from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import EnvBasedTC -from cubicweb.web.formwidgets import PasswordInput, TextArea, Select +from cubicweb.web.formwidgets import PasswordInput, TextArea, Select, Radio from cubicweb.web.formfields import * from cubicweb.web.views.forms import EntityFieldsForm @@ -21,91 +21,104 @@ config = TestServerConfiguration('data') config.bootstrap_cubes() schema = config.load_schema() -state_schema = schema['State'] -cwuser_schema = schema['CWUser'] -file_schema = schema['File'] -salesterm_schema = schema['Salesterm'] class GuessFieldTC(TestCase): def test_state_fields(self): - title_field = guess_field(state_schema, schema['name']) + title_field = guess_field(schema['State'], schema['name']) self.assertIsInstance(title_field, StringField) self.assertEquals(title_field.required, True) -# synopsis_field = guess_field(state_schema, schema['synopsis']) +# synopsis_field = guess_field(schema['State'], schema['synopsis']) # self.assertIsInstance(synopsis_field, StringField) # self.assertIsInstance(synopsis_field.widget, TextArea) # self.assertEquals(synopsis_field.required, False) # self.assertEquals(synopsis_field.help, 'an abstract for this state') - description_field = guess_field(state_schema, schema['description']) + description_field = guess_field(schema['State'], schema['description']) self.assertIsInstance(description_field, RichTextField) self.assertEquals(description_field.required, False) self.assertEquals(description_field.format_field, None) - description_format_field = guess_field(state_schema, schema['description_format']) + description_format_field = guess_field(schema['State'], schema['description_format']) self.assertEquals(description_format_field, None) - description_format_field = guess_field(state_schema, schema['description_format'], skip_meta_attr=False) + description_format_field = guess_field(schema['State'], schema['description_format'], skip_meta_attr=False) self.assertEquals(description_format_field.internationalizable, True) self.assertEquals(description_format_field.sort, True) self.assertEquals(description_format_field.initial(None), 'text/rest') - -# wikiid_field = guess_field(state_schema, schema['wikiid']) +# wikiid_field = guess_field(schema['State'], schema['wikiid']) # self.assertIsInstance(wikiid_field, StringField) # self.assertEquals(wikiid_field.required, False) def test_cwuser_fields(self): - upassword_field = guess_field(cwuser_schema, schema['upassword']) + upassword_field = guess_field(schema['CWUser'], schema['upassword']) self.assertIsInstance(upassword_field, StringField) self.assertIsInstance(upassword_field.widget, PasswordInput) self.assertEquals(upassword_field.required, True) - last_login_time_field = guess_field(cwuser_schema, schema['last_login_time']) + last_login_time_field = guess_field(schema['CWUser'], schema['last_login_time']) self.assertIsInstance(last_login_time_field, DateTimeField) self.assertEquals(last_login_time_field.required, False) - in_group_field = guess_field(cwuser_schema, schema['in_group']) + in_group_field = guess_field(schema['CWUser'], schema['in_group']) self.assertIsInstance(in_group_field, RelationField) self.assertEquals(in_group_field.required, True) self.assertEquals(in_group_field.role, 'subject') self.assertEquals(in_group_field.help, 'groups grant permissions to the user') - owned_by_field = guess_field(cwuser_schema, schema['owned_by'], 'object') + owned_by_field = guess_field(schema['CWUser'], schema['owned_by'], 'object') self.assertIsInstance(owned_by_field, RelationField) self.assertEquals(owned_by_field.required, False) self.assertEquals(owned_by_field.role, 'object') def test_file_fields(self): - data_format_field = guess_field(file_schema, schema['data_format']) + data_format_field = guess_field(schema['File'], schema['data_format']) self.assertEquals(data_format_field, None) - data_encoding_field = guess_field(file_schema, schema['data_encoding']) + data_encoding_field = guess_field(schema['File'], schema['data_encoding']) self.assertEquals(data_encoding_field, None) - data_field = guess_field(file_schema, schema['data']) + data_field = guess_field(schema['File'], schema['data']) self.assertIsInstance(data_field, FileField) self.assertEquals(data_field.required, True) self.assertIsInstance(data_field.format_field, StringField) self.assertIsInstance(data_field.encoding_field, StringField) def test_constraints_priority(self): - salesterm_field = guess_field(salesterm_schema, schema['reason']) + salesterm_field = guess_field(schema['Salesterm'], schema['reason']) constraints = schema['reason'].rproperty('Salesterm', 'String', 'constraints') self.assertEquals([c.__class__ for c in constraints], [SizeConstraint, StaticVocabularyConstraint]) self.assertIsInstance(salesterm_field.widget, Select) + + def test_bool_field_base(self): + field = guess_field(schema['CWAttribute'], schema['indexed']) + self.assertIsInstance(field, BooleanField) + self.assertEquals(field.required, False) + self.assertEquals(field.initial(None), None) + self.assertIsInstance(field.widget, Radio) + self.assertEquals(field.vocabulary(mock(req=mock(_=unicode))), + [(u'yes', '1'), (u'no', '')]) + + def test_bool_field_explicit_choices(self): + field = guess_field(schema['CWAttribute'], schema['indexed'], + choices=[(u'maybe', '1'), (u'no', '')]) + self.assertIsInstance(field.widget, Radio) + self.assertEquals(field.vocabulary(mock(req=mock(_=unicode))), + [(u'maybe', '1'), (u'no', '')]) + + class MoreFieldsTC(EnvBasedTC): def test_rtf_format_field(self): req = self.request() req.use_fckeditor = lambda: False e = self.etype_instance('State') form = EntityFieldsForm(req, entity=e) - description_field = guess_field(state_schema, schema['description']) + description_field = guess_field(schema['State'], schema['description']) description_format_field = description_field.get_format_field(form) self.assertEquals(description_format_field.internationalizable, True) self.assertEquals(description_format_field.sort, True) diff -r 0072247db207 -r 51c84d585456 web/views/actions.py --- a/web/views/actions.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/actions.py Tue Jun 23 13:36:38 2009 +0200 @@ -6,6 +6,7 @@ :license: GNU Lesser General Public License, v2.1 - http://www.gnu.org/licenses """ __docformat__ = "restructuredtext en" +_ = unicode from cubicweb.vregistry import objectify_selector from cubicweb.selectors import (EntitySelector, @@ -18,8 +19,6 @@ from cubicweb.web.views import linksearch_select_url, vid_from_rset from cubicweb.web.views.autoform import AutomaticEntityForm -_ = unicode - class has_editable_relation(EntitySelector): """accept if some relations for an entity found in the result set is diff -r 0072247db207 -r 51c84d585456 web/views/autoform.py --- a/web/views/autoform.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/autoform.py Tue Jun 23 13:36:38 2009 +0200 @@ -137,8 +137,10 @@ rschema = cls_or_self.schema.rschema(name) # XXX use a sample target type. Document this. tschemas = rschema.targets(eschema, role) - fieldcls = cls_or_self.rfields.etype_get(eschema, rschema, role, tschemas[0]) - kwargs = cls_or_self.rfields_kwargs.etype_get(eschema, rschema, role, tschemas[0]) + fieldcls = cls_or_self.rfields.etype_get(eschema, rschema, role, + tschemas[0]) + kwargs = cls_or_self.rfields_kwargs.etype_get(eschema, rschema, + role, tschemas[0]) if kwargs is None: kwargs = {} if fieldcls: @@ -290,7 +292,7 @@ by default true if there is no related entity and we need at least one """ - return not existant and card in '1+' + return not existant and card in '1+' or self.req.form.has_key('force_%s_display' % rschema) def should_display_add_new_relation_link(self, rschema, existant, card): """return true if we should add a link to add a new creation form diff -r 0072247db207 -r 51c84d585456 web/views/basecomponents.py --- a/web/views/basecomponents.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/basecomponents.py Tue Jun 23 13:36:38 2009 +0200 @@ -29,7 +29,8 @@ """build the rql input form, usually displayed in the header""" id = 'rqlinput' property_defs = VISIBLE_PROP_DEF - + visible = False + def call(self, view=None): if hasattr(view, 'filter_box_context_info'): rset = view.filter_box_context_info()[0] @@ -148,8 +149,10 @@ site_wide = True def call(self): - self.w(u'%s' % ( - self.req.base_url(), self.req.property_value('ui.site-title'))) + title = self.req.property_value('ui.site-title') + if title: + self.w(u'%s' % ( + self.req.base_url(), title)) class SeeAlsoVComponent(component.RelatedObjectsVComponent): diff -r 0072247db207 -r 51c84d585456 web/views/basecontrollers.py --- a/web/views/basecontrollers.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/basecontrollers.py Tue Jun 23 13:36:38 2009 +0200 @@ -239,9 +239,11 @@ response content type """ self.req.pageid = self.req.form.get('pageid') - fname = self.req.form['fname'] try: + fname = self.req.form['fname'] func = getattr(self, 'js_%s' % fname) + except KeyError: + raise RemoteCallFailed('no method specified') except AttributeError: raise RemoteCallFailed('no %s method' % fname) # no attribute means the callback takes no argument diff -r 0072247db207 -r 51c84d585456 web/views/basetemplates.py --- a/web/views/basetemplates.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/basetemplates.py Tue Jun 23 13:36:38 2009 +0200 @@ -12,7 +12,7 @@ from cubicweb.vregistry import objectify_selector from cubicweb.selectors import match_kwargs -from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW, STRICT_DOCTYPE +from cubicweb.view import View, MainTemplate, NOINDEX, NOFOLLOW from cubicweb.utils import make_uid, UStringIO # main templates ############################################################## @@ -86,7 +86,7 @@ xhtml_wrap = (self.req.form.has_key('__notemplate') and view.templatable and view.content_type == self.req.html_content_type()) if xhtml_wrap: - view.w(u'\n' + STRICT_DOCTYPE) + view.w(u'\n' + self.doctype) view.w(u'
') # have to replace our unicode stream using view's binary stream view.render() @@ -441,7 +441,7 @@ self.w(u'
' % (id, klass)) if title: self.w(u'
%s
' - % self.req.property_value('ui.site-title')) + % (self.req.property_value('ui.site-title') or u' ')) self.w(u'
\n') if message: diff -r 0072247db207 -r 51c84d585456 web/views/cwproperties.py --- a/web/views/cwproperties.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/cwproperties.py Tue Jun 23 13:36:38 2009 +0200 @@ -19,7 +19,7 @@ from cubicweb.web import uicfg, stdmsgs from cubicweb.web.form import FormViewMixIn from cubicweb.web.formfields import FIELDS, StringField -from cubicweb.web.formwidgets import Select, Button, SubmitButton +from cubicweb.web.formwidgets import Select, TextInput, Button, SubmitButton from cubicweb.web.views import primary, formrenderers @@ -139,7 +139,6 @@ w(u'

%s

\n' % (make_togglable_link('fieldset_' + group, label.capitalize()))) w(u'
' % (group, status)) - # create selection sorted_objects = sorted((self.req.__('%s_%s' % (group, o)), o, f) for o, f in objects.iteritems()) @@ -217,10 +216,9 @@ eidparam=True)) subform.vreg = self.vreg subform.form_add_hidden('pkey', key, eidparam=True) - subform.form_add_hidden("current-value:%s" % entity.eid,) form.form_add_subform(subform) return subform - + def is_user_prefs(cls, req, rset, row=None, col=0, **kwargs): return req.user.eid == rset[row or 0][col] @@ -303,7 +301,7 @@ choices = entity.vreg.user_property_keys() return [(u'', u'')] + sorted(zip((_(v) for v in choices), choices)) - + class PropertyValueField(StringField): """specific field for CWProperty.value which will be different according to the selected key type and vocabulary information @@ -346,15 +344,15 @@ else: self.choices = vocab wdg = Select() + elif pdef['type'] == 'String': # else we'll get a TextArea by default + wdg = TextInput() else: - wdg = FIELDS[pdef['type']].widget() + field = FIELDS[pdef['type']]() + wdg = field.widget if pdef['type'] == 'Boolean': - self.choices = [(form.req._('yes'), '1'), (form.req._('no'), '')] - elif pdef['type'] in ('Float', 'Int'): - wdg.attrs.setdefault('size', 3) + self.choices = field.vocabulary(form) self.widget = wdg - uicfg.autoform_field.tag_attribute(('CWProperty', 'pkey'), PropertyKeyField) uicfg.autoform_field.tag_attribute(('CWProperty', 'value'), PropertyValueField) diff -r 0072247db207 -r 51c84d585456 web/views/formrenderers.py --- a/web/views/formrenderers.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/formrenderers.py Tue Jun 23 13:36:38 2009 +0200 @@ -94,6 +94,8 @@ def render_help(self, form, field): help = [] descr = field.help + if callable(descr): + descr = descr(form) if descr: help.append('
%s
' % self.req._(descr)) example = field.example_format(self.req) diff -r 0072247db207 -r 51c84d585456 web/views/forms.py --- a/web/views/forms.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/forms.py Tue Jun 23 13:36:38 2009 +0200 @@ -267,7 +267,7 @@ self.edited_entity = self.complete_entity(self.row or 0, self.col or 0) self.form_add_hidden('__type', eidparam=True) self.form_add_hidden('eid') - if kwargs.get('mainform'): + if kwargs.get('mainform', True): # mainform default to true in parent self.form_add_hidden(u'__maineid', self.edited_entity.eid) # If we need to directly attach the new object to another one if self.req.list_form_param('__linkto'): diff -r 0072247db207 -r 51c84d585456 web/views/idownloadable.py --- a/web/views/idownloadable.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/idownloadable.py Tue Jun 23 13:36:38 2009 +0200 @@ -125,7 +125,7 @@ """the secondary view is a link to download the file""" entity = self.entity(row, col) url = html_escape(entity.absolute_url()) - name = html_escape(entity.download_file_name()) + name = html_escape(title or entity.download_file_name()) durl = html_escape(entity.download_url()) self.w(u'%s [%s]' % (url, name, durl, self.req._('download'))) diff -r 0072247db207 -r 51c84d585456 web/views/management.py --- a/web/views/management.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/management.py Tue Jun 23 13:36:38 2009 +0200 @@ -186,7 +186,7 @@ field = guess_field(cwpermschema, self.schema.rschema('require_group')) form.append_field(field) renderer = self.vreg.select('formrenderers', 'htable', self.req, - display_progress_div=False) + rset=None, display_progress_div=False) self.w(form.form_render(renderer=renderer)) @@ -243,7 +243,7 @@ submiturl = self.config['submit-url'] submitmail = self.config['submit-mail'] if submiturl or submitmail: - form = self.vreg.select('forms', 'base', self.req, + form = self.vreg.select('forms', 'base', self.req, rset=None, mainform=False) binfo = text_error_description(ex, excinfo, req, eversion, cversions) form.form_add_hidden('description', binfo) diff -r 0072247db207 -r 51c84d585456 web/views/primary.py --- a/web/views/primary.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/primary.py Tue Jun 23 13:36:38 2009 +0200 @@ -54,6 +54,7 @@ self.w(u'
') self.w(u'
') self.w(u'
') + self.content_navigation_components('navcontenttop') try: self.render_entity_attributes(entity) except TypeError: # XXX bw compat @@ -61,7 +62,6 @@ 'deprecated (%s)' % self.__class__) self.render_entity_attributes(entity, []) self.w(u'
') - self.content_navigation_components('navcontenttop') if self.main_related_section: try: self.render_entity_relations(entity) diff -r 0072247db207 -r 51c84d585456 web/views/tableview.py --- a/web/views/tableview.py Tue Jun 23 13:31:35 2009 +0200 +++ b/web/views/tableview.py Tue Jun 23 13:36:38 2009 +0200 @@ -140,6 +140,7 @@ actions += self.form_filter(divid, displaycols, displayfilter, displayactions) elif displayfilter: + req.add_css('cubicweb.facets.css') actions += self.show_hide_actions(divid, True) self.w(u'