--- a/dbapi.py Fri Oct 07 11:56:04 2011 +0200
+++ b/dbapi.py Fri Oct 07 11:59:20 2011 +0200
@@ -273,6 +273,8 @@
def anonymous_session(self):
return not self.cnx or self.cnx.anonymous_connection
+ def __repr__(self):
+ return '<DBAPISession %r>' % self.sessionid
class DBAPIRequest(RequestSessionBase):
--- a/entity.py Fri Oct 07 11:56:04 2011 +0200
+++ b/entity.py Fri Oct 07 11:59:20 2011 +0200
@@ -836,12 +836,21 @@
self.cw_attr_cache[name] = value = None
return value
- def related(self, rtype, role='subject', limit=None, entities=False): # XXX .cw_related
+ def related(self, rtype, role='subject', limit=None, entities=False, # XXX .cw_related
+ safe=False):
"""returns a resultset of related entities
- :param role: is the role played by 'self' in the relation ('subject' or 'object')
- :param limit: resultset's maximum size
- :param entities: if True, the entites are returned; if False, a result set is returned
+ :param rtype:
+ the name of the relation, aka relation type
+ :param role:
+ the role played by 'self' in the relation ('subject' or 'object')
+ :param limit:
+ resultset's maximum size
+ :param entities:
+ if True, the entites are returned; if False, a result set is returned
+ :param safe:
+ if True, an empty rset/list of entities will be returned in case of
+ :exc:`Unauthorized`, else (the default), the exception is propagated
"""
rtype = str(rtype)
try:
@@ -853,7 +862,12 @@
return []
return self._cw.empty_rset()
rql = self.cw_related_rql(rtype, role)
- rset = self._cw.execute(rql, {'x': self.eid})
+ try:
+ rset = self._cw.execute(rql, {'x': self.eid})
+ except Unauthorized:
+ if not safe:
+ raise
+ rset = self._cw.empty_rset()
self.cw_set_relation_cache(rtype, role, rset)
return self.related(rtype, role, limit, entities)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/misc/scripts/pyroforge2datafeed.py Fri Oct 07 11:59:20 2011 +0200
@@ -0,0 +1,131 @@
+"""turn a pyro source into a datafeed source
+
+Once this script is run, execute c-c db-check to cleanup relation tables.
+"""
+import sys
+
+try:
+ source_name, = __args__
+ source = repo.sources_by_uri[source_name]
+except ValueError:
+ print('you should specify the source name as script argument (i.e. after --'
+ ' on the command line)')
+ sys.exit(1)
+except KeyError:
+ print '%s is not an active source' % source_name
+ sys.exit(1)
+
+# check source is reachable before doing anything
+try:
+ source.get_connection()._repo
+except AttributeError:
+ print '%s is not reachable. Fix this before running this script' % source_name
+ sys.exit(1)
+
+raw_input('Ensure you have shutdown all instances of this application before continuing.'
+ ' Type enter when ready.')
+
+system_source = repo.system_source
+
+from base64 import b64encode
+from cubicweb.server.edition import EditedEntity
+
+DONT_GET_BACK_ETYPES = set(( # XXX edit as desired
+ 'State',
+ 'RecipeStep', 'RecipeStepInput', 'RecipeStepOutput',
+ 'RecipeTransition', 'RecipeTransitionCondition',
+ 'NarvalConditionExpression', 'Recipe',
+ # XXX TestConfig
+ ))
+
+
+session.mode = 'write' # hold on the connections set
+
+print '******************** backport entity content ***************************'
+
+from cubicweb.server import debugged
+todelete = {}
+for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
+ with debugged('DBG_SQL'):
+ etype = entity.__regid__
+ if not source.support_entity(etype):
+ print "source doesn't support %s, delete %s" % (etype, entity.eid)
+ elif etype in DONT_GET_BACK_ETYPES:
+ print 'ignore %s, delete %s' % (etype, entity.eid)
+ else:
+ try:
+ entity.complete()
+ except:
+ print '%s %s much probably deleted, delete it (extid %s)' % (
+ etype, entity.eid, entity.cw_metainformation()['extid'])
+ else:
+ print 'get back', etype, entity.eid
+ entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
+ system_source.add_entity(session, entity)
+ sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s "
+ "WHERE eid=%(eid)s", {'asource': source_name,
+ 'extid': b64encode(entity.cwuri),
+ 'eid': entity.eid})
+ continue
+ todelete.setdefault(etype, []).append(entity)
+
+# only cleanup entities table, remaining stuff should be cleaned by a c-c
+# db-check to be run after this script
+for entities in todelete.values():
+ system_source.delete_info_multi(session, entities, source_name)
+
+
+print '******************** backport mapping **********************************'
+session.disable_hook_categories('cw.sources')
+mapping = []
+for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
+ {'s': source.eid}).entities():
+ schemaent = mappart.cw_schema[0]
+ if schemaent.__regid__ != 'CWEType':
+ assert schemaent.__regid__ == 'CWRType'
+ sch = schema._eid_index[schemaent.eid]
+ for rdef in sch.rdefs.values():
+ if not source.support_entity(rdef.subject) \
+ or not source.support_entity(rdef.object):
+ continue
+ if rdef.subject in DONT_GET_BACK_ETYPES \
+ and rdef.object in DONT_GET_BACK_ETYPES:
+ print 'dont map', rdef
+ continue
+ if rdef.subject in DONT_GET_BACK_ETYPES:
+ options = u'action=link\nlinkattr=name'
+ roles = 'object',
+ elif rdef.object in DONT_GET_BACK_ETYPES:
+ options = u'action=link\nlinkattr=name'
+ roles = 'subject',
+ else:
+ options = u'action=copy'
+ if rdef.rtype in ('use_environment',):
+ roles = 'object',
+ else:
+ roles = 'subject',
+ print 'map', rdef, options, roles
+ for role in roles:
+ mapping.append( (
+ (str(rdef.subject), str(rdef.rtype), str(rdef.object)),
+ options + '\nrole=%s' % role) )
+ mappart.cw_delete()
+
+source_ent = rql('CWSource S WHERE S eid %(s)s', {'s': source.eid}).get_entity(0, 0)
+source_ent.init_mapping(mapping)
+
+# change source properties
+config = u'''synchronize=yes
+synchronization-interval=10min
+delete-entities=no
+'''
+rql('SET X type "datafeed", X parser "cw.entityxml", X url %(url)s, X config %(config)s '
+ 'WHERE X eid %(x)s',
+ {'x': source.eid, 'config': config,
+ 'url': source.config['base-url']+'/project'})
+
+
+commit()
+
+from cubes.apycot import recipes
+recipes.create_quick_recipe(session)
--- a/server/__init__.py Fri Oct 07 11:56:04 2011 +0200
+++ b/server/__init__.py Fri Oct 07 11:59:20 2011 +0200
@@ -211,9 +211,9 @@
handler = config.migration_handler(schema, interactive=False,
cnx=cnx, repo=repo)
# install additional driver specific sql files
- handler.install_custom_sql_scripts(join(CW_SOFTWARE_ROOT, 'schemas'), driver)
- for directory in reversed(config.cubes_path()):
- handler.install_custom_sql_scripts(join(directory, 'schema'), driver)
+ handler.cmd_install_custom_sql_scripts()
+ for cube in reversed(config.cubes()):
+ handler.cmd_install_custom_sql_scripts(cube)
# serialize the schema
initialize_schema(config, schema, handler)
# yoo !
@@ -232,8 +232,7 @@
from cubicweb.server.schemaserial import serialize_schema
from cubicweb.server.session import hooks_control
session = mhandler.session
- paths = [p for p in config.cubes_path() + [config.apphome]
- if exists(join(p, 'migration'))]
+ cubes = config.cubes()
# deactivate every hooks but those responsible to set metadata
# so, NO INTEGRITY CHECKS are done, to have quicker db creation.
# Active integrity is kept else we may pb such as two default
@@ -241,18 +240,18 @@
with hooks_control(session, session.HOOKS_DENY_ALL, 'metadata',
'activeintegrity'):
# execute cubicweb's pre<event> script
- mhandler.exec_event_script('pre%s' % event)
+ mhandler.cmd_exec_event_script('pre%s' % event)
# execute cubes pre<event> script if any
- for path in reversed(paths):
- mhandler.exec_event_script('pre%s' % event, path)
+ for cube in reversed(cubes):
+ mhandler.cmd_exec_event_script('pre%s' % event, cube)
# enter instance'schema into the database
session.set_cnxset()
serialize_schema(session, schema)
# execute cubicweb's post<event> script
- mhandler.exec_event_script('post%s' % event)
+ mhandler.cmd_exec_event_script('post%s' % event)
# execute cubes'post<event> script if any
- for path in reversed(paths):
- mhandler.exec_event_script('post%s' % event, path)
+ for cube in reversed(cubes):
+ mhandler.cmd_exec_event_script('post%s' % event, cube)
# sqlite'stored procedures have to be registered at connection opening time
--- a/server/migractions.py Fri Oct 07 11:56:04 2011 +0200
+++ b/server/migractions.py Fri Oct 07 11:59:20 2011 +0200
@@ -50,7 +50,7 @@
from yams.schema2sql import eschema2sql, rschema2sql
from yams.schema import RelationDefinitionSchema
-from cubicweb import AuthenticationError, ExecutionError
+from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError
from cubicweb.selectors import is_instance
from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES,
PURE_VIRTUAL_RTYPES,
@@ -358,9 +358,14 @@
"""cached constraint types mapping"""
return ss.cstrtype_mapping(self._cw)
- def exec_event_script(self, event, cubepath=None, funcname=None,
- *args, **kwargs):
- if cubepath:
+ def cmd_exec_event_script(self, event, cube=None, funcname=None,
+ *args, **kwargs):
+ """execute a cube event scripts `migration/<event>.py` where event
+ is one of 'precreate', 'postcreate', 'preremove' and 'postremove'.
+ """
+ assert event in ('precreate', 'postcreate', 'preremove', 'postremove')
+ if cube:
+ cubepath = self.config.cube_dir(cube)
apc = osp.join(cubepath, 'migration', '%s.py' % event)
else:
apc = osp.join(self.config.migration_scripts_dir(), '%s.py' % event)
@@ -380,7 +385,16 @@
if self.config.free_wheel:
self.cmd_reactivate_verification_hooks()
- def install_custom_sql_scripts(self, directory, driver):
+ def cmd_install_custom_sql_scripts(self, cube=None):
+ """install a cube custom sql scripts `schema/*.<driver>.sql` where
+ <driver> depends on the instance main database backend (eg 'postgres',
+ 'mysql'...)
+ """
+ driver = self.repo.system_source.dbdriver
+ if cube is None:
+ directory = osp.join(CW_SOFTWARE_ROOT, 'schemas')
+ else:
+ directory = self.config.cube_dir(cube)
sql_scripts = []
for fpath in glob(osp.join(directory, '*.sql.%s' % driver)):
newname = osp.basename(fpath).replace('.sql.%s' % driver,
@@ -667,10 +681,9 @@
new = set()
# execute pre-create files
driver = self.repo.system_source.dbdriver
- for pack in reversed(newcubes):
- cubedir = self.config.cube_dir(pack)
- self.install_custom_sql_scripts(osp.join(cubedir, 'schema'), driver)
- self.exec_event_script('precreate', cubedir)
+ for cube in reversed(newcubes):
+ self.cmd_install_custom_sql_scripts(cube)
+ self.cmd_exec_event_script('precreate', cube)
# add new entity and relation types
for rschema in newcubes_schema.relations():
if not rschema in self.repo.schema:
@@ -693,8 +706,8 @@
self.cmd_add_relation_definition(str(fromtype), rschema.type,
str(totype))
# execute post-create files
- for pack in reversed(newcubes):
- self.exec_event_script('postcreate', self.config.cube_dir(pack))
+ for cube in reversed(newcubes):
+ self.cmd_exec_event_script('postcreate', cube)
self.commit()
def cmd_remove_cube(self, cube, removedeps=False):
@@ -706,8 +719,8 @@
removedcubes_schema = self.config.load_schema(construction_mode='non-strict')
reposchema = self.repo.schema
# execute pre-remove files
- for pack in reversed(removedcubes):
- self.exec_event_script('preremove', self.config.cube_dir(pack))
+ for cube in reversed(removedcubes):
+ self.cmd_exec_event_script('preremove', cube)
# remove cubes'entity and relation types
for rschema in fsschema.relations():
if not rschema in removedcubes_schema and rschema in reposchema:
@@ -728,7 +741,7 @@
str(fromtype), rschema.type, str(totype))
# execute post-remove files
for cube in reversed(removedcubes):
- self.exec_event_script('postremove', self.config.cube_dir(cube))
+ self.cmd_exec_event_script('postremove', cube)
self.rqlexec('DELETE CWProperty X WHERE X pkey %(pk)s',
{'pk': u'system.version.'+cube}, ask_confirm=False)
self.commit()
--- a/sobjects/parsers.py Fri Oct 07 11:56:04 2011 +0200
+++ b/sobjects/parsers.py Fri Oct 07 11:59:20 2011 +0200
@@ -193,6 +193,13 @@
yield builder.build_item()
def process_item(self, item, rels):
+ """
+ item and rels are what's returned by the item builder `build_item` method:
+
+ * `item` is an {attribute: value} dictionary
+ * `rels` is for relations and structured as
+ {role: {relation: [(related item, related rels)...]}
+ """
entity = self.extid2entity(str(item.pop('cwuri')), item.pop('cwtype'),
cwsource=item.pop('cwsource'), item=item)
if entity is None:
@@ -206,12 +213,17 @@
# XXX check modification date
attrs = extract_typed_attrs(entity.e_schema, item)
entity.set_attributes(**attrs)
- for (rtype, role, action), rules in self.source.mapping.get(entity.__regid__, {}).iteritems():
+ self.process_relations(entity, rels)
+ return entity
+
+ def process_relations(self, entity, rels):
+ etype = entity.__regid__
+ for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
try:
related_items = rels[role][rtype]
except KeyError:
self.source.error('relation %s-%s not found in xml export of %s',
- rtype, role, entity.__regid__)
+ rtype, role, etype)
continue
try:
linker = self.select_linker(action, rtype, role, entity)
@@ -219,20 +231,22 @@
self.source.error('no linker for action %s', action)
else:
linker.link_items(related_items, rules)
- return entity
def before_entity_copy(self, entity, sourceparams):
"""IDataFeedParser callback"""
attrs = extract_typed_attrs(entity.e_schema, sourceparams['item'])
entity.cw_edited.update(attrs)
- def complete_url(self, url, etype=None, add_relations=True):
+ def complete_url(self, url, etype=None, known_relations=None):
"""append to the url's query string information about relation that should
be included in the resulting xml, according to source mapping.
If etype is not specified, try to guess it using the last path part of
the url, i.e. the format used by default in cubicweb to map all entities
of a given type as in 'http://mysite.org/EntityType'.
+
+ If `known_relations` is given, it should be a dictionary of already
+ known relations, so they don't get queried again.
"""
try:
url, qs = url.split('?', 1)
@@ -250,24 +264,29 @@
etype = self._cw.vreg.case_insensitive_etypes[etype.lower()]
except KeyError:
return url + '?' + self._cw.build_url_params(**params)
- if add_relations:
- relations = params.setdefault('relation', [])
- for rtype, role, _ in self.source.mapping.get(etype, ()):
- reldef = '%s-%s' % (rtype, role)
- if not reldef in relations:
- relations.append(reldef)
+ relations = params.setdefault('relation', [])
+ for rtype, role, _ in self.source.mapping.get(etype, ()):
+ if known_relations and rtype in known_relations.get('role', ()):
+ continue
+ reldef = '%s-%s' % (rtype, role)
+ if not reldef in relations:
+ relations.append(reldef)
return url + '?' + self._cw.build_url_params(**params)
- def complete_item(self, item, add_relations=True):
+ def complete_item(self, item, rels):
try:
- return self._parsed_urls[(item['cwuri'], add_relations)]
+ return self._parsed_urls[item['cwuri']]
except KeyError:
- itemurl = self.complete_url(item['cwuri'], item['cwtype'],
- add_relations)
+ itemurl = self.complete_url(item['cwuri'], item['cwtype'], rels)
item_rels = list(self.parse(itemurl))
assert len(item_rels) == 1, 'url %s expected to bring back one '\
'and only one entity, got %s' % (itemurl, len(item_rels))
- self._parsed_urls[(item['cwuri'], add_relations)] = item_rels[0]
+ self._parsed_urls[item['cwuri']] = item_rels[0]
+ if rels:
+ # XXX (do it better) merge relations
+ new_rels = item_rels[0][1]
+ new_rels.get('subject', {}).update(rels.get('subject', {}))
+ new_rels.get('object', {}).update(rels.get('object', {}))
return item_rels[0]
@@ -280,6 +299,12 @@
self.node = node
def build_item(self):
+ """parse a XML document node and return two dictionaries defining (part
+ of) an entity:
+
+ - {attribute: value}
+ - {role: {relation: [(related item, related rels)...]}
+ """
node = self.node
item = dict(node.attrib.items())
item['cwtype'] = unicode(node.tag)
@@ -296,7 +321,7 @@
if role:
# relation
related = rels.setdefault(role, {}).setdefault(child.tag, [])
- related += [ritem for ritem, _ in self.parser.parse_etree(child)]
+ related += self.parser.parse_etree(child)
elif child.text:
# attribute
item[child.tag] = unicode(child.text)
@@ -337,10 +362,10 @@
assert not any(x[1] for x in rules), "'copy' action takes no option"
ttypes = frozenset([x[0] for x in rules])
eids = [] # local eids
- for item in others:
+ for item, rels in others:
if item['cwtype'] in ttypes:
- item = self.parser.complete_item(item)[0]
- other_entity = self.parser.process_item(item, [])
+ item, rels = self.parser.complete_item(item, rels)
+ other_entity = self.parser.process_item(item, rels)
if other_entity is not None:
eids.append(other_entity.eid)
if eids:
@@ -395,11 +420,11 @@
return all(z in y for z in x)
eids = [] # local eids
source = self.parser.source
- for item in others:
+ for item, rels in others:
if item['cwtype'] != ttype:
continue
if not issubset(searchattrs, item):
- item = self.parser.complete_item(item, False)[0]
+ item, rels = self.parser.complete_item(item, rels)
if not issubset(searchattrs, item):
source.error('missing attribute, got %s expected keys %s',
item, searchattrs)
@@ -407,16 +432,20 @@
# XXX str() needed with python < 2.6
kwargs = dict((str(attr), item[attr]) for attr in searchattrs)
targets = self._find_entities(item, kwargs)
- if len(targets) > 1:
- source.error('ambiguous link: found %s entity %s with attributes %s',
- len(targets), item['cwtype'], kwargs)
- elif len(targets) == 1:
- eids.append(targets[0].eid)
- elif self.create_when_not_found:
- eids.append(self._cw.create_entity(item['cwtype'], **kwargs).eid)
+ if len(targets) == 1:
+ entity = targets[0]
+ elif not targets and self.create_when_not_found:
+ entity = self._cw.create_entity(item['cwtype'], **kwargs)
else:
- source.error('can not find %s entity with attributes %s',
- item['cwtype'], kwargs)
+ if len(targets) > 1:
+ source.error('ambiguous link: found %s entity %s with attributes %s',
+ len(targets), item['cwtype'], kwargs)
+ else:
+ source.error('can not find %s entity with attributes %s',
+ item['cwtype'], kwargs)
+ continue
+ eids.append(entity.eid)
+ self.parser.process_relations(entity, rels)
if eids:
self._set_relation(eids)
else:
--- a/sobjects/test/data/schema.py Fri Oct 07 11:56:04 2011 +0200
+++ b/sobjects/test/data/schema.py Fri Oct 07 11:59:20 2011 +0200
@@ -25,4 +25,4 @@
class Tag(EntityType):
name = String(unique=True)
- tags = SubjectRelation('CWUser')
+ tags = SubjectRelation(('CWUser', 'CWGroup', 'EmailAddress'))
--- a/sobjects/test/unittest_parsers.py Fri Oct 07 11:56:04 2011 +0200
+++ b/sobjects/test/unittest_parsers.py Fri Oct 07 11:59:20 2011 +0200
@@ -71,6 +71,9 @@
<address>syt@logilab.fr</address>
<modification_date>2010-04-13 14:35:56</modification_date>
<creation_date>2010-04-13 14:35:56</creation_date>
+ <tags role="object">
+ <Tag cwuri="http://pouet.org/9" eid="9"/>
+ </tags>
</EmailAddress>
</rset>
''',
@@ -78,6 +81,9 @@
<rset size="1">
<CWGroup eid="7" cwuri="http://pouet.org/7">
<name>users</name>
+ <tags role="object">
+ <Tag cwuri="http://pouet.org/9" eid="9"/>
+ </tags>
</CWGroup>
</rset>
''',
@@ -140,7 +146,7 @@
u'role=subject\naction=link\nlinkattr=name'),
(('CWUser', 'in_state', '*'),
u'role=subject\naction=link\nlinkattr=name'),
- (('*', 'tags', 'CWUser'),
+ (('*', 'tags', '*'),
u'role=object\naction=link-or-create\nlinkattr=name'),
])
myotherfeed.init_mapping([(('CWUser', 'in_group', '*'),
@@ -177,7 +183,15 @@
(u'Tag', {u'linkattr': u'name'})],
(u'use_email', u'subject', u'copy'): [
(u'EmailAddress', {})]
- }
+ },
+ u'CWGroup': {
+ (u'tags', u'object', u'link-or-create'): [
+ (u'Tag', {u'linkattr': u'name'})],
+ },
+ u'EmailAddress': {
+ (u'tags', u'object', u'link-or-create'): [
+ (u'Tag', {u'linkattr': u'name'})],
+ },
})
session = self.repo.internal_session(safe=True)
stats = dfsource.pull_data(session, force=True, raise_on_error=True)
@@ -198,17 +212,21 @@
self.assertEqual(email.cwuri, 'http://pouet.org/6')
self.assertEqual(email.absolute_url(), 'http://pouet.org/6')
self.assertEqual(email.cw_source[0].name, 'myfeed')
+ self.assertEqual(len(email.reverse_tags), 1)
+ self.assertEqual(email.reverse_tags[0].name, 'hop')
# link action
self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"'))
groups = sorted([g.name for g in user.in_group])
self.assertEqual(groups, ['users'])
+ group = user.in_group[0]
+ self.assertEqual(len(group.reverse_tags), 1)
+ self.assertEqual(group.reverse_tags[0].name, 'hop')
# link or create action
- tags = sorted([t.name for t in user.reverse_tags])
- self.assertEqual(tags, ['hop', 'unknown'])
- tag = self.execute('Tag X WHERE X name "unknown"').get_entity(0, 0)
- self.assertEqual(tag.cwuri, 'http://testing.fr/cubicweb/%s' % tag.eid)
- self.assertEqual(tag.cw_source[0].name, 'system')
-
+ tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name)
+ for t in user.reverse_tags])
+ self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'),
+ ('unknown', 'http://testing.fr/cubicweb/', 'system')))
+ )
session.set_cnxset()
stats = dfsource.pull_data(session, force=True, raise_on_error=True)
self.assertEqual(stats['created'], set())
--- a/web/data/cubicweb.facets.js Fri Oct 07 11:56:04 2011 +0200
+++ b/web/data/cubicweb.facets.js Fri Oct 07 11:59:20 2011 +0200
@@ -111,17 +111,17 @@
'ctxcomponents', 'breadcrumbs'));
}
}
- var mainvar = null;
- var index = jQuery.inArray('mainvar', zipped[0]);
- if (index > - 1) {
+ var mainvar = null;
+ var index = jQuery.inArray('mainvar', zipped[0]);
+ if (index > - 1) {
mainvar = zipped[1][index];
- }
+ }
var d = loadRemote('json', ajaxFuncArgs('filter_select_content', null, toupdate, rql, mainvar));
d.addCallback(function(updateMap) {
for (facetName in updateMap) {
var values = updateMap[facetName];
- // XXX fine with jquery 1.6
+ // XXX fine with jquery 1.6
//$form.find('div[cubicweb\\:facetName="' + facetName + '"] ~ div .facetCheckBox').each(function() {
$form.find('div').filter(function () {return $(this).attr('cubicweb:facetName') == facetName}).parent().find('.facetCheckBox').each(function() {
var value = this.getAttribute('cubicweb:value');
@@ -151,19 +151,19 @@
// called, not when the page is initialized
var facetargs = form.attr('cubicweb:facetargs');
if (facetargs != undefined && !form.attr('cubicweb:initialized')) {
- form.attr('cubicweb:initialized', '1');
- var jsfacetargs = cw.evalJSON(form.attr('cubicweb:facetargs'));
+ form.attr('cubicweb:initialized', '1');
+ var jsfacetargs = cw.evalJSON(form.attr('cubicweb:facetargs'));
form.submit(function() {
buildRQL.apply(null, jsfacetargs);
return false;
});
- var divid = jsfacetargs[0];
- if (jQuery('#'+divid).length) {
- var $loadingDiv = $(DIV({id:'facetLoading'},
- facetLoadingMsg));
- $loadingDiv.corner();
- $(jQuery('#'+divid).get(0).parentNode).append($loadingDiv);
- }
+ var divid = jsfacetargs[0];
+ if (jQuery('#'+divid).length) {
+ var $loadingDiv = $(DIV({id:'facetLoading'},
+ facetLoadingMsg));
+ $loadingDiv.corner();
+ $(jQuery('#'+divid).get(0).parentNode).append($loadingDiv);
+ }
form.find('div.facet').each(function() {
var facet = jQuery(this);
facet.find('div.facetCheckBox').each(function(i) {
--- a/web/views/authentication.py Fri Oct 07 11:56:04 2011 +0200
+++ b/web/views/authentication.py Fri Oct 07 11:59:20 2011 +0200
@@ -65,6 +65,14 @@
"""
raise NotImplementedError()
+ def cleanup_authentication_information(self, req):
+ """called when the retriever has returned some authentication
+ information but we get an authentication error when using them, so it
+ get a chance to cleanup things (e.g. remove cookie)
+ """
+ pass
+
+
class LoginPasswordRetreiver(WebAuthInfoRetreiver):
__regid__ = 'loginpwdauth'
order = 10
@@ -144,6 +152,7 @@
try:
cnx = self._authenticate(login, authinfo)
except AuthenticationError:
+ retriever.cleanup_authentication_information(req)
continue # the next one may succeed
for retriever_ in self.authinforetrievers:
retriever_.authenticated(retriever, req, cnx, login, authinfo)
--- a/web/views/rdf.py Fri Oct 07 11:56:04 2011 +0200
+++ b/web/views/rdf.py Fri Oct 07 11:59:20 2011 +0200
@@ -89,15 +89,17 @@
except xy.UnsupportedVocabulary:
pass
else:
- for related in entity.related(rtype, role, entities=True):
- if role == 'subject':
- add( (cwuri, CW[rtype], URIRef(related.cwuri)) )
- try:
- for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)):
- add( (cwuri, urijoin(item), URIRef(related.cwuri)) )
- except xy.UnsupportedVocabulary:
- pass
- else:
- add( (URIRef(related.cwuri), CW[rtype], cwuri) )
+ try:
+ for related in entity.related(rtype, role, entities=True, safe=True):
+ if role == 'subject':
+ add( (cwuri, CW[rtype], URIRef(related.cwuri)) )
+ try:
+ for item in xy.xeq('%s %s' % (entity.e_schema.type, rtype)):
+ add( (cwuri, urijoin(item), URIRef(related.cwuri)) )
+ except xy.UnsupportedVocabulary:
+ pass
+ else:
+ add( (URIRef(related.cwuri), CW[rtype], cwuri) )
+ except Unauthorized:
+ pass
-
--- a/web/views/sessions.py Fri Oct 07 11:56:04 2011 +0200
+++ b/web/views/sessions.py Fri Oct 07 11:59:20 2011 +0200
@@ -25,7 +25,7 @@
BadConnectionId)
from cubicweb.web import InvalidSession, Redirect
from cubicweb.web.application import AbstractSessionManager
-from cubicweb.dbapi import DBAPISession
+from cubicweb.dbapi import ProgrammingError, DBAPISession
class InMemoryRepositorySessionManager(AbstractSessionManager):
@@ -133,6 +133,6 @@
if session.cnx:
try:
session.cnx.close()
- except BadConnectionId: # expired on the repository side
+ except (ProgrammingError, BadConnectionId): # expired on the repository side
pass
session.cnx = None
--- a/web/views/xmlrss.py Fri Oct 07 11:56:04 2011 +0200
+++ b/web/views/xmlrss.py Fri Oct 07 11:59:20 2011 +0200
@@ -115,14 +115,14 @@
self.error('unexisting relation %r', relstr)
continue
self.w(u' <%s role="%s">\n' % (rtype, role))
- for related in entity.related(rtype, role, entities=True):
- related.view('xmlrelateditem', w=self.w)
+ self.wview('xmlrelateditem', entity.related(rtype, role, safe=True), 'null')
self.w(u' </%s>\n' % rtype)
self.w(u'</%s>\n' % (entity.e_schema))
class XMLRelatedItemView(EntityView):
__regid__ = 'xmlrelateditem'
+ add_div_section = False
def entity_call(self, entity):
# XXX put unique attributes as xml attribute, they are much probably