fix interface_selector bug
If a view using this selector defines an `accepts` attribute, the
view should be selectable only if the entity is of one of the
accepted types (+need to consider schema inheritance)
--- a/common/selectors.py Mon Dec 08 08:56:08 2008 +0100
+++ b/common/selectors.py Mon Dec 08 10:12:32 2008 +0100
@@ -234,6 +234,8 @@
* if row is specified, return the value returned by the method with
the entity's class of this row
"""
+ # XXX this selector can be refactored : extract the code testing
+ # for entity schema / interface compliance
score = 0
# check 'accepts' to give priority to more specific classes
if row is None:
@@ -245,8 +247,15 @@
if not escore:
return 0
score += escore
- if eclass.id in getattr(cls, 'accepts', ()):
- score += 2
+ accepts = set(getattr(cls, 'accepts', ()))
+ # if accepts is defined on the vobject, eclass must match
+ if accepts:
+ eschema = eclass.e_schema
+ etypes = set([eschema] + eschema.ancestors())
+ if accepts & etypes:
+ score += 2
+ elif 'Any' not in accepts:
+ return 0
return score + 1
etype = rset.description[row][col or 0]
if etype is None: # outer join
@@ -255,10 +264,16 @@
for iface in cls.accepts_interfaces:
score += iface.is_implemented_by(eclass)
if score:
- if eclass.id in getattr(cls, 'accepts', ()):
- score += 2
- else:
- score += 1
+ accepts = set(getattr(cls, 'accepts', ()))
+ # if accepts is defined on the vobject, eclass must match
+ if accepts:
+ eschema = eclass.e_schema
+ etypes = set([eschema] + eschema.ancestors())
+ if accepts & etypes:
+ score += 1
+ elif 'Any' not in accepts:
+ return 0
+ score += 1
return score
@lltrace