[datafeed] Add a raise_on_error parameter to DataFeedSource.extid2entity
And pass the option from various `process_` methods in existing parsers.
This makes debugging in tests easier.
Closes #4601191.
--- a/server/sources/datafeed.py Tue Dec 02 11:37:01 2014 +0100
+++ b/server/sources/datafeed.py Wed Nov 19 11:57:55 2014 +0100
@@ -342,9 +342,13 @@
raise ValidationError(schemacfg.eid, {None: msg})
def extid2entity(self, uri, etype, **sourceparams):
- """return an entity for the given uri. May return None if it should be
- skipped
+ """Return an entity for the given uri. May return None if it should be
+ skipped.
+
+ If a `raise_on_error` keyword parameter is passed, a ValidationError
+ exception may be raised.
"""
+ raise_on_error = sourceparams.pop('raise_on_error', False)
cnx = self._cw
# if cwsource is specified and repository has a source with the same
# name, call extid2eid on that source so entity will be properly seen as
@@ -361,8 +365,8 @@
eid = cnx.repo.extid2eid(source, str(uri), etype, cnx,
sourceparams=sourceparams)
except ValidationError as ex:
- # XXX use critical so they are seen during tests. Should consider
- # raise_on_error instead?
+ if raise_on_error:
+ raise
self.source.critical('error while creating %s: %s', etype, ex)
self.import_log.record_error('error while creating %s: %s'
% (etype, ex))
@@ -453,7 +457,7 @@
rollback = self._cw.rollback
for args in parsed:
try:
- self.process_item(*args)
+ self.process_item(*args, raise_on_error=raise_on_error)
# commit+set_cnxset instead of commit(free_cnxset=False) to let
# other a chance to get our connections set
commit()
@@ -473,7 +477,7 @@
def parse_etree(self, document):
return [(document,)]
- def process_item(self, *args):
+ def process_item(self, *args, **kwargs):
raise NotImplementedError
def is_deleted(self, extid, etype, eid):
--- a/server/test/unittest_datafeed.py Tue Dec 02 11:37:01 2014 +0100
+++ b/server/test/unittest_datafeed.py Wed Nov 19 11:57:55 2014 +0100
@@ -40,7 +40,8 @@
def process(self, url, raise_on_error=False):
entity = self.extid2entity('http://www.cubicweb.org/', 'Card',
item={'title': u'cubicweb.org',
- 'content': u'the cw web site'})
+ 'content': u'the cw web site'},
+ raise_on_error=raise_on_error)
if not self.created_during_pull(entity):
self.notify_updated(entity)
def before_entity_copy(self, entity, sourceparams):
--- a/sobjects/cwxmlparser.py Tue Dec 02 11:37:01 2014 +0100
+++ b/sobjects/cwxmlparser.py Wed Nov 19 11:57:55 2014 +0100
@@ -195,7 +195,7 @@
parser=self)
yield builder.build_item()
- def process_item(self, item, rels):
+ def process_item(self, item, rels, raise_on_error=False):
"""
item and rels are what's returned by the item builder `build_item` method:
@@ -204,7 +204,8 @@
{role: {relation: [(related item, related rels)...]}
"""
entity = self.extid2entity(str(item['cwuri']), item['cwtype'],
- cwsource=item['cwsource'], item=item)
+ cwsource=item['cwsource'], item=item,
+ raise_on_error=raise_on_error)
if entity is None:
return None
if entity.eid in self._processed_entities:
--- a/sobjects/ldapparser.py Tue Dec 02 11:37:01 2014 +0100
+++ b/sobjects/ldapparser.py Wed Nov 19 11:57:55 2014 +0100
@@ -70,10 +70,11 @@
attrs))
return {}
- def _process(self, etype, sdict):
+ def _process(self, etype, sdict, raise_on_error=False):
self.debug('fetched %s %s', etype, sdict)
extid = sdict['dn']
- entity = self.extid2entity(extid, etype, **sdict)
+ entity = self.extid2entity(extid, etype,
+ raise_on_error=raise_on_error, **sdict)
if entity is not None and not self.created_during_pull(entity):
self.notify_updated(entity)
attrs = self.ldap2cwattrs(sdict, etype)
@@ -90,7 +91,7 @@
self._process('CWUser', userdict)
self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr)
for groupdict in self.group_source_entities_by_extid.itervalues():
- self._process('CWGroup', groupdict)
+ self._process('CWGroup', groupdict, raise_on_error=raise_on_error)
def handle_deletion(self, config, cnx, myuris):
if config['delete-entities']: