--- a/server/test/unittest_datafeed.py Fri Oct 21 09:23:43 2011 +0200
+++ b/server/test/unittest_datafeed.py Fri Oct 21 09:23:43 2011 +0200
@@ -37,19 +37,21 @@
self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
self.assertFalse(dfsource.fresh())
+
class AParser(datafeed.DataFeedParser):
__regid__ = 'testparser'
def process(self, url, raise_on_error=False):
entity = self.extid2entity('http://www.cubicweb.org/', 'Card',
- item={'title': u'cubicweb.org',
- 'content': u'the cw web site'})
+ item={'title': u'cubicweb.org',
+ 'content': u'the cw web site'})
if not self.created_during_pull(entity):
self.notify_updated(entity)
def before_entity_copy(self, entity, sourceparams):
entity.cw_edited.update(sourceparams['item'])
with self.temporary_appobjects(AParser):
- stats = dfsource.pull_data(self.session, force=True)
+ session = self.repo.internal_session()
+ stats = dfsource.pull_data(session, force=True)
self.commit()
# test import stats
self.assertEqual(sorted(stats.keys()), ['created', 'updated'])
@@ -74,13 +76,15 @@
self.assertEqual(self.repo._extid_cache[('http://www.cubicweb.org/', 'system')],
entity.eid)
# test repull
- stats = dfsource.pull_data(self.session, force=True)
+ session.set_cnxset()
+ stats = dfsource.pull_data(session, force=True)
self.assertEqual(stats['created'], set())
self.assertEqual(stats['updated'], set((entity.eid,)))
# test repull with caches reseted
self.repo._type_source_cache.clear()
self.repo._extid_cache.clear()
- stats = dfsource.pull_data(self.session, force=True)
+ session.set_cnxset()
+ stats = dfsource.pull_data(session, force=True)
self.assertEqual(stats['created'], set())
self.assertEqual(stats['updated'], set((entity.eid,)))
self.assertEqual(self.repo._type_source_cache[entity.eid],