equal
deleted
inserted
replaced
205 for url in urls: |
205 for url in urls: |
206 self.info('pulling data from %s', url) |
206 self.info('pulling data from %s', url) |
207 try: |
207 try: |
208 if parser.process(url, raise_on_error): |
208 if parser.process(url, raise_on_error): |
209 error = True |
209 error = True |
210 except IOError, exc: |
210 except IOError as exc: |
211 if raise_on_error: |
211 if raise_on_error: |
212 raise |
212 raise |
213 parser.import_log.record_error( |
213 parser.import_log.record_error( |
214 'could not pull data while processing %s: %s' |
214 'could not pull data while processing %s: %s' |
215 % (url, exc)) |
215 % (url, exc)) |
216 error = True |
216 error = True |
217 except Exception, exc: |
217 except Exception as exc: |
218 if raise_on_error: |
218 if raise_on_error: |
219 raise |
219 raise |
220 self.exception('error while processing %s: %s', |
220 self.exception('error while processing %s: %s', |
221 url, exc) |
221 url, exc) |
222 error = True |
222 error = True |
316 uri = uri.encode('utf-8') |
316 uri = uri.encode('utf-8') |
317 try: |
317 try: |
318 eid = session.repo.extid2eid(source, str(uri), etype, session, |
318 eid = session.repo.extid2eid(source, str(uri), etype, session, |
319 complete=False, commit=False, |
319 complete=False, commit=False, |
320 sourceparams=sourceparams) |
320 sourceparams=sourceparams) |
321 except ValidationError, ex: |
321 except ValidationError as ex: |
322 # XXX use critical so they are seen during tests. Should consider |
322 # XXX use critical so they are seen during tests. Should consider |
323 # raise_on_error instead? |
323 # raise_on_error instead? |
324 self.source.critical('error while creating %s: %s', etype, ex) |
324 self.source.critical('error while creating %s: %s', etype, ex) |
325 self.import_log.record_error('error while creating %s: %s' |
325 self.import_log.record_error('error while creating %s: %s' |
326 % (etype, ex)) |
326 % (etype, ex)) |
400 |
400 |
401 def process(self, url, raise_on_error=False): |
401 def process(self, url, raise_on_error=False): |
402 """IDataFeedParser main entry point""" |
402 """IDataFeedParser main entry point""" |
403 try: |
403 try: |
404 parsed = self.parse(url) |
404 parsed = self.parse(url) |
405 except Exception, ex: |
405 except Exception as ex: |
406 if raise_on_error: |
406 if raise_on_error: |
407 raise |
407 raise |
408 self.import_log.record_error(str(ex)) |
408 self.import_log.record_error(str(ex)) |
409 return True |
409 return True |
410 error = False |
410 error = False |
422 self.process_item(*args) |
422 self.process_item(*args) |
423 # commit+set_cnxset instead of commit(free_cnxset=False) to let |
423 # commit+set_cnxset instead of commit(free_cnxset=False) to let |
424 # other a chance to get our connections set |
424 # other a chance to get our connections set |
425 commit() |
425 commit() |
426 set_cnxset() |
426 set_cnxset() |
427 except ValidationError, exc: |
427 except ValidationError as exc: |
428 if raise_on_error: |
428 if raise_on_error: |
429 raise |
429 raise |
430 self.source.error('Skipping %s because of validation error %s' |
430 self.source.error('Skipping %s because of validation error %s' |
431 % (args, exc)) |
431 % (args, exc)) |
432 rollback() |
432 rollback() |
453 |
453 |
454 def is_deleted(self, extid, etype, eid): |
454 def is_deleted(self, extid, etype, eid): |
455 if extid.startswith('http'): |
455 if extid.startswith('http'): |
456 try: |
456 try: |
457 _OPENER.open(self.normalize_url(extid)) # XXX HTTP HEAD request |
457 _OPENER.open(self.normalize_url(extid)) # XXX HTTP HEAD request |
458 except urllib2.HTTPError, ex: |
458 except urllib2.HTTPError as ex: |
459 if ex.code == 404: |
459 if ex.code == 404: |
460 return True |
460 return True |
461 elif extid.startswith('file://'): |
461 elif extid.startswith('file://'): |
462 return exists(extid[7:]) |
462 return exists(extid[7:]) |
463 return False |
463 return False |