# HG changeset patch # User David Douard # Date 1374646822 -7200 # Node ID 75493f6ca586820a2d728c1861d8d84142b3009a # Parent 2eac0aa1d3f6e1d9bfae656ba1345095bba03094 [datafeed] add a timeout config option (closes #2745677) So a HTTP GET do not hang forever in the datafeed looping task. diff -r 2eac0aa1d3f6 -r 75493f6ca586 server/sources/datafeed.py --- a/server/sources/datafeed.py Wed Jul 24 11:54:35 2013 +0200 +++ b/server/sources/datafeed.py Wed Jul 24 08:20:22 2013 +0200 @@ -78,6 +78,12 @@ 'help': ('Time before logs from datafeed imports are deleted.'), 'group': 'datafeed-source', 'level': 2, }), + ('http-timeout', + {'type': 'time', + 'default': '1min', + 'help': ('Timeout of HTTP GET requests, when synchronizing a source.'), + 'group': 'datafeed-source', 'level': 2, + }), ) def check_config(self, source_entity): @@ -101,6 +107,7 @@ super(DataFeedSource, self).update_config(source_entity, typed_config) self.synchro_interval = timedelta(seconds=typed_config['synchronization-interval']) self.max_lock_lifetime = timedelta(seconds=typed_config['max-lock-lifetime']) + self.http_timeout = typed_config['http-timeout'] def init(self, activated, source_entity): super(DataFeedSource, self).init(activated, source_entity) @@ -438,7 +445,7 @@ if url.startswith('http'): url = self.normalize_url(url) self.source.info('GET %s', url) - stream = _OPENER.open(url) + stream = _OPENER.open(url, timeout=self.http_timeout) elif url.startswith('file://'): stream = open(url[7:]) else: @@ -454,7 +461,8 @@ def is_deleted(self, extid, etype, eid): if extid.startswith('http'): try: - _OPENER.open(self.normalize_url(extid)) # XXX HTTP HEAD request + _OPENER.open(self.normalize_url(extid), # XXX HTTP HEAD request + timeout=self.http_timeout) except urllib2.HTTPError as ex: if ex.code == 404: return True