cubicweb/hooks/__init__.py
author Philippe Pepiot <ph@itsalwaysdns.eu>
Tue, 31 Mar 2020 18:22:05 +0200
changeset 12966 6cd938c29ca3
parent 12145 752b94ed9748
permissions -rw-r--r--
[server] Make connection pooler configurable and set better default values Drop the configuration connections-pool-size and add new configurations options: * connections-pool-min-size. Set to 0 by default so we open connections only when needed. This avoid opening min-size*processes connections at startup, which is, it think, a good default. * connections-pool-max-size. Set to 0 (unlimited) by default, so we move the bottleneck to postgresql. * connections-idle-timeout. Set to 10 minutes. I don't have arguments about this except that this is the default in pgbouncer.

# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
#
# CubicWeb is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option)
# any later version.
#
# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb.  If not, see <http://www.gnu.org/licenses/>.
"""core hooks registering some maintainance tasks as server startup time"""



from datetime import timedelta, datetime

from cubicweb.server import hook

class TransactionsCleanupStartupHook(hook.Hook):
    """start task to cleanup transaction data"""
    __regid__ = 'cw.looping-tasks.transactions-cleanup'
    events = ('server_startup',)

    def __call__(self):
        if not self.repo.has_scheduler():
            return
        # XXX use named args and inner functions to avoid referencing globals
        # which may cause reloading pb
        lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime'])
        def cleanup_old_transactions(repo=self.repo, lifetime=lifetime):
            mindate = datetime.utcnow() - lifetime
            with repo.internal_cnx() as cnx:
                cnx.system_sql(
                    'DELETE FROM transactions WHERE tx_time < %(time)s',
                    {'time': mindate})
                cnx.commit()
        if self.repo.config['undo-enabled']:
            self.repo.looping_task(60*60*24, cleanup_old_transactions,
                                   self.repo)

class UpdateFeedsStartupHook(hook.Hook):
    """start task to update datafeed based sources"""
    __regid__ = 'cw.looping-tasks.update-feeds'
    events = ('server_startup',)

    def __call__(self):
        if not self.repo.has_scheduler():
            return
        def update_feeds(repo):
            # take a list to avoid iterating on a dictionary whose size may
            # change
            for uri, source in repo.sources_by_uri.items():
                if (uri == 'system'
                    or not repo.config.source_enabled(source)
                    or not source.config['synchronize']):
                    continue
                with repo.internal_cnx() as cnx:
                    try:
                        source.pull_data(cnx)
                    except Exception as exc:
                        cnx.exception('while trying to update feed %s', source)
        self.repo.looping_task(60, update_feeds, self.repo)


class DataImportsCleanupStartupHook(hook.Hook):
    """start task to cleanup old data imports (ie datafeed import logs)"""
    __regid__ = 'cw.looping-tasks.dataimports-cleanup'
    events = ('server_startup',)

    def __call__(self):
        if not self.repo.has_scheduler():
            return
        def expire_dataimports(repo=self.repo):
            for uri, source in repo.sources_by_uri.items():
                if (uri == 'system'
                    or not repo.config.source_enabled(source)):
                    continue
                with repo.internal_cnx() as cnx:
                    mindate = datetime.utcnow() - timedelta(seconds=source.config['logs-lifetime'])
                    cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s',
                                    {'time': mindate})
                    cnx.commit()
        self.repo.looping_task(60*60*24, expire_dataimports, self.repo)