--- a/.hgignore Mon Mar 20 11:16:11 2017 +0100
+++ b/.hgignore Fri Apr 14 15:40:15 2017 +0200
@@ -13,6 +13,7 @@
^doc/book/en/apidoc$
\.old$
\.pybuild
+cubicweb/server/test/data/slapd.conf
debian/python-cubicweb
debian/*.log
debian/*.substvars
--- a/.hgtags Mon Mar 20 11:16:11 2017 +0100
+++ b/.hgtags Fri Apr 14 15:40:15 2017 +0200
@@ -599,3 +599,6 @@
4029fb83a67d0b1ac0e73985426a83373721a4b2 3.24.6
4029fb83a67d0b1ac0e73985426a83373721a4b2 debian/3.24.6-1
4029fb83a67d0b1ac0e73985426a83373721a4b2 centos/3.24.6-1
+dacc5b168e29b33515cee5940de1e392dc9d522a 3.25.0
+dacc5b168e29b33515cee5940de1e392dc9d522a debian/3.25.0-1
+dacc5b168e29b33515cee5940de1e392dc9d522a centos/3.25.0-1
--- a/README Mon Mar 20 11:16:11 2017 +0100
+++ b/README Fri Apr 14 15:40:15 2017 +0200
@@ -14,7 +14,7 @@
Install
-------
-More details at https://docs.cubicweb.org/book/admin/setup
+More details at https://cubicweb.readthedocs.io/en/3.25/book/admin/setup
Getting started
---------------
@@ -26,12 +26,25 @@
cubicweb-ctl start -D myblog
sensible-browser http://localhost:8080/
-Details at https://docs.cubicweb.org/tutorials/base/blog-in-five-minutes
+Details at https://cubicweb.readthedocs.io/en/3.25/tutorials/base/blog-in-five-minutes
Documentation
-------------
-Look in the doc/ subdirectory or read https://docs.cubicweb.org/
+Look in the doc/ subdirectory or read https://cubicweb.readthedocs.io/en/3.25/
-CubicWeb includes the Entypo pictograms by Daniel Bruce — www.entypo.com
+CubicWeb includes the Entypo pictograms by Daniel Bruce — http://www.entypo.com
+
+Contributing
+------------
+
+Patches should be submitted by email at the cubicweb-devel@lists.cubicweb.org
+mailing list in order to get reviewed by project integrators or any community
+member.
+The simplest way of send patches is to use the ``hg email`` command available
+through the *patchbomb* extension of Mercurial. Preferably, patches should be
+*in the message body* of emails. When submitting a revised version of a patch
+series, a prefix indicating the iteration number ``<n>`` of the series should
+be added to email subject prefixes; this can be achieved by specifying a
+``--flag v<n>`` option to ``hg email`` command.
--- a/README.pyramid.rst Mon Mar 20 11:16:11 2017 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,170 +0,0 @@
-
-pyramid_cubicweb_ is one specific way of integrating CubicWeb_ with a
-Pyramid_ web application.
-
-Features
-========
-
-* provides a default route that let a cubicweb instance handle the request.
-
-Usage
-=====
-
-To use, install ``pyramid_cubicweb`` in your python environment, and
-then include_ the package::
-
- config.include('pyramid_cubicweb')
-
-
-Configuration
-=============
-
-Requires the following `INI setting / environment variable`_:
-
-* `cubicweb.instance` / `CW_INSTANCE`: the cubicweb instance name
-
-Authentication cookies
-----------------------
-
-When using the `pyramid_cubicweb.auth` (CubicWeb AuthTkt
-authentication policy), which is the default in most cases, you may
-have to configure the behaviour of these authentication policies using
-standard's Pyramid configuration. You may want to configure in your
-``pyramid.ini``:
-
-:Session Authentication:
-
- This is a `AuthTktAuthenticationPolicy`_ so you may overwrite default
- configuration values by adding configuration entries using the prefix
- ``cubicweb.auth.authtkt.session``. Default values are:
-
- ::
-
- cubicweb.auth.authtkt.session.hashalg = sha512
- cubicweb.auth.authtkt.session.cookie_name = auth_tkt
- cubicweb.auth.authtkt.session.timeout = 1200
- cubicweb.auth.authtkt.session.reissue_time = 120
- cubicweb.auth.authtkt.session.http_only = True
- cubicweb.auth.authtkt.session.secure = True
-
-
-:Persistent Authentication:
-
- This is also a `AuthTktAuthenticationPolicy`_. It is used when persistent
- sessions are activated (typically when using the cubicweb-rememberme_
- cube). You may overwrite default configuration values by adding
- configuration entries using the prefix
- ``cubicweb.auth.authtkt.persistent``. Default values are:
-
- ::
-
- cubicweb.auth.authtkt.persistent.hashalg = sha512
- cubicweb.auth.authtkt.persistent.cookie_name = pauth_tkt
- cubicweb.auth.authtkt.persistent.max_age = 3600*24*30
- cubicweb.auth.authtkt.persistent.reissue_time = 3600*24
- cubicweb.auth.authtkt.persistent.http_only = True
- cubicweb.auth.authtkt.persistent.secure = True
-
-
-.. Warning:: Legacy timeout values from the instance's
- ``all-in-one.conf`` are **not** used at all (``
- http-session-time`` and ``cleanup-session-time``)
-
-Please refer to the documentation_ for more details (available in the
-``docs`` directory of the source code).
-
-.. _pyramid_cubicweb: https://www.cubicweb.org/project/pyramid-cubicweb
-.. _CubicWeb: https://www.cubicweb.org/
-.. _`cubicweb-rememberme`: \
- https://www.cubicweb.org/project/cubicweb-rememberme
-.. _Pyramid: http://pypi.python.org/pypi/pyramid
-.. _include: http://docs.pylonsproject.org/projects/pyramid/en/latest/api/config.html#pyramid.config.Configurator.include
-.. _`INI setting / environment variable`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html#adding-a-custom-setting
-.. _documentation: http://pyramid-cubicweb.readthedocs.org/
-.. _AuthTktAuthenticationPolicy: \
- http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html#pyramid.authentication.AuthTktAuthenticationPolicy
-
-Command
-=======
-
-Summary
--------
-
-Add the 'pyramid' command to cubicweb-ctl".
-
-This cube also add a ``CWSession`` entity type so that sessions can be
-stored in the database, which allows to run a Cubicweb instance
-without having to set up a session storage (like redis or memcache)
-solution.
-
-However, for production systems, it is greatly advised to use such a
-storage solution for the sessions.
-
-The handling of the sessions is made by pyramid (see the
-`pyramid's documentation on sessions`_ for more details).
-
-For example, to set up a redis based session storage, you need the
-`pyramid-redis-session`_ package, then you must configure pyramid to
-use this backend, by configuring the ``pyramid.ini`` file in the instance's
-config directory (near the ``all-in-one.conf`` file):
-
-
-.. code-block:: ini
-
- [main]
- cubicweb.defaults = no # we do not want to load the default cw session handling
-
- cubicweb.auth.authtkt.session.secret = <secret1>
- cubicweb.auth.authtkt.persistent.secret = <secret2>
- cubicweb.auth.authtkt.session.secure = yes
- cubicweb.auth.authtkt.persistent.secure = yes
-
- redis.sessions.secret = <secret3>
- redis.sessions.prefix = <my-app>:
-
- redis.sessions.url = redis://localhost:6379/0
-
- pyramid.includes =
- pyramid_redis_sessions
- pyramid_cubicweb.auth
- pyramid_cubicweb.login
-
-
-See the documentation of `Pyramid Cubicweb`_ for more details.
-
-.. Warning:: If you want to be able to log in a CubicWeb application
- served by pyramid on a unsecured stream (typically when
- you start an instance in dev mode using a simple
- ``cubicweb-ctl pyramid -D -linfo myinstance``), you
- **must** set ``cubicweb.auth.authtkt.session.secure`` to
- ``no``.
-
-Secrets
-~~~~~~~
-
-There are a number of secrets to configure in ``pyramid.ini``. They
-should be different one from each other, as explained in `Pyramid's
-documentation`_.
-
-For the record:
-
-:cubicweb.session.secret: This secret is used to encrypt the session's
- data ID (data themselved are stored in the backend, database or
- redis) when using the integrated (``CWSession`` based) session data
- storage.
-
-:redis.session.secret: This secret is used to encrypt the session's
- data ID (data themselved are stored in the backend, database or
- redis) when using redis as backend.
-
-:cubicweb.auth.authtkt.session.secret: This secret is used to encrypt
- the authentication cookie.
-
-:cubicweb.auth.authtkt.persistent.secret: This secret is used to
- encrypt the persistent authentication cookie.
-
-
-.. _`Pyramid Cubicweb`: http://pyramid-cubicweb.readthedocs.org/
-.. _`pyramid's documentation on sessions`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/sessions.html
-.. _`pyramid-redis-session`: http://pyramid-redis-sessions.readthedocs.org/en/latest/index.html
-.. _`Pyramid's documentation`: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/security.html#admonishment-against-secret-sharing
--- a/cubicweb.spec Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb.spec Fri Apr 14 15:40:15 2017 +0200
@@ -8,7 +8,7 @@
%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
Name: cubicweb
-Version: 3.24.6
+Version: 3.25.0
Release: logilab.1%{?dist}
Summary: CubicWeb is a semantic web application framework
Source0: https://pypi.python.org/packages/source/c/cubicweb/cubicweb-%{version}.tar.gz
--- a/cubicweb/__pkginfo__.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/__pkginfo__.py Fri Apr 14 15:40:15 2017 +0200
@@ -28,7 +28,7 @@
modname = distname = "cubicweb"
numversion = (3, 25, 0)
-version = '.'.join(str(num) for num in numversion) + '.dev0'
+version = '.'.join(str(num) for num in numversion)
description = "a repository of entities / relations for knowledge management"
author = "Logilab"
@@ -71,6 +71,7 @@
# data files that shall be copied into the main package directory
package_data = {
'cubicweb.web.views': ['*.pt'],
+ 'cubicweb.pyramid': ['development.ini.tmpl'],
}
try:
--- a/cubicweb/cwconfig.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/cwconfig.py Fri Apr 14 15:40:15 2017 +0200
@@ -276,6 +276,15 @@
return cube
+def _cube_modname(cube):
+ modname = _cube_pkgname(cube)
+ loader = pkgutil.find_loader(modname)
+ if loader:
+ return modname
+ else:
+ return 'cubes.' + cube
+
+
def _expand_modname(modname):
"""expand modules names `modname` if exists by walking non package submodules
and yield (submodname, filepath) including `modname` itself
@@ -502,8 +511,11 @@
@classmethod
def available_cubes(cls):
+ """Return a list of available cube names.
+
+ For cube as package, name is equal to python package's name.
+ """
cubes = set()
- prefix = 'cubicweb_'
for entry_point in pkg_resources.iter_entry_points(
group='cubicweb.cubes', name=None):
try:
@@ -512,11 +524,11 @@
continue
else:
modname = module.__name__
- if not modname.startswith(prefix):
+ if not modname.startswith('cubicweb_'):
cls.warning('entry point %s does not appear to be a cube',
entry_point)
continue
- cubes.add(modname[len(prefix):])
+ cubes.add(modname)
# Legacy cubes.
for directory in cls.cubes_search_path():
if not exists(directory):
@@ -751,17 +763,6 @@
else:
cls.warning('no __init__ file in cube %s', cube)
- @classmethod
- def init_available_cubes(cls):
- """cubes may register some sources (svnfile for instance) in their
- __init__ file, so they should be loaded early in the startup process
- """
- for cube in cls.available_cubes():
- try:
- __import__('cubes.%s' % cube)
- except Exception as ex:
- cls.warning("can't init cube %s: %s", cube, ex)
-
cubicweb_appobject_path = set(['entities'])
cube_appobject_path = set(['entities'])
@@ -817,7 +818,8 @@
for name in ('bootstrap', 'base', 'workflow', 'Bookmark'):
modnames.append(('cubicweb', 'cubicweb.schemas.' + name))
for cube in reversed(self.cubes()):
- for modname, filepath in _expand_modname('cubes.{0}.schema'.format(cube)):
+ for modname, filepath in _expand_modname(
+ '{0}.schema'.format(_cube_modname(cube))):
modnames.append((cube, modname))
if self.apphome:
apphome = realpath(self.apphome)
@@ -1058,6 +1060,7 @@
MODES = ('common', 'repository', 'Any')
MCOMPAT = {'all-in-one': MODES,
+ 'pyramid': MODES,
'repository': ('common', 'repository', 'Any')}
@classmethod
def accept_mode(cls, mode):
@@ -1335,9 +1338,10 @@
def appobjects_cube_modnames(self, cube):
modnames = []
+ cube_modname = _cube_modname(cube)
cube_submodnames = self._sorted_appobjects(self.cube_appobject_path)
for name in cube_submodnames:
- for modname, filepath in _expand_modname('.'.join(['cubes', cube, name])):
+ for modname, filepath in _expand_modname('.'.join([cube_modname, name])):
modnames.append(modname)
return modnames
--- a/cubicweb/cwctl.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/cwctl.py Fri Apr 14 15:40:15 2017 +0200
@@ -89,6 +89,18 @@
return modes
+def available_cube_names(cwcfg):
+ """Return a list of available cube names, with 'cubicweb_' prefix dropped.
+ """
+ def drop_prefix(cube):
+ prefix = 'cubicweb_'
+ if cube.startswith(prefix):
+ cube = cube[len(prefix):]
+ return cube
+
+ return [drop_prefix(cube) for cube in cwcfg.available_cubes()]
+
+
class InstanceCommand(Command):
"""base class for command taking 0 to n instance id as arguments
(0 meaning all registered instances)
@@ -220,14 +232,15 @@
cfgpb = ConfigurationProblem(cwcfg)
try:
cubesdir = pathsep.join(cwcfg.cubes_search_path())
- namesize = max(len(x) for x in cwcfg.available_cubes())
+ cube_names = available_cube_names(cwcfg)
+ namesize = max(len(x) for x in cube_names)
except ConfigurationError as ex:
print('No cubes available:', ex)
except ValueError:
print('No cubes available in %s' % cubesdir)
else:
print('Available cubes (%s):' % cubesdir)
- for cube in cwcfg.available_cubes():
+ for cube in cube_names:
try:
tinfo = cwcfg.cube_pkginfo(cube)
tversion = tinfo.version
@@ -360,7 +373,7 @@
except ConfigurationError as ex:
print(ex)
print('\navailable cubes:', end=' ')
- print(', '.join(cwcfg.available_cubes()))
+ print(', '.join(available_cube_names(cwcfg)))
return
# create the registry directory for this instance
print('\n'+underline_title('Creating the instance %s' % appid))
@@ -677,6 +690,11 @@
'default': False,
'help': 'only upgrade files on the file system, not the database.'}),
+ ('no-config-update',
+ {'short': 'C', 'action': 'store_true',
+ 'default': False,
+ 'help': 'do NOT update config file if set.'}),
+
('nostartstop',
{'short': 'n', 'action' : 'store_true',
'default': False,
@@ -755,7 +773,8 @@
else:
print('-> no data migration needed for instance %s.' % appid)
# rewrite main configuration file
- mih.rewrite_configuration()
+ if not self.config.no_config_update:
+ mih.rewrite_configuration()
mih.shutdown()
# handle i18n upgrade
if not self.i18nupgrade(config):
--- a/cubicweb/dataimport/test/test_massive_store.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/dataimport/test/test_massive_store.py Fri Apr 14 15:40:15 2017 +0200
@@ -45,7 +45,7 @@
def store_impl(self, cnx):
source = cnx.create_entity('CWSource', type=u'datafeed', name=u'test', url=u'test')
cnx.commit()
- metagen = stores.MetadataGenerator(cnx, source=cnx.repo.sources_by_eid[source.eid])
+ metagen = stores.MetadataGenerator(cnx, source=cnx.repo.source_by_eid(source.eid))
return MassiveObjectStore(cnx, metagen=metagen)
--- a/cubicweb/dataimport/test/test_stores.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/dataimport/test/test_stores.py Fri Apr 14 15:40:15 2017 +0200
@@ -80,7 +80,7 @@
def store_impl(self, cnx):
source = cnx.create_entity('CWSource', type=u'datafeed', name=u'test', url=u'test')
cnx.commit()
- metagen = stores.MetadataGenerator(cnx, source=cnx.repo.sources_by_eid[source.eid])
+ metagen = stores.MetadataGenerator(cnx, source=cnx.repo.source_by_eid(source.eid))
return stores.NoHookRQLObjectStore(cnx, metagen)
--- a/cubicweb/devtools/devctl.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/devtools/devctl.py Fri Apr 14 15:40:15 2017 +0200
@@ -28,7 +28,7 @@
import tempfile
import sys
from datetime import datetime, date
-from os import mkdir, chdir, path as osp
+from os import getcwd, mkdir, chdir, path as osp
import pkg_resources
from warnings import warn
@@ -691,13 +691,8 @@
verbose = self.get('verbose')
destdir = self.get('directory')
if not destdir:
- cubespath = ServerConfiguration.cubes_search_path()
- if len(cubespath) > 1:
- raise BadCommandUsage(
- "can't guess directory where to put the new cube."
- " Please specify it using the --directory option")
- destdir = cubespath[0]
- if not osp.isdir(destdir):
+ destdir = getcwd()
+ elif not osp.isdir(destdir):
print("-> creating cubes directory", destdir)
try:
mkdir(destdir)
--- a/cubicweb/devtools/repotest.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/devtools/repotest.py Fri Apr 14 15:40:15 2017 +0200
@@ -251,7 +251,7 @@
# use cnx.user.eid to get correct owned_by relation, unless explicit eid
with self.admin_access.cnx() as cnx:
user_eid = cnx.user.eid
- cnx.user._cw.data[user_session_cache_key(user_eid, 'groups')] = set(groups)
+ cnx.user._cw.transaction_data[user_session_cache_key(user_eid, 'groups')] = set(groups)
yield cnx
def qexecute(self, rql, args=None, build_descr=True):
--- a/cubicweb/entities/adapters.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/entities/adapters.py Fri Apr 14 15:40:15 2017 +0200
@@ -49,7 +49,9 @@
def long_title(self):
"""Return a more detailled title for entity"""
- return self.title()
+ # go through entity.dc_title for bw compat purpose: if entity define dc_title but not
+ # dc_long_title, we still want it to be considered.
+ return self.entity.dc_title()
def description(self, format='text/plain'):
"""Return a suitable description for entity"""
--- a/cubicweb/entities/authobjs.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/entities/authobjs.py Fri Apr 14 15:40:15 2017 +0200
@@ -62,26 +62,26 @@
def groups(self):
key = user_session_cache_key(self.eid, 'groups')
try:
- return self._cw.data[key]
+ return self._cw.transaction_data[key]
except KeyError:
with self._cw.security_enabled(read=False):
groups = set(group for group, in self._cw.execute(
'Any GN WHERE U in_group G, G name GN, U eid %(userid)s',
{'userid': self.eid}))
- self._cw.data[key] = groups
+ self._cw.transaction_data[key] = groups
return groups
@property
def properties(self):
key = user_session_cache_key(self.eid, 'properties')
try:
- return self._cw.data[key]
+ return self._cw.transaction_data[key]
except KeyError:
with self._cw.security_enabled(read=False):
properties = dict(self._cw.execute(
'Any K, V WHERE P for_user U, U eid %(userid)s, '
'P pkey K, P value V', {'userid': self.eid}))
- self._cw.data[key] = properties
+ self._cw.transaction_data[key] = properties
return properties
def prefered_language(self, language=None):
--- a/cubicweb/entities/sources.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/entities/sources.py Fri Apr 14 15:40:15 2017 +0200
@@ -21,6 +21,8 @@
from socket import gethostname
import logging
+from six import text_type
+
from logilab.common.textutils import text_to_dict
from logilab.common.configuration import OptionError
from logilab.mtconverter import xml_escape
@@ -29,27 +31,12 @@
class _CWSourceCfgMixIn(object):
+
@property
def dictconfig(self):
- return self.config and text_to_dict(self.config) or {}
-
- def update_config(self, skip_unknown=False, **config):
- from cubicweb.server import SOURCE_TYPES
- from cubicweb.server.serverconfig import (SourceConfiguration,
- generate_source_config)
- cfg = self.dictconfig
- cfg.update(config)
- options = SOURCE_TYPES[self.type].options
- sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
- for opt, val in cfg.items():
- try:
- sconfig.set_option(opt, val)
- except OptionError:
- if skip_unknown:
- continue
- raise
- cfgstr = unicode(generate_source_config(sconfig), self._cw.encoding)
- self.cw_set(config=cfgstr)
+ if not self.config:
+ return {}
+ return text_to_dict(self.config)
class CWSource(_CWSourceCfgMixIn, AnyEntity):
@@ -76,7 +63,7 @@
"""repository only property, not available from the web side (eg
self._cw is expected to be a server session)
"""
- return self._cw.repo.sources_by_eid[self.eid]
+ return self._cw.repo.source_by_eid(self.eid)
class CWSourceHostConfig(_CWSourceCfgMixIn, AnyEntity):
--- a/cubicweb/hooks/__init__.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/hooks/__init__.py Fri Apr 14 15:40:15 2017 +0200
@@ -29,7 +29,7 @@
events = ('server_startup',)
def __call__(self):
- if self.repo._scheduler is None:
+ if not self.repo.has_scheduler():
return
# XXX use named args and inner functions to avoid referencing globals
# which may cause reloading pb
@@ -51,12 +51,12 @@
events = ('server_startup',)
def __call__(self):
- if self.repo._scheduler is None:
+ if not self.repo.has_scheduler():
return
def update_feeds(repo):
# take a list to avoid iterating on a dictionary whose size may
# change
- for uri, source in list(repo.sources_by_uri.items()):
+ for uri, source in repo.sources_by_uri.items():
if (uri == 'system'
or not repo.config.source_enabled(source)
or not source.config['synchronize']):
@@ -75,7 +75,7 @@
events = ('server_startup',)
def __call__(self):
- if self.repo._scheduler is None:
+ if not self.repo.has_scheduler():
return
def expire_dataimports(repo=self.repo):
for uri, source in repo.sources_by_uri.items():
--- a/cubicweb/hooks/logstats.py Mon Mar 20 11:16:11 2017 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-
-"""looping task for dumping instance's stats in a file
-"""
-
-
-
-from datetime import datetime
-import json
-
-from cubicweb.server import hook
-
-class LogStatsStartHook(hook.Hook):
- """register task to regularly dump instance's stats in a file
-
- data are stored as one json entry per row
- """
- __regid__ = 'cubicweb.hook.logstats.start'
- events = ('server_startup',)
-
- def __call__(self):
- interval = self.repo.config.get('logstat-interval', 0)
- if interval <= 0:
- return
-
- def dump_stats(repo):
- statsfile = repo.config.get('logstat-file')
- with repo.internal_cnx() as cnx:
- stats = cnx.call_service('repo_stats')
- gcstats = cnx.call_service('repo_gc_stats', nmax=5)
-
- allstats = {'resources': stats,
- 'memory': gcstats,
- 'timestamp': datetime.utcnow().isoformat(),
- }
- try:
- with open(statsfile, 'ab') as ofile:
- json.dump(allstats, ofile)
- ofile.write('\n')
- except IOError:
- repo.warning('Cannot open stats file for writing: %s', statsfile)
-
- self.repo.looping_task(interval, dump_stats, self.repo)
--- a/cubicweb/hooks/syncsession.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/hooks/syncsession.py Fri Apr 14 15:40:15 2017 +0200
@@ -18,36 +18,9 @@
"""Core hooks: synchronize living session on persistent data changes"""
from cubicweb import _
-from cubicweb import UnknownProperty, BadConnectionId, validation_error
+from cubicweb import UnknownProperty, validation_error
from cubicweb.predicates import is_instance
from cubicweb.server import hook
-from cubicweb.entities.authobjs import user_session_cache_key
-
-
-def get_user_sessions(cnx, user_eid):
- if cnx.user.eid == user_eid:
- yield cnx
-
-
-class CachedValueMixin(object):
- """Mixin class providing methods to retrieve some value, specified through
- `value_name` attribute, in session data.
- """
- value_name = None
- session = None # make pylint happy
-
- @property
- def cached_value(self):
- """Return cached value for the user, or None"""
- key = user_session_cache_key(self.session.user.eid, self.value_name)
- return self.session.data.get(key, None)
-
- def update_cached_value(self, value):
- """Update cached value for the user (modifying the set returned by cached_value may not be
- necessary depending on session data implementation, e.g. redis)
- """
- key = user_session_cache_key(self.session.user.eid, self.value_name)
- self.session.data[key] = value
class SyncSessionHook(hook.Hook):
@@ -55,112 +28,6 @@
category = 'syncsession'
-# user/groups synchronisation #################################################
-
-class _GroupOperation(CachedValueMixin, hook.Operation):
- """Base class for group operation"""
- value_name = 'groups'
-
- def __init__(self, cnx, *args, **kwargs):
- """Override to get the group name before actual groups manipulation
-
- we may temporarily loose right access during a commit event, so
- no query should be emitted while comitting
- """
- rql = 'Any N WHERE G eid %(x)s, G name N'
- result = cnx.execute(rql, {'x': kwargs['group_eid']}, build_descr=False)
- hook.Operation.__init__(self, cnx, *args, **kwargs)
- self.group = result[0][0]
-
-
-class _DeleteGroupOp(_GroupOperation):
- """Synchronize user when a in_group relation has been deleted"""
-
- def postcommit_event(self):
- cached_groups = self.cached_value
- if cached_groups is not None:
- cached_groups.remove(self.group)
- self.update_cached_value(cached_groups)
-
-
-class _AddGroupOp(_GroupOperation):
- """Synchronize user when a in_group relation has been added"""
-
- def postcommit_event(self):
- cached_groups = self.cached_value
- if cached_groups is not None:
- cached_groups.add(self.group)
- self.update_cached_value(cached_groups)
-
-
-class SyncInGroupHook(SyncSessionHook):
- """Watch addition/removal of in_group relation to synchronize living sessions accordingly"""
- __regid__ = 'syncingroup'
- __select__ = SyncSessionHook.__select__ & hook.match_rtype('in_group')
- events = ('after_delete_relation', 'after_add_relation')
-
- def __call__(self):
- if self.event == 'after_delete_relation':
- opcls = _DeleteGroupOp
- else:
- opcls = _AddGroupOp
- for session in get_user_sessions(self._cw, self.eidfrom):
- opcls(self._cw, session=session, group_eid=self.eidto)
-
-
-class _CloseSessionOp(hook.Operation):
- """Close user's session when it has been deleted"""
-
- def postcommit_event(self):
- try:
- # remove cached groups for the user
- key = user_session_cache_key(self.session.user.eid, 'groups')
- self.session.data.pop(key, None)
- except BadConnectionId:
- pass # already closed
-
-
-class UserDeletedHook(SyncSessionHook):
- """Watch deletion of user to close its opened session"""
- __regid__ = 'closession'
- __select__ = SyncSessionHook.__select__ & is_instance('CWUser')
- events = ('after_delete_entity',)
-
- def __call__(self):
- for session in get_user_sessions(self._cw, self.entity.eid):
- _CloseSessionOp(self._cw, session=session)
-
-
-# CWProperty hooks #############################################################
-
-
-class _UserPropertyOperation(CachedValueMixin, hook.Operation):
- """Base class for property operation"""
- value_name = 'properties'
- key = None # make pylint happy
-
-
-class _ChangeUserCWPropertyOp(_UserPropertyOperation):
- """Synchronize cached user's properties when one has been added/updated"""
- value = None # make pylint happy
-
- def postcommit_event(self):
- cached_props = self.cached_value
- if cached_props is not None:
- cached_props[self.key] = self.value
- self.update_cached_value(cached_props)
-
-
-class _DelUserCWPropertyOp(_UserPropertyOperation):
- """Synchronize cached user's properties when one has been deleted"""
-
- def postcommit_event(self):
- cached_props = self.cached_value
- if cached_props is not None:
- cached_props.pop(self.key, None)
- self.update_cached_value(cached_props)
-
-
class _ChangeSiteWideCWPropertyOp(hook.Operation):
"""Synchronize site wide properties when one has been added/updated"""
cwprop = None # make pylint happy
@@ -223,10 +90,7 @@
return
except ValueError as ex:
raise validation_error(entity, {('value', 'subject'): str(ex)})
- if entity.for_user:
- for session in get_user_sessions(cnx, entity.for_user[0].eid):
- _ChangeUserCWPropertyOp(cnx, session=session, key=key, value=value)
- else:
+ if not entity.for_user:
_ChangeSiteWideCWPropertyOp(cnx, cwprop=self.entity)
@@ -238,7 +102,7 @@
cnx = self._cw
for eidfrom, rtype, eidto in cnx.transaction_data.get('pendingrelations', ()):
if rtype == 'for_user' and eidfrom == self.entity.eid:
- # if for_user was set, delete already handled by hook on for_user deletion
+ # not need to sync user specific properties
break
else:
_DelSiteWideCWPropertyOp(cnx, key=self.entity.pkey)
@@ -259,8 +123,6 @@
if cnx.vreg.property_info(key)['sitewide']:
msg = _("site-wide property can't be set for user")
raise validation_error(eidfrom, {('for_user', 'subject'): msg})
- for session in get_user_sessions(cnx, self.eidto):
- _ChangeUserCWPropertyOp(cnx, session=session, key=key, value=value)
class DelForUserRelationHook(AddForUserRelationHook):
@@ -269,8 +131,5 @@
def __call__(self):
cnx = self._cw
- key = cnx.execute('Any K WHERE P eid %(x)s, P pkey K', {'x': self.eidfrom})[0][0]
cnx.transaction_data.setdefault('pendingrelations', []).append(
(self.eidfrom, self.rtype, self.eidto))
- for session in get_user_sessions(cnx, self.eidto):
- _DelUserCWPropertyOp(cnx, session=session, key=key)
--- a/cubicweb/hooks/syncsources.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/hooks/syncsources.py Fri Apr 14 15:40:15 2017 +0200
@@ -19,10 +19,6 @@
from cubicweb import _
-from socket import gethostname
-
-from logilab.common.decorators import clear_cache
-
from cubicweb import validation_error
from cubicweb.predicates import is_instance
from cubicweb.server import SOURCE_TYPES, hook
@@ -32,106 +28,53 @@
__abstract__ = True
category = 'cw.sources'
-
-# repo sources synchronization #################################################
+ def get_source(self, source_entity):
+ if source_entity.name == 'system':
+ return self._cw.repo.system_source
+ return self._cw.repo.get_source(source_entity.type, source_entity.name,
+ {}, source_entity.eid)
-class SourceAddedOp(hook.Operation):
- entity = None # make pylint happy
- def postcommit_event(self):
- self.cnx.repo.add_source(self.entity)
class SourceAddedHook(SourceHook):
__regid__ = 'cw.sources.added'
__select__ = SourceHook.__select__ & is_instance('CWSource')
events = ('after_add_entity',)
+
def __call__(self):
- try:
- sourcecls = SOURCE_TYPES[self.entity.type]
- except KeyError:
+ if self.entity.type not in SOURCE_TYPES:
msg = _('Unknown source type')
raise validation_error(self.entity, {('type', 'subject'): msg})
- # ignore creation of the system source done during database
- # initialisation, as config for this source is in a file and handling
- # is done separatly (no need for the operation either)
- if self.entity.name != 'system':
- sourcecls.check_conf_dict(self.entity.eid, self.entity.host_config,
- fail_if_unknown=not self._cw.vreg.config.repairing)
- SourceAddedOp(self._cw, entity=self.entity)
+ source = self.get_source(self.entity)
+ source.check_urls(self.entity)
+ source.check_config(self.entity)
-class SourceRemovedOp(hook.Operation):
- uri = None # make pylint happy
- def postcommit_event(self):
- self.cnx.repo.remove_source(self.uri)
class SourceRemovedHook(SourceHook):
__regid__ = 'cw.sources.removed'
__select__ = SourceHook.__select__ & is_instance('CWSource')
events = ('before_delete_entity',)
+
def __call__(self):
if self.entity.name == 'system':
msg = _("You cannot remove the system source")
raise validation_error(self.entity, {None: msg})
- SourceRemovedOp(self._cw, uri=self.entity.name)
-
-
-class SourceConfigUpdatedOp(hook.DataOperationMixIn, hook.Operation):
-
- def precommit_event(self):
- self.__processed = []
- for source in self.get_data():
- if not self.cnx.deleted_in_transaction(source.eid):
- conf = source.repo_source.check_config(source)
- self.__processed.append( (source, conf) )
-
- def postcommit_event(self):
- for source, conf in self.__processed:
- source.repo_source.update_config(source, conf)
-
-
-class SourceRenamedOp(hook.LateOperation):
- oldname = newname = None # make pylint happy
-
- def postcommit_event(self):
- repo = self.cnx.repo
- # XXX race condition
- source = repo.sources_by_uri.pop(self.oldname)
- source.uri = self.newname
- source.public_config['uri'] = self.newname
- repo.sources_by_uri[self.newname] = source
- clear_cache(repo, 'source_defs')
class SourceUpdatedHook(SourceHook):
__regid__ = 'cw.sources.configupdate'
__select__ = SourceHook.__select__ & is_instance('CWSource')
events = ('before_update_entity',)
+
def __call__(self):
if 'name' in self.entity.cw_edited:
oldname, newname = self.entity.cw_edited.oldnewvalue('name')
if oldname == 'system':
msg = _("You cannot rename the system source")
raise validation_error(self.entity, {('name', 'subject'): msg})
- SourceRenamedOp(self._cw, oldname=oldname, newname=newname)
- if 'config' in self.entity.cw_edited or 'url' in self.entity.cw_edited:
- if self.entity.name == 'system' and self.entity.config:
- msg = _("Configuration of the system source goes to "
- "the 'sources' file, not in the database")
- raise validation_error(self.entity, {('config', 'subject'): msg})
- SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity)
-
-class SourceHostConfigUpdatedHook(SourceHook):
- __regid__ = 'cw.sources.hostconfigupdate'
- __select__ = SourceHook.__select__ & is_instance('CWSourceHostConfig')
- events = ('after_add_entity', 'after_update_entity', 'before_delete_entity',)
- def __call__(self):
- if self.entity.match(gethostname()):
- if self.event == 'after_update_entity' and \
- not 'config' in self.entity.cw_edited:
- return
- try:
- SourceConfigUpdatedOp.get_instance(self._cw).add_data(self.entity.cwsource)
- except IndexError:
- # XXX no source linked to the host config yet
- pass
+ source = self.get_source(self.entity)
+ if 'url' in self.entity.cw_edited:
+ source.check_urls(self.entity)
+ if 'config' in self.entity.cw_edited:
+ source.check_config(self.entity)
--- a/cubicweb/hooks/test/unittest_hooks.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/hooks/test/unittest_hooks.py Fri Apr 14 15:40:15 2017 +0200
@@ -30,6 +30,7 @@
from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.server.hook import Operation
class CoreHooksTC(CubicWebTC):
@@ -217,6 +218,21 @@
'login': u'login is part of violated unicity constraint'})
+class OperationTC(CubicWebTC):
+
+ def test_bad_postcommit_event(self):
+
+ class BadOp(Operation):
+ def postcommit_event(self):
+ raise RuntimeError('this is bad')
+
+ with self.admin_access.cnx() as cnx:
+ BadOp(cnx)
+ with self.assertRaises(RuntimeError) as cm:
+ cnx.commit()
+ self.assertEqual(str(cm.exception), 'this is bad')
+
+
if __name__ == '__main__':
import unittest
unittest.main()
--- a/cubicweb/hooks/test/unittest_syncsession.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/hooks/test/unittest_syncsession.py Fri Apr 14 15:40:15 2017 +0200
@@ -80,23 +80,6 @@
cnx.commit()
self.assertEqual(self.vreg.property_value('test.int'), 42)
- def test_sync_user_props(self):
- with self.admin_access.client_cnx() as cnx:
- self.assertNotIn('ui.language', cnx.user.properties)
- cnx.user.set_property(u'ui.language', u'fr')
- self.assertNotIn('ui.language', cnx.user.properties)
- cnx.commit()
- self.assertEqual(cnx.user.properties['ui.language'], 'fr')
- cnx.user.set_property(u'ui.language', u'en')
- self.assertEqual(cnx.user.properties['ui.language'], 'fr')
- cnx.commit()
- self.assertEqual(cnx.user.properties['ui.language'], 'en')
- cnx.execute('DELETE CWProperty X WHERE X for_user U, U eid %(u)s',
- {'u': cnx.user.eid})
- self.assertEqual(cnx.user.properties['ui.language'], 'en')
- cnx.commit()
- self.assertNotIn('ui.language', cnx.user.properties)
-
def test_sync_sitewide_props(self):
with self.admin_access.client_cnx() as cnx:
self.assertNotIn('ui.language', cnx.vreg['propertyvalues'])
@@ -114,22 +97,6 @@
self.assertNotIn('ui.language', cnx.vreg['propertyvalues'])
-class UserGroupsSyncTC(CubicWebTC):
-
- def test_sync_groups(self):
- with self.admin_access.client_cnx() as cnx:
- cnx.execute('SET U in_group G WHERE G name "users", U eid %(u)s',
- {'u': cnx.user.eid})
- self.assertEqual(cnx.user.groups, set(['managers']))
- cnx.commit()
- self.assertEqual(cnx.user.groups, set(['managers', 'users']))
- cnx.execute('DELETE U in_group G WHERE G name "users", U eid %(u)s',
- {'u': cnx.user.eid})
- self.assertEqual(cnx.user.groups, set(['managers', 'users']))
- cnx.commit()
- self.assertEqual(cnx.user.groups, set(['managers']))
-
-
if __name__ == '__main__':
import unittest
unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/hooks/test/unittest_syncsources.py Fri Apr 14 15:40:15 2017 +0200
@@ -0,0 +1,64 @@
+# copyright 2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+from cubicweb import ValidationError
+from cubicweb.devtools.testlib import CubicWebTC
+
+
+class SyncSourcesTC(CubicWebTC):
+
+ def test_source_type_unknown(self):
+ with self.admin_access.cnx() as cnx:
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'source',
+ type=u'doesnotexit',
+ parser=u'doestnotmatter',
+ )
+ self.assertIn('Unknown source type', str(cm.exception))
+
+ def test_cant_delete_system_source(self):
+ with self.admin_access.cnx() as cnx:
+ with self.assertRaises(ValidationError) as cm:
+ cnx.execute('DELETE CWSource X')
+ self.assertIn('You cannot remove the system source', str(cm.exception))
+
+ def test_cant_rename_system_source(self):
+ with self.admin_access.cnx() as cnx:
+ with self.assertRaises(ValidationError) as cm:
+ cnx.find('CWSource').one().cw_set(name=u'sexy name')
+ self.assertIn('You cannot rename the system source', str(cm.exception))
+
+ def test_cant_add_config_system_source(self):
+ with self.admin_access.cnx() as cnx:
+ source = cnx.find('CWSource').one()
+
+ with self.assertRaises(ValidationError) as cm:
+ source.cw_set(url=u'whatever')
+ self.assertIn("Configuration of the system source goes to the 'sources' file",
+ str(cm.exception))
+
+ with self.assertRaises(ValidationError) as cm:
+ source.cw_set(config=u'whatever')
+ self.assertIn("Configuration of the system source goes to the 'sources' file",
+ str(cm.exception))
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()
--- a/cubicweb/misc/migration/3.10.0_Any.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/misc/migration/3.10.0_Any.py Fri Apr 14 15:40:15 2017 +0200
@@ -1,10 +1,5 @@
from six import text_type
-for uri, cfg in config.read_sources_file().items():
- if uri in ('system', 'admin'):
- continue
- repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg.copy())
-
add_entity_type('CWSource')
add_relation_definition('CWSource', 'cw_source', 'CWSource')
add_entity_type('CWSourceHostConfig')
@@ -21,7 +16,6 @@
for uri, cfg in config.read_sources_file().items():
if uri in ('system', 'admin'):
continue
- repo.sources_by_uri.pop(uri)
config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items()
if key != 'adapter' and value is not None)
create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']),
--- a/cubicweb/misc/migration/3.15.0_Any.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/misc/migration/3.15.0_Any.py Fri Apr 14 15:40:15 2017 +0200
@@ -1,10 +1,30 @@
+from cubicweb.server import SOURCE_TYPES
+from cubicweb.server.serverconfig import (SourceConfiguration,
+ generate_source_config)
+
+
sync_schema_props_perms('EmailAddress')
+
+def update_config(source, **config):
+ cfg = source.dictconfig
+ cfg.update(config)
+ options = SOURCE_TYPES[source.type].options
+ sconfig = SourceConfiguration(source._cw.vreg.config, options=options)
+ for opt, val in cfg.items():
+ try:
+ sconfig.set_option(opt, val)
+ except OptionError:
+ continue
+ cfgstr = text_type(generate_source_config(sconfig), source._cw.encoding)
+ source.cw_set(config=cfgstr)
+
+
for source in rql('CWSource X WHERE X type "ldapuser"').entities():
config = source.dictconfig
host = config.pop('host', u'ldap')
protocol = config.pop('protocol', u'ldap')
source.cw_set(url=u'%s://%s' % (protocol, host))
- source.update_config(skip_unknown=True, **config)
+ update_config(source, **config)
commit()
--- a/cubicweb/misc/scripts/ldap_change_base_dn.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/misc/scripts/ldap_change_base_dn.py Fri Apr 14 15:40:15 2017 +0200
@@ -8,7 +8,7 @@
print()
print('you should not have updated your sources file yet')
-olddn = repo.sources_by_uri[uri].config['user-base-dn']
+olddn = repo.source_by_uri(uri).config['user-base-dn']
assert olddn != newdn
--- a/cubicweb/pyramid/__init__.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/__init__.py Fri Apr 14 15:40:15 2017 +0200
@@ -22,7 +22,6 @@
import atexit
import os
-from warnings import warn
import wsgicors
@@ -226,7 +225,12 @@
repo = config.registry['cubicweb.repository'] = cwconfig.repository()
config.registry['cubicweb.registry'] = repo.vreg
- atexit.register(repo.shutdown)
+ if cwconfig.mode != 'test':
+ @atexit.register
+ def shutdown_repo():
+ if repo.shutting_down:
+ return
+ repo.shutdown
if asbool(config.registry.settings.get('cubicweb.defaults', True)):
config.include('cubicweb.pyramid.defaults')
@@ -238,10 +242,3 @@
if asbool(config.registry.settings.get('cubicweb.bwcompat', True)):
config.include('cubicweb.pyramid.bwcompat')
-
- if cwconfig.debugmode:
- try:
- config.include('pyramid_debugtoolbar')
- except ImportError:
- warn('pyramid_debugtoolbar package not available, install it to '
- 'get UI debug features', RuntimeWarning)
--- a/cubicweb/pyramid/auth.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/auth.py Fri Apr 14 15:40:15 2017 +0200
@@ -17,8 +17,74 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+"""
+CubicWeb AuthTkt authentication policy
+--------------------------------------
-"""Authentication policies for cubicweb.pyramid."""
+When using the `cubicweb.pyramid.auth` module, which is the default in most
+cases, you may have to configure the behaviour of these authentication
+policies using standard's Pyramid configuration. You may want to configure in
+your pyramid configuration file:
+
+:Session Authentication:
+
+ This is a `AuthTktAuthenticationPolicy`_ so you may overwrite default
+ configuration values by adding configuration entries using the prefix
+ ``cubicweb.auth.authtkt.session``. Default values are:
+
+ ::
+
+ cubicweb.auth.authtkt.session.hashalg = sha512
+ cubicweb.auth.authtkt.session.cookie_name = auth_tkt
+ cubicweb.auth.authtkt.session.timeout = 1200
+ cubicweb.auth.authtkt.session.reissue_time = 120
+ cubicweb.auth.authtkt.session.http_only = True
+ cubicweb.auth.authtkt.session.secure = True
+
+
+:Persistent Authentication:
+
+ This is also a `AuthTktAuthenticationPolicy`_. It is used when persistent
+ sessions are activated (typically when using the cubicweb-rememberme_
+ cube). You may overwrite default configuration values by adding
+ configuration entries using the prefix
+ ``cubicweb.auth.authtkt.persistent``. Default values are:
+
+ ::
+
+ cubicweb.auth.authtkt.persistent.hashalg = sha512
+ cubicweb.auth.authtkt.persistent.cookie_name = pauth_tkt
+ cubicweb.auth.authtkt.persistent.max_age = 3600*24*30
+ cubicweb.auth.authtkt.persistent.reissue_time = 3600*24
+ cubicweb.auth.authtkt.persistent.http_only = True
+ cubicweb.auth.authtkt.persistent.secure = True
+
+
+.. Warning:: Legacy timeout values from the instance's
+ ``all-in-one.conf`` are **not** used at all (``
+ http-session-time`` and ``cleanup-session-time``)
+
+.. _CubicWeb: https://www.cubicweb.org/
+.. _`cubicweb-rememberme`: \
+ https://www.cubicweb.org/project/cubicweb-rememberme
+.. _AuthTktAuthenticationPolicy: \
+ http://docs.pylonsproject.org/projects/pyramid/en/latest/api/authentication.html#pyramid.authentication.AuthTktAuthenticationPolicy
+
+
+Secrets
+~~~~~~~
+There are a number of secrets to configure in ``pyramid.ini``. They
+should be different one from each other, as explained in `Pyramid's
+documentation`_.
+
+For the record, regarding authentication:
+
+:cubicweb.auth.authtkt.session.secret: This secret is used to encrypt
+ the authentication cookie.
+
+:cubicweb.auth.authtkt.persistent.secret: This secret is used to
+ encrypt the persistent authentication cookie.
+"""
import datetime
import logging
--- a/cubicweb/pyramid/development.ini.tmpl Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/development.ini.tmpl Fri Apr 14 15:40:15 2017 +0200
@@ -12,7 +12,6 @@
pyramid.debug_routematch = false
pyramid.default_locale_name = en
pyramid.includes =
- pyramid_debugtoolbar
cubicweb_%(cubename)s
# By default, the toolbar only appears for clients from IP addresses
--- a/cubicweb/pyramid/session.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/session.py Fri Apr 14 15:40:15 2017 +0200
@@ -17,8 +17,72 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+"""
+Web session when using pyramid
+------------------------------
-"""Pyramid session factories for CubicWeb."""
+CubicWeb ``CWSession`` entity type so that sessions can be
+stored in the database, which allows to run a Cubicweb instance
+without having to set up a session storage (like redis or memcache)
+solution.
+
+However, for production systems, it is greatly advised to use such a
+storage solution for the sessions.
+
+The handling of the sessions is made by pyramid (see the
+`pyramid's documentation on sessions`_ for more details).
+
+For example, to set up a redis based session storage, you need the
+`pyramid-redis-session`_ package, then you must configure pyramid to
+use this backend, by configuring the pyramid configuration file:
+
+
+.. code-block:: ini
+
+ [main]
+ cubicweb.defaults = no # we do not want to load the default cw session handling
+
+ cubicweb.auth.authtkt.session.secret = <secret1>
+ cubicweb.auth.authtkt.persistent.secret = <secret2>
+ cubicweb.auth.authtkt.session.secure = yes
+ cubicweb.auth.authtkt.persistent.secure = yes
+
+ redis.sessions.secret = <secret3>
+ redis.sessions.prefix = <my-app>:
+
+ redis.sessions.url = redis://localhost:6379/0
+
+ pyramid.includes =
+ pyramid_redis_sessions
+ cubicweb.pyramid.auth
+ cubicweb.pyramid.login
+
+
+.. Warning:: If you want to be able to log in a CubicWeb application
+ served by pyramid on a unsecured stream (typically when
+ you start an instance in dev mode using a simple
+ ``cubicweb-ctl pyramid -D -linfo myinstance``), you
+ **must** set ``cubicweb.auth.authtkt.session.secure`` to
+ ``no``.
+
+Secrets
+~~~~~~~
+
+There are a number of secrets to configure in ``pyramid.ini``. They
+should be different one from each other, as explained in `Pyramid's
+documentation`_.
+
+For the record, regarding session handling:
+
+:cubicweb.session.secret: This secret is used to encrypt the session's
+ data ID (data themselved are stored in the backend, database or
+ redis) when using the integrated (``CWSession`` based) session data
+ storage.
+
+:redis.session.secret: This secret is used to encrypt the session's
+ data ID (data themselved are stored in the backend, database or
+ redis) when using redis as backend.
+"""
import warnings
import logging
--- a/cubicweb/pyramid/test/__init__.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/test/__init__.py Fri Apr 14 15:40:15 2017 +0200
@@ -15,7 +15,9 @@
def setUp(self):
# Skip CubicWebTestTC setUp
super(CubicWebTestTC, self).setUp()
- config = Configurator(settings=self.settings)
+ settings = {'cubicweb.bwcompat': False}
+ settings.update(self.settings)
+ config = Configurator(settings=settings)
config.registry['cubicweb.repository'] = self.repo
config.include('cubicweb.pyramid')
self.includeme(config)
--- a/cubicweb/pyramid/test/test_bw_request.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/test/test_bw_request.py Fri Apr 14 15:40:15 2017 +0200
@@ -10,6 +10,8 @@
class WSGIAppTest(PyramidCWTest):
+ settings = {'cubicweb.bwcompat': True}
+
def make_request(self, path, environ=None, **kw):
r = webtest.app.TestRequest.blank(path, environ, **kw)
--- a/cubicweb/pyramid/test/test_core.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/test/test_core.py Fri Apr 14 15:40:15 2017 +0200
@@ -24,6 +24,7 @@
class CoreTest(PyramidCWTest):
anonymous_allowed = True
+ settings = {'cubicweb.bwcompat': True}
def includeme(self, config):
config.add_route('uncommitable', '/uncommitable')
--- a/cubicweb/pyramid/test/test_login.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/pyramid/test/test_login.py Fri Apr 14 15:40:15 2017 +0200
@@ -21,6 +21,7 @@
class LoginTest(PyramidCWTest):
+ settings = {'cubicweb.bwcompat': True}
def test_login_form(self):
res = self.webapp.get('/login')
--- a/cubicweb/rqlrewrite.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/rqlrewrite.py Fri Apr 14 15:40:15 2017 +0200
@@ -21,7 +21,6 @@
This is used for instance for read security checking in the repository.
"""
-
from six import text_type, string_types
from rql import nodes as n, stmts, TypeResolverException
@@ -33,7 +32,7 @@
from logilab.common.graph import has_path
from cubicweb import Unauthorized
-from cubicweb.schema import RRQLExpression
+
def cleanup_solutions(rqlst, solutions):
for sol in solutions:
@@ -66,7 +65,7 @@
var = newroot.defined_vars[varname]
stinfo = var.stinfo
if stinfo.get('uidrel') is not None:
- continue # eid specified, no need for additional type specification
+ continue # eid specified, no need for additional type specification
try:
typerel = rqlst.defined_vars[varname].stinfo.get('typerel')
except KeyError:
@@ -103,7 +102,7 @@
# possible types can only be a subset of existing ones, so only
# remove no more possible types
for cst in mytyperel.get_nodes(n.Constant):
- if not cst.value in possibletypes:
+ if cst.value not in possibletypes:
cst.parent.remove(cst)
else:
# we have to add types restriction
@@ -159,12 +158,13 @@
def _expand_selection(terms, selected, aliases, select, newselect):
for term in terms:
for vref in term.iget_nodes(n.VariableRef):
- if not vref.name in selected:
+ if vref.name not in selected:
select.append_selected(vref)
colalias = newselect.get_variable(vref.name, len(aliases))
aliases.append(n.VariableRef(colalias))
selected.add(vref.name)
+
def _has_multiple_cardinality(etypes, rdef, ttypes_func, cardindex):
"""return True if relation definitions from entity types (`etypes`) to
target types returned by the `ttypes_func` function all have single (1 or ?)
@@ -176,6 +176,7 @@
return True
return False
+
def _compatible_relation(relations, stmt, sniprel):
"""Search among given rql relation nodes if there is one 'compatible' with the
snippet relation, and return it if any, else None.
@@ -210,6 +211,7 @@
because it create an unresolvable query (eg no solutions found)
"""
+
class VariableFromSubQuery(Exception):
"""flow control exception to indicate that a variable is coming from a
subquery, and let parent act accordingly
@@ -302,15 +304,15 @@
newvar.stinfo.setdefault('relations', set()).add(rel)
newselect.set_orderby(sortterms)
_expand_selection(select.orderby, selected, aliases, select, newselect)
- select.orderby = () # XXX dereference?
+ select.orderby = () # XXX dereference?
if select.groupby:
newselect.set_groupby([g.copy(newselect) for g in select.groupby])
_expand_selection(select.groupby, selected, aliases, select, newselect)
- select.groupby = () # XXX dereference?
+ select.groupby = () # XXX dereference?
if select.having:
newselect.set_having([g.copy(newselect) for g in select.having])
_expand_selection(select.having, selected, aliases, select, newselect)
- select.having = () # XXX dereference?
+ select.having = () # XXX dereference?
if select.limit:
newselect.limit = select.limit
select.limit = None
@@ -338,7 +340,7 @@
myrqlst = select.copy(solutions=lchecksolutions)
myunion.append(myrqlst)
# in-place rewrite + annotation / simplification
- lcheckdef = [({var: 'X'}, rqlexprs) for var, rqlexprs in lcheckdef]
+ lcheckdef = [({v: 'X'}, rqlexprs) for v, rqlexprs in lcheckdef]
self.rewrite(myrqlst, lcheckdef, kwargs)
_add_noinvariant(noinvariant, restricted, myrqlst, nbtrees)
if () in localchecks:
@@ -376,7 +378,7 @@
'something wrong in your schema permission (for instance using a '
'RQLExpression which inserts a relation which doesn\'t exist in '
'the schema)\nOrig solutions: %s\nnew solutions: %s' % (
- select, solutions, newsolutions))
+ select, solutions, newsolutions))
if len(newsolutions) > len(solutions):
newsolutions = self.remove_ambiguities(snippets, newsolutions)
assert newsolutions
@@ -390,7 +392,7 @@
varmap = tuple(sorted(varmap.items()))
else:
assert isinstance(varmap, tuple), varmap
- if varexistsmap is not None and not varmap in varexistsmap:
+ if varexistsmap is not None and varmap not in varexistsmap:
continue
self.insert_varmap_snippets(varmap, rqlexprs, varexistsmap)
@@ -418,7 +420,7 @@
vi['rhs_rels'].setdefault(rel.r_type, []).append(rel)
vi['lhs_rels'] = {}
for rel in sti.get('relations', []):
- if not rel in sti.get('rhsrelations', []):
+ if rel not in sti.get('rhsrelations', []):
vi['lhs_rels'].setdefault(rel.r_type, []).append(rel)
else:
vi['rhs_rels'] = vi['lhs_rels'] = {}
@@ -460,7 +462,7 @@
self.insert_snippet(varmap, rqlexpr.snippet_rqlst, exists)
if varexistsmap is None and not inserted:
# no rql expression found matching rql solutions. User has no access right
- raise Unauthorized() # XXX may also be because of bad constraints in schema definition
+ raise Unauthorized() # XXX may also be because of bad constraints in schema definition
def insert_snippet(self, varmap, snippetrqlst, previous=None):
new = snippetrqlst.where.accept(self)
@@ -498,7 +500,6 @@
assert previous is None
self._insert_scope, new = self.snippet_subquery(varmap, new)
self.insert_pending()
- #self._insert_scope = None
return new
new = self._inserted_root(new)
if previous is None:
@@ -548,7 +549,7 @@
# XXX dunno how to handle this
self.session.error(
'cant check security of %s, ambigous type for %s in %s',
- stmt, varname, key[0]) # key[0] == the rql expression
+ stmt, varname, key[0]) # key[0] == the rql expression
raise Unauthorized()
etype = next(iter(ptypes))
eschema = self.schema.eschema(etype)
@@ -581,7 +582,7 @@
rschema = get_rschema(rel.r_type)
if rschema.final or rschema.inlined:
subselect_vrefs = []
- rel.children[0].name = varname # XXX explain why
+ rel.children[0].name = varname # XXX explain why
subselect.add_restriction(rel.copy(subselect))
for vref in rel.children[1].iget_nodes(n.VariableRef):
if isinstance(vref.variable, n.ColumnAlias):
@@ -611,7 +612,7 @@
for orel in iter_relations(ostinfo):
orschema = get_rschema(orel.r_type)
if orschema.final or orschema.inlined:
- todo.append( (vref.name, ostinfo) )
+ todo.append((vref.name, ostinfo))
break
if need_null_test:
snippetrqlst = n.Or(
@@ -680,10 +681,10 @@
for sol in newsolutions:
variante = []
for key, newvar in self.rewritten.items():
- variante.append( (key, sol[newvar]) )
+ variante.append((key, sol[newvar]))
variantes.add(tuple(variante))
# rebuild variantes as dict
- variantes = [dict(variante) for variante in variantes]
+ variantes = [dict(v) for v in variantes]
# remove variable which have always the same type
for key in self.rewritten:
it = iter(variantes)
@@ -709,13 +710,12 @@
if newvar in removed:
del self.rewritten[key]
-
def _may_be_shared_with(self, sniprel, target):
"""if the snippet relation can be skipped to use a relation from the
original query, return that relation node
"""
if sniprel.neged(strict=True):
- return None # no way
+ return None # no way
rschema = self.schema.rschema(sniprel.r_type)
stmt = self.current_statement()
for vi in self.varinfos:
@@ -725,11 +725,13 @@
cardindex = 0
ttypes_func = rschema.objects
rdef = rschema.rdef
- else: # target == 'subject':
+ else: # target == 'subject':
orels = vi['rhs_rels'][sniprel.r_type]
cardindex = 1
ttypes_func = rschema.subjects
- rdef = lambda x, y: rschema.rdef(y, x)
+
+ def rdef(x, y):
+ return rschema.rdef(y, x)
except KeyError:
# may be raised by vi['xhs_rels'][sniprel.r_type]
continue
@@ -817,8 +819,7 @@
return True
vargraph = self.current_expr.vargraph
for existingvar in self.existingvars:
- #path = has_path(vargraph, varname, existingvar)
- if not varname in vargraph or has_path(vargraph, varname, existingvar):
+ if varname not in vargraph or has_path(vargraph, varname, existingvar):
return True
# no path from this variable to an existing variable
return False
@@ -835,7 +836,7 @@
assert lhs.name == 'U'
action = node.r_type.split('_')[1]
key = (self.current_expr, self.varmap, rhs.name)
- self.pending_keys.append( (key, action) )
+ self.pending_keys.append((key, action))
return
if isinstance(rhs, n.VariableRef):
if self.existingvars and not self.keep_var(rhs.name):
@@ -905,22 +906,18 @@
This class *isn't thread safe*.
"""
- def __init__(self, session):
- super(RQLRelationRewriter, self).__init__(session)
- self.rules = {}
- for rschema in self.schema.iter_computed_relations():
- self.rules[rschema.type] = RRQLExpression(rschema.rule)
def rewrite(self, union, kwargs=None):
self.kwargs = kwargs
self.removing_ambiguity = False
self.existingvars = None
self.pending_keys = None
+ rules = self.schema.rules_rqlexpr_mapping
for relation in union.iget_nodes(n.Relation):
- if relation.r_type in self.rules:
+ if relation.r_type in rules:
self.select = relation.stmt
- self.solutions = solutions = self.select.solutions[:]
- self.current_expr = self.rules[relation.r_type]
+ self.solutions = self.select.solutions[:]
+ self.current_expr = rules[relation.r_type]
self._insert_scope = relation.scope
self.rewritten = {}
lhs, rhs = relation.get_variable_parts()
--- a/cubicweb/schema.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/schema.py Fri Apr 14 15:40:15 2017 +0200
@@ -1008,6 +1008,16 @@
etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$'
+ @cachedproperty
+ def rules_rqlexpr_mapping(self):
+ """Return a dictionary mapping rtype to RRQLExpression for computed
+ relations.
+ """
+ rules = {}
+ for rschema in self.iter_computed_relations():
+ rules[rschema.type] = RRQLExpression(rschema.rule)
+ return rules
+
def add_entity_type(self, edef):
edef.name = str(edef.name)
edef.name = bw_normalize_etype(edef.name)
@@ -1148,6 +1158,8 @@
super(CubicWebSchema, self).rebuild_infered_relations()
self.finalize_computed_attributes()
self.finalize_computed_relations()
+ # remove @cachedproperty cache
+ self.__dict__.pop('rules_rqlexpr_mapping', None)
# additional cw specific constraints ###########################################
--- a/cubicweb/server/repository.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/repository.py Fri Apr 14 15:40:15 2017 +0200
@@ -211,7 +211,6 @@
def __init__(self, config, scheduler=None, vreg=None):
self.config = config
- self.sources_by_eid = {}
if vreg is None:
vreg = cwvreg.CWRegistryStore(config)
self.vreg = vreg
@@ -230,7 +229,6 @@
# sources (additional sources info in the system database)
self.system_source = self.get_source('native', 'system',
config.system_source_config.copy())
- self.sources_by_uri = {'system': self.system_source}
# querier helper, need to be created after sources initialization
self.querier = querier.QuerierHelper(self, self.schema)
# cache eid -> type
@@ -294,7 +292,7 @@
# configurate tsearch according to postgres version
self.system_source.init_creating()
else:
- self.init_sources_from_database()
+ self._init_system_source()
if 'CWProperty' in self.schema:
self.vreg.init_properties(self.properties())
# 4. close initialization connection set and reopen fresh ones for
@@ -304,49 +302,70 @@
# 5. call instance level initialisation hooks
self.hm.call_hooks('server_startup', repo=self)
- # internals ###############################################################
+ def source_by_uri(self, uri):
+ with self.internal_cnx() as cnx:
+ rset = cnx.find('CWSource', name=uri)
+ if not rset:
+ raise ValueError('no source with uri %s found' % uri)
+ return self._source_from_cwsource(rset.one())
+
+ def source_by_eid(self, eid):
+ with self.internal_cnx() as cnx:
+ rset = cnx.find('CWSource', eid=eid)
+ if not rset:
+ raise ValueError('no source with eid %d found' % eid)
+ return self._source_from_cwsource(rset.one())
- def init_sources_from_database(self):
- if self.config.quick_start or 'CWSource' not in self.schema: # 3.10 migration
- self.system_source.init_creating()
+ @property
+ def sources_by_uri(self):
+ mapping = {'system': self.system_source}
+ mapping.update((sourceent.name, source)
+ for sourceent, source in self._sources())
+ return mapping
+
+ @property
+ @deprecated("[3.25] use source_by_eid(<eid>)")
+ def sources_by_eid(self):
+ mapping = {self.system_source.eid: self.system_source}
+ mapping.update((sourceent.eid, source)
+ for sourceent, source in self._sources())
+ return mapping
+
+ def _sources(self):
+ if self.config.quick_start:
return
with self.internal_cnx() as cnx:
- # FIXME: sources should be ordered (add_entity priority)
for sourceent in cnx.execute(
'Any S, SN, SA, SC WHERE S is_instance_of CWSource, '
- 'S name SN, S type SA, S config SC').entities():
- if sourceent.name == 'system':
- self.system_source.eid = sourceent.eid
- self.sources_by_eid[sourceent.eid] = self.system_source
- self.system_source.init(True, sourceent)
- continue
- self.add_source(sourceent)
+ 'S name SN, S type SA, S config SC, S name != "system"').entities():
+ source = self._source_from_cwsource(sourceent)
+ yield sourceent, source
+ self._clear_source_defs_caches()
- def add_source(self, sourceent):
- try:
- source = self.get_source(sourceent.type, sourceent.name,
- sourceent.host_config, sourceent.eid)
- except RuntimeError:
- if self.config.repairing:
- self.exception('cant setup source %s, skipped', sourceent.name)
- return
- raise
- self.sources_by_eid[sourceent.eid] = source
- self.sources_by_uri[sourceent.name] = source
+ def _source_from_cwsource(self, sourceent):
+ source = self.get_source(sourceent.type, sourceent.name,
+ sourceent.host_config, sourceent.eid)
if self.config.source_enabled(source):
# call source's init method to complete their initialisation if
# needed (for instance looking for persistent configuration using an
# internal session, which is not possible until connections sets have been
# initialized)
- source.init(True, sourceent)
- else:
- source.init(False, sourceent)
- self._clear_source_defs_caches()
+ source.init(sourceent)
+ return source
+
+ # internals ###############################################################
- def remove_source(self, uri):
- source = self.sources_by_uri.pop(uri)
- del self.sources_by_eid[source.eid]
- self._clear_source_defs_caches()
+ def _init_system_source(self):
+ if self.config.quick_start:
+ self.system_source.init_creating()
+ return
+ with self.internal_cnx() as cnx:
+ sourceent = cnx.execute(
+ 'Any S, SA, SC WHERE S is_instance_of CWSource,'
+ ' S name "system", S type SA, S config SC'
+ ).one()
+ self.system_source.eid = sourceent.eid
+ self.system_source.init(sourceent)
def get_source(self, type, uri, source_config, eid=None):
# set uri and type in source config so it's available through
@@ -363,8 +382,7 @@
else:
self.vreg._set_schema(schema)
self.querier.set_schema(schema)
- for source in self.sources_by_uri.values():
- source.set_schema(schema)
+ self.system_source.set_schema(schema)
self.schema = schema
def deserialize_schema(self):
@@ -383,6 +401,12 @@
raise Exception('Is the database initialised ? (cause: %s)' % ex)
return appschema
+ def has_scheduler(self):
+ """Return True if the repository has a scheduler attached and is able
+ to register looping tasks.
+ """
+ return self._scheduler is not None
+
def run_scheduler(self):
"""Start repository scheduler after preparing the repository for that.
@@ -392,7 +416,7 @@
XXX Other startup related stuffs are done elsewhere. In Repository
XXX __init__ or in external codes (various server managers).
"""
- assert self._scheduler is not None, \
+ assert self.has_scheduler(), \
"This Repository is not intended to be used as a server"
self.info(
'starting repository scheduler with tasks: %s',
@@ -405,8 +429,14 @@
looping tasks can only be registered during repository initialization,
once done this method will fail.
"""
- assert self._scheduler is not None, \
- "This Repository is not intended to be used as a server"
+ if self.config.repairing:
+ return
+ if not self.has_scheduler():
+ self.warning(
+ 'looping task %s will not run in this process where repository '
+ 'has no scheduler; use "cubicweb-ctl scheduler <appid>" to '
+ 'have it running', func)
+ return
event = utils.schedule_periodic_task(
self._scheduler, interval, func, *args)
self.info('scheduled periodic task %s (interval: %.2fs)',
--- a/cubicweb/server/serverctl.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/serverctl.py Fri Apr 14 15:40:15 2017 +0200
@@ -1056,14 +1056,14 @@
if len(args) >= 2:
for name in args[1:]:
try:
- source = repo.sources_by_uri[name]
- except KeyError:
+ source = repo.source_by_uri(name)
+ except ValueError:
cnx.error('no source named %r' % name)
errors = True
else:
sources.append(source)
else:
- for uri, source in list(repo.sources_by_uri.items()):
+ for uri, source in repo.sources_by_uri.items():
if (uri != 'system' and
repo.config.source_enabled(source) and
source.config['synchronize']):
--- a/cubicweb/server/session.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/session.py Fri Apr 14 15:40:15 2017 +0200
@@ -398,28 +398,6 @@
def rql_rewriter(self):
return self._rewriter
- @_open_only
- @deprecated('[3.19] use session or transaction data', stacklevel=3)
- def get_shared_data(self, key, default=None, pop=False, txdata=False):
- """return value associated to `key` in session data"""
- if txdata:
- data = self.transaction_data
- else:
- data = self.data
- if pop:
- return data.pop(key, default)
- else:
- return data.get(key, default)
-
- @_open_only
- @deprecated('[3.19] use session or transaction data', stacklevel=3)
- def set_shared_data(self, key, value, txdata=False):
- """set value associated to `key` in session data"""
- if txdata:
- self.transaction_data[key] = value
- else:
- self.data[key] = value
-
def clear(self):
"""reset internal data"""
self.transaction_data = {}
@@ -852,6 +830,8 @@
try:
operation.handle_event('postcommit_event')
except BaseException:
+ if self.repo.config.mode == 'test':
+ raise
self.critical('error while postcommit',
exc_info=sys.exc_info())
self.debug('postcommit transaction %s done', self)
--- a/cubicweb/server/sources/__init__.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/sources/__init__.py Fri Apr 14 15:40:15 2017 +0200
@@ -29,7 +29,7 @@
from yams.schema import role_name
-from cubicweb import ValidationError, set_log_methods, server
+from cubicweb import ValidationError, set_log_methods, server, _
from cubicweb.server import SOURCE_TYPES
@@ -85,7 +85,7 @@
# these are overridden by set_log_methods below
# only defining here to prevent pylint from complaining
- info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None
+ info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None
def __init__(self, repo, source_config, eid=None):
self.repo = repo
@@ -99,8 +99,8 @@
# on logging
set_log_methods(self, getLogger('cubicweb.sources.' + unormalize(text_type(self.uri))))
source_config.pop('type')
- self.update_config(None, self.check_conf_dict(eid, source_config,
- fail_if_unknown=False))
+ self.config = self._check_config_dict(
+ eid, source_config, raise_on_error=False)
def __repr__(self):
return '<%s %s source %s @%#x>' % (self.uri, self.__class__.__name__,
@@ -132,25 +132,30 @@
"""method called to restore a backup of source's data"""
pass
- @classmethod
- def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True):
- """check configuration of source entity. Return config dict properly
+ def _check_config_dict(self, eid, confdict, raise_on_error=True):
+ """Check configuration of source entity and return config dict properly
typed with defaults set.
+
+ If `raise_on_error` is True (the default), a ValidationError will be
+ raised if some error is encountered, else the problem will be ignored.
"""
processed = {}
- for optname, optdict in cls.options:
+ for optname, optdict in self.options:
value = confdict.pop(optname, optdict.get('default'))
if value is configuration.REQUIRED:
- if not fail_if_unknown:
+ if not raise_on_error:
continue
- msg = _('specifying %s is mandatory' % optname)
- raise ValidationError(eid, {role_name('config', 'subject'): msg})
+ msg = _('specifying %s is mandatory')
+ msgargs = optname
+ raise ValidationError(eid, {role_name('config', 'subject'): msg}, msgargs)
elif value is not None:
# type check
try:
value = configuration._validate(value, optdict, optname)
except Exception as ex:
- msg = text_type(ex) # XXX internationalization
+ if not raise_on_error:
+ continue
+ msg = text_type(ex)
raise ValidationError(eid, {role_name('config', 'subject'): msg})
processed[optname] = value
# cw < 3.10 bw compat
@@ -160,37 +165,32 @@
pass
# check for unknown options
if confdict and tuple(confdict) != ('adapter',):
- if fail_if_unknown:
- msg = _('unknown options %s') % ', '.join(confdict)
- raise ValidationError(eid, {role_name('config', 'subject'): msg})
+ if raise_on_error:
+ msg = _('unknown options %s')
+ msgargs = ', '.join(confdict)
+ raise ValidationError(eid, {role_name('config', 'subject'): msg}, msgargs)
else:
- logger = getLogger('cubicweb.sources')
- logger.warning('unknown options %s', ', '.join(confdict))
+ self.warning('unknown options %s', ', '.join(confdict))
# add options to processed, they may be necessary during migration
processed.update(confdict)
return processed
- @classmethod
- def check_config(cls, source_entity):
- """check configuration of source entity"""
- return cls.check_conf_dict(source_entity.eid, source_entity.host_config,
- _=source_entity._cw._)
-
- def update_config(self, source_entity, typedconfig):
- """update configuration from source entity. `typedconfig` is config
- properly typed with defaults set
+ def check_config(self, source_entity):
+ """Check configuration of source entity, raise ValidationError if some
+ errors are detected.
"""
- if source_entity is not None:
- self._entity_update(source_entity)
- self.config = typedconfig
+ return self._check_config_dict(source_entity.eid, source_entity.dictconfig)
- def _entity_update(self, source_entity):
- source_entity.complete()
- if source_entity.url:
- self.urls = [url.strip() for url in source_entity.url.splitlines()
- if url.strip()]
- else:
- self.urls = []
+ def check_urls(self, source_entity):
+ """Check URL of source entity: `urls` is a string that may contain one
+ URL per line), and return a list of at least one validated URL.
+ """
+ urls = source_entity.url if source_entity.url else ''
+ urls = [url.strip() for url in urls.splitlines() if url.strip()]
+ if not urls:
+ msg = _('specifying an URL is mandatory')
+ raise ValidationError(source_entity.eid, {role_name('url', 'subject'): msg})
+ return urls
# source initialization / finalization #####################################
@@ -202,20 +202,24 @@
"""method called by the repository once ready to create a new instance"""
pass
- def init(self, activated, source_entity):
+ def init(self, source_entity):
"""method called by the repository once ready to handle request.
`activated` is a boolean flag telling if the source is activated or not.
"""
- if activated:
- self._entity_update(source_entity)
+ source_entity.complete()
+ if source_entity.url:
+ self.urls = self.check_urls(source_entity)
+ else:
+ self.urls = []
PUBLIC_KEYS = ('type', 'uri', 'use-cwuri-as-url')
+
def remove_sensitive_information(self, sourcedef):
"""remove sensitive information such as login / password from source
definition
"""
for key in list(sourcedef):
- if not key in self.PUBLIC_KEYS:
+ if key not in self.PUBLIC_KEYS:
sourcedef.pop(key)
# connections handling #####################################################
@@ -279,7 +283,7 @@
"""add a relation to the source"""
raise NotImplementedError(self)
- def add_relations(self, cnx, rtype, subj_obj_list):
+ def add_relations(self, cnx, rtype, subj_obj_list):
"""add a relations to the source"""
# override in derived classes if you feel you can
# optimize
--- a/cubicweb/server/sources/datafeed.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/sources/datafeed.py Fri Apr 14 15:40:15 2017 +0200
@@ -34,7 +34,7 @@
from logilab.common.deprecation import deprecated
-from cubicweb import ObjectNotFound, ValidationError, SourceException
+from cubicweb import ObjectNotFound, ValidationError, SourceException, _
from cubicweb.server.sources import AbstractSource
from cubicweb.appobject import AppObject
@@ -102,21 +102,15 @@
"""check configuration of source entity"""
typed_config = super(DataFeedSource, self).check_config(source_entity)
if typed_config['synchronization-interval'] < 60:
- _ = source_entity._cw._
msg = _('synchronization-interval must be greater than 1 minute')
raise ValidationError(source_entity.eid, {'config': msg})
return typed_config
- def _entity_update(self, source_entity):
- super(DataFeedSource, self)._entity_update(source_entity)
+ def init(self, source_entity):
+ super(DataFeedSource, self).init(source_entity)
self.parser_id = source_entity.parser
self.latest_retrieval = source_entity.latest_retrieval
-
- def update_config(self, source_entity, typed_config):
- """update configuration from source entity. `typed_config` is config
- properly typed with defaults set
- """
- super(DataFeedSource, self).update_config(source_entity, typed_config)
+ typed_config = self.config
self.synchro_interval = timedelta(seconds=typed_config['synchronization-interval'])
self.max_lock_lifetime = timedelta(seconds=typed_config['max-lock-lifetime'])
self.http_timeout = typed_config['http-timeout']
@@ -127,10 +121,6 @@
self.use_cwuri_as_url = typed_config['use-cwuri-as-url']
self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url
- def init(self, activated, source_entity):
- super(DataFeedSource, self).init(activated, source_entity)
- self.parser_id = source_entity.parser
-
def _get_parser(self, cnx, **kwargs):
if self.parser_id is None:
self.warning('No parser defined on source %r', self)
@@ -200,7 +190,7 @@
def _synchronize_source(repo, source_eid, import_log_eid):
with repo.internal_cnx() as cnx:
- source = repo.sources_by_eid[source_eid]
+ source = repo.source_by_eid(source_eid)
source._pull_data(cnx, force, raise_on_error, import_log_eid=import_log_eid)
sync = partial(_synchronize_source, cnx.repo, self.eid, import_log.eid)
--- a/cubicweb/server/sources/ldapfeed.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/sources/ldapfeed.py Fri Apr 14 15:40:15 2017 +0200
@@ -176,11 +176,24 @@
_conn = None
- def update_config(self, source_entity, typedconfig):
- """update configuration from source entity. `typedconfig` is config
- properly typed with defaults set
- """
- super(LDAPFeedSource, self).update_config(source_entity, typedconfig)
+ def check_urls(self, source_entity):
+ urls = super(LDAPFeedSource, self).check_urls(source_entity)
+
+ if len(urls) > 1:
+ raise ValidationError(source_entity.eid, {'url': _('can only have one url')})
+
+ try:
+ protocol, hostport = urls[0].split('://')
+ except ValueError:
+ raise ValidationError(source_entity.eid, {'url': _('badly formatted url')})
+ if protocol not in PROTO_PORT:
+ raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')})
+
+ return urls
+
+ def init(self, source_entity):
+ super(LDAPFeedSource, self).init(source_entity)
+ typedconfig = self.config
self.authmode = typedconfig['auth-mode']
self._authenticate = getattr(self, '_auth_%s' % self.authmode)
self.cnx_dn = typedconfig['data-cnx-dn']
@@ -208,18 +221,6 @@
self.group_base_filters.append(typedconfig['group-filter'])
self._conn = None
- def _entity_update(self, source_entity):
- super(LDAPFeedSource, self)._entity_update(source_entity)
- if self.urls:
- if len(self.urls) > 1:
- raise ValidationError(source_entity.eid, {'url': _('can only have one url')})
- try:
- protocol, hostport = self.urls[0].split('://')
- except ValueError:
- raise ValidationError(source_entity.eid, {'url': _('badly formatted url')})
- if protocol not in PROTO_PORT:
- raise ValidationError(source_entity.eid, {'url': _('unsupported protocol')})
-
def connection_info(self):
assert len(self.urls) == 1, self.urls
protocol, hostport = self.urls[0].split('://')
--- a/cubicweb/server/sources/native.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/sources/native.py Fri Apr 14 15:40:15 2017 +0200
@@ -41,7 +41,7 @@
from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary,
UniqueTogetherError, UndoTransactionException, ViolatedConstraint)
-from cubicweb import transaction as tx, server, neg_role
+from cubicweb import transaction as tx, server, neg_role, _
from cubicweb.utils import QueryCache
from cubicweb.schema import VIRTUAL_RTYPES
from cubicweb.cwconfig import CubicWebNoAppConfiguration
@@ -229,7 +229,7 @@
return self._create_eid(count)
else:
raise
- except Exception: # WTF?
+ except Exception: # WTF?
cnx.rollback()
self.cnx = None
source.exception('create eid failed in an unforeseen way on SQL statement %s', sql)
@@ -350,10 +350,15 @@
self.create_eid = self.eid_generator.create_eid
def check_config(self, source_entity):
- """check configuration of source entity"""
- if source_entity.host_config:
- msg = source_entity._cw._('the system source has its configuration '
- 'stored on the file-system')
+ if source_entity.config:
+ msg = _("Configuration of the system source goes to "
+ "the 'sources' file, not in the database")
+ raise ValidationError(source_entity.eid, {role_name('config', 'subject'): msg})
+
+ def check_urls(self, source_entity):
+ if source_entity.url:
+ msg = _("Configuration of the system source goes to "
+ "the 'sources' file, not in the database")
raise ValidationError(source_entity.eid, {role_name('config', 'subject'): msg})
def add_authentifier(self, authentifier):
@@ -427,8 +432,8 @@
else:
raise ValueError('Unknown format %r' % format)
- def init(self, activated, source_entity):
- super(NativeSQLSource, self).init(activated, source_entity)
+ def init(self, source_entity):
+ super(NativeSQLSource, self).init(source_entity)
self.init_creating(source_entity._cw.cnxset)
def shutdown(self):
@@ -699,7 +704,7 @@
raise UniqueTogetherError(cnx, cstrname=mo.group(0))
# old sqlite
mo = re.search('columns? (.*) (?:is|are) not unique', arg)
- if mo is not None: # sqlite in use
+ if mo is not None: # sqlite in use
# we left chop the 'cw_' prefix of attribute names
rtypes = [c.strip()[3:]
for c in mo.group(1).split(',')]
@@ -1644,12 +1649,12 @@
self.logger.critical('Restore warning: versions do not match')
new_cubes = db_versions - archive_versions
if new_cubes:
- self.logger.critical('In the db:\n%s', '\n'.join('%s: %s' % (cube, ver)
- for cube, ver in sorted(new_cubes)))
+ self.logger.critical('In the db:\n%s', '\n'.join(
+ '%s: %s' % (cube, ver) for cube, ver in sorted(new_cubes)))
old_cubes = archive_versions - db_versions
if old_cubes:
- self.logger.critical('In the archive:\n%s', '\n'.join('%s: %s' % (cube, ver)
- for cube, ver in sorted(old_cubes)))
+ self.logger.critical('In the archive:\n%s', '\n'.join(
+ '%s: %s' % (cube, ver) for cube, ver in sorted(old_cubes)))
if not ASK.confirm('Versions mismatch: continue anyway ?', False):
raise ValueError('Unable to restore: versions do not match')
table_chunks = {}
--- a/cubicweb/server/test/unittest_datafeed.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/test/unittest_datafeed.py Fri Apr 14 15:40:15 2017 +0200
@@ -20,6 +20,7 @@
from datetime import timedelta
from contextlib import contextmanager
+from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.sources import datafeed
from cubicweb.dataimport.stores import NoHookRQLObjectStore, MetaGenerator
@@ -50,18 +51,19 @@
store.commit()
with self.temporary_appobjects(AParser):
- if u'ô myfeed' in self.repo.sources_by_uri:
- yield self.repo.sources_by_uri[u'ô myfeed']._get_parser(session)
+ try:
+ source = self.repo.source_by_uri(u'ô myfeed')
+ except ValueError:
+ yield
else:
- yield
+ yield source._get_parser(session)
# vreg.unregister just pops appobjects from their regid entry,
# completely remove the entry to ensure we have no side effect with
# this empty entry.
del self.vreg['parsers'][AParser.__regid__]
def test(self):
- self.assertIn(u'ô myfeed', self.repo.sources_by_uri)
- dfsource = self.repo.sources_by_uri[u'ô myfeed']
+ dfsource = self.repo.source_by_uri(u'ô myfeed')
self.assertNotIn('use_cwuri_as_url', dfsource.__dict__)
self.assertEqual({'type': u'datafeed', 'uri': u'ô myfeed', 'use-cwuri-as-url': True},
dfsource.public_config)
@@ -113,15 +115,16 @@
self.assertEqual('a string', value.geturl())
def test_update_url(self):
- dfsource = self.repo.sources_by_uri[u'ô myfeed']
+ dfsource = self.repo.source_by_uri(u'ô myfeed')
with self.admin_access.repo_cnx() as cnx:
cnx.entity_from_eid(dfsource.eid).cw_set(url=u"http://pouet.com\nhttp://pouet.org")
- self.assertEqual(dfsource.urls, [u'ignored'])
cnx.commit()
+ self.assertEqual(dfsource.urls, [u'ignored'])
+ dfsource = self.repo.source_by_uri(u'ô myfeed')
self.assertEqual(dfsource.urls, [u"http://pouet.com", u"http://pouet.org"])
def test_parser_not_found(self):
- dfsource = self.repo.sources_by_uri[u'ô myfeed']
+ dfsource = self.repo.source_by_uri(u'ô myfeed')
with self.assertLogs('cubicweb.sources.o myfeed', level='ERROR') as cm:
with self.repo.internal_cnx() as cnx:
stats = dfsource.pull_data(cnx, force=True)
@@ -131,6 +134,36 @@
self.assertIn(u'failed to load parser for source "ô myfeed"',
importlog)
+ def test_bad_config(self):
+ with self.admin_access.repo_cnx() as cnx:
+ with self.base_parser(cnx):
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'error', type=u'datafeed', parser=u'testparser',
+ url=u'ignored',
+ config=u'synchronization-interval=1s')
+ self.assertIn('synchronization-interval must be greater than 1 minute',
+ str(cm.exception))
+ cnx.rollback()
+
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'error', type=u'datafeed', parser=u'testparser',
+ url=None,
+ config=u'synchronization-interval=1min')
+ self.assertIn('specifying an URL is mandatory',
+ str(cm.exception))
+ cnx.rollback()
+
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'error', type=u'datafeed', parser=u'testparser',
+ url=u'ignored',
+ config=u'synch-interval=1min')
+ self.assertIn('unknown options synch-interval',
+ str(cm.exception))
+ cnx.rollback()
+
class DataFeedConfigTC(CubicWebTC):
@@ -140,11 +173,12 @@
parser=u'testparser', url=u'ignored',
config=u'use-cwuri-as-url=no')
cnx.commit()
- dfsource = self.repo.sources_by_uri['myfeed']
+ dfsource = self.repo.source_by_uri('myfeed')
self.assertEqual(dfsource.use_cwuri_as_url, False)
self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': False},
dfsource.public_config)
+
if __name__ == '__main__':
- from logilab.common.testlib import unittest_main
- unittest_main()
+ import unittest
+ unittest.main()
--- a/cubicweb/server/test/unittest_ldapsource.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/test/unittest_ldapsource.py Fri Apr 14 15:40:15 2017 +0200
@@ -34,7 +34,7 @@
from six import string_types
from six.moves import range
-from cubicweb import AuthenticationError
+from cubicweb import AuthenticationError, ValidationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.httptest import get_available_port
@@ -111,6 +111,16 @@
pass
+def ldapsource(cnx):
+ return cnx.find('CWSource', type=u'ldapfeed').one()
+
+
+def update_source_config(source, options):
+ config = source.dictconfig
+ config.update(options)
+ source.cw_set(config=u'\n'.join('%s=%s' % x for x in config.items()))
+
+
class LDAPFeedTestBase(CubicWebTC):
test_db_id = 'ldap-feed'
loglevel = 'ERROR'
@@ -147,9 +157,9 @@
cnx.commit()
return cls.pull(cnx)
- @classmethod
- def pull(self, cnx):
- lfsource = cnx.repo.sources_by_uri['ldap']
+ @staticmethod
+ def pull(cnx):
+ lfsource = cnx.repo.source_by_uri('ldap')
stats = lfsource.pull_data(cnx, force=True, raise_on_error=True)
cnx.commit()
return stats
@@ -198,7 +208,7 @@
self._ldapmodify(modcmd)
def _ldapmodify(self, modcmd):
- uri = self.repo.sources_by_uri['ldap'].urls[0]
+ uri = self.repo.source_by_uri('ldap').urls[0]
updatecmd = ['ldapmodify', '-H', uri, '-v', '-x', '-D',
'cn=admin,dc=cubicweb,dc=test', '-w', 'cw']
PIPE = subprocess.PIPE
@@ -217,11 +227,10 @@
def test_wrong_group(self):
with self.admin_access.repo_cnx() as cnx:
- source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0)
- config = source.repo_source.check_config(source)
+ source = ldapsource(cnx)
# inject a bogus group here, along with at least a valid one
- config['user-default-group'] = ('thisgroupdoesnotexists', 'users')
- source.repo_source.update_config(source, config)
+ options = {'user-default-group': 'thisgroupdoesnotexists,users'}
+ update_source_config(source, options)
cnx.commit()
# here we emitted an error log entry
source.repo_source.pull_data(cnx, force=True, raise_on_error=True)
@@ -238,7 +247,7 @@
self.assertTrue(entity.modification_date)
def test_authenticate(self):
- source = self.repo.sources_by_uri['ldap']
+ source = self.repo.source_by_uri('ldap')
with self.admin_access.repo_cnx() as cnx:
# ensure we won't be logged against
self.assertRaises(AuthenticationError,
@@ -273,7 +282,7 @@
def test_copy_to_system_source(self):
"make sure we can 'convert' an LDAP user into a system one"
with self.admin_access.repo_cnx() as cnx:
- source = self.repo.sources_by_uri['ldap']
+ source = self.repo.source_by_uri('ldap')
eid = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
cnx.commit()
@@ -298,6 +307,33 @@
self.assertIsNotNone(pwd)
self.assertTrue(str(pwd))
+ def test_bad_config(self):
+ with self.admin_access.cnx() as cnx:
+
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'erroneous', type=u'ldapfeed', parser=u'ldapfeed',
+ url=u'ldap.com', config=CONFIG_LDAPFEED)
+ self.assertIn('badly formatted url',
+ str(cm.exception))
+ cnx.rollback()
+
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'erroneous', type=u'ldapfeed', parser=u'ldapfeed',
+ url=u'http://ldap.com', config=CONFIG_LDAPFEED)
+ self.assertIn('unsupported protocol',
+ str(cm.exception))
+ cnx.rollback()
+
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity(
+ 'CWSource', name=u'erroneous', type=u'ldapfeed', parser=u'ldapfeed',
+ url=u'ldap://host1\nldap://host2', config=CONFIG_LDAPFEED)
+ self.assertIn('can only have one url',
+ str(cm.exception))
+ cnx.rollback()
+
class LDAPGeneratePwdTC(LDAPFeedTestBase):
"""
@@ -306,7 +342,7 @@
def setup_database(self):
with self.admin_access.repo_cnx() as cnx:
- lfsource = cnx.repo.sources_by_uri['ldap']
+ lfsource = cnx.repo.source_by_uri('ldap')
del lfsource.user_attrs['userPassword']
super(LDAPGeneratePwdTC, self).setup_database()
@@ -325,16 +361,15 @@
def test_a_filter_inactivate(self):
""" filtered out people should be deactivated, unable to authenticate """
- repo_source = self.repo.sources_by_uri['ldap']
with self.admin_access.repo_cnx() as cnx:
- source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0)
- config = repo_source.check_config(source)
+ source = ldapsource(cnx)
# filter with adim's phone number
- config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
- repo_source.update_config(source, config)
+ options = {'user-filter': '(%s=%s)' % ('telephonenumber', '109')}
+ update_source_config(source, options)
cnx.commit()
with self.repo.internal_cnx() as cnx:
self.pull(cnx)
+ repo_source = self.repo.source_by_uri('ldap')
self.assertRaises(AuthenticationError,
repo_source.authenticate, cnx, 'syt', 'syt')
with self.admin_access.repo_cnx() as cnx:
@@ -345,8 +380,9 @@
'U in_state S, S name N').rows[0][0],
'activated')
# unfilter, syt should be activated again
- config['user-filter'] = u''
- repo_source.update_config(source, config)
+ source = ldapsource(cnx)
+ options = {'user-filter': u''}
+ update_source_config(source, options)
cnx.commit()
with self.repo.internal_cnx() as cnx:
self.pull(cnx)
@@ -365,7 +401,7 @@
self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
with self.repo.internal_cnx() as cnx:
self.pull(cnx)
- source = self.repo.sources_by_uri['ldap']
+ source = self.repo.source_by_uri('ldap')
self.assertRaises(AuthenticationError,
source.authenticate, cnx, 'syt', 'syt')
with self.admin_access.repo_cnx() as cnx:
@@ -404,7 +440,7 @@
# test reactivating BY HAND the user isn't enough to
# authenticate, as the native source refuse to authenticate
# user from other sources
- repo_source = self.repo.sources_by_uri['ldap']
+ repo_source = self.repo.source_by_uri('ldap')
self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
with self.repo.internal_cnx() as cnx:
self.pull(cnx)
--- a/cubicweb/server/test/unittest_postgres.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/test/unittest_postgres.py Fri Apr 14 15:40:15 2017 +0200
@@ -52,7 +52,7 @@
def test_eid_range(self):
# concurrent allocation of eid ranges
- source = self.repo.sources_by_uri['system']
+ source = self.repo.system_source
range1 = []
range2 = []
def allocate_eid_ranges(session, target):
--- a/cubicweb/server/test/unittest_undo.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/server/test/unittest_undo.py Fri Apr 14 15:40:15 2017 +0200
@@ -378,7 +378,7 @@
txuuid = cnx.commit()
p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
cnx.commit()
- integrityerror = self.repo.sources_by_uri['system'].dbhelper.dbapi_module.IntegrityError
+ integrityerror = self.repo.system_source.dbhelper.dbapi_module.IntegrityError
with self.assertRaises(integrityerror):
cnx.undo_transaction(txuuid)
--- a/cubicweb/skeleton/tox.ini.tmpl Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/skeleton/tox.ini.tmpl Fri Apr 14 15:40:15 2017 +0200
@@ -2,7 +2,6 @@
envlist = py27,py34,flake8
[testenv]
-sitepackages = true
deps =
pytest
commands =
--- a/cubicweb/sobjects/services.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/sobjects/services.py Fri Apr 14 15:40:15 2017 +0200
@@ -142,6 +142,6 @@
__select__ = Service.__select__ & match_user_groups('managers')
def call(self, source_eid):
- source = self._cw.repo.sources_by_eid[source_eid]
+ source = self._cw.repo.source_by_eid(source_eid)
result = source.pull_data(self._cw, force=True, async=True)
return result['import_log_eid']
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/test/data/libpython/cubicweb_mycube/ccplugin.py Fri Apr 14 15:40:15 2017 +0200
@@ -0,0 +1,1 @@
+# simply there to test ccplugin module autoloading
--- a/cubicweb/test/unittest_cwconfig.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/test/unittest_cwconfig.py Fri Apr 14 15:40:15 2017 +0200
@@ -23,7 +23,7 @@
import sys
import os
import pkgutil
-from os.path import dirname, join, abspath
+from os.path import dirname, join
from pkg_resources import EntryPoint, Distribution
import unittest
@@ -31,7 +31,6 @@
from six import PY3
from logilab.common.modutils import cleanup_sys_modules
-from logilab.common.changelog import Version
from cubicweb.devtools import ApptestConfiguration
from cubicweb.devtools.testlib import BaseTestCase, TemporaryDirectory
@@ -43,9 +42,9 @@
parts = path.split(os.sep)
for i, part in reversed(tuple(enumerate(parts))):
if part.startswith('cubicweb_'):
- return os.sep.join([part[len('cubicweb_'):]] + parts[i+1:])
+ return os.sep.join([part[len('cubicweb_'):]] + parts[i + 1:])
if part.startswith('cubicweb') or part == 'legacy_cubes':
- return os.sep.join(parts[i+1:])
+ return os.sep.join(parts[i + 1:])
raise Exception('duh? %s' % path)
@@ -93,6 +92,21 @@
del sys.modules[module]
+def iter_entry_points(group, name):
+ """Mock pkg_resources.iter_entry_points to yield EntryPoint from
+ packages found in test/data/libpython even though these are not
+ installed.
+ """
+ libpython = CubicWebConfigurationTC.datapath('libpython')
+ prefix = 'cubicweb_'
+ for pkgname in os.listdir(libpython):
+ if not pkgname.startswith(prefix):
+ continue
+ location = join(libpython, pkgname)
+ yield EntryPoint(pkgname[len(prefix):], pkgname,
+ dist=Distribution(location))
+
+
class CubicWebConfigurationTC(BaseTestCase):
@classmethod
@@ -109,27 +123,14 @@
def tearDown(self):
ApptestConfiguration.CUBES_PATH = []
-
- def iter_entry_points(group, name):
- """Mock pkg_resources.iter_entry_points to yield EntryPoint from
- packages found in test/data/libpython even though these are not
- installed.
- """
- libpython = CubicWebConfigurationTC.datapath('libpython')
- prefix = 'cubicweb_'
- for pkgname in os.listdir(libpython):
- if not pkgname.startswith(prefix):
- continue
- location = join(libpython, pkgname)
- yield EntryPoint(pkgname[len(prefix):], pkgname,
- dist=Distribution(location))
+ cleanup_sys_modules([self.datapath('libpython')])
@patch('pkg_resources.iter_entry_points', side_effect=iter_entry_points)
def test_available_cubes(self, mock_iter_entry_points):
expected_cubes = [
- 'card', 'comment', 'email', 'file',
- 'forge', 'localperms',
- 'mycube', 'tag',
+ 'card', 'comment', 'cubicweb_comment', 'cubicweb_email', 'file',
+ 'cubicweb_file', 'cubicweb_forge', 'localperms',
+ 'cubicweb_mycube', 'tag',
]
self._test_available_cubes(expected_cubes)
mock_iter_entry_points.assert_called_once_with(
@@ -142,17 +143,17 @@
# forge depends on email and file and comment
# email depends on file
self.assertEqual(self.config.reorder_cubes(['file', 'email', 'forge']),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
self.assertEqual(self.config.reorder_cubes(['email', 'file', 'forge']),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
self.assertEqual(self.config.reorder_cubes(['email', 'forge', 'file']),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
self.assertEqual(self.config.reorder_cubes(['file', 'forge', 'email']),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
self.assertEqual(self.config.reorder_cubes(['forge', 'file', 'email']),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file')),
- ('forge', 'email', 'file'))
+ ('forge', 'email', 'file'))
def test_reorder_cubes_recommends(self):
from cubicweb_comment import __pkginfo__ as comment_pkginfo
@@ -164,20 +165,19 @@
# email recommends comment
# comment recommends file
self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file', 'comment')),
- ('forge', 'email', 'comment', 'file'))
+ ('forge', 'email', 'comment', 'file'))
self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'comment', 'file')),
- ('forge', 'email', 'comment', 'file'))
+ ('forge', 'email', 'comment', 'file'))
self.assertEqual(self.config.reorder_cubes(('forge', 'comment', 'email', 'file')),
- ('forge', 'email', 'comment', 'file'))
+ ('forge', 'email', 'comment', 'file'))
self.assertEqual(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')),
- ('forge', 'email', 'comment', 'file'))
+ ('forge', 'email', 'comment', 'file'))
finally:
comment_pkginfo.__recommends_cubes__ = {}
def test_expand_cubes(self):
self.assertEqual(self.config.expand_cubes(('email', 'comment')),
- ['email', 'comment', 'file'])
-
+ ['email', 'comment', 'file'])
def test_init_cubes_ignore_pyramid_cube(self):
warning_msg = 'cubicweb-pyramid got integrated into CubicWeb'
@@ -186,6 +186,15 @@
self.assertIn(warning_msg, cm.output[0])
self.assertNotIn('pyramid', self.config._cubes)
+ @patch('pkg_resources.iter_entry_points', side_effect=iter_entry_points)
+ def test_ccplugin_modname(self, mock_iter_entry_points):
+ self.config.load_cwctl_plugins()
+ mock_iter_entry_points.assert_called_once_with(
+ group='cubicweb.cubes', name=None)
+ self.assertNotIn('cubes.mycube.ccplugin', sys.modules, sorted(sys.modules))
+ self.assertIn('cubicweb_mycube.ccplugin', sys.modules, sorted(sys.modules))
+
+
class CubicWebConfigurationWithLegacyCubesTC(CubicWebConfigurationTC):
@classmethod
@@ -225,13 +234,13 @@
self.assertNotEqual(dirname(email.__file__), self.config.CUBES_DIR)
self.config.__class__.CUBES_PATH = [self.custom_cubes_dir]
self.assertEqual(self.config.cubes_search_path(),
- [self.custom_cubes_dir, self.config.CUBES_DIR])
+ [self.custom_cubes_dir, self.config.CUBES_DIR])
self.config.__class__.CUBES_PATH = [self.custom_cubes_dir,
self.config.CUBES_DIR, 'unexistant']
# filter out unexistant and duplicates
self.assertEqual(self.config.cubes_search_path(),
- [self.custom_cubes_dir,
- self.config.CUBES_DIR])
+ [self.custom_cubes_dir,
+ self.config.CUBES_DIR])
self.assertIn('mycube', self.config.available_cubes())
# test cubes python path
self.config.adjust_sys_path()
@@ -273,7 +282,12 @@
self.assertEqual(self.config['allow-email-login'], result)
finally:
del os.environ['CW_ALLOW_EMAIL_LOGIN']
-
+
+ def test_ccplugin_modname(self):
+ self.config.load_cwctl_plugins()
+ self.assertIn('cubes.mycube.ccplugin', sys.modules, sorted(sys.modules))
+ self.assertNotIn('cubicweb_mycube.ccplugin', sys.modules, sorted(sys.modules))
+
class FindPrefixTC(unittest.TestCase):
@@ -350,7 +364,7 @@
def test_upper_candidate_prefix(self):
with TemporaryDirectory() as prefix:
self.make_dirs(prefix, 'share', 'cubicweb')
- self.make_dirs(prefix, 'bell', 'bob', 'share', 'cubicweb')
+ self.make_dirs(prefix, 'bell', 'bob', 'share', 'cubicweb')
file_path = self.make_file(prefix, 'bell', 'toto.py')
self.assertEqual(_find_prefix(file_path), prefix)
@@ -450,9 +464,9 @@
('cubicweb', 'cubicweb.schemas.workflow'),
('cubicweb', 'cubicweb.schemas.Bookmark'),
('bar', 'cubes.bar.schema'),
- ('foo', 'cubes.foo.schema'),
- ('foo', 'cubes.foo.schema.a'),
- ('foo', 'cubes.foo.schema.b'),
+ ('foo', 'cubicweb_foo.schema'),
+ ('foo', 'cubicweb_foo.schema.a'),
+ ('foo', 'cubicweb_foo.schema.b'),
]
# app has schema file
instance_dir, cubes_dir = (
@@ -497,9 +511,9 @@
'cubicweb.entities.sources',
'cubicweb.entities.wfobjs',
'cubes.bar.hooks',
- 'cubes.foo.entities',
- 'cubes.foo.entities.a',
- 'cubes.foo.hooks',
+ 'cubicweb_foo.entities',
+ 'cubicweb_foo.entities.a',
+ 'cubicweb_foo.hooks',
]
# data1 has entities
with temp_config('data1', instance_dir, cubes_dir,
--- a/cubicweb/test/unittest_cwctl.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/test/unittest_cwctl.py Fri Apr 14 15:40:15 2017 +0200
@@ -23,14 +23,20 @@
from six import PY2
-from cubicweb.cwconfig import CubicWebConfiguration
+from mock import patch
+
from cubicweb.cwctl import ListCommand
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.server.migractions import ServerMigrationHelper
+import unittest_cwconfig
+
class CubicWebCtlTC(unittest.TestCase):
+ setUpClass = unittest_cwconfig.CubicWebConfigurationTC.setUpClass
+ tearDownClass = unittest_cwconfig.CubicWebConfigurationTC.tearDownClass
+
def setUp(self):
self.stream = BytesIO() if PY2 else StringIO()
sys.stdout = self.stream
@@ -38,8 +44,12 @@
def tearDown(self):
sys.stdout = sys.__stdout__
- def test_list(self):
+ @patch('pkg_resources.iter_entry_points', side_effect=unittest_cwconfig.iter_entry_points)
+ def test_list(self, mock_iter_entry_points):
ListCommand(None).run([])
+ self.assertNotIn('cubicweb_', self.stream.getvalue())
+ mock_iter_entry_points.assert_called_once_with(
+ group='cubicweb.cubes', name=None)
def test_list_configurations(self):
ListCommand(None).run(['configurations'])
@@ -58,10 +68,11 @@
interactive=False,
# hack so it don't try to load fs schema
schema=1)
- scripts = {'script1.py': list(),
- 'script2.py': ['-v'],
- 'script3.py': ['-vd', '-f', 'FILE.TXT'],
- }
+ scripts = {
+ 'script1.py': list(),
+ 'script2.py': ['-v'],
+ 'script3.py': ['-vd', '-f', 'FILE.TXT'],
+ }
mih.cmd_process_script(join(self.datadir, 'scripts', 'script1.py'),
funcname=None)
for script, args in scripts.items():
--- a/cubicweb/web/request.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/web/request.py Fri Apr 14 15:40:15 2017 +0200
@@ -961,8 +961,6 @@
entity_metas = _cnx_func('entity_metas') # XXX deprecated
entity_type = _cnx_func('entity_type')
source_defs = _cnx_func('source_defs')
- get_shared_data = _cnx_func('get_shared_data')
- set_shared_data = _cnx_func('set_shared_data')
# security #################################################################
--- a/cubicweb/web/test/unittest_views_cwsources.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/web/test/unittest_views_cwsources.py Fri Apr 14 15:40:15 2017 +0200
@@ -35,7 +35,7 @@
with self.temporary_appobjects(AParser):
source = req.create_entity('CWSource', name=u'ext', type=u'datafeed',
- parser=u'cw.entityxml')
+ parser=u'cw.entityxml', url=u'whatever')
req.cnx.commit()
self.threads = 0
--- a/cubicweb/web/test/unittest_views_editforms.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/web/test/unittest_views_editforms.py Fri Apr 14 15:40:15 2017 +0200
@@ -294,6 +294,14 @@
req.find('Directory', name='child1').one().eid)
self.assertIn(expected, source)
+ # No composite entities
+ rset = req.execute('Directory X WHERE X name "dtest4"')
+ source = self.view('deleteconf', rset,
+ template=None, req=req).source.decode('utf-8')
+ expected = ('<li><a href="http://testing.fr/cubicweb/directory/%s">'
+ 'dtest4</a></li>') % (d4.eid,)
+ self.assertIn(expected, source)
+
def test_automatic_edition_formview(self):
with self.admin_access.web_request() as req:
rset = req.execute('CWUser X')
--- a/cubicweb/web/views/editforms.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/web/views/editforms.py Fri Apr 14 15:40:15 2017 +0200
@@ -76,13 +76,16 @@
show_composite_skip_rtypes = set('wf_info_for',)
def _iter_composite_entities(self, entity, limit=None):
+ eids = set()
for rdef, role in entity.e_schema.composite_rdef_roles:
if rdef.rtype in self.show_composite_skip_rtypes:
continue
for centity in entity.related(
rdef.rtype, role, limit=limit
).entities():
- yield centity
+ if centity.eid not in eids:
+ eids.add(centity.eid)
+ yield centity
def call(self, onsubmit=None):
"""ask for confirmation before real deletion"""
@@ -120,7 +123,8 @@
content = tags.a(centity.view('textoutofcontext'),
href=centity.absolute_url())
else:
- w(u'<li class="last">%s</li></ul>' % content)
+ if content is not None:
+ w(u'<li class="last">%s</li></ul>' % content)
w(u'</li>\n')
w(u'</ul>\n')
form.render(w=self.w)
--- a/cubicweb/web/webctl.py Mon Mar 20 11:16:11 2017 +0100
+++ b/cubicweb/web/webctl.py Fri Apr 14 15:40:15 2017 +0200
@@ -39,6 +39,17 @@
from shutil import copytree as linkdir
+def rmtreecontent(dst):
+ """Delete the content of the dst directory (but NOT the directory
+ itself)"""
+ for fname in os.listdir(dst):
+ fpath = osp.join(dst, fname)
+ if osp.isfile(fpath) or osp.islink(fpath):
+ os.unlink(fpath)
+ else:
+ rmtree(fpath)
+
+
class WebCreateHandler(CommandHandler):
cmdname = 'create'
@@ -70,7 +81,7 @@
ASK.confirm('Remove existing data directory %s?' % dest))):
raise ExecutionError('Directory %s already exists. '
'Remove it first.' % dest)
- rmtree(dest)
+ rmtreecontent(dest)
config.quick_start = True # notify this is not a regular start
# list all resources (no matter their order)
resources = set()
--- a/debian/changelog Mon Mar 20 11:16:11 2017 +0100
+++ b/debian/changelog Fri Apr 14 15:40:15 2017 +0200
@@ -1,3 +1,9 @@
+cubicweb (3.25.0-1) unstable; urgency=medium
+
+ * New upstream release.
+
+ -- Denis Laxalde <denis.laxalde@logilab.fr> Fri, 14 Apr 2017 15:15:00 +0200
+
cubicweb (3.24.6-1) unstable; urgency=medium
* New upstream release.
--- a/doc/api/pyramid.rst Mon Mar 20 11:16:11 2017 +0100
+++ b/doc/api/pyramid.rst Fri Apr 14 15:40:15 2017 +0200
@@ -3,8 +3,6 @@
.. automodule:: cubicweb.pyramid
- .. autofunction:: make_cubicweb_application
-
.. autofunction:: wsgi_application_from_cwconfig
.. autofunction:: wsgi_application
--- a/doc/api/pyramid/tools.rst Mon Mar 20 11:16:11 2017 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-.. _tools_module:
-
-:mod:`cubicweb.pyramid.tools`
------------------------------
-
-.. automodule:: cubicweb.pyramid.tools
-
- .. autofunction:: includeme
-
- .. autofunction:: clone_user
- .. autofunction:: cnx_attach_entity
- .. autofunction:: cached_build_user
- .. autofunction:: clear_cache
--- a/doc/book/pyramid/index.rst Mon Mar 20 11:16:11 2017 +0100
+++ b/doc/book/pyramid/index.rst Fri Apr 14 15:40:15 2017 +0200
@@ -1,25 +1,29 @@
-================
-Pyramid Cubicweb
-================
+Pyramid
+=======
-Pyramid Cubicweb is an attempt to rebase the CubicWeb framework on pyramid.
+:mod:`cubicweb.pyramid` provides a way to bind a CubicWeb data repository to a
+Pyramid WSGI web application.
It can be used in two different ways:
-- Within CubicWeb, through the 'pyramid' cube and the
- :ref:`pyramid command <cubicweb-ctl_pyramid>`.
- In this mode, the Pyramid CubicWeb replaces some parts of
- CubicWeb and make the pyramid api available to the cubes.
+- Through the :ref:`pyramid command <cubicweb-ctl_pyramid>` or through
+ :func:`cubicweb.pyramid.wsgi_application` WSGI application factory, one can
+ run an ``all-in-one`` CubicWeb instance with the web part served by a
+ Pyramid application. This is referred to as the *backwards compatible
+ mode*.
-- Within a pyramid application, it provides easy access to a CubicWeb
- instance and registry.
+- Through the ``pyramid`` configuration type, one can setup a CubicWeb
+ instance which repository can be used from within a Pyramid application.
+ Such an instance may be launched through ``pserve`` or any WSGI server as
+ would any plain Pyramid application.
+
Narrative Documentation
=======================
.. toctree::
:maxdepth: 2
-
+
quickstart
ctl
settings
--- a/doc/book/pyramid/quickstart.rst Mon Mar 20 11:16:11 2017 +0100
+++ b/doc/book/pyramid/quickstart.rst Fri Apr 14 15:40:15 2017 +0200
@@ -6,37 +6,53 @@
Prerequites
-----------
-- Install everything (here with pip, possibly in a virtualenv)::
+Install the *pyramid* flavour of CubicWeb (here with pip, possibly in a
+virtualenv):
- pip install pyramid-cubicweb cubicweb-pyramid pyramid_debugtoolbar
+::
-- Have a working Cubicweb instance, for example:
+ pip install cubicweb[pyramid]
- - Make sure CubicWeb is in user mode::
+Instance creation and running
+-----------------------------
- export CW_MODE=user
+In *backwards compatible* mode
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- - Create a CubicWeb instance, and install the 'pyramid' cube on it (see
- :ref:`configenv` for more details on this step)::
+In this mode, you can simply create an instance of kind ``all-in-one`` with
+the ``cubicweb-ctl create`` command. You'll then need to add a ``pyramid.ini``
+file in your instance directory, see :ref:`pyramid_settings` for details about the
+content of this file.
- cubicweb-ctl create pyramid myinstance
+Start the instance with the :ref:`'pyramid' command <cubicweb-ctl_pyramid>`
+instead of 'start':
+
+::
+
+ cubicweb-ctl pyramid --debug myinstance
-- Edit your ``~/etc/cubicweb.d/myinstance/all-in-one.conf`` and set values for
- :confval:`pyramid-auth-secret` and :confval:`pyramid-session-secret`.
- *required if cubicweb.pyramid.auth and pyramid_cubiweb.session get
- included, which is the default*
+
+Without *backwards compatibility*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this mode, you can create an instance of kind ``pyramid`` as follow:
+
+::
-From CubicWeb
--------------
+ cubicweb-ctl create -c pyramid <cube_name> <instance_name>
-- Start the instance with the :ref:`'pyramid' command <cubicweb-ctl_pyramid>`
- instead of 'start'::
+This will bootstrap a ``development.ini`` file typical of a Pyramid
+application in the instance's directory. The new instance may then be launched
+by any WSGI server, for instance with pserve_:
- cubicweb-ctl pyramid --debug myinstance
+::
+
+ pserve etc/cubicweb.d/<instance_name>/development.ini
+
In a pyramid application
-------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~
- Create a pyramid application
@@ -55,18 +71,8 @@
cubicweb.instance = myinstance
-- Configure the base-url and https-url in all-in-one.conf to match the ones
- of the pyramid configuration (this is a temporary limitation).
-
-
-Usage with pserve
------------------
-
-To run a Pyramid application using pserve_:
-
-::
-
- pserve /path/to/development.ini instance=<appid>
+- Configure the base-url in all-in-one.conf to match the ones of the pyramid
+ configuration (this is a temporary limitation).
.. _pserve: \
--- a/doc/book/pyramid/settings.rst Mon Mar 20 11:16:11 2017 +0100
+++ b/doc/book/pyramid/settings.rst Fri Apr 14 15:40:15 2017 +0200
@@ -13,12 +13,13 @@
.. _pyramid_settings:
-Pyramid Settings
-----------------
+Pyramid Settings file
+---------------------
-If a ``pyramid.ini`` file is found in the instance home directory (where the
-``all-in-one.conf`` file is), its ``[main]`` section will be read and used as the
-``settings`` of the pyramid Configurator.
+In *backwards compatibility* mode, Pyramid settings will be looked for in a
+``pyramid.ini`` file in the instance home directory (where the
+``all-in-one.conf`` file is), its ``[main]`` section will be read and used as
+the ``settings`` of the pyramid Configurator.
This configuration file is almost the same as the one read by ``pserve``, which
allow to easily add any pyramid extension and configure it.
@@ -43,6 +44,14 @@
redis.sessions.host = mywheezy
+
+Without *backwards compatibility* a standard ``development.ini`` file can be
+used with any useful CubicWeb-specific settings added.
+
+
+Pyramid CubicWeb configuration entries
+--------------------------------------
+
The Pyramid CubicWeb specific configuration entries are:
.. confval:: cubicweb.instance (string)
--- a/doc/changes/3.25.rst Mon Mar 20 11:16:11 2017 +0100
+++ b/doc/changes/3.25.rst Fri Apr 14 15:40:15 2017 +0200
@@ -10,7 +10,85 @@
.. _pgbouncer: https://pgbouncer.github.io/
+* In `deleteconf` view (confirmation before deletion), the list of first-level
+ composite objects that would be deleted along with the primary entity is
+ displayed (01eeea97e549).
+
+* The ``cubicweb.pyramid`` module now provides a Paste application factory
+ registered as an entry point named ``pyramid_main`` and that can be used to
+ run a Pyramid WSGI application bound to a CubicWeb repository.
+
+* A new configuration type ``pyramid`` has been added to create CubicWeb's
+ instances (through ``cubicweb-ctl create -c pyramid <basecube> <appid>``).
+ This configuration bootstraps a CubicWeb instance that is essentially a
+ repository plus the minimal setup to run a Pyramid WSGI application on top
+ of it. Noticeably, it does not ship all *web* configuration but rather
+ relies on configuration declared in a ``development.ini`` file for any
+ Pyramid application.
* A new way to declare workflows as simple data structure (dict/list) has been
introduced. Respective utility functions live in ``cubicweb.wfutils``
module. This handles both the creation and migration of workflows.
+
+* A new IDublinCore adapter has been introduced to control the generation of
+ Dublin Core metadata that are used in several base views.
+
+* It is now possible to *derive* rtags using their ``derive`` method
+ (0849a5eb57b8). Derived rtags keep a reference to the original rtag and only
+ hold custom rules, allowing changes which are done in the original rtag after
+ derivation to be still considered.
+
+* A new ``cubicweb-ctl scheduler <appid>`` command has been introduced to run
+ background and periodic tasks of the repository (previously called *looping
+ tasks*). In a production environment, a process with this command should be
+ run alongside with a WSGI server process (possibly running multiple
+ processes itself).
+
+
+Backwards incompatible changes
+------------------------------
+
+* As a consequence of the replacement of the old looping tasks manager by a
+ scheduler, all cubicweb-ctl's "start" commands (i.e. ``start``, ``pyramid``,
+ ``wsgi``) do not start repository *looping tasks manager* anymore, nor do
+ they start the scheduler. Site administrators are thus expected to start
+ this scheduler as a separate process. Also, registering looping tasks (i.e.
+ calling ``repo.looping_tasks()``) is a no-op when the repository has no
+ scheduler set; a warning is issued in such cases. Application developers may
+ rely on repository's ``has_scheduler`` method to determine if they should
+ register a looping task or not.
+
+* In ``cubicweb.pyramid``, function ``make_cubicweb_application`` got renamed
+ into ``config_from_cwconfig`` (950ce7d9f642).
+
+* Several cleanups in repository's session management have been conducted
+ resulting from changes introduced in 3.19 release. Among others, the
+ ``cubicweb.server.session.Session`` class has been dropped, and request
+ ``session`` attribute is now tight to a web session whose implementation
+ depends on the front-end used (twisted or pyramid). Hence this attribute
+ should not be accessed from "repository side" code (e.g. hooks or operations)
+ and has lost some of his former attributes like ``repo`` which used to
+ reference the repository instance. Due to this, you don't have anymore access
+ to session's data through the connection, which leds to deprecation of the
+ ``data`` attribute and removal of ``get_shared_data`` and ``set_shared_data``
+ methods which are deprecated since 3.19.
+
+* Support for 'https-url' configuration option has been removed
+ (4516c3956d46).
+
+* The `next_tabindex` method of request class has been removed (011730a4af73).
+
+* The `cubicweb.hook.logstats.start` hook was dropped because it's looping
+ task would not be run in a web instance (see first point about repository
+ scheduler).
+
+* ``uicfg`` rules to hide the opposite relation of inlined form are not anymore
+ automatically added, because this was actually done randomly and so not
+ reliable, so you'll have to add them manually:
+
+ ::
+
+ autoform_section.tag_subject_of(('CWUser', 'use_email', 'EmailAddress'),
+ 'main', 'inlined')
+ autoform_section.tag_object_of(('CWUser', 'use_email', 'EmailAddress'),
+ 'inlined', 'hidden')
--- a/flake8-ok-files.txt Mon Mar 20 11:16:11 2017 +0100
+++ b/flake8-ok-files.txt Fri Apr 14 15:40:15 2017 +0200
@@ -26,12 +26,15 @@
cubicweb/etwist/request.py
cubicweb/etwist/service.py
cubicweb/ext/__init__.py
+cubicweb/hooks/syncsources.py
cubicweb/hooks/test/data/hooks.py
cubicweb/hooks/test/unittest_notificationhooks.py
cubicweb/hooks/test/unittest_security.py
cubicweb/hooks/test/unittest_syncsession.py
+cubicweb/hooks/test/unittest_syncsources.py
cubicweb/pylintext.py
cubicweb/repoapi.py
+cubicweb/rqlrewrite.py
cubicweb/rset.py
cubicweb/rtags.py
cubicweb/server/__init__.py
@@ -51,6 +54,7 @@
cubicweb/server/test/data-migractions/migratedapp/__init__.py
cubicweb/server/test/data-schema2sql/__init__.py
cubicweb/server/test/unittest_checkintegrity.py
+cubicweb/server/test/unittest_datafeed.py
cubicweb/server/test/unittest_ldapsource.py
cubicweb/server/test/unittest_serverctl.py
cubicweb/server/test/unittest_session.py
@@ -61,6 +65,8 @@
cubicweb/sobjects/test/unittest_notification.py
cubicweb/sobjects/test/unittest_register_user.py
cubicweb/sobjects/textparsers.py
+cubicweb/sources/__init__.py
+cubicweb/sources/native.py
cubicweb/test/data/libpython/cubicweb_comment/__init__.py
cubicweb/test/data/libpython/cubicweb_comment/__pkginfo__.py
cubicweb/test/data/libpython/cubicweb_email/entities.py
@@ -83,6 +89,8 @@
cubicweb/test/data/server_migration/bootstrapmigration_repository.py
cubicweb/test/data/views.py
cubicweb/test/unittest_binary.py
+cubicweb/test/unittest_cwconfig.py
+cubicweb/test/unittest_cwctl.py
cubicweb/test/unittest_mail.py
cubicweb/test/unittest_repoapi.py
cubicweb/test/unittest_req.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/requirements/doc.txt Fri Apr 14 15:40:15 2017 +0200
@@ -0,0 +1,1 @@
+.[pyramid]
--- a/tox.ini Mon Mar 20 11:16:11 2017 +0100
+++ b/tox.ini Fri Apr 14 15:40:15 2017 +0200
@@ -26,11 +26,13 @@
commands = /bin/sh -c "flake8 `xargs -a {toxinidir}/flake8-ok-files.txt`"
[testenv:doc]
+skip_install = true
changedir = doc
deps =
sphinx
+ -r{toxinidir}/requirements/doc.txt
commands =
- {envpython} -c 'import sphinx; sphinx.main()' -b html -d {envtmpdir}/doctrees . {envtmpdir}/html
+ {envpython} -m sphinx -b html -d {envtmpdir}/doctrees . {envtmpdir}/html
[testenv:check-manifest]
skip_install = true