--- a/.hgtags Mon Jan 23 15:35:19 2017 +0100
+++ b/.hgtags Fri Jan 27 16:26:09 2017 +0100
@@ -584,3 +584,12 @@
da52fda6f15b76823361bb64c9b69686707137c2 3.24.1
da52fda6f15b76823361bb64c9b69686707137c2 debian/3.24.1-1
da52fda6f15b76823361bb64c9b69686707137c2 centos/3.24.1-1
+29607751378210609cd716b5c28f42e7431ba73c 3.24.2
+29607751378210609cd716b5c28f42e7431ba73c debian/3.24.2-1
+29607751378210609cd716b5c28f42e7431ba73c centos/3.24.2-1
+bb5904cd284ee08f74bc655834b493438f8e819f 3.24.3
+bb5904cd284ee08f74bc655834b493438f8e819f debian/3.24.3-1
+bb5904cd284ee08f74bc655834b493438f8e819f centos/3.24.3-1
+35fd54c0065d622647f2d1fffc9874a455a1b9be 3.24.4
+35fd54c0065d622647f2d1fffc9874a455a1b9be debian/3.24.4-1
+35fd54c0065d622647f2d1fffc9874a455a1b9be centos/3.24.4-1
--- a/cubicweb.spec Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb.spec Fri Jan 27 16:26:09 2017 +0100
@@ -8,7 +8,7 @@
%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
Name: cubicweb
-Version: 3.24.1
+Version: 3.24.4
Release: logilab.1%{?dist}
Summary: CubicWeb is a semantic web application framework
Source0: https://pypi.python.org/packages/source/c/cubicweb/cubicweb-%{version}.tar.gz
--- a/cubicweb/_exceptions.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/_exceptions.py Fri Jan 27 16:26:09 2017 +0100
@@ -21,7 +21,7 @@
from warnings import warn
-from six import PY3, text_type
+from six import PY2, text_type
from logilab.common.decorators import cachedproperty
@@ -40,7 +40,13 @@
return self.msg
else:
return u' '.join(text_type(arg) for arg in self.args)
- __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8')
+
+ def __str__(self):
+ res = self.__unicode__()
+ if PY2:
+ res = res.encode('utf-8')
+ return res
+
class ConfigurationError(CubicWebException):
"""a misconfiguration error"""
--- a/cubicweb/cwconfig.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/cwconfig.py Fri Jan 27 16:26:09 2017 +0100
@@ -201,7 +201,8 @@
from logilab.common.deprecation import deprecated
from logilab.common.logging_ext import set_log_methods, init_log
from logilab.common.configuration import (Configuration, Method,
- ConfigurationMixIn, merge_options)
+ ConfigurationMixIn, merge_options,
+ _validate as lgc_validate)
from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
ConfigurationError, Binary, _)
@@ -413,7 +414,7 @@
mode = _forced_mode or 'system'
_CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes')
- CUBES_DIR = abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR))
+ CUBES_DIR = realpath(abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR)))
CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep)
options = (
@@ -456,7 +457,11 @@
def __getitem__(self, key):
"""Get configuration option, by first looking at environmnent."""
file_value = super(CubicWebNoAppConfiguration, self).__getitem__(key)
- return option_value_from_env(key, file_value)
+ value = option_value_from_env(key, file_value)
+ if value is not None:
+ option_def = self.get_option_def(key)
+ value = lgc_validate(value, option_def)
+ return value
# static and class methods used to get instance independant resources ##
@staticmethod
@@ -544,7 +549,7 @@
@classmethod
def cubes_search_path(cls):
"""return the path of directories where cubes should be searched"""
- path = [abspath(normpath(directory)) for directory in cls.CUBES_PATH
+ path = [realpath(abspath(normpath(directory))) for directory in cls.CUBES_PATH
if directory.strip() and exists(directory.strip())]
if not cls.CUBES_DIR in path and exists(cls.CUBES_DIR):
path.append(cls.CUBES_DIR)
--- a/cubicweb/devtools/testlib.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/devtools/testlib.py Fri Jan 27 16:26:09 2017 +0100
@@ -413,11 +413,6 @@
config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr')
# default_base_url on config class isn't enough for TestServerConfiguration
config.global_set_option('base-url', config.default_base_url())
- # web resources
- try:
- config.global_set_option('embed-allowed', re.compile('.*'))
- except Exception: # not in server only configuration
- pass
@property
def vreg(self):
@@ -1290,7 +1285,7 @@
self._test_action(action)
for box in self.list_boxes_for(rset):
w = [].append
- with self.subTest(self._testname(rset, box.__regid__, 'box')):
+ with self.subTest(name=self._testname(rset, box.__regid__, 'box')):
box.render(w)
@staticmethod
--- a/cubicweb/hooks/syncschema.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/hooks/syncschema.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -778,7 +778,7 @@
syssource.update_rdef_unique(self.cnx, self.rdef)
-class CWConstraintAddOp(CWConstraintDelOp):
+class CWConstraintAddOp(hook.LateOperation, CWConstraintDelOp):
"""actually update constraint of a relation definition"""
entity = None # make pylint happy
@@ -886,12 +886,10 @@
class MemSchemaCWRTypeAdd(MemSchemaOperation):
- """actually add the relation type to the instance's schema"""
+ """Revert addition of the relation type from the instance's schema if something goes wrong.
+ """
rtypedef = None # make pylint happy
- def precommit_event(self):
- self.cnx.vreg.schema.add_relation_type(self.rtypedef)
-
def revertprecommit_event(self):
self.cnx.vreg.schema.del_relation_type(self.rtypedef.name)
@@ -1088,44 +1086,45 @@
MemSchemaCWRTypeDel(self._cw, rtype=name)
-class AfterAddCWComputedRTypeHook(SyncSchemaHook):
- """after a CWComputedRType entity has been added:
- * register an operation to add the relation type to the instance's
- schema on commit
-
- We don't know yet this point if a table is necessary
- """
- __regid__ = 'syncaddcwcomputedrtype'
- __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
- events = ('after_add_entity',)
+class AfterAddCWRTypeHook(SyncSchemaHook):
+ """After a CWRType entity has been added, register an operation to add the
+ relation type to the instance's schema on commit.
- def __call__(self):
- entity = self.entity
- rtypedef = ybo.ComputedRelation(name=entity.name,
- eid=entity.eid,
- rule=entity.rule)
- MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
-
-
-class AfterAddCWRTypeHook(SyncSchemaHook):
- """after a CWRType entity has been added:
- * register an operation to add the relation type to the instance's
- schema on commit
-
- We don't know yet this point if a table is necessary
+ We don't know yet at this point if a table is necessary, it will depend on
+ further addition of relation definitions.
"""
__regid__ = 'syncaddcwrtype'
__select__ = SyncSchemaHook.__select__ & is_instance('CWRType')
events = ('after_add_entity',)
def __call__(self):
+ rtypedef = self.rtype_def()
+ # modify the instance's schema now since we'll usually need the type definition to do
+ # further thing (e.g. add relation def of this type) but register and operation to revert
+ # this if necessary
+ self._cw.vreg.schema.add_relation_type(rtypedef)
+ MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
+
+ def rtype_def(self):
entity = self.entity
- rtypedef = ybo.RelationType(name=entity.name,
- description=entity.description,
- inlined=entity.cw_edited.get('inlined', False),
- symmetric=entity.cw_edited.get('symmetric', False),
- eid=entity.eid)
- MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef)
+ return ybo.RelationType(name=entity.name,
+ description=entity.description,
+ inlined=entity.cw_edited.get('inlined', False),
+ symmetric=entity.cw_edited.get('symmetric', False),
+ eid=entity.eid)
+
+
+class AfterAddCWComputedRTypeHook(AfterAddCWRTypeHook):
+ """After a CWComputedRType entity has been added, register an operation to
+ add the relation type to the instance's schema on commit.
+ """
+ __select__ = SyncSchemaHook.__select__ & is_instance('CWComputedRType')
+
+ def rtype_def(self):
+ entity = self.entity
+ return ybo.ComputedRelation(name=entity.name,
+ eid=entity.eid,
+ rule=entity.rule)
class BeforeUpdateCWRTypeHook(SyncSchemaHook):
--- a/cubicweb/misc/migration/3.23.0_Any.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/misc/migration/3.23.0_Any.py Fri Jan 27 16:26:09 2017 +0100
@@ -16,18 +16,18 @@
SELECT DISTINCT tc.table_name, tc.constraint_name
FROM information_schema.table_constraints tc,
information_schema.key_column_usage kc
- WHERE tc.constraint_type IN 'PRIMARY KEY'
+ WHERE tc.constraint_type = 'PRIMARY KEY'
AND kc.table_name = tc.table_name
AND kc.table_name LIKE '%\_relation'
AND kc.table_schema = tc.table_schema
AND kc.constraint_name = tc.constraint_name;
'''):
- sql('ALTER TABLE %s DROP CONSTRAINT' % (table, cstr))
+ sql('ALTER TABLE %s DROP CONSTRAINT %s' % (table, cstr))
for table, cstr in sql("""
SELECT DISTINCT table_name, constraint_name FROM information_schema.constraint_column_usage
WHERE table_name LIKE 'cw\_%' AND constraint_name LIKE '%\_key'"""):
- sql("ALTER TABLE %(table)s DROP CONSTRAINT %(cstr)s" % locals())
+ sql('ALTER TABLE %s DROP CONSTRAINT %s' % (table, cstr))
for rschema in schema.relations():
if rschema.rule or rschema in PURE_VIRTUAL_RTYPES:
@@ -36,10 +36,6 @@
for rdef in rschema.rdefs.values():
table = 'cw_{0}'.format(rdef.subject)
column = 'cw_{0}'.format(rdef.rtype)
- if any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints):
- old_name = '%s_%s_key' % (table.lower(), column.lower())
- sql('ALTER TABLE %s DROP CONSTRAINT %s' % (table, old_name))
- source.create_index(cnx, table, column, unique=True)
if rschema.inlined or rdef.indexed:
old_name = '%s_%s_idx' % (table.lower(), column.lower())
sql('DROP INDEX IF EXISTS %s' % old_name)
--- a/cubicweb/misc/migration/3.24.0_Any.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/misc/migration/3.24.0_Any.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,3 +1,6 @@
+from base64 import b64decode
+
+
# Check the CW versions and add the entity only if needed ?
add_entity_type('CWSession')
rql('DELETE CWProperty X WHERE X pkey "system.version.pyramid"',
@@ -5,6 +8,12 @@
sql('DROP TABLE moved_entities')
sql('ALTER TABLE entities DROP COLUMN asource')
+# before removing extid, ensure it's coherent with cwuri
+for eid, etype, encoded_extid in sql(
+ "SELECT eid, type, extid FROM entities, cw_CWSource "
+ "WHERE cw_CWSource.cw_name=entities.asource AND cw_CWSource.cw_type='ldapfeed'"):
+ sql('UPDATE cw_{} SET cw_cwuri=%(cwuri)s WHERE cw_eid=%(eid)s'.format(etype),
+ {'eid': eid, 'cwuri': b64decode(encoded_extid)})
sql('ALTER TABLE entities DROP COLUMN extid')
sql('DROP INDEX entities_type_idx')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cubicweb/misc/migration/3.24.4_Any.py Fri Jan 27 16:26:09 2017 +0100
@@ -0,0 +1,38 @@
+
+from yams.constraints import UniqueConstraint
+from cubicweb.schema import PURE_VIRTUAL_RTYPES
+from cubicweb.server.checkintegrity import expected_indexes, database_indexes
+
+source = repo.system_source
+
+for rschema in schema.relations():
+ if rschema.rule or rschema in PURE_VIRTUAL_RTYPES:
+ continue
+ if rschema.final or rschema.inlined:
+ for rdef in rschema.rdefs.values():
+ table = 'cw_{0}'.format(rdef.subject)
+ column = 'cw_{0}'.format(rdef.rtype)
+ if any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints):
+ source.create_index(cnx, table, column, unique=True)
+ commit()
+ if rschema.inlined or rdef.indexed:
+ source.create_index(cnx, table, column)
+ commit()
+
+schema_indices = expected_indexes(cnx)
+db_indices = database_indexes(cnx)
+for additional_index in (db_indices - set(schema_indices)):
+ try:
+ sql('DROP INDEX %s' % additional_index)
+ commit()
+ except:
+ # ignore if this is not an index but a constraint
+ pass
+
+if source.dbhelper == 'postgres' and 'appears_words_idx' not in db_indices:
+ sql('CREATE INDEX appears_words_idx ON appears USING gin(words)')
+ db_indices.add('appears_words_idx')
+
+for missing_index in (set(schema_indices) - db_indices):
+ print('WARNING: missing index', missing_index)
+
--- a/cubicweb/pyramid/core.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/pyramid/core.py Fri Jan 27 16:26:09 2017 +0100
@@ -157,7 +157,7 @@
self.form[param] = val
def relative_path(self, includeparams=True):
- path = self._request.path[1:]
+ path = self._request.path_info[1:]
if includeparams and self._request.query_string:
return '%s?%s' % (path, self._request.query_string)
return path
--- a/cubicweb/pyramid/login.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/pyramid/login.py Fri Jan 27 16:26:09 2017 +0100
@@ -81,5 +81,13 @@
def includeme(config):
""" Create the 'login' route ('/login') and load this module views"""
+ cwconfig = config.registry['cubicweb.config']
config.add_route('login', '/login')
+ if cwconfig.get('language-mode') == 'url-prefix':
+ config.add_route('login-lang', '/{lang}/login')
+ config.add_view(login_already_loggedin, route_name='login-lang',
+ effective_principals=security.Authenticated)
+ config.add_view(login_form, route_name='login-lang')
+ config.add_view(login_password_login, route_name='login-lang',
+ request_param=('__login', '__password'))
config.scan('cubicweb.pyramid.login')
--- a/cubicweb/pyramid/session.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/pyramid/session.py Fri Jan 27 16:26:09 2017 +0100
@@ -28,6 +28,8 @@
If request has an attached connection, its security will be deactived in the context manager's
scope, else a new internal connection is returned.
+
+ This should be used for read-only queries, not if you intend to commit/rollback some data.
"""
cnx = request.cw_cnx
if cnx is None:
@@ -134,7 +136,7 @@
data = Binary(pickle.dumps(dict(self)))
sessioneid = self.sessioneid
- with unsafe_cnx_context_manager(self.request) as cnx:
+ with self.request.registry['cubicweb.repository'].internal_cnx() as cnx:
if not sessioneid:
session = cnx.create_entity(
'CWSession', cwsessiondata=data)
--- a/cubicweb/pyramid/test/test_login.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/pyramid/test/test_login.py Fri Jan 27 16:26:09 2017 +0100
@@ -4,7 +4,24 @@
from cubicweb.pyramid.test import PyramidCWTest
+class LoginTestLangUrlPrefix(PyramidCWTest):
+
+ @classmethod
+ def setUpClass(cls):
+ super(LoginTestLangUrlPrefix, cls).setUpClass()
+ cls.config.global_set_option('language-mode', 'url-prefix')
+
+ def test_login_password_login_lang_prefix(self):
+ res = self.webapp.post('/fr/login', {
+ '__login': self.admlogin, '__password': self.admpassword})
+ self.assertEqual(res.status_int, 303)
+
+ res = self.webapp.get('/fr/login')
+ self.assertEqual(res.status_int, 303)
+
+
class LoginTest(PyramidCWTest):
+
def test_login_form(self):
res = self.webapp.get('/login')
self.assertIn('__login', res.text)
--- a/cubicweb/server/checkintegrity.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/checkintegrity.py Fri Jan 27 16:26:09 2017 +0100
@@ -411,7 +411,7 @@
cnx.commit()
-SYSTEM_INDICES = {
+SYSTEM_INDEXES = {
# see cw/server/sources/native.py
'transactions_tx_time_idx': ('transactions', 'tx_time'),
'transactions_tx_user_idx': ('transactions', 'tx_user'),
@@ -428,24 +428,20 @@
}
-def check_indexes(cnx):
- """Check indexes of a system database: output missing expected indexes as well as unexpected ones.
+def expected_indexes(cnx):
+ """Return a dictionary describing indexes expected by the schema {index name: (table, column)}.
- Return 0 if there is no differences, else 1.
+ This doesn't include primary key indexes.
"""
source = cnx.repo.system_source
dbh = source.dbhelper
schema = cnx.repo.schema
- schema_indices = SYSTEM_INDICES.copy()
+ schema_indexes = SYSTEM_INDEXES.copy()
if source.dbdriver == 'postgres':
- schema_indices.update({'appears_words_idx': ('appears', 'words')})
- index_filter = lambda idx: not (idx.startswith('pg_') or idx.endswith('_pkey'))
+ schema_indexes.update({'appears_words_idx': ('appears', 'words')})
else:
- schema_indices.update({'appears_uid': ('appears', 'uid'),
+ schema_indexes.update({'appears_uid': ('appears', 'uid'),
'appears_word_id': ('appears', 'word_id')})
- index_filter = lambda idx: not idx.startswith('sqlite_')
- db_indices = set(idx for idx in dbh.list_indices(cnx.cnxset.cu)
- if index_filter(idx))
for rschema in schema.relations():
if rschema.rule or rschema in PURE_VIRTUAL_RTYPES:
continue # computed relation
@@ -454,44 +450,75 @@
table = 'cw_{0}'.format(rdef.subject)
column = 'cw_{0}'.format(rdef.rtype)
if any(isinstance(cstr, UniqueConstraint) for cstr in rdef.constraints):
- schema_indices[dbh._index_name(table, column, unique=True)] = (
+ schema_indexes[dbh._index_name(table, column, unique=True)] = (
table, [column])
if rschema.inlined or rdef.indexed:
- schema_indices[dbh._index_name(table, column)] = (table, [column])
+ schema_indexes[dbh._index_name(table, column)] = (table, [column])
else:
table = '{0}_relation'.format(rschema)
if source.dbdriver == 'postgres':
# index built after the primary key constraint
- schema_indices[build_index_name(table, ['eid_from', 'eid_to'], 'key_')] = (
+ schema_indexes[build_index_name(table, ['eid_from', 'eid_to'], 'key_')] = (
table, ['eid_from', 'eid_to'])
- schema_indices[build_index_name(table, ['eid_from'], 'idx_')] = (
+ schema_indexes[build_index_name(table, ['eid_from'], 'idx_')] = (
table, ['eid_from'])
- schema_indices[build_index_name(table, ['eid_to'], 'idx_')] = (
+ schema_indexes[build_index_name(table, ['eid_to'], 'idx_')] = (
table, ['eid_to'])
for eschema in schema.entities():
if eschema.final:
continue
table = 'cw_{0}'.format(eschema)
for columns, index_name in iter_unique_index_names(eschema):
- schema_indices[index_name] = (table, columns)
+ schema_indexes[index_name] = (table, columns)
+
+ return schema_indexes
+
+
+def database_indexes(cnx):
+ """Return a set of indexes found in the database, excluding primary key indexes."""
+ source = cnx.repo.system_source
+ dbh = source.dbhelper
+ if source.dbdriver == 'postgres':
+
+ def index_filter(idx):
+ return not (idx.startswith('pg_') or '_pkey' in idx or '_p_key' in idx
+ or idx.endswith('_key'))
+ else:
+
+ def index_filter(idx):
+ return not idx.startswith('sqlite_')
- missing_indices = set(schema_indices) - db_indices
- if missing_indices:
- print(underline_title('Missing indices'))
+ return set(idx for idx in dbh.list_indices(cnx.cnxset.cu)
+ if index_filter(idx))
+
+
+def check_indexes(cnx):
+ """Check indexes of a system database: output missing expected indexes as well as unexpected ones.
+
+ Return 0 if there is no differences, else 1.
+ """
+ schema_indexes = expected_indexes(cnx)
+ db_indexes = database_indexes(cnx)
+
+ missing_indexes = set(schema_indexes) - db_indexes
+ if missing_indexes:
+ print(underline_title('Missing indexes'))
print('index expected by the schema but not found in the database:\n')
- missing = ['{0} ON {1[0]} {1[1]}'.format(idx, schema_indices[idx])
- for idx in missing_indices]
+ missing = ['{0} ON {1[0]} {1[1]}'.format(idx, schema_indexes[idx])
+ for idx in missing_indexes]
print('\n'.join(sorted(missing)))
print()
status = 1
- additional_indices = db_indices - set(schema_indices)
- if additional_indices:
- print(underline_title('Additional indices'))
+
+ additional_indexes = db_indexes - set(schema_indexes)
+ if additional_indexes:
+ print(underline_title('Additional indexes'))
print('index in the database but not expected by the schema:\n')
- print('\n'.join(sorted(additional_indices)))
+ print('\n'.join(sorted(additional_indexes)))
print()
status = 1
- if not (missing_indices or additional_indices):
+
+ if not (missing_indexes or additional_indexes):
print('Everything is Ok')
status = 0
--- a/cubicweb/server/migractions.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/migractions.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -666,7 +666,7 @@
# add new entity and relation types
for rschema in newcubes_schema.relations():
if rschema not in self.repo.schema:
- self.cmd_add_relation_type(rschema.type)
+ self.cmd_add_relation_type(rschema.type, commit=False)
new.add(rschema.type)
toadd = [eschema for eschema in newcubes_schema.entities()
if eschema not in self.repo.schema]
@@ -675,6 +675,8 @@
new.add(eschema.type)
# check if attributes has been added to existing entities
for rschema in newcubes_schema.relations():
+ if rschema.type in VIRTUAL_RTYPES:
+ continue
existingschema = self.repo.schema.rschema(rschema.type)
for (fromtype, totype) in rschema.rdefs:
# if rdef already exists or is infered from inheritance,
@@ -814,12 +816,12 @@
if attrschema.type not in instschema:
self.cmd_add_entity_type(attrschema.type, False, False)
if rschema.type not in instschema:
- # need to add the relation type and to commit to get it
- # actually in the schema
- self.cmd_add_relation_type(rschema.type, False, commit=True)
+ # need to add the relation type
+ self.cmd_add_relation_type(rschema.type, False, commit=False)
# register relation definition
rdef = self._get_rdef(rschema, eschema, eschema.destination(rschema))
ss.execschemarql(execute, rdef, ss.rdef2rql(rdef, cstrtypemap, groupmap),)
+ self.commit()
# take care to newly introduced base class
# XXX some part of this should probably be under the "if auto" block
for spschema in eschema.specialized_by(recursive=False):
@@ -873,7 +875,7 @@
# need to add the relation type and to commit to get it
# actually in the schema
added.append(rschema.type)
- self.cmd_add_relation_type(rschema.type, False, commit=True)
+ self.cmd_add_relation_type(rschema.type, False, commit=False)
rtypeadded = True
# register relation definition
# remember this two avoid adding twice non symmetric relation
@@ -897,7 +899,7 @@
if not rtypeadded:
# need to add the relation type and to commit to get it
# actually in the schema
- self.cmd_add_relation_type(rschema.type, False, commit=True)
+ self.cmd_add_relation_type(rschema.type, False, commit=False)
rtypeadded = True
elif (targettype, rschema.type, etype) in added:
continue
@@ -1096,7 +1098,7 @@
' do you really want to drop it?' % oldname,
default='n'):
return
- self.cmd_add_relation_type(newname, commit=True)
+ self.cmd_add_relation_type(newname, commit=False)
if not self.repo.schema[oldname].rule:
self.rqlexec('SET X %s Y WHERE X %s Y' % (newname, oldname),
ask_confirm=self.verbosity >= 2)
@@ -1111,7 +1113,7 @@
raise ExecutionError('Cannot add a relation definition for a '
'computed relation (%s)' % rschema)
if rtype not in self.repo.schema:
- self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
+ self.cmd_add_relation_type(rtype, addrdef=False, commit=False)
if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs:
print('warning: relation %s %s %s is already known, skip addition' % (
subjtype, rtype, objtype))
@@ -1131,7 +1133,8 @@
schemaobj = getattr(rdef, attr)
if getattr(schemaobj, 'eid', None) is None:
schemaobj.eid = self.repo.schema[schemaobj].eid
- assert schemaobj.eid is not None, schemaobj
+ assert schemaobj.eid is not None, \
+ '%s has no eid while adding %s' % (schemaobj, rdef)
return rdef
def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True):
--- a/cubicweb/server/repository.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/repository.py Fri Jan 27 16:26:09 2017 +0100
@@ -204,6 +204,10 @@
def init_cnxset_pool(self):
"""should be called bootstrap_repository, as this is what it does"""
config = self.config
+ # copy pool size here since config.init_cube() and config.load_schema()
+ # reload configuration from file and could reset a manually set pool
+ # size.
+ pool_size = config['connections-pool-size']
self._cnxsets_pool = queue.Queue()
# 0. init a cnxset that will be used to fetch bootstrap information from
# the database
@@ -224,7 +228,7 @@
config.cube_appobject_path = set(('hooks', 'entities'))
config.cubicweb_appobject_path = set(('hooks', 'entities'))
# limit connections pool to 1
- config['connections-pool-size'] = 1
+ pool_size = 1
if config.quick_start or config.creating or not config.read_instance_schema:
# load schema from the file system
if not config.creating:
@@ -258,7 +262,7 @@
self._get_cnxset().close(True)
# list of available cnxsets (can't iterate on a Queue)
self.cnxsets = []
- for i in range(config['connections-pool-size']):
+ for i in range(pool_size):
self.cnxsets.append(self.system_source.wrapped_connection())
self._cnxsets_pool.put_nowait(self.cnxsets[-1])
--- a/cubicweb/server/session.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/session.py Fri Jan 27 16:26:09 2017 +0100
@@ -43,6 +43,8 @@
NO_UNDO_TYPES = schema.SCHEMA_TYPES.copy()
NO_UNDO_TYPES.add('CWCache')
+NO_UNDO_TYPES.add('CWSession')
+NO_UNDO_TYPES.add('CWDataImport')
# is / is_instance_of are usually added by sql hooks except when using
# dataimport.NoHookRQLObjectStore, and we don't want to record them
# anyway in the later case
--- a/cubicweb/server/sources/native.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/sources/native.py Fri Jan 27 16:26:09 2017 +0100
@@ -716,10 +716,10 @@
rtypes = [c.split('.', 1)[1].strip()[3:] for c in columns]
raise UniqueTogetherError(cnx, rtypes=rtypes)
- mo = re.search('"cstr[a-f0-9]{32}"', arg)
+ mo = re.search(r'\bcstr[a-f0-9]{32}\b', arg)
if mo is not None:
# postgresql
- raise ViolatedConstraint(cnx, cstrname=mo.group(0)[1:-1])
+ raise ViolatedConstraint(cnx, cstrname=mo.group(0))
if arg.startswith('CHECK constraint failed:'):
# sqlite3 (new)
raise ViolatedConstraint(cnx, cstrname=arg.split(':', 1)[1].strip())
--- a/cubicweb/server/test/data-migractions/migratedapp/schema.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/test/data-migractions/migratedapp/schema.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -20,7 +20,7 @@
from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
SubjectRelation, Bytes,
RichString, String, Int, Boolean, Datetime, Date, Float)
-from yams.constraints import SizeConstraint, UniqueConstraint
+from yams.constraints import SizeConstraint, UniqueConstraint, BoundaryConstraint, Attribute
from cubicweb import _
from cubicweb.schema import (WorkflowableEntityType, RQLConstraint,
RQLVocabularyConstraint,
@@ -216,3 +216,10 @@
subject = object = 'Folder2'
inlined = True
cardinality = '??'
+
+
+class Activity(EntityType):
+ start = Datetime(constraints=[BoundaryConstraint('<=', Attribute('end'))],
+ description=_('when the activity started'))
+ end = Datetime(constraints=[BoundaryConstraint('>=', Attribute('start'))],
+ description=_('when the activity ended'))
--- a/cubicweb/server/test/unittest_migractions.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/test/unittest_migractions.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -326,6 +326,12 @@
for cstr in eschema.rdef('name').constraints:
self.assertTrue(hasattr(cstr, 'eid'))
+ def test_add_entity_type_with_constraint(self):
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_entity_type('Activity')
+ constraints = self.table_constraints(mh, 'cw_Activity')
+ self.assertEqual(len(constraints), 2, constraints)
+
def test_add_cube_with_custom_final_type(self):
with self.mh() as (cnx, mh):
try:
--- a/cubicweb/server/test/unittest_rqlannotation.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/server/test/unittest_rqlannotation.py Fri Jan 27 16:26:09 2017 +0100
@@ -67,8 +67,8 @@
self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar'])
self.assertEqual(rqlst.defined_vars['C']._q_invariant, False)
self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'},
- {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'},
- {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}])
+ {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'},
+ {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}])
def test_0_5(self):
with self.session.new_cnx() as cnx:
@@ -93,7 +93,6 @@
with self.session.new_cnx() as cnx:
rqlst = self._prepare(cnx, 'Any P WHERE X eid 0, NOT X connait P')
self.assertEqual(rqlst.defined_vars['P']._q_invariant, False)
- #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True)
self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions)
def test_0_10(self):
@@ -400,11 +399,13 @@
with self.session.new_cnx() as cnx:
rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 '
'WHERE C is Societe, S concerne C, C nom CS, '
- '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))')
+ '(EXISTS(S owned_by D)) '
+ 'OR (EXISTS(S documented_by N, N title "published"))')
self.assertEqual(rqlst.defined_vars['S']._q_invariant, True)
rqlst = self._prepare(cnx, 'Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 '
'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, '
- '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))')
+ '(EXISTS(S owned_by D)) '
+ 'OR (EXISTS(S documented_by N, N title "published"))')
self.assertEqual(rqlst.defined_vars['S']._q_invariant, True)
def test_nonregr_ambiguity(self):
@@ -416,7 +417,8 @@
def test_nonregr_ambiguity_2(self):
with self.session.new_cnx() as cnx:
- rqlst = self._prepare(cnx, 'Any S,SN WHERE X has_text "tot", X in_state S, S name SN, X is CWUser')
+ rqlst = self._prepare(cnx, 'Any S,SN WHERE X has_text "tot", '
+ 'X in_state S, S name SN, X is CWUser')
# X use has_text but should not be invariant as ambiguous, and has_text
# may not be its principal
self.assertEqual(rqlst.defined_vars['X']._q_invariant, False)
@@ -425,7 +427,7 @@
def test_remove_from_deleted_source_1(self):
with self.session.new_cnx() as cnx:
rqlst = self._prepare(cnx, 'Note X WHERE X eid 999998, NOT X cw_source Y')
- self.assertNotIn('X', rqlst.defined_vars) # simplified
+ self.assertNotIn('X', rqlst.defined_vars) # simplified
self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True)
def test_remove_from_deleted_source_2(self):
@@ -440,6 +442,7 @@
'(Any C WHERE C is Societe, C nom CS)')
self.assertTrue(rqlst.parent.has_text_query)
+
if __name__ == '__main__':
- from logilab.common.testlib import unittest_main
- unittest_main()
+ import unittest
+ unittest.main()
--- a/cubicweb/test/unittest_cwconfig.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/test/unittest_cwconfig.py Fri Jan 27 16:26:09 2017 +0100
@@ -247,7 +247,7 @@
from cubes import file
self.assertEqual(file.__path__, [join(self.custom_cubes_dir, 'file')])
- def test_config_value_from_environment(self):
+ def test_config_value_from_environment_str(self):
self.assertIsNone(self.config['base-url'])
os.environ['CW_BASE_URL'] = 'https://www.cubicweb.org'
try:
@@ -256,6 +256,24 @@
finally:
del os.environ['CW_BASE_URL']
+ def test_config_value_from_environment_int(self):
+ self.assertEqual(self.config['connections-pool-size'], 4)
+ os.environ['CW_CONNECTIONS_POOL_SIZE'] = '6'
+ try:
+ self.assertEqual(self.config['connections-pool-size'], 6)
+ finally:
+ del os.environ['CW_CONNECTIONS_POOL_SIZE']
+
+ def test_config_value_from_environment_yn(self):
+ self.assertEqual(self.config['allow-email-login'], False)
+ try:
+ for val, result in (('yes', True), ('no', False),
+ ('y', True), ('n', False),):
+ os.environ['CW_ALLOW_EMAIL_LOGIN'] = val
+ self.assertEqual(self.config['allow-email-login'], result)
+ finally:
+ del os.environ['CW_ALLOW_EMAIL_LOGIN']
+
class FindPrefixTC(unittest.TestCase):
--- a/cubicweb/test/unittest_uilib.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/test/unittest_uilib.py Fri Jan 27 16:26:09 2017 +0100
@@ -174,8 +174,10 @@
'cw.pouet(1,"2").pouet(null)')
self.assertEqual(str(uilib.js.cw.pouet(1, cwutils.JSString("$")).pouet(None)),
'cw.pouet(1,$).pouet(null)')
- self.assertEqual(str(uilib.js.cw.pouet(1, {'callback': cwutils.JSString("cw.cb")}).pouet(None)),
- 'cw.pouet(1,{callback: cw.cb}).pouet(null)')
+ self.assertEqual(
+ str(uilib.js.cw.pouet(
+ 1, {'call back': cwutils.JSString("cw.cb")}).pouet(None)),
+ 'cw.pouet(1,{"call back": cw.cb}).pouet(null)')
def test_embedded_css(self):
--- a/cubicweb/test/unittest_utils.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/test/unittest_utils.py Fri Jan 27 16:26:09 2017 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2017 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -27,9 +27,10 @@
except ImportError: # Python3
from unittest import TestCase
+from six import PY2
from six.moves import range
-from cubicweb import Binary
+from cubicweb import Binary, Unauthorized
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.utils import (make_uid, UStringIO, RepeatList, HTMLHead,
QueryCache, parse_repo_uri)
@@ -40,6 +41,7 @@
except ImportError:
json = None
+
class MakeUidTC(TestCase):
def test_1(self):
self.assertNotEqual(make_uid('xyz'), make_uid('abcd'))
@@ -325,6 +327,26 @@
self.config.global_set_option('concat-resources', True)
+def UnauthorizedTC(TestCase):
+
+ def _test(self, func):
+ self.assertEqual(func(Unauthorized()),
+ 'You are not allowed to perform this operation')
+ self.assertEqual(func(Unauthorized('a')),
+ 'a')
+ self.assertEqual(func(Unauthorized('a', 'b')),
+ 'You are not allowed to perform a operation on b')
+ self.assertEqual(func(Unauthorized('a', 'b', 'c')),
+ 'a b c')
+
+ def test_str(self):
+ self._test(str)
+
+ if PY2:
+ def test_unicode(self):
+ self._test(unicode)
+
+
def load_tests(loader, tests, ignore):
import cubicweb.utils
tests.addTests(doctest.DocTestSuite(cubicweb.utils))
--- a/cubicweb/utils.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/utils.py Fri Jan 27 16:26:09 2017 +0100
@@ -534,7 +534,7 @@
it = sorted(d.items())
else:
it = d.items()
- res = [key + ': ' + js_dumps(val, predictable)
+ res = [js_dumps(key, predictable) + ': ' + js_dumps(val, predictable)
for key, val in it]
return '{%s}' % ', '.join(res)
--- a/cubicweb/web/formfields.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/formfields.py Fri Jan 27 16:26:09 2017 +0100
@@ -1027,6 +1027,8 @@
def _ensure_correctly_typed(self, form, value):
tz_naive = super(TZDatetimeField, self)._ensure_correctly_typed(
form, value)
+ if not tz_naive:
+ return None
return tz_naive.replace(tzinfo=pytz.utc)
--- a/cubicweb/web/formwidgets.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/formwidgets.py Fri Jan 27 16:26:09 2017 +0100
@@ -790,8 +790,12 @@
def process_field_data(self, form, field):
req = form._cw
- datestr = req.form.get(field.input_name(form, 'date')).strip() or None
- timestr = req.form.get(field.input_name(form, 'time')).strip() or None
+ datestr = req.form.get(field.input_name(form, 'date'))
+ if datestr:
+ datestr = datestr.strip() or None
+ timestr = req.form.get(field.input_name(form, 'time'))
+ if timestr:
+ timestr = timestr.strip() or None
if datestr is None:
return None
try:
--- a/cubicweb/web/http_headers.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/http_headers.py Fri Jan 27 16:26:09 2017 +0100
@@ -164,7 +164,7 @@
"""Convert an HTTP date string (one of three formats) to seconds since epoch."""
parts = dateString.split()
- if not parts[0][0:3].lower() in weekdayname_lower:
+ if parts and not parts[0][0:3].lower() in weekdayname_lower:
# Weekday is stupid. Might have been omitted.
try:
return parseDateTime("Sun, "+dateString)
--- a/cubicweb/web/test/unittest_form.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/test/unittest_form.py Fri Jan 27 16:26:09 2017 +0100
@@ -265,9 +265,12 @@
<p><b>You can either submit a new file using the browse button above, or choose to remove already uploaded file by checking the "detach attached file" check-box, or edit file content online with the widget below.</b></p>
<textarea cols="80" name="data-subject:%(eid)s" onkeyup="autogrow(this)" rows="3">new widgets system</textarea>''' % {'eid': file.eid})
- def _modified_tzdatenaiss(self, eid, datestr, timestr):
- ctx = {'tzdatenaiss-subjectdate:%d' % eid: datestr,
- 'tzdatenaiss-subjecttime:%d' % eid: timestr}
+ def _modified_tzdatenaiss(self, eid, date_and_time_str=None):
+ ctx = {}
+ if date_and_time_str:
+ datestr, timestr = date_and_time_str
+ ctx['tzdatenaiss-subjectdate:%d' % eid] = datestr
+ ctx['tzdatenaiss-subjecttime:%d' % eid] = timestr
with self.admin_access.web_request(**ctx) as req:
form = EntityFieldsForm(req, None, entity=req.entity_from_eid(eid))
field = TZDatetimeField(name='tzdatenaiss', eidparam=True,
@@ -285,10 +288,13 @@
eid = req.create_entity('Personne', nom=u'Flo', tzdatenaiss=tzd).eid
req.cnx.commit()
- modified = self._modified_tzdatenaiss(eid, datestr, timestr)
+ modified = self._modified_tzdatenaiss(eid, (datestr, timestr))
self.assertFalse(modified)
- modified = self._modified_tzdatenaiss(eid, '2016/05/04', '15:07')
+ modified = self._modified_tzdatenaiss(eid, ('2016/05/04', '15:07'))
+ self.assertTrue(modified)
+
+ modified = self._modified_tzdatenaiss(eid, None)
self.assertTrue(modified)
def test_passwordfield(self):
--- a/cubicweb/web/test/unittest_web.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/test/unittest_web.py Fri Jan 27 16:26:09 2017 +0100
@@ -51,7 +51,7 @@
cbname = url.split()[1][:-2]
self.assertMultiLineEqual(
'function %s() { $("#foo").loadxhtml("http://testing.fr/cubicweb/ajax?%s",'
- '{pageid: "%s"},"get","replace"); }' %
+ '{"pageid": "%s"},"get","replace"); }' %
(cbname, qs, req.pageid),
req.html_headers.post_inlined_scripts[0])
--- a/cubicweb/web/webconfig.py Mon Jan 23 15:35:19 2017 +0100
+++ b/cubicweb/web/webconfig.py Fri Jan 27 16:26:09 2017 +0100
@@ -149,14 +149,6 @@
'transparent to the user. Default to 5min.',
'group': 'web', 'level': 3,
}),
- ('embed-allowed',
- {'type' : 'regexp',
- 'default': None,
- 'help': 'regular expression matching URLs that may be embeded. \
-leave it blank if you don\'t want the embedding feature, or set it to ".*" \
-if you want to allow everything',
- 'group': 'web', 'level': 3,
- }),
('submit-mail',
{'type' : 'string',
'default': None,
--- a/debian/changelog Mon Jan 23 15:35:19 2017 +0100
+++ b/debian/changelog Fri Jan 27 16:26:09 2017 +0100
@@ -1,3 +1,21 @@
+cubicweb (3.24.4-1) unstable; urgency=medium
+
+ * New upstream release.
+
+ -- Sylvain Thenault <sylvain.thenault@logilab.fr> Fri, 27 Jan 2017 13:28:56 +0100
+
+cubicweb (3.24.3-1) unstable; urgency=medium
+
+ * New upstream release.
+
+ -- David Douard <david.douard@logilab.fr> Wed, 23 Nov 2016 18:36:14 +0100
+
+cubicweb (3.24.2-1) unstable; urgency=medium
+
+ * new upstream release.
+
+ -- Denis Laxalde <denis.laxalde@logilab.fr> Mon, 21 Nov 2016 13:46:43 +0100
+
cubicweb (3.24.1-1) unstable; urgency=medium
* New upstream release.
--- a/debian/control Mon Jan 23 15:35:19 2017 +0100
+++ b/debian/control Fri Jan 27 16:26:09 2017 +0100
@@ -14,7 +14,7 @@
python-docutils,
python-sphinx,
python-logilab-common (>= 1.2.2),
- python-unittest2,
+ python-unittest2 (>= 0.7.0),
python-logilab-mtconverter,
python-markdown,
python-tz,
@@ -40,10 +40,11 @@
${python:Depends},
python-six (>= 1.4.0),
python-logilab-mtconverter (>= 0.8.0),
- python-logilab-common (>= 1.2.0),
+ python-logilab-common (>= 1.2.2),
python-logilab-database (>= 1.15.0),
python-yams (>= 0.44.0),
python-rql (>= 0.34.0),
+ python-unittest2 (>= 0.7.0),
python-lxml,
python-markdown,
python-passlib,
@@ -67,7 +68,6 @@
python-rdflib,
python-werkzeug,
# dev recommends
- python-unittest2 (>= 0.7.0),
python-pysqlite2,
Suggests:
python-zmq,
--- a/doc/book/annexes/rql/language.rst Mon Jan 23 15:35:19 2017 +0100
+++ b/doc/book/annexes/rql/language.rst Fri Jan 27 16:26:09 2017 +0100
@@ -318,8 +318,8 @@
``owned_by``".
* ``Any X WHERE NOT X owned_by U, U login "syt"`` means "the entity have no
- relation ``owned_by`` with the user syt". They may have a relation "owned_by"
- with another user.
+ relation ``owned_by`` with the user syt". They may have a relation "owned_by"
+ with another user.
In this clause, you can also use ``EXISTS`` when you want to know if some
expression is true and do not need the complete set of elements that make it
--- a/doc/changes/3.24.rst Mon Jan 23 15:35:19 2017 +0100
+++ b/doc/changes/3.24.rst Fri Jan 27 16:26:09 2017 +0100
@@ -1,5 +1,5 @@
-3.24
-====
+3.24 (2 November 2016)
+======================
New features
------------
--- a/doc/changes/changelog.rst Mon Jan 23 15:35:19 2017 +0100
+++ b/doc/changes/changelog.rst Fri Jan 27 16:26:09 2017 +0100
@@ -2,6 +2,7 @@
Changelog history
===================
+.. include:: 3.24.rst
.. include:: 3.23.rst
.. include:: 3.22.rst
.. include:: 3.21.rst
--- a/doc/changes/index.rst Mon Jan 23 15:35:19 2017 +0100
+++ b/doc/changes/index.rst Fri Jan 27 16:26:09 2017 +0100
@@ -4,6 +4,7 @@
.. toctree::
:maxdepth: 1
+ 3.24
3.23
3.22
3.21
--- a/flake8-ok-files.txt Mon Jan 23 15:35:19 2017 +0100
+++ b/flake8-ok-files.txt Fri Jan 27 16:26:09 2017 +0100
@@ -48,6 +48,7 @@
cubicweb/server/test/unittest_checkintegrity.py
cubicweb/server/test/unittest_ldapsource.py
cubicweb/server/test/unittest_session.py
+cubicweb/server/test/unittest_rqlannotation.py
cubicweb/sobjects/test/unittest_notification.py
cubicweb/sobjects/test/unittest_register_user.py
cubicweb/sobjects/textparsers.py